diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..184885227f7cdf790d385ce9a332ba2b3fc7b55f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,37 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/pexels-photo-11338397.jpeg filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_highres.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow2.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/All_in_one_v1_3.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real0.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real1.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test_vfi_schedule.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-10-31_22-43-17.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-11-04_22-32-57.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-11-04_22-46-20.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-11-04_22-47-09.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-11-04_22-57-28.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/2023-11-05_00-02-34.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/ComfyUI_temp_vpose_00005_.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/AnimateDiff[[:space:]]&[[:space:]]HiResFix[[:space:]]Scripts.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/AnimateDiff[[:space:]]-[[:space:]]Node[[:space:]]Example.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/HighResFix[[:space:]]-[[:space:]]Node[[:space:]]Example.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/Image[[:space:]]Overlay[[:space:]]-[[:space:]]Node[[:space:]]Example.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/Tiled[[:space:]]Upscaler[[:space:]]-[[:space:]]Node[[:space:]]Example.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/images/nodes/XY[[:space:]]Plot[[:space:]]-[[:space:]]Node[[:space:]]Example.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/AnimateDiff[[:space:]]&[[:space:]]HiResFix[[:space:]]Scripts.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/Eff_XYPlot[[:space:]]-[[:space:]]LoRA[[:space:]]Model[[:space:]]vs[[:space:]]Clip[[:space:]]Strengths01.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/Eff_multiKsampler_withScriptsSDXL.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/HiResFix[[:space:]]Script.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/SDXL[[:space:]]Refining[[:space:]]&[[:space:]]Noise[[:space:]]Control[[:space:]]Script.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/SDXL_base_refine_noise_workflow.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/Tiled[[:space:]]Upscaler[[:space:]]Script.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/XYPlot[[:space:]]-[[:space:]]LoRA[[:space:]]Model[[:space:]]vs[[:space:]]Clip[[:space:]]Strengths.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/XYPlot[[:space:]]-[[:space:]]Seeds[[:space:]]vs[[:space:]]Checkpoints[[:space:]]&[[:space:]]Stacked[[:space:]]Scripts.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/efficiency-nodes-comfyui/workflows/eff_animatescriptWF001.gif filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/rgthree-comfy/docs/rgthree_context_metadata.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/custom_nodes/x-flux-comfyui/assets/image1.png filter=lfs diff=lfs merge=lfs -text +src/comfyui/output/it_flux_up_00001_.png filter=lfs diff=lfs merge=lfs -text diff --git a/src/comfyui/.ci/update_windows/update.py b/src/comfyui/.ci/update_windows/update.py new file mode 100644 index 0000000000000000000000000000000000000000..6a04e5e1689b9629d5bef1e8f9e771de7111f03e --- /dev/null +++ b/src/comfyui/.ci/update_windows/update.py @@ -0,0 +1,146 @@ +import pygit2 +from datetime import datetime +import sys +import os +import shutil +import filecmp + +def pull(repo, remote_name='origin', branch='master'): + for remote in repo.remotes: + if remote.name == remote_name: + remote.fetch() + remote_master_id = repo.lookup_reference('refs/remotes/origin/%s' % (branch)).target + merge_result, _ = repo.merge_analysis(remote_master_id) + # Up to date, do nothing + if merge_result & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE: + return + # We can just fastforward + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD: + repo.checkout_tree(repo.get(remote_master_id)) + try: + master_ref = repo.lookup_reference('refs/heads/%s' % (branch)) + master_ref.set_target(remote_master_id) + except KeyError: + repo.create_branch(branch, repo.get(remote_master_id)) + repo.head.set_target(remote_master_id) + elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL: + repo.merge(remote_master_id) + + if repo.index.conflicts is not None: + for conflict in repo.index.conflicts: + print('Conflicts found in:', conflict[0].path) + raise AssertionError('Conflicts, ahhhhh!!') + + user = repo.default_signature + tree = repo.index.write_tree() + commit = repo.create_commit('HEAD', + user, + user, + 'Merge!', + tree, + [repo.head.target, remote_master_id]) + # We need to do this or git CLI will think we are still merging. + repo.state_cleanup() + else: + raise AssertionError('Unknown merge analysis result') + +pygit2.option(pygit2.GIT_OPT_SET_OWNER_VALIDATION, 0) +repo_path = str(sys.argv[1]) +repo = pygit2.Repository(repo_path) +ident = pygit2.Signature('comfyui', 'comfy@ui') +try: + print("stashing current changes") + repo.stash(ident) +except KeyError: + print("nothing to stash") +backup_branch_name = 'backup_branch_{}'.format(datetime.today().strftime('%Y-%m-%d_%H_%M_%S')) +print("creating backup branch: {}".format(backup_branch_name)) +try: + repo.branches.local.create(backup_branch_name, repo.head.peel()) +except: + pass + +print("checking out master branch") +branch = repo.lookup_branch('master') +if branch is None: + ref = repo.lookup_reference('refs/remotes/origin/master') + repo.checkout(ref) + branch = repo.lookup_branch('master') + if branch is None: + repo.create_branch('master', repo.get(ref.target)) +else: + ref = repo.lookup_reference(branch.name) + repo.checkout(ref) + +print("pulling latest changes") +pull(repo) + +if "--stable" in sys.argv: + def latest_tag(repo): + versions = [] + for k in repo.references: + try: + prefix = "refs/tags/v" + if k.startswith(prefix): + version = list(map(int, k[len(prefix):].split("."))) + versions.append((version[0] * 10000000000 + version[1] * 100000 + version[2], k)) + except: + pass + versions.sort() + if len(versions) > 0: + return versions[-1][1] + return None + latest_tag = latest_tag(repo) + if latest_tag is not None: + repo.checkout(latest_tag) + +print("Done!") + +self_update = True +if len(sys.argv) > 2: + self_update = '--skip_self_update' not in sys.argv + +update_py_path = os.path.realpath(__file__) +repo_update_py_path = os.path.join(repo_path, ".ci/update_windows/update.py") + +cur_path = os.path.dirname(update_py_path) + + +req_path = os.path.join(cur_path, "current_requirements.txt") +repo_req_path = os.path.join(repo_path, "requirements.txt") + + +def files_equal(file1, file2): + try: + return filecmp.cmp(file1, file2, shallow=False) + except: + return False + +def file_size(f): + try: + return os.path.getsize(f) + except: + return 0 + + +if self_update and not files_equal(update_py_path, repo_update_py_path) and file_size(repo_update_py_path) > 10: + shutil.copy(repo_update_py_path, os.path.join(cur_path, "update_new.py")) + exit() + +if not os.path.exists(req_path) or not files_equal(repo_req_path, req_path): + import subprocess + try: + subprocess.check_call([sys.executable, '-s', '-m', 'pip', 'install', '-r', repo_req_path]) + shutil.copy(repo_req_path, req_path) + except: + pass + + +stable_update_script = os.path.join(repo_path, ".ci/update_windows/update_comfyui_stable.bat") +stable_update_script_to = os.path.join(cur_path, "update_comfyui_stable.bat") + +try: + if not file_size(stable_update_script_to) > 10: + shutil.copy(stable_update_script, stable_update_script_to) +except: + pass diff --git a/src/comfyui/.ci/update_windows/update_comfyui.bat b/src/comfyui/.ci/update_windows/update_comfyui.bat new file mode 100644 index 0000000000000000000000000000000000000000..bb08c0de0c7bfcf7cdecfda45edc72805bc3adea --- /dev/null +++ b/src/comfyui/.ci/update_windows/update_comfyui.bat @@ -0,0 +1,8 @@ +@echo off +..\python_embeded\python.exe .\update.py ..\ComfyUI\ +if exist update_new.py ( + move /y update_new.py update.py + echo Running updater again since it got updated. + ..\python_embeded\python.exe .\update.py ..\ComfyUI\ --skip_self_update +) +if "%~1"=="" pause diff --git a/src/comfyui/.ci/update_windows/update_comfyui_stable.bat b/src/comfyui/.ci/update_windows/update_comfyui_stable.bat new file mode 100644 index 0000000000000000000000000000000000000000..e18010da39d37b063f44ec811340c43d4739e874 --- /dev/null +++ b/src/comfyui/.ci/update_windows/update_comfyui_stable.bat @@ -0,0 +1,8 @@ +@echo off +..\python_embeded\python.exe .\update.py ..\ComfyUI\ --stable +if exist update_new.py ( + move /y update_new.py update.py + echo Running updater again since it got updated. + ..\python_embeded\python.exe .\update.py ..\ComfyUI\ --skip_self_update --stable +) +if "%~1"=="" pause diff --git a/src/comfyui/.ci/windows_base_files/README_VERY_IMPORTANT.txt b/src/comfyui/.ci/windows_base_files/README_VERY_IMPORTANT.txt new file mode 100644 index 0000000000000000000000000000000000000000..d46acbcbf1d77440dbf4bfaa7bb5cbe6090d6fde --- /dev/null +++ b/src/comfyui/.ci/windows_base_files/README_VERY_IMPORTANT.txt @@ -0,0 +1,31 @@ +HOW TO RUN: + +if you have a NVIDIA gpu: + +run_nvidia_gpu.bat + + + +To run it in slow CPU mode: + +run_cpu.bat + + + +IF YOU GET A RED ERROR IN THE UI MAKE SURE YOU HAVE A MODEL/CHECKPOINT IN: ComfyUI\models\checkpoints + +You can download the stable diffusion 1.5 one from: https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/blob/main/v1-5-pruned-emaonly-fp16.safetensors + + +RECOMMENDED WAY TO UPDATE: +To update the ComfyUI code: update\update_comfyui.bat + + + +To update ComfyUI with the python dependencies, note that you should ONLY run this if you have issues with python dependencies. +update\update_comfyui_and_python_dependencies.bat + + +TO SHARE MODELS BETWEEN COMFYUI AND ANOTHER UI: +In the ComfyUI directory you will find a file: extra_model_paths.yaml.example +Rename this file to: extra_model_paths.yaml and edit it with your favorite text editor. diff --git a/src/comfyui/.ci/windows_base_files/run_cpu.bat b/src/comfyui/.ci/windows_base_files/run_cpu.bat new file mode 100644 index 0000000000000000000000000000000000000000..c3ba4172161a996b0187dd981286c521bcc65846 --- /dev/null +++ b/src/comfyui/.ci/windows_base_files/run_cpu.bat @@ -0,0 +1,2 @@ +.\python_embeded\python.exe -s ComfyUI\main.py --cpu --windows-standalone-build +pause diff --git a/src/comfyui/.ci/windows_base_files/run_nvidia_gpu.bat b/src/comfyui/.ci/windows_base_files/run_nvidia_gpu.bat new file mode 100644 index 0000000000000000000000000000000000000000..274d7c9486e0ee182ef8fbe16d34ca95e05e90ec --- /dev/null +++ b/src/comfyui/.ci/windows_base_files/run_nvidia_gpu.bat @@ -0,0 +1,2 @@ +.\python_embeded\python.exe -s ComfyUI\main.py --windows-standalone-build +pause diff --git a/src/comfyui/.ci/windows_nightly_base_files/run_nvidia_gpu_fast.bat b/src/comfyui/.ci/windows_nightly_base_files/run_nvidia_gpu_fast.bat new file mode 100644 index 0000000000000000000000000000000000000000..ca6d6868af4e19fb9ee764241030451d0d15f42d --- /dev/null +++ b/src/comfyui/.ci/windows_nightly_base_files/run_nvidia_gpu_fast.bat @@ -0,0 +1,2 @@ +.\python_embeded\python.exe -s ComfyUI\main.py --windows-standalone-build --fast +pause diff --git a/src/comfyui/.gitattributes b/src/comfyui/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..4391de6789a22b1f650463f3dcbf0182f1a2f865 --- /dev/null +++ b/src/comfyui/.gitattributes @@ -0,0 +1,2 @@ +/web/assets/** linguist-generated +/web/** linguist-vendored diff --git a/src/comfyui/.github/ISSUE_TEMPLATE/bug-report.yml b/src/comfyui/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 0000000000000000000000000000000000000000..39d1992d7623a70bd23aa4bc3d26e8996a55705a --- /dev/null +++ b/src/comfyui/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,48 @@ +name: Bug Report +description: "Something is broken inside of ComfyUI. (Do not use this if you're just having issues and need help, or if the issue relates to a custom node)" +labels: ["Potential Bug"] +body: + - type: markdown + attributes: + value: | + Before submitting a **Bug Report**, please ensure the following: + + - **1:** You are running the latest version of ComfyUI. + - **2:** You have looked at the existing bug reports and made sure this isn't already reported. + - **3:** You confirmed that the bug is not caused by a custom node. You can disable all custom nodes by passing + `--disable-all-custom-nodes` command line argument. + - **4:** This is an actual bug in ComfyUI, not just a support question. A bug is when you can specify exact + steps to replicate what went wrong and others will be able to repeat your steps and see the same issue happen. + + If unsure, ask on the [ComfyUI Matrix Space](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) or the [Comfy Org Discord](https://discord.gg/comfyorg) first. + - type: textarea + attributes: + label: Expected Behavior + description: "What you expected to happen." + validations: + required: true + - type: textarea + attributes: + label: Actual Behavior + description: "What actually happened. Please include a screenshot of the issue if possible." + validations: + required: true + - type: textarea + attributes: + label: Steps to Reproduce + description: "Describe how to reproduce the issue. Please be sure to attach a workflow JSON or PNG, ideally one that doesn't require custom nodes to test. If the bug open happens when certain custom nodes are used, most likely that custom node is what has the bug rather than ComfyUI, in which case it should be reported to the node's author." + validations: + required: true + - type: textarea + attributes: + label: Debug Logs + description: "Please copy the output from your terminal logs here." + render: powershell + validations: + required: true + - type: textarea + attributes: + label: Other + description: "Any other additional information you think might be helpful." + validations: + required: false diff --git a/src/comfyui/.github/ISSUE_TEMPLATE/config.yml b/src/comfyui/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..09fea712edc0df7bada8c850abde6370cbf35719 --- /dev/null +++ b/src/comfyui/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: true +contact_links: + - name: ComfyUI Frontend Issues + url: https://github.com/Comfy-Org/ComfyUI_frontend/issues + about: Issues related to the ComfyUI frontend (display issues, user interaction bugs), please go to the frontend repo to file the issue + - name: ComfyUI Matrix Space + url: https://app.element.io/#/room/%23comfyui_space%3Amatrix.org + about: The ComfyUI Matrix Space is available for support and general discussion related to ComfyUI (Matrix is like Discord but open source). + - name: Comfy Org Discord + url: https://discord.gg/comfyorg + about: The Comfy Org Discord is available for support and general discussion related to ComfyUI. diff --git a/src/comfyui/.github/ISSUE_TEMPLATE/feature-request.yml b/src/comfyui/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 0000000000000000000000000000000000000000..419721b63b4c0df62989862ea12cf2274d5788d7 --- /dev/null +++ b/src/comfyui/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,32 @@ +name: Feature Request +description: "You have an idea for something new you would like to see added to ComfyUI's core." +labels: [ "Feature" ] +body: + - type: markdown + attributes: + value: | + Before submitting a **Feature Request**, please ensure the following: + + **1:** You are running the latest version of ComfyUI. + **2:** You have looked to make sure there is not already a feature that does what you need, and there is not already a Feature Request listed for the same idea. + **3:** This is something that makes sense to add to ComfyUI Core, and wouldn't make more sense as a custom node. + + If unsure, ask on the [ComfyUI Matrix Space](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) or the [Comfy Org Discord](https://discord.gg/comfyorg) first. + - type: textarea + attributes: + label: Feature Idea + description: "Describe the feature you want to see." + validations: + required: true + - type: textarea + attributes: + label: Existing Solutions + description: "Please search through available custom nodes / extensions to see if there are existing custom solutions for this. If so, please link the options you found here as a reference." + validations: + required: false + - type: textarea + attributes: + label: Other + description: "Any other additional information you think might be helpful." + validations: + required: false diff --git a/src/comfyui/.github/ISSUE_TEMPLATE/user-support.yml b/src/comfyui/.github/ISSUE_TEMPLATE/user-support.yml new file mode 100644 index 0000000000000000000000000000000000000000..df28804c6e950845d7204e5def64b33669d48a9d --- /dev/null +++ b/src/comfyui/.github/ISSUE_TEMPLATE/user-support.yml @@ -0,0 +1,32 @@ +name: User Support +description: "Use this if you need help with something, or you're experiencing an issue." +labels: [ "User Support" ] +body: + - type: markdown + attributes: + value: | + Before submitting a **User Report** issue, please ensure the following: + + **1:** You are running the latest version of ComfyUI. + **2:** You have made an effort to find public answers to your question before asking here. In other words, you googled it first, and scrolled through recent help topics. + + If unsure, ask on the [ComfyUI Matrix Space](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) or the [Comfy Org Discord](https://discord.gg/comfyorg) first. + - type: textarea + attributes: + label: Your question + description: "Post your question here. Please be as detailed as possible." + validations: + required: true + - type: textarea + attributes: + label: Logs + description: "If your question relates to an issue you're experiencing, please go to `Server` -> `Logs` -> potentially set `View Type` to `Debug` as well, then copypaste all the text into here." + render: powershell + validations: + required: false + - type: textarea + attributes: + label: Other + description: "Any other additional information you think might be helpful." + validations: + required: false diff --git a/src/comfyui/.github/workflows/pullrequest-ci-run.yml b/src/comfyui/.github/workflows/pullrequest-ci-run.yml new file mode 100644 index 0000000000000000000000000000000000000000..a2a4b265c9847f7d46f7625f68f86abb5dbda304 --- /dev/null +++ b/src/comfyui/.github/workflows/pullrequest-ci-run.yml @@ -0,0 +1,53 @@ +# This is the GitHub Workflow that drives full-GPU-enabled tests of pull requests to ComfyUI, when the 'Run-CI-Test' label is added +# Results are reported as checkmarks on the commits, as well as onto https://ci.comfy.org/ +name: Pull Request CI Workflow Runs +on: + pull_request_target: + types: [labeled] + +jobs: + pr-test-stable: + if: ${{ github.event.label.name == 'Run-CI-Test' }} + strategy: + fail-fast: false + matrix: + os: [macos, linux, windows] + python_version: ["3.9", "3.10", "3.11", "3.12"] + cuda_version: ["12.1"] + torch_version: ["stable"] + include: + - os: macos + runner_label: [self-hosted, macOS] + flags: "--use-pytorch-cross-attention" + - os: linux + runner_label: [self-hosted, Linux] + flags: "" + - os: windows + runner_label: [self-hosted, Windows] + flags: "" + runs-on: ${{ matrix.runner_label }} + steps: + - name: Test Workflows + uses: comfy-org/comfy-action@main + with: + os: ${{ matrix.os }} + python_version: ${{ matrix.python_version }} + torch_version: ${{ matrix.torch_version }} + google_credentials: ${{ secrets.GCS_SERVICE_ACCOUNT_JSON }} + comfyui_flags: ${{ matrix.flags }} + use_prior_commit: 'true' + comment: + if: ${{ github.event.label.name == 'Run-CI-Test' }} + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: actions/github-script@v6 + with: + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: '(Automated Bot Message) CI Tests are running, you can view the results at https://ci.comfy.org/?branch=${{ github.event.pull_request.number }}%2Fmerge' + }) diff --git a/src/comfyui/.github/workflows/pylint.yml b/src/comfyui/.github/workflows/pylint.yml new file mode 100644 index 0000000000000000000000000000000000000000..5effbea35fc735fab327c6270feccecf65d0ec3d --- /dev/null +++ b/src/comfyui/.github/workflows/pylint.yml @@ -0,0 +1,23 @@ +name: Python Linting + +on: [push, pull_request] + +jobs: + pylint: + name: Run Pylint + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.x + + - name: Install Pylint + run: pip install pylint + + - name: Run Pylint + run: pylint --rcfile=.pylintrc $(find . -type f -name "*.py") diff --git a/src/comfyui/.github/workflows/stable-release.yml b/src/comfyui/.github/workflows/stable-release.yml new file mode 100644 index 0000000000000000000000000000000000000000..0bdd5a3bd3116ce509a9f781d7e3026f8e83b4f4 --- /dev/null +++ b/src/comfyui/.github/workflows/stable-release.yml @@ -0,0 +1,104 @@ + +name: "Release Stable Version" + +on: + workflow_dispatch: + inputs: + git_tag: + description: 'Git tag' + required: true + type: string + cu: + description: 'CUDA version' + required: true + type: string + default: "124" + python_minor: + description: 'Python minor version' + required: true + type: string + default: "12" + python_patch: + description: 'Python patch version' + required: true + type: string + default: "7" + + +jobs: + package_comfy_windows: + permissions: + contents: "write" + packages: "write" + pull-requests: "read" + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.git_tag }} + fetch-depth: 0 + persist-credentials: false + - uses: actions/cache/restore@v4 + id: cache + with: + path: | + cu${{ inputs.cu }}_python_deps.tar + update_comfyui_and_python_dependencies.bat + key: ${{ runner.os }}-build-cu${{ inputs.cu }}-${{ inputs.python_minor }} + - shell: bash + run: | + mv cu${{ inputs.cu }}_python_deps.tar ../ + mv update_comfyui_and_python_dependencies.bat ../ + cd .. + tar xf cu${{ inputs.cu }}_python_deps.tar + pwd + ls + + - shell: bash + run: | + cd .. + cp -r ComfyUI ComfyUI_copy + curl https://www.python.org/ftp/python/3.${{ inputs.python_minor }}.${{ inputs.python_patch }}/python-3.${{ inputs.python_minor }}.${{ inputs.python_patch }}-embed-amd64.zip -o python_embeded.zip + unzip python_embeded.zip -d python_embeded + cd python_embeded + echo ${{ env.MINOR_VERSION }} + echo 'import site' >> ./python3${{ inputs.python_minor }}._pth + curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py + ./python.exe get-pip.py + ./python.exe -s -m pip install ../cu${{ inputs.cu }}_python_deps/* + sed -i '1i../ComfyUI' ./python3${{ inputs.python_minor }}._pth + cd .. + + git clone --depth 1 https://github.com/comfyanonymous/taesd + cp taesd/*.pth ./ComfyUI_copy/models/vae_approx/ + + mkdir ComfyUI_windows_portable + mv python_embeded ComfyUI_windows_portable + mv ComfyUI_copy ComfyUI_windows_portable/ComfyUI + + cd ComfyUI_windows_portable + + mkdir update + cp -r ComfyUI/.ci/update_windows/* ./update/ + cp -r ComfyUI/.ci/windows_base_files/* ./ + cp ../update_comfyui_and_python_dependencies.bat ./update/ + + cd .. + + "C:\Program Files\7-Zip\7z.exe" a -t7z -m0=lzma2 -mx=8 -mfb=64 -md=32m -ms=on -mf=BCJ2 ComfyUI_windows_portable.7z ComfyUI_windows_portable + mv ComfyUI_windows_portable.7z ComfyUI/ComfyUI_windows_portable_nvidia.7z + + cd ComfyUI_windows_portable + python_embeded/python.exe -s ComfyUI/main.py --quick-test-for-ci --cpu + + ls + + - name: Upload binaries to release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: ComfyUI_windows_portable_nvidia.7z + tag: ${{ inputs.git_tag }} + overwrite: true + prerelease: true + make_latest: false diff --git a/src/comfyui/.github/workflows/stale-issues.yml b/src/comfyui/.github/workflows/stale-issues.yml new file mode 100644 index 0000000000000000000000000000000000000000..0459960706dccca86452bc82804dc79b8bcadeda --- /dev/null +++ b/src/comfyui/.github/workflows/stale-issues.yml @@ -0,0 +1,21 @@ +name: 'Close stale issues' +on: + schedule: + # Run daily at 430 am PT + - cron: '30 11 * * *' +permissions: + issues: write + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + stale-issue-message: "This issue is being marked stale because it has not had any activity for 30 days. Reply below within 7 days if your issue still isn't solved, and it will be left open. Otherwise, the issue will be closed automatically." + days-before-stale: 30 + days-before-close: 7 + stale-issue-label: 'Stale' + only-labels: 'User Support' + exempt-all-assignees: true + exempt-all-milestones: true diff --git a/src/comfyui/.github/workflows/test-build.yml b/src/comfyui/.github/workflows/test-build.yml new file mode 100644 index 0000000000000000000000000000000000000000..444d6b2548c47954e0dab5e5830809f67d89d219 --- /dev/null +++ b/src/comfyui/.github/workflows/test-build.yml @@ -0,0 +1,31 @@ +name: Build package + +# +# This workflow is a test of the python package build. +# Install Python dependencies across different Python versions. +# + +on: + push: + paths: + - "requirements.txt" + - ".github/workflows/test-build.yml" + +jobs: + build: + name: Build Test + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt \ No newline at end of file diff --git a/src/comfyui/.github/workflows/test-ci.yml b/src/comfyui/.github/workflows/test-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..ce4598b27861efe1a3e0c384acd53120a34aebfc --- /dev/null +++ b/src/comfyui/.github/workflows/test-ci.yml @@ -0,0 +1,95 @@ +# This is the GitHub Workflow that drives automatic full-GPU-enabled tests of all new commits to the master branch of ComfyUI +# Results are reported as checkmarks on the commits, as well as onto https://ci.comfy.org/ +name: Full Comfy CI Workflow Runs +on: + push: + branches: + - master + paths-ignore: + - 'app/**' + - 'input/**' + - 'output/**' + - 'notebooks/**' + - 'script_examples/**' + - '.github/**' + - 'web/**' + workflow_dispatch: + +jobs: + test-stable: + strategy: + fail-fast: false + matrix: + os: [macos, linux, windows] + python_version: ["3.9", "3.10", "3.11", "3.12"] + cuda_version: ["12.1"] + torch_version: ["stable"] + include: + - os: macos + runner_label: [self-hosted, macOS] + flags: "--use-pytorch-cross-attention" + - os: linux + runner_label: [self-hosted, Linux] + flags: "" + - os: windows + runner_label: [self-hosted, Windows] + flags: "" + runs-on: ${{ matrix.runner_label }} + steps: + - name: Test Workflows + uses: comfy-org/comfy-action@main + with: + os: ${{ matrix.os }} + python_version: ${{ matrix.python_version }} + torch_version: ${{ matrix.torch_version }} + google_credentials: ${{ secrets.GCS_SERVICE_ACCOUNT_JSON }} + comfyui_flags: ${{ matrix.flags }} + + test-win-nightly: + strategy: + fail-fast: true + matrix: + os: [windows] + python_version: ["3.9", "3.10", "3.11", "3.12"] + cuda_version: ["12.1"] + torch_version: ["nightly"] + include: + - os: windows + runner_label: [self-hosted, Windows] + flags: "" + runs-on: ${{ matrix.runner_label }} + steps: + - name: Test Workflows + uses: comfy-org/comfy-action@main + with: + os: ${{ matrix.os }} + python_version: ${{ matrix.python_version }} + torch_version: ${{ matrix.torch_version }} + google_credentials: ${{ secrets.GCS_SERVICE_ACCOUNT_JSON }} + comfyui_flags: ${{ matrix.flags }} + + test-unix-nightly: + strategy: + fail-fast: false + matrix: + os: [macos, linux] + python_version: ["3.11"] + cuda_version: ["12.1"] + torch_version: ["nightly"] + include: + - os: macos + runner_label: [self-hosted, macOS] + flags: "--use-pytorch-cross-attention" + - os: linux + runner_label: [self-hosted, Linux] + flags: "" + runs-on: ${{ matrix.runner_label }} + steps: + - name: Test Workflows + uses: comfy-org/comfy-action@main + with: + os: ${{ matrix.os }} + python_version: ${{ matrix.python_version }} + torch_version: ${{ matrix.torch_version }} + google_credentials: ${{ secrets.GCS_SERVICE_ACCOUNT_JSON }} + comfyui_flags: ${{ matrix.flags }} diff --git a/src/comfyui/.github/workflows/test-launch.yml b/src/comfyui/.github/workflows/test-launch.yml new file mode 100644 index 0000000000000000000000000000000000000000..42f1dbe9941c8895204ad2a1854d43f70dd2c080 --- /dev/null +++ b/src/comfyui/.github/workflows/test-launch.yml @@ -0,0 +1,45 @@ +name: Test server launches without errors + +on: + push: + branches: [ main, master ] + pull_request: + branches: [ main, master ] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout ComfyUI + uses: actions/checkout@v4 + with: + repository: "comfyanonymous/ComfyUI" + path: "ComfyUI" + - uses: actions/setup-python@v4 + with: + python-version: '3.8' + - name: Install requirements + run: | + python -m pip install --upgrade pip + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu + pip install -r requirements.txt + pip install wait-for-it + working-directory: ComfyUI + - name: Start ComfyUI server + run: | + python main.py --cpu 2>&1 | tee console_output.log & + wait-for-it --service 127.0.0.1:8188 -t 600 + working-directory: ComfyUI + - name: Check for unhandled exceptions in server log + run: | + if grep -qE "Exception|Error" console_output.log; then + echo "Unhandled exception/error found in server log." + exit 1 + fi + working-directory: ComfyUI + - uses: actions/upload-artifact@v4 + if: always() + with: + name: console-output + path: ComfyUI/console_output.log + retention-days: 30 diff --git a/src/comfyui/.github/workflows/test-unit.yml b/src/comfyui/.github/workflows/test-unit.yml new file mode 100644 index 0000000000000000000000000000000000000000..b3a4b4ea0c668f8f3d37c4284510939143253b89 --- /dev/null +++ b/src/comfyui/.github/workflows/test-unit.yml @@ -0,0 +1,30 @@ +name: Unit Tests + +on: + push: + branches: [ main, master ] + pull_request: + branches: [ main, master ] + +jobs: + test: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + runs-on: ${{ matrix.os }} + continue-on-error: true + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install requirements + run: | + python -m pip install --upgrade pip + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu + pip install -r requirements.txt + - name: Run Unit Tests + run: | + pip install -r tests-unit/requirements.txt + python -m pytest tests-unit diff --git a/src/comfyui/.github/workflows/windows_release_dependencies.yml b/src/comfyui/.github/workflows/windows_release_dependencies.yml new file mode 100644 index 0000000000000000000000000000000000000000..85e6a52fd9a3531dad4a05a828ba06c000a4b8c8 --- /dev/null +++ b/src/comfyui/.github/workflows/windows_release_dependencies.yml @@ -0,0 +1,71 @@ +name: "Windows Release dependencies" + +on: + workflow_dispatch: + inputs: + xformers: + description: 'xformers version' + required: false + type: string + default: "" + extra_dependencies: + description: 'extra dependencies' + required: false + type: string + default: "" + cu: + description: 'cuda version' + required: true + type: string + default: "124" + + python_minor: + description: 'python minor version' + required: true + type: string + default: "12" + + python_patch: + description: 'python patch version' + required: true + type: string + default: "7" +# push: +# branches: +# - master + +jobs: + build_dependencies: + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.${{ inputs.python_minor }}.${{ inputs.python_patch }} + + - shell: bash + run: | + echo "@echo off + call update_comfyui.bat nopause + echo - + echo This will try to update pytorch and all python dependencies. + echo - + echo If you just want to update normally, close this and run update_comfyui.bat instead. + echo - + pause + ..\python_embeded\python.exe -s -m pip install --upgrade torch torchvision torchaudio ${{ inputs.xformers }} --extra-index-url https://download.pytorch.org/whl/cu${{ inputs.cu }} -r ../ComfyUI/requirements.txt pygit2 + pause" > update_comfyui_and_python_dependencies.bat + + python -m pip wheel --no-cache-dir torch torchvision torchaudio ${{ inputs.xformers }} ${{ inputs.extra_dependencies }} --extra-index-url https://download.pytorch.org/whl/cu${{ inputs.cu }} -r requirements.txt pygit2 -w ./temp_wheel_dir + python -m pip install --no-cache-dir ./temp_wheel_dir/* + echo installed basic + ls -lah temp_wheel_dir + mv temp_wheel_dir cu${{ inputs.cu }}_python_deps + tar cf cu${{ inputs.cu }}_python_deps.tar cu${{ inputs.cu }}_python_deps + + - uses: actions/cache/save@v4 + with: + path: | + cu${{ inputs.cu }}_python_deps.tar + update_comfyui_and_python_dependencies.bat + key: ${{ runner.os }}-build-cu${{ inputs.cu }}-${{ inputs.python_minor }} diff --git a/src/comfyui/.github/workflows/windows_release_nightly_pytorch.yml b/src/comfyui/.github/workflows/windows_release_nightly_pytorch.yml new file mode 100644 index 0000000000000000000000000000000000000000..07f52e0b22d7a977c4d0c8c30daa9a4c72acb999 --- /dev/null +++ b/src/comfyui/.github/workflows/windows_release_nightly_pytorch.yml @@ -0,0 +1,91 @@ +name: "Windows Release Nightly pytorch" + +on: + workflow_dispatch: + inputs: + cu: + description: 'cuda version' + required: true + type: string + default: "124" + + python_minor: + description: 'python minor version' + required: true + type: string + default: "12" + + python_patch: + description: 'python patch version' + required: true + type: string + default: "4" +# push: +# branches: +# - master + +jobs: + build: + permissions: + contents: "write" + packages: "write" + pull-requests: "read" + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + - uses: actions/setup-python@v5 + with: + python-version: 3.${{ inputs.python_minor }}.${{ inputs.python_patch }} + - shell: bash + run: | + cd .. + cp -r ComfyUI ComfyUI_copy + curl https://www.python.org/ftp/python/3.${{ inputs.python_minor }}.${{ inputs.python_patch }}/python-3.${{ inputs.python_minor }}.${{ inputs.python_patch }}-embed-amd64.zip -o python_embeded.zip + unzip python_embeded.zip -d python_embeded + cd python_embeded + echo 'import site' >> ./python3${{ inputs.python_minor }}._pth + curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py + ./python.exe get-pip.py + python -m pip wheel torch torchvision torchaudio --pre --extra-index-url https://download.pytorch.org/whl/nightly/cu${{ inputs.cu }} -r ../ComfyUI/requirements.txt pygit2 -w ../temp_wheel_dir + ls ../temp_wheel_dir + ./python.exe -s -m pip install --pre ../temp_wheel_dir/* + sed -i '1i../ComfyUI' ./python3${{ inputs.python_minor }}._pth + cd .. + + git clone --depth 1 https://github.com/comfyanonymous/taesd + cp taesd/*.pth ./ComfyUI_copy/models/vae_approx/ + + mkdir ComfyUI_windows_portable_nightly_pytorch + mv python_embeded ComfyUI_windows_portable_nightly_pytorch + mv ComfyUI_copy ComfyUI_windows_portable_nightly_pytorch/ComfyUI + + cd ComfyUI_windows_portable_nightly_pytorch + + mkdir update + cp -r ComfyUI/.ci/update_windows/* ./update/ + cp -r ComfyUI/.ci/windows_base_files/* ./ + cp -r ComfyUI/.ci/windows_nightly_base_files/* ./ + + echo "call update_comfyui.bat nopause + ..\python_embeded\python.exe -s -m pip install --upgrade --pre torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/nightly/cu${{ inputs.cu }} -r ../ComfyUI/requirements.txt pygit2 + pause" > ./update/update_comfyui_and_python_dependencies.bat + cd .. + + "C:\Program Files\7-Zip\7z.exe" a -t7z -m0=lzma2 -mx=8 -mfb=64 -md=32m -ms=on -mf=BCJ2 ComfyUI_windows_portable_nightly_pytorch.7z ComfyUI_windows_portable_nightly_pytorch + mv ComfyUI_windows_portable_nightly_pytorch.7z ComfyUI/ComfyUI_windows_portable_nvidia_or_cpu_nightly_pytorch.7z + + cd ComfyUI_windows_portable_nightly_pytorch + python_embeded/python.exe -s ComfyUI/main.py --quick-test-for-ci --cpu + + ls + + - name: Upload binaries to release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: ComfyUI_windows_portable_nvidia_or_cpu_nightly_pytorch.7z + tag: "latest" + overwrite: true diff --git a/src/comfyui/.github/workflows/windows_release_package.yml b/src/comfyui/.github/workflows/windows_release_package.yml new file mode 100644 index 0000000000000000000000000000000000000000..11e724ba7b2d643fa4b63d3d88f66d88353e4deb --- /dev/null +++ b/src/comfyui/.github/workflows/windows_release_package.yml @@ -0,0 +1,100 @@ +name: "Windows Release packaging" + +on: + workflow_dispatch: + inputs: + cu: + description: 'cuda version' + required: true + type: string + default: "124" + + python_minor: + description: 'python minor version' + required: true + type: string + default: "12" + + python_patch: + description: 'python patch version' + required: true + type: string + default: "7" +# push: +# branches: +# - master + +jobs: + package_comfyui: + permissions: + contents: "write" + packages: "write" + pull-requests: "read" + runs-on: windows-latest + steps: + - uses: actions/cache/restore@v4 + id: cache + with: + path: | + cu${{ inputs.cu }}_python_deps.tar + update_comfyui_and_python_dependencies.bat + key: ${{ runner.os }}-build-cu${{ inputs.cu }}-${{ inputs.python_minor }} + - shell: bash + run: | + mv cu${{ inputs.cu }}_python_deps.tar ../ + mv update_comfyui_and_python_dependencies.bat ../ + cd .. + tar xf cu${{ inputs.cu }}_python_deps.tar + pwd + ls + + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + - shell: bash + run: | + cd .. + cp -r ComfyUI ComfyUI_copy + curl https://www.python.org/ftp/python/3.${{ inputs.python_minor }}.${{ inputs.python_patch }}/python-3.${{ inputs.python_minor }}.${{ inputs.python_patch }}-embed-amd64.zip -o python_embeded.zip + unzip python_embeded.zip -d python_embeded + cd python_embeded + echo 'import site' >> ./python3${{ inputs.python_minor }}._pth + curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py + ./python.exe get-pip.py + ./python.exe -s -m pip install ../cu${{ inputs.cu }}_python_deps/* + sed -i '1i../ComfyUI' ./python3${{ inputs.python_minor }}._pth + cd .. + + git clone --depth 1 https://github.com/comfyanonymous/taesd + cp taesd/*.pth ./ComfyUI_copy/models/vae_approx/ + + mkdir ComfyUI_windows_portable + mv python_embeded ComfyUI_windows_portable + mv ComfyUI_copy ComfyUI_windows_portable/ComfyUI + + cd ComfyUI_windows_portable + + mkdir update + cp -r ComfyUI/.ci/update_windows/* ./update/ + cp -r ComfyUI/.ci/windows_base_files/* ./ + cp ../update_comfyui_and_python_dependencies.bat ./update/ + + cd .. + + "C:\Program Files\7-Zip\7z.exe" a -t7z -m0=lzma2 -mx=8 -mfb=64 -md=32m -ms=on -mf=BCJ2 ComfyUI_windows_portable.7z ComfyUI_windows_portable + mv ComfyUI_windows_portable.7z ComfyUI/new_ComfyUI_windows_portable_nvidia_cu${{ inputs.cu }}_or_cpu.7z + + cd ComfyUI_windows_portable + python_embeded/python.exe -s ComfyUI/main.py --quick-test-for-ci --cpu + + ls + + - name: Upload binaries to release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: new_ComfyUI_windows_portable_nvidia_cu${{ inputs.cu }}_or_cpu.7z + tag: "latest" + overwrite: true + diff --git a/src/comfyui/.gitignore b/src/comfyui/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..61881b8a4f3eb751c0343b99778348ba0d08d1ac --- /dev/null +++ b/src/comfyui/.gitignore @@ -0,0 +1,23 @@ +__pycache__/ +*.py[cod] +/output/ +/input/ +!/input/example.png +/models/ +/temp/ +/custom_nodes/ +!custom_nodes/example_node.py.example +extra_model_paths.yaml +/.vs +.vscode/ +.idea/ +venv/ +.venv/ +/web/extensions/* +!/web/extensions/logging.js.example +!/web/extensions/core/ +/tests-ui/data/object_info.json +/user/ +*.log +web_custom_versions/ +.DS_Store diff --git a/src/comfyui/.pylintrc b/src/comfyui/.pylintrc new file mode 100644 index 0000000000000000000000000000000000000000..a5da56e57ca44931ade3ea1b307fd3f955a17977 --- /dev/null +++ b/src/comfyui/.pylintrc @@ -0,0 +1,3 @@ +[MESSAGES CONTROL] +disable=all +enable=eval-used diff --git a/src/comfyui/CODEOWNERS b/src/comfyui/CODEOWNERS new file mode 100644 index 0000000000000000000000000000000000000000..7c7c3e19eaaff42625016ced81fbc3fb74586761 --- /dev/null +++ b/src/comfyui/CODEOWNERS @@ -0,0 +1 @@ +* @comfyanonymous diff --git a/src/comfyui/CONTRIBUTING.md b/src/comfyui/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..048f127e72ddc351451a9612d4c715f603393a84 --- /dev/null +++ b/src/comfyui/CONTRIBUTING.md @@ -0,0 +1,41 @@ +# Contributing to ComfyUI + +Welcome, and thank you for your interest in contributing to ComfyUI! + +There are several ways in which you can contribute, beyond writing code. The goal of this document is to provide a high-level overview of how you can get involved. + +## Asking Questions + +Have a question? Instead of opening an issue, please ask on [Discord](https://comfy.org/discord) or [Matrix](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) channels. Our team and the community will help you. + +## Providing Feedback + +Your comments and feedback are welcome, and the development team is available via a handful of different channels. + +See the `#bug-report`, `#feature-request` and `#feedback` channels on Discord. + +## Reporting Issues + +Have you identified a reproducible problem in ComfyUI? Do you have a feature request? We want to hear about it! Here's how you can report your issue as effectively as possible. + + +### Look For an Existing Issue + +Before you create a new issue, please do a search in [open issues](https://github.com/comfyanonymous/ComfyUI/issues) to see if the issue or feature request has already been filed. + +If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment: + +* 👍 - upvote +* 👎 - downvote + +If you cannot find an existing issue that describes your bug or feature, create a new issue. We have an issue template in place to organize new issues. + + +### Creating Pull Requests + +* Please refer to the article on [creating pull requests](https://github.com/comfyanonymous/ComfyUI/wiki/How-to-Contribute-Code) and contributing to this project. + + +## Thank You + +Your contributions to open source, large or small, make great projects like this possible. Thank you for taking the time to contribute. diff --git a/src/comfyui/LICENSE b/src/comfyui/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/src/comfyui/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/src/comfyui/README.md b/src/comfyui/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8e5cef450db05524ef4da14b7e5df309dcee3134 --- /dev/null +++ b/src/comfyui/README.md @@ -0,0 +1,299 @@ +
+ +# ComfyUI +**The most powerful and modular diffusion model GUI and backend.** + + +[![Website][website-shield]][website-url] +[![Dynamic JSON Badge][discord-shield]][discord-url] +[![Matrix][matrix-shield]][matrix-url] +
+[![][github-release-shield]][github-release-link] +[![][github-release-date-shield]][github-release-link] +[![][github-downloads-shield]][github-downloads-link] +[![][github-downloads-latest-shield]][github-downloads-link] + +[matrix-shield]: https://img.shields.io/badge/Matrix-000000?style=flat&logo=matrix&logoColor=white +[matrix-url]: https://app.element.io/#/room/%23comfyui_space%3Amatrix.org +[website-shield]: https://img.shields.io/badge/ComfyOrg-4285F4?style=flat +[website-url]: https://www.comfy.org/ + +[discord-shield]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdiscord.com%2Fapi%2Finvites%2Fcomfyorg%3Fwith_counts%3Dtrue&query=%24.approximate_member_count&logo=discord&logoColor=white&label=Discord&color=green&suffix=%20total +[discord-url]: https://www.comfy.org/discord + +[github-release-shield]: https://img.shields.io/github/v/release/comfyanonymous/ComfyUI?style=flat&sort=semver +[github-release-link]: https://github.com/comfyanonymous/ComfyUI/releases +[github-release-date-shield]: https://img.shields.io/github/release-date/comfyanonymous/ComfyUI?style=flat +[github-downloads-shield]: https://img.shields.io/github/downloads/comfyanonymous/ComfyUI/total?style=flat +[github-downloads-latest-shield]: https://img.shields.io/github/downloads/comfyanonymous/ComfyUI/latest/total?style=flat&label=downloads%40latest +[github-downloads-link]: https://github.com/comfyanonymous/ComfyUI/releases + +![ComfyUI Screenshot](comfyui_screenshot.png) +
+ +This ui will let you design and execute advanced stable diffusion pipelines using a graph/nodes/flowchart based interface. For some workflow examples and see what ComfyUI can do you can check out: +### [ComfyUI Examples](https://comfyanonymous.github.io/ComfyUI_examples/) + +### [Installing ComfyUI](#installing) + +## Features +- Nodes/graph/flowchart interface to experiment and create complex Stable Diffusion workflows without needing to code anything. +- Fully supports SD1.x, SD2.x, [SDXL](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/), [Stable Video Diffusion](https://comfyanonymous.github.io/ComfyUI_examples/video/), [Stable Cascade](https://comfyanonymous.github.io/ComfyUI_examples/stable_cascade/), [SD3](https://comfyanonymous.github.io/ComfyUI_examples/sd3/) and [Stable Audio](https://comfyanonymous.github.io/ComfyUI_examples/audio/) +- [Flux](https://comfyanonymous.github.io/ComfyUI_examples/flux/) +- Asynchronous Queue system +- Many optimizations: Only re-executes the parts of the workflow that changes between executions. +- Smart memory management: can automatically run models on GPUs with as low as 1GB vram. +- Works even if you don't have a GPU with: ```--cpu``` (slow) +- Can load ckpt, safetensors and diffusers models/checkpoints. Standalone VAEs and CLIP models. +- Embeddings/Textual inversion +- [Loras (regular, locon and loha)](https://comfyanonymous.github.io/ComfyUI_examples/lora/) +- [Hypernetworks](https://comfyanonymous.github.io/ComfyUI_examples/hypernetworks/) +- Loading full workflows (with seeds) from generated PNG, WebP and FLAC files. +- Saving/Loading workflows as Json files. +- Nodes interface can be used to create complex workflows like one for [Hires fix](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/) or much more advanced ones. +- [Area Composition](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/) +- [Inpainting](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/) with both regular and inpainting models. +- [ControlNet and T2I-Adapter](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/) +- [Upscale Models (ESRGAN, ESRGAN variants, SwinIR, Swin2SR, etc...)](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/) +- [unCLIP Models](https://comfyanonymous.github.io/ComfyUI_examples/unclip/) +- [GLIGEN](https://comfyanonymous.github.io/ComfyUI_examples/gligen/) +- [Model Merging](https://comfyanonymous.github.io/ComfyUI_examples/model_merging/) +- [LCM models and Loras](https://comfyanonymous.github.io/ComfyUI_examples/lcm/) +- [SDXL Turbo](https://comfyanonymous.github.io/ComfyUI_examples/sdturbo/) +- [AuraFlow](https://comfyanonymous.github.io/ComfyUI_examples/aura_flow/) +- [HunyuanDiT](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_dit/) +- Latent previews with [TAESD](#how-to-show-high-quality-previews) +- Starts up very fast. +- Works fully offline: will never download anything. +- [Config file](extra_model_paths.yaml.example) to set the search paths for models. + +Workflow examples can be found on the [Examples page](https://comfyanonymous.github.io/ComfyUI_examples/) + +## Shortcuts + +| Keybind | Explanation | +|------------------------------------|--------------------------------------------------------------------------------------------------------------------| +| Ctrl + Enter | Queue up current graph for generation | +| Ctrl + Shift + Enter | Queue up current graph as first for generation | +| Ctrl + Alt + Enter | Cancel current generation | +| Ctrl + Z/Ctrl + Y | Undo/Redo | +| Ctrl + S | Save workflow | +| Ctrl + O | Load workflow | +| Ctrl + A | Select all nodes | +| Alt + C | Collapse/uncollapse selected nodes | +| Ctrl + M | Mute/unmute selected nodes | +| Ctrl + B | Bypass selected nodes (acts like the node was removed from the graph and the wires reconnected through) | +| Delete/Backspace | Delete selected nodes | +| Ctrl + Backspace | Delete the current graph | +| Space | Move the canvas around when held and moving the cursor | +| Ctrl/Shift + Click | Add clicked node to selection | +| Ctrl + C/Ctrl + V | Copy and paste selected nodes (without maintaining connections to outputs of unselected nodes) | +| Ctrl + C/Ctrl + Shift + V | Copy and paste selected nodes (maintaining connections from outputs of unselected nodes to inputs of pasted nodes) | +| Shift + Drag | Move multiple selected nodes at the same time | +| Ctrl + D | Load default graph | +| Alt + `+` | Canvas Zoom in | +| Alt + `-` | Canvas Zoom out | +| Ctrl + Shift + LMB + Vertical drag | Canvas Zoom in/out | +| P | Pin/Unpin selected nodes | +| Ctrl + G | Group selected nodes | +| Q | Toggle visibility of the queue | +| H | Toggle visibility of history | +| R | Refresh graph | +| Double-Click LMB | Open node quick search palette | +| Shift + Drag | Move multiple wires at once | +| Ctrl + Alt + LMB | Disconnect all wires from clicked slot | + +Ctrl can also be replaced with Cmd instead for macOS users + +# Installing + +## Windows + +There is a portable standalone build for Windows that should work for running on Nvidia GPUs or for running on your CPU only on the [releases page](https://github.com/comfyanonymous/ComfyUI/releases). + +### [Direct link to download](https://github.com/comfyanonymous/ComfyUI/releases/latest/download/ComfyUI_windows_portable_nvidia.7z) + +Simply download, extract with [7-Zip](https://7-zip.org) and run. Make sure you put your Stable Diffusion checkpoints/models (the huge ckpt/safetensors files) in: ComfyUI\models\checkpoints + +If you have trouble extracting it, right click the file -> properties -> unblock + +#### How do I share models between another UI and ComfyUI? + +See the [Config file](extra_model_paths.yaml.example) to set the search paths for models. In the standalone windows build you can find this file in the ComfyUI directory. Rename this file to extra_model_paths.yaml and edit it with your favorite text editor. + +## Jupyter Notebook + +To run it on services like paperspace, kaggle or colab you can use my [Jupyter Notebook](notebooks/comfyui_colab.ipynb) + +## Manual Install (Windows, Linux) + +Note that some dependencies do not yet support python 3.13 so using 3.12 is recommended. + +Git clone this repo. + +Put your SD checkpoints (the huge ckpt/safetensors files) in: models/checkpoints + +Put your VAE in: models/vae + + +### AMD GPUs (Linux only) +AMD users can install rocm and pytorch with pip if you don't have it already installed, this is the command to install the stable version: + +```pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.1``` + +This is the command to install the nightly with ROCm 6.2 which might have some performance improvements: + +```pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm6.2``` + +### NVIDIA + +Nvidia users should install stable pytorch using this command: + +```pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu124``` + +This is the command to install pytorch nightly instead which might have performance improvements: + +```pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cu124``` + +#### Troubleshooting + +If you get the "Torch not compiled with CUDA enabled" error, uninstall torch with: + +```pip uninstall torch``` + +And install it again with the command above. + +### Dependencies + +Install the dependencies by opening your terminal inside the ComfyUI folder and: + +```pip install -r requirements.txt``` + +After this you should have everything installed and can proceed to running ComfyUI. + +### Others: + +#### Intel GPUs + +Intel GPU support is available for all Intel GPUs supported by Intel's Extension for Pytorch (IPEX) with the support requirements listed in the [Installation](https://intel.github.io/intel-extension-for-pytorch/index.html#installation?platform=gpu) page. Choose your platform and method of install and follow the instructions. The steps are as follows: + +1. Start by installing the drivers or kernel listed or newer in the Installation page of IPEX linked above for Windows and Linux if needed. +1. Follow the instructions to install [Intel's oneAPI Basekit](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit-download.html) for your platform. +1. Install the packages for IPEX using the instructions provided in the Installation page for your platform. +1. Follow the [ComfyUI manual installation](#manual-install-windows-linux) instructions for Windows and Linux and run ComfyUI normally as described above after everything is installed. + +Additional discussion and help can be found [here](https://github.com/comfyanonymous/ComfyUI/discussions/476). + +#### Apple Mac silicon + +You can install ComfyUI in Apple Mac silicon (M1 or M2) with any recent macOS version. + +1. Install pytorch nightly. For instructions, read the [Accelerated PyTorch training on Mac](https://developer.apple.com/metal/pytorch/) Apple Developer guide (make sure to install the latest pytorch nightly). +1. Follow the [ComfyUI manual installation](#manual-install-windows-linux) instructions for Windows and Linux. +1. Install the ComfyUI [dependencies](#dependencies). If you have another Stable Diffusion UI [you might be able to reuse the dependencies](#i-already-have-another-ui-for-stable-diffusion-installed-do-i-really-have-to-install-all-of-these-dependencies). +1. Launch ComfyUI by running `python main.py` + +> **Note**: Remember to add your models, VAE, LoRAs etc. to the corresponding Comfy folders, as discussed in [ComfyUI manual installation](#manual-install-windows-linux). + +#### DirectML (AMD Cards on Windows) + +```pip install torch-directml``` Then you can launch ComfyUI with: ```python main.py --directml``` + +# Running + +```python main.py``` + +### For AMD cards not officially supported by ROCm + +Try running it with this command if you have issues: + +For 6700, 6600 and maybe other RDNA2 or older: ```HSA_OVERRIDE_GFX_VERSION=10.3.0 python main.py``` + +For AMD 7600 and maybe other RDNA3 cards: ```HSA_OVERRIDE_GFX_VERSION=11.0.0 python main.py``` + +# Notes + +Only parts of the graph that have an output with all the correct inputs will be executed. + +Only parts of the graph that change from each execution to the next will be executed, if you submit the same graph twice only the first will be executed. If you change the last part of the graph only the part you changed and the part that depends on it will be executed. + +Dragging a generated png on the webpage or loading one will give you the full workflow including seeds that were used to create it. + +You can use () to change emphasis of a word or phrase like: (good code:1.2) or (bad code:0.8). The default emphasis for () is 1.1. To use () characters in your actual prompt escape them like \\( or \\). + +You can use {day|night}, for wildcard/dynamic prompts. With this syntax "{wild|card|test}" will be randomly replaced by either "wild", "card" or "test" by the frontend every time you queue the prompt. To use {} characters in your actual prompt escape them like: \\{ or \\}. + +Dynamic prompts also support C-style comments, like `// comment` or `/* comment */`. + +To use a textual inversion concepts/embeddings in a text prompt put them in the models/embeddings directory and use them in the CLIPTextEncode node like this (you can omit the .pt extension): + +```embedding:embedding_filename.pt``` + + +## How to show high-quality previews? + +Use ```--preview-method auto``` to enable previews. + +The default installation includes a fast latent preview method that's low-resolution. To enable higher-quality previews with [TAESD](https://github.com/madebyollin/taesd), download the [taesd_decoder.pth, taesdxl_decoder.pth, taesd3_decoder.pth and taef1_decoder.pth](https://github.com/madebyollin/taesd/) and place them in the `models/vae_approx` folder. Once they're installed, restart ComfyUI and launch it with `--preview-method taesd` to enable high-quality previews. + +## How to use TLS/SSL? +Generate a self-signed certificate (not appropriate for shared/production use) and key by running the command: `openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -sha256 -days 3650 -nodes -subj "/C=XX/ST=StateName/L=CityName/O=CompanyName/OU=CompanySectionName/CN=CommonNameOrHostname"` + +Use `--tls-keyfile key.pem --tls-certfile cert.pem` to enable TLS/SSL, the app will now be accessible with `https://...` instead of `http://...`. + +> Note: Windows users can use [alexisrolland/docker-openssl](https://github.com/alexisrolland/docker-openssl) or one of the [3rd party binary distributions](https://wiki.openssl.org/index.php/Binaries) to run the command example above. +

If you use a container, note that the volume mount `-v` can be a relative path so `... -v ".\:/openssl-certs" ...` would create the key & cert files in the current directory of your command prompt or powershell terminal. + +## Support and dev channel + +[Matrix space: #comfyui_space:matrix.org](https://app.element.io/#/room/%23comfyui_space%3Amatrix.org) (it's like discord but open source). + +See also: [https://www.comfy.org/](https://www.comfy.org/) + +## Frontend Development + +As of August 15, 2024, we have transitioned to a new frontend, which is now hosted in a separate repository: [ComfyUI Frontend](https://github.com/Comfy-Org/ComfyUI_frontend). This repository now hosts the compiled JS (from TS/Vue) under the `web/` directory. + +### Reporting Issues and Requesting Features + +For any bugs, issues, or feature requests related to the frontend, please use the [ComfyUI Frontend repository](https://github.com/Comfy-Org/ComfyUI_frontend). This will help us manage and address frontend-specific concerns more efficiently. + +### Using the Latest Frontend + +The new frontend is now the default for ComfyUI. However, please note: + +1. The frontend in the main ComfyUI repository is updated weekly. +2. Daily releases are available in the separate frontend repository. + +To use the most up-to-date frontend version: + +1. For the latest daily release, launch ComfyUI with this command line argument: + + ``` + --front-end-version Comfy-Org/ComfyUI_frontend@latest + ``` + +2. For a specific version, replace `latest` with the desired version number: + + ``` + --front-end-version Comfy-Org/ComfyUI_frontend@1.2.2 + ``` + +This approach allows you to easily switch between the stable weekly release and the cutting-edge daily updates, or even specific versions for testing purposes. + +### Accessing the Legacy Frontend + +If you need to use the legacy frontend for any reason, you can access it using the following command line argument: + +``` +--front-end-version Comfy-Org/ComfyUI_legacy_frontend@latest +``` + +This will use a snapshot of the legacy frontend preserved in the [ComfyUI Legacy Frontend repository](https://github.com/Comfy-Org/ComfyUI_legacy_frontend). + +# QA + +### Which GPU should I buy for this? + +[See this page for some recommendations](https://github.com/comfyanonymous/ComfyUI/wiki/Which-GPU-should-I-buy-for-ComfyUI) + diff --git a/src/comfyui/__pycache__/cuda_malloc.cpython-310.pyc b/src/comfyui/__pycache__/cuda_malloc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d208089c91e30ccf0bd9998e744316fdb210636e Binary files /dev/null and b/src/comfyui/__pycache__/cuda_malloc.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/cuda_malloc.cpython-38.pyc b/src/comfyui/__pycache__/cuda_malloc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..180b0cfdb4b5953b06f2b4613055ddc027afec94 Binary files /dev/null and b/src/comfyui/__pycache__/cuda_malloc.cpython-38.pyc differ diff --git a/src/comfyui/__pycache__/execution.cpython-310.pyc b/src/comfyui/__pycache__/execution.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bba4cdf6828a1938cb083a6fda6b0bf5b77677eb Binary files /dev/null and b/src/comfyui/__pycache__/execution.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/execution.cpython-38.pyc b/src/comfyui/__pycache__/execution.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87f4a199d9e9b907b4467a9a7a06d8989973adcd Binary files /dev/null and b/src/comfyui/__pycache__/execution.cpython-38.pyc differ diff --git a/src/comfyui/__pycache__/folder_paths.cpython-310.pyc b/src/comfyui/__pycache__/folder_paths.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9caddc926b426d15d84ac01a157857a1947e981e Binary files /dev/null and b/src/comfyui/__pycache__/folder_paths.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/folder_paths.cpython-38.pyc b/src/comfyui/__pycache__/folder_paths.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7e931a66982ca7e9bbcd38d0607050c4bc45c43 Binary files /dev/null and b/src/comfyui/__pycache__/folder_paths.cpython-38.pyc differ diff --git a/src/comfyui/__pycache__/latent_preview.cpython-310.pyc b/src/comfyui/__pycache__/latent_preview.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95f9840e5730e722f0b4afe25923a391932f37e4 Binary files /dev/null and b/src/comfyui/__pycache__/latent_preview.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/node_helpers.cpython-310.pyc b/src/comfyui/__pycache__/node_helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7f4068aee9536a532bcd9e590b30ebc59a04dbf Binary files /dev/null and b/src/comfyui/__pycache__/node_helpers.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/nodes.cpython-310.pyc b/src/comfyui/__pycache__/nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0ed97776e5713f9c17bbe8cb565dcf12a73cdbd Binary files /dev/null and b/src/comfyui/__pycache__/nodes.cpython-310.pyc differ diff --git a/src/comfyui/__pycache__/nodes.cpython-38.pyc b/src/comfyui/__pycache__/nodes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84131cfdad842aac3cc020cbe54fe80a12583658 Binary files /dev/null and b/src/comfyui/__pycache__/nodes.cpython-38.pyc differ diff --git a/src/comfyui/__pycache__/server.cpython-310.pyc b/src/comfyui/__pycache__/server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb62ea77596abe68036b7b9d27d954c43d5cb510 Binary files /dev/null and b/src/comfyui/__pycache__/server.cpython-310.pyc differ diff --git a/src/comfyui/api_server/__init__.py b/src/comfyui/api_server/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/api_server/__pycache__/__init__.cpython-310.pyc b/src/comfyui/api_server/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96dc528051ecde313a2ea9408f7d2fedccbcbcae Binary files /dev/null and b/src/comfyui/api_server/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/api_server/routes/__init__.py b/src/comfyui/api_server/routes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/api_server/routes/__pycache__/__init__.cpython-310.pyc b/src/comfyui/api_server/routes/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8e936175189d050bd111b9c411a383c4d151cb7 Binary files /dev/null and b/src/comfyui/api_server/routes/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/api_server/routes/internal/README.md b/src/comfyui/api_server/routes/internal/README.md new file mode 100644 index 0000000000000000000000000000000000000000..35330c36f83962385b4afe4653c5967f2bdb73c1 --- /dev/null +++ b/src/comfyui/api_server/routes/internal/README.md @@ -0,0 +1,3 @@ +# ComfyUI Internal Routes + +All routes under the `/internal` path are designated for **internal use by ComfyUI only**. These routes are not intended for use by external applications may change at any time without notice. diff --git a/src/comfyui/api_server/routes/internal/__init__.py b/src/comfyui/api_server/routes/internal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/api_server/routes/internal/__pycache__/__init__.cpython-310.pyc b/src/comfyui/api_server/routes/internal/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c498ab27aa50ad721d70c90eac74a189a1610234 Binary files /dev/null and b/src/comfyui/api_server/routes/internal/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/api_server/routes/internal/__pycache__/internal_routes.cpython-310.pyc b/src/comfyui/api_server/routes/internal/__pycache__/internal_routes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19d571f10381638a5fdcaee661f32d56d886b230 Binary files /dev/null and b/src/comfyui/api_server/routes/internal/__pycache__/internal_routes.cpython-310.pyc differ diff --git a/src/comfyui/api_server/routes/internal/internal_routes.py b/src/comfyui/api_server/routes/internal/internal_routes.py new file mode 100644 index 0000000000000000000000000000000000000000..63704f13a6dc69274b2a057993a9874a0ada33e0 --- /dev/null +++ b/src/comfyui/api_server/routes/internal/internal_routes.py @@ -0,0 +1,51 @@ +from aiohttp import web +from typing import Optional +from folder_paths import models_dir, user_directory, output_directory, folder_names_and_paths +from api_server.services.file_service import FileService +import app.logger + +class InternalRoutes: + ''' + The top level web router for internal routes: /internal/* + The endpoints here should NOT be depended upon. It is for ComfyUI frontend use only. + Check README.md for more information. + + ''' + def __init__(self): + self.routes: web.RouteTableDef = web.RouteTableDef() + self._app: Optional[web.Application] = None + self.file_service = FileService({ + "models": models_dir, + "user": user_directory, + "output": output_directory + }) + + def setup_routes(self): + @self.routes.get('/files') + async def list_files(request): + directory_key = request.query.get('directory', '') + try: + file_list = self.file_service.list_files(directory_key) + return web.json_response({"files": file_list}) + except ValueError as e: + return web.json_response({"error": str(e)}, status=400) + except Exception as e: + return web.json_response({"error": str(e)}, status=500) + + @self.routes.get('/logs') + async def get_logs(request): + return web.json_response(app.logger.get_logs()) + + @self.routes.get('/folder_paths') + async def get_folder_paths(request): + response = {} + for key in folder_names_and_paths: + response[key] = folder_names_and_paths[key][0] + return web.json_response(response) + + def get_app(self): + if self._app is None: + self._app = web.Application() + self.setup_routes() + self._app.add_routes(self.routes) + return self._app diff --git a/src/comfyui/api_server/services/__init__.py b/src/comfyui/api_server/services/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/api_server/services/__pycache__/__init__.cpython-310.pyc b/src/comfyui/api_server/services/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccd1e565df19015dd05a17c04116403c301d65e8 Binary files /dev/null and b/src/comfyui/api_server/services/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/api_server/services/__pycache__/file_service.cpython-310.pyc b/src/comfyui/api_server/services/__pycache__/file_service.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9c7dbf20322ba32f96d76c6063d402d17c39f47 Binary files /dev/null and b/src/comfyui/api_server/services/__pycache__/file_service.cpython-310.pyc differ diff --git a/src/comfyui/api_server/services/file_service.py b/src/comfyui/api_server/services/file_service.py new file mode 100644 index 0000000000000000000000000000000000000000..394571084e97909f9aa2544500c6ebb7697c962c --- /dev/null +++ b/src/comfyui/api_server/services/file_service.py @@ -0,0 +1,13 @@ +from typing import Dict, List, Optional +from api_server.utils.file_operations import FileSystemOperations, FileSystemItem + +class FileService: + def __init__(self, allowed_directories: Dict[str, str], file_system_ops: Optional[FileSystemOperations] = None): + self.allowed_directories: Dict[str, str] = allowed_directories + self.file_system_ops: FileSystemOperations = file_system_ops or FileSystemOperations() + + def list_files(self, directory_key: str) -> List[FileSystemItem]: + if directory_key not in self.allowed_directories: + raise ValueError("Invalid directory key") + directory_path: str = self.allowed_directories[directory_key] + return self.file_system_ops.walk_directory(directory_path) \ No newline at end of file diff --git a/src/comfyui/api_server/utils/__pycache__/file_operations.cpython-310.pyc b/src/comfyui/api_server/utils/__pycache__/file_operations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..304eb1b0c86b8b2928f5d9257047db4722c4fa8d Binary files /dev/null and b/src/comfyui/api_server/utils/__pycache__/file_operations.cpython-310.pyc differ diff --git a/src/comfyui/api_server/utils/file_operations.py b/src/comfyui/api_server/utils/file_operations.py new file mode 100644 index 0000000000000000000000000000000000000000..ef1bf999e52871ad2d338cde8bdb8b48d4747b00 --- /dev/null +++ b/src/comfyui/api_server/utils/file_operations.py @@ -0,0 +1,42 @@ +import os +from typing import List, Union, TypedDict, Literal +from typing_extensions import TypeGuard +class FileInfo(TypedDict): + name: str + path: str + type: Literal["file"] + size: int + +class DirectoryInfo(TypedDict): + name: str + path: str + type: Literal["directory"] + +FileSystemItem = Union[FileInfo, DirectoryInfo] + +def is_file_info(item: FileSystemItem) -> TypeGuard[FileInfo]: + return item["type"] == "file" + +class FileSystemOperations: + @staticmethod + def walk_directory(directory: str) -> List[FileSystemItem]: + file_list: List[FileSystemItem] = [] + for root, dirs, files in os.walk(directory): + for name in files: + file_path = os.path.join(root, name) + relative_path = os.path.relpath(file_path, directory) + file_list.append({ + "name": name, + "path": relative_path, + "type": "file", + "size": os.path.getsize(file_path) + }) + for name in dirs: + dir_path = os.path.join(root, name) + relative_path = os.path.relpath(dir_path, directory) + file_list.append({ + "name": name, + "path": relative_path, + "type": "directory" + }) + return file_list \ No newline at end of file diff --git a/src/comfyui/app/__init__.py b/src/comfyui/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/app/__pycache__/__init__.cpython-310.pyc b/src/comfyui/app/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2b11682843b915a84c9aec6ebed0af35f162b65 Binary files /dev/null and b/src/comfyui/app/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/app/__pycache__/__init__.cpython-38.pyc b/src/comfyui/app/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7124b99bccad9927aa98cd6c0ea3b645b077322 Binary files /dev/null and b/src/comfyui/app/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/comfyui/app/__pycache__/app_settings.cpython-310.pyc b/src/comfyui/app/__pycache__/app_settings.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6db903ec73a22184d6314fbe8f4ab96a20107d1 Binary files /dev/null and b/src/comfyui/app/__pycache__/app_settings.cpython-310.pyc differ diff --git a/src/comfyui/app/__pycache__/frontend_management.cpython-310.pyc b/src/comfyui/app/__pycache__/frontend_management.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a879b9260d66bac6eda35e6d4c9cd97e189106c5 Binary files /dev/null and b/src/comfyui/app/__pycache__/frontend_management.cpython-310.pyc differ diff --git a/src/comfyui/app/__pycache__/logger.cpython-310.pyc b/src/comfyui/app/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f5da1fddd31a3f82e0c57ee91b60ac3c7a2718b Binary files /dev/null and b/src/comfyui/app/__pycache__/logger.cpython-310.pyc differ diff --git a/src/comfyui/app/__pycache__/logger.cpython-38.pyc b/src/comfyui/app/__pycache__/logger.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fa3daa7fd218a4fb3eaf3e1c72adebb44cc9bbd Binary files /dev/null and b/src/comfyui/app/__pycache__/logger.cpython-38.pyc differ diff --git a/src/comfyui/app/__pycache__/user_manager.cpython-310.pyc b/src/comfyui/app/__pycache__/user_manager.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc2a80e550992bccaf97c09089d04b4ed93b9fb3 Binary files /dev/null and b/src/comfyui/app/__pycache__/user_manager.cpython-310.pyc differ diff --git a/src/comfyui/app/app_settings.py b/src/comfyui/app/app_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..8c6edc56c1d68cca481e7c78487454e278f6b326 --- /dev/null +++ b/src/comfyui/app/app_settings.py @@ -0,0 +1,54 @@ +import os +import json +from aiohttp import web + + +class AppSettings(): + def __init__(self, user_manager): + self.user_manager = user_manager + + def get_settings(self, request): + file = self.user_manager.get_request_user_filepath( + request, "comfy.settings.json") + if os.path.isfile(file): + with open(file) as f: + return json.load(f) + else: + return {} + + def save_settings(self, request, settings): + file = self.user_manager.get_request_user_filepath( + request, "comfy.settings.json") + with open(file, "w") as f: + f.write(json.dumps(settings, indent=4)) + + def add_routes(self, routes): + @routes.get("/settings") + async def get_settings(request): + return web.json_response(self.get_settings(request)) + + @routes.get("/settings/{id}") + async def get_setting(request): + value = None + settings = self.get_settings(request) + setting_id = request.match_info.get("id", None) + if setting_id and setting_id in settings: + value = settings[setting_id] + return web.json_response(value) + + @routes.post("/settings") + async def post_settings(request): + settings = self.get_settings(request) + new_settings = await request.json() + self.save_settings(request, {**settings, **new_settings}) + return web.Response(status=200) + + @routes.post("/settings/{id}") + async def post_setting(request): + setting_id = request.match_info.get("id", None) + if not setting_id: + return web.Response(status=400) + settings = self.get_settings(request) + settings[setting_id] = await request.json() + self.save_settings(request, settings) + return web.Response(status=200) \ No newline at end of file diff --git a/src/comfyui/app/frontend_management.py b/src/comfyui/app/frontend_management.py new file mode 100644 index 0000000000000000000000000000000000000000..6f20e439c306fe4717f33b6dc66296e090dbcd57 --- /dev/null +++ b/src/comfyui/app/frontend_management.py @@ -0,0 +1,204 @@ +from __future__ import annotations +import argparse +import logging +import os +import re +import tempfile +import zipfile +from dataclasses import dataclass +from functools import cached_property +from pathlib import Path +from typing import TypedDict, Optional + +import requests +from typing_extensions import NotRequired +from comfy.cli_args import DEFAULT_VERSION_STRING + + +REQUEST_TIMEOUT = 10 # seconds + + +class Asset(TypedDict): + url: str + + +class Release(TypedDict): + id: int + tag_name: str + name: str + prerelease: bool + created_at: str + published_at: str + body: str + assets: NotRequired[list[Asset]] + + +@dataclass +class FrontEndProvider: + owner: str + repo: str + + @property + def folder_name(self) -> str: + return f"{self.owner}_{self.repo}" + + @property + def release_url(self) -> str: + return f"https://api.github.com/repos/{self.owner}/{self.repo}/releases" + + @cached_property + def all_releases(self) -> list[Release]: + releases = [] + api_url = self.release_url + while api_url: + response = requests.get(api_url, timeout=REQUEST_TIMEOUT) + response.raise_for_status() # Raises an HTTPError if the response was an error + releases.extend(response.json()) + # GitHub uses the Link header to provide pagination links. Check if it exists and update api_url accordingly. + if "next" in response.links: + api_url = response.links["next"]["url"] + else: + api_url = None + return releases + + @cached_property + def latest_release(self) -> Release: + latest_release_url = f"{self.release_url}/latest" + response = requests.get(latest_release_url, timeout=REQUEST_TIMEOUT) + response.raise_for_status() # Raises an HTTPError if the response was an error + return response.json() + + def get_release(self, version: str) -> Release: + if version == "latest": + return self.latest_release + else: + for release in self.all_releases: + if release["tag_name"] in [version, f"v{version}"]: + return release + raise ValueError(f"Version {version} not found in releases") + + +def download_release_asset_zip(release: Release, destination_path: str) -> None: + """Download dist.zip from github release.""" + asset_url = None + for asset in release.get("assets", []): + if asset["name"] == "dist.zip": + asset_url = asset["url"] + break + + if not asset_url: + raise ValueError("dist.zip not found in the release assets") + + # Use a temporary file to download the zip content + with tempfile.TemporaryFile() as tmp_file: + headers = {"Accept": "application/octet-stream"} + response = requests.get( + asset_url, headers=headers, allow_redirects=True, timeout=REQUEST_TIMEOUT + ) + response.raise_for_status() # Ensure we got a successful response + + # Write the content to the temporary file + tmp_file.write(response.content) + + # Go back to the beginning of the temporary file + tmp_file.seek(0) + + # Extract the zip file content to the destination path + with zipfile.ZipFile(tmp_file, "r") as zip_ref: + zip_ref.extractall(destination_path) + + +class FrontendManager: + DEFAULT_FRONTEND_PATH = str(Path(__file__).parents[1] / "web") + CUSTOM_FRONTENDS_ROOT = str(Path(__file__).parents[1] / "web_custom_versions") + + @classmethod + def parse_version_string(cls, value: str) -> tuple[str, str, str]: + """ + Args: + value (str): The version string to parse. + + Returns: + tuple[str, str]: A tuple containing provider name and version. + + Raises: + argparse.ArgumentTypeError: If the version string is invalid. + """ + VERSION_PATTERN = r"^([a-zA-Z0-9][a-zA-Z0-9-]{0,38})/([a-zA-Z0-9_.-]+)@(v?\d+\.\d+\.\d+|latest)$" + match_result = re.match(VERSION_PATTERN, value) + if match_result is None: + raise argparse.ArgumentTypeError(f"Invalid version string: {value}") + + return match_result.group(1), match_result.group(2), match_result.group(3) + + @classmethod + def init_frontend_unsafe(cls, version_string: str, provider: Optional[FrontEndProvider] = None) -> str: + """ + Initializes the frontend for the specified version. + + Args: + version_string (str): The version string. + provider (FrontEndProvider, optional): The provider to use. Defaults to None. + + Returns: + str: The path to the initialized frontend. + + Raises: + Exception: If there is an error during the initialization process. + main error source might be request timeout or invalid URL. + """ + if version_string == DEFAULT_VERSION_STRING: + return cls.DEFAULT_FRONTEND_PATH + + repo_owner, repo_name, version = cls.parse_version_string(version_string) + + if version.startswith("v"): + expected_path = str(Path(cls.CUSTOM_FRONTENDS_ROOT) / f"{repo_owner}_{repo_name}" / version.lstrip("v")) + if os.path.exists(expected_path): + logging.info(f"Using existing copy of specific frontend version tag: {repo_owner}/{repo_name}@{version}") + return expected_path + + logging.info(f"Initializing frontend: {repo_owner}/{repo_name}@{version}, requesting version details from GitHub...") + + provider = provider or FrontEndProvider(repo_owner, repo_name) + release = provider.get_release(version) + + semantic_version = release["tag_name"].lstrip("v") + web_root = str( + Path(cls.CUSTOM_FRONTENDS_ROOT) / provider.folder_name / semantic_version + ) + if not os.path.exists(web_root): + try: + os.makedirs(web_root, exist_ok=True) + logging.info( + "Downloading frontend(%s) version(%s) to (%s)", + provider.folder_name, + semantic_version, + web_root, + ) + logging.debug(release) + download_release_asset_zip(release, destination_path=web_root) + finally: + # Clean up the directory if it is empty, i.e. the download failed + if not os.listdir(web_root): + os.rmdir(web_root) + + return web_root + + @classmethod + def init_frontend(cls, version_string: str) -> str: + """ + Initializes the frontend with the specified version string. + + Args: + version_string (str): The version string to initialize the frontend with. + + Returns: + str: The path of the initialized frontend. + """ + try: + return cls.init_frontend_unsafe(version_string) + except Exception as e: + logging.error("Failed to initialize frontend: %s", e) + logging.info("Falling back to the default frontend.") + return cls.DEFAULT_FRONTEND_PATH diff --git a/src/comfyui/app/logger.py b/src/comfyui/app/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..4ca0ea88e45048cb27ced80edcb19ef5e077ed9d --- /dev/null +++ b/src/comfyui/app/logger.py @@ -0,0 +1,31 @@ +import logging +from logging.handlers import MemoryHandler +from collections import deque + +logs = None +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + + +def get_logs(): + return "\n".join([formatter.format(x) for x in logs]) + + +def setup_logger(log_level: str = 'INFO', capacity: int = 300): + global logs + if logs: + return + + # Setup default global logger + logger = logging.getLogger() + logger.setLevel(log_level) + + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(logging.Formatter("%(message)s")) + logger.addHandler(stream_handler) + + # Create a memory handler with a deque as its buffer + logs = deque(maxlen=capacity) + memory_handler = MemoryHandler(capacity, flushLevel=logging.INFO) + memory_handler.buffer = logs + memory_handler.setFormatter(formatter) + logger.addHandler(memory_handler) diff --git a/src/comfyui/app/user_manager.py b/src/comfyui/app/user_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..20817844406c7c1c37054432abf0c59ffea88842 --- /dev/null +++ b/src/comfyui/app/user_manager.py @@ -0,0 +1,255 @@ +import json +import os +import re +import uuid +import glob +import shutil +from aiohttp import web +from urllib import parse +from comfy.cli_args import args +import folder_paths +from .app_settings import AppSettings + +default_user = "default" + + +class UserManager(): + def __init__(self): + user_directory = folder_paths.get_user_directory() + + self.settings = AppSettings(self) + if not os.path.exists(user_directory): + os.mkdir(user_directory) + if not args.multi_user: + print("****** User settings have been changed to be stored on the server instead of browser storage. ******") + print("****** For multi-user setups add the --multi-user CLI argument to enable multiple user profiles. ******") + + if args.multi_user: + if os.path.isfile(self.get_users_file()): + with open(self.get_users_file()) as f: + self.users = json.load(f) + else: + self.users = {} + else: + self.users = {"default": "default"} + + def get_users_file(self): + return os.path.join(folder_paths.get_user_directory(), "users.json") + + def get_request_user_id(self, request): + user = "default" + if args.multi_user and "comfy-user" in request.headers: + user = request.headers["comfy-user"] + + if user not in self.users: + raise KeyError("Unknown user: " + user) + + return user + + def get_request_user_filepath(self, request, file, type="userdata", create_dir=True): + user_directory = folder_paths.get_user_directory() + + if type == "userdata": + root_dir = user_directory + else: + raise KeyError("Unknown filepath type:" + type) + + user = self.get_request_user_id(request) + path = user_root = os.path.abspath(os.path.join(root_dir, user)) + + # prevent leaving /{type} + if os.path.commonpath((root_dir, user_root)) != root_dir: + return None + + if file is not None: + # Check if filename is url encoded + if "%" in file: + file = parse.unquote(file) + + # prevent leaving /{type}/{user} + path = os.path.abspath(os.path.join(user_root, file)) + if os.path.commonpath((user_root, path)) != user_root: + return None + + parent = os.path.split(path)[0] + + if create_dir and not os.path.exists(parent): + os.makedirs(parent, exist_ok=True) + + return path + + def add_user(self, name): + name = name.strip() + if not name: + raise ValueError("username not provided") + user_id = re.sub("[^a-zA-Z0-9-_]+", '-', name) + user_id = user_id + "_" + str(uuid.uuid4()) + + self.users[user_id] = name + + with open(self.get_users_file(), "w") as f: + json.dump(self.users, f) + + return user_id + + def add_routes(self, routes): + self.settings.add_routes(routes) + + @routes.get("/users") + async def get_users(request): + if args.multi_user: + return web.json_response({"storage": "server", "users": self.users}) + else: + user_dir = self.get_request_user_filepath(request, None, create_dir=False) + return web.json_response({ + "storage": "server", + "migrated": os.path.exists(user_dir) + }) + + @routes.post("/users") + async def post_users(request): + body = await request.json() + username = body["username"] + if username in self.users.values(): + return web.json_response({"error": "Duplicate username."}, status=400) + + user_id = self.add_user(username) + return web.json_response(user_id) + + @routes.get("/userdata") + async def listuserdata(request): + """ + List user data files in a specified directory. + + This endpoint allows listing files in a user's data directory, with options for recursion, + full file information, and path splitting. + + Query Parameters: + - dir (required): The directory to list files from. + - recurse (optional): If "true", recursively list files in subdirectories. + - full_info (optional): If "true", return detailed file information (path, size, modified time). + - split (optional): If "true", split file paths into components (only applies when full_info is false). + + Returns: + - 400: If 'dir' parameter is missing. + - 403: If the requested path is not allowed. + - 404: If the requested directory does not exist. + - 200: JSON response with the list of files or file information. + + The response format depends on the query parameters: + - Default: List of relative file paths. + - full_info=true: List of dictionaries with file details. + - split=true (and full_info=false): List of lists, each containing path components. + """ + directory = request.rel_url.query.get('dir', '') + if not directory: + return web.Response(status=400, text="Directory not provided") + + path = self.get_request_user_filepath(request, directory) + if not path: + return web.Response(status=403, text="Invalid directory") + + if not os.path.exists(path): + return web.Response(status=404, text="Directory not found") + + recurse = request.rel_url.query.get('recurse', '').lower() == "true" + full_info = request.rel_url.query.get('full_info', '').lower() == "true" + + # Use different patterns based on whether we're recursing or not + if recurse: + pattern = os.path.join(glob.escape(path), '**', '*') + else: + pattern = os.path.join(glob.escape(path), '*') + + results = glob.glob(pattern, recursive=recurse) + + if full_info: + results = [ + { + 'path': os.path.relpath(x, path).replace(os.sep, '/'), + 'size': os.path.getsize(x), + 'modified': os.path.getmtime(x) + } for x in results if os.path.isfile(x) + ] + else: + results = [ + os.path.relpath(x, path).replace(os.sep, '/') + for x in results + if os.path.isfile(x) + ] + + split_path = request.rel_url.query.get('split', '').lower() == "true" + if split_path and not full_info: + results = [[x] + x.split('/') for x in results] + + return web.json_response(results) + + def get_user_data_path(request, check_exists = False, param = "file"): + file = request.match_info.get(param, None) + if not file: + return web.Response(status=400) + + path = self.get_request_user_filepath(request, file) + if not path: + return web.Response(status=403) + + if check_exists and not os.path.exists(path): + return web.Response(status=404) + + return path + + @routes.get("/userdata/{file}") + async def getuserdata(request): + path = get_user_data_path(request, check_exists=True) + if not isinstance(path, str): + return path + + return web.FileResponse(path) + + @routes.post("/userdata/{file}") + async def post_userdata(request): + path = get_user_data_path(request) + if not isinstance(path, str): + return path + + overwrite = request.query["overwrite"] != "false" + if not overwrite and os.path.exists(path): + return web.Response(status=409) + + body = await request.read() + + with open(path, "wb") as f: + f.write(body) + + resp = os.path.relpath(path, self.get_request_user_filepath(request, None)) + return web.json_response(resp) + + @routes.delete("/userdata/{file}") + async def delete_userdata(request): + path = get_user_data_path(request, check_exists=True) + if not isinstance(path, str): + return path + + os.remove(path) + + return web.Response(status=204) + + @routes.post("/userdata/{file}/move/{dest}") + async def move_userdata(request): + source = get_user_data_path(request, check_exists=True) + if not isinstance(source, str): + return source + + dest = get_user_data_path(request, check_exists=False, param="dest") + if not isinstance(source, str): + return dest + + overwrite = request.query["overwrite"] != "false" + if not overwrite and os.path.exists(dest): + return web.Response(status=409) + + print(f"moving '{source}' -> '{dest}'") + shutil.move(source, dest) + + resp = os.path.relpath(dest, self.get_request_user_filepath(request, None)) + return web.json_response(resp) diff --git a/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-310.pyc b/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f26bcab720034006a591660a47ed22991640b6f Binary files /dev/null and b/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-38.pyc b/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..392885ee34ac1c213da1fed40310f19e966d0f1c Binary files /dev/null and b/src/comfyui/comfy/__pycache__/checkpoint_pickle.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/cli_args.cpython-310.pyc b/src/comfyui/comfy/__pycache__/cli_args.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..200fced92f1137eac9571d94d285e66dd33eb7c4 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/cli_args.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/cli_args.cpython-38.pyc b/src/comfyui/comfy/__pycache__/cli_args.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a17af054be071fa8537e9c7a757d3d1e839d8d1c Binary files /dev/null and b/src/comfyui/comfy/__pycache__/cli_args.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/clip_model.cpython-310.pyc b/src/comfyui/comfy/__pycache__/clip_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..333f0996b9b629872d27c0c50c98913a8b0d8cb2 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/clip_model.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/clip_vision.cpython-310.pyc b/src/comfyui/comfy/__pycache__/clip_vision.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e793e10fbd7f7a240b5fbbe8651c25966a7ecf2 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/clip_vision.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/comfy_types.cpython-310.pyc b/src/comfyui/comfy/__pycache__/comfy_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e24bb2d502ffce9fdb2b64e43755505cbac7a261 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/comfy_types.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/conds.cpython-310.pyc b/src/comfyui/comfy/__pycache__/conds.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a16092a72958359dcaeb14e36a725b36ab312458 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/conds.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/controlnet.cpython-310.pyc b/src/comfyui/comfy/__pycache__/controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5fb0928c37ce2d788d29f59975fc77f4bca5e393 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/controlnet.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/diffusers_convert.cpython-310.pyc b/src/comfyui/comfy/__pycache__/diffusers_convert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4d650c903e47ac1f14b7b4a2d5f475d3552ed4b Binary files /dev/null and b/src/comfyui/comfy/__pycache__/diffusers_convert.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/diffusers_load.cpython-310.pyc b/src/comfyui/comfy/__pycache__/diffusers_load.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1dfc8126248ae40709ebc294c591bce73e35dd3e Binary files /dev/null and b/src/comfyui/comfy/__pycache__/diffusers_load.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/diffusers_load.cpython-38.pyc b/src/comfyui/comfy/__pycache__/diffusers_load.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..702018ad732c5e472808972165d7a533fddcafd3 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/diffusers_load.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/float.cpython-310.pyc b/src/comfyui/comfy/__pycache__/float.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bff62e4b42852178e5fdc673bc9e954dd13dbac5 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/float.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/float.cpython-38.pyc b/src/comfyui/comfy/__pycache__/float.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82da72f22064f0f8f402e08570df088f57bf4fc5 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/float.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/gligen.cpython-310.pyc b/src/comfyui/comfy/__pycache__/gligen.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..398ce0fd741147e923b2c432b9354a5566525c86 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/gligen.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/latent_formats.cpython-310.pyc b/src/comfyui/comfy/__pycache__/latent_formats.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c02f9c59bfafd195adbef0f7a7f98ed88ecb3724 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/latent_formats.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/lora.cpython-310.pyc b/src/comfyui/comfy/__pycache__/lora.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ff171c29511ead9960051f227195f1fb1e7a063 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/lora.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_base.cpython-310.pyc b/src/comfyui/comfy/__pycache__/model_base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e59131c055a187f0e0022d69410482ceb39dcdd Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_base.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_detection.cpython-310.pyc b/src/comfyui/comfy/__pycache__/model_detection.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb5264291900315468fb055b290b611d03820ead Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_detection.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_management.cpython-310.pyc b/src/comfyui/comfy/__pycache__/model_management.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c33b51d1fe66c30934da58a1b46c4a5f20cef323 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_management.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_management.cpython-38.pyc b/src/comfyui/comfy/__pycache__/model_management.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1e1f8146d2f2c0c3b8e4a9a587dd53fcddd9cc0 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_management.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_patcher.cpython-310.pyc b/src/comfyui/comfy/__pycache__/model_patcher.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e42c8b144d5768cf7fa50508a743dab31892cb90 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_patcher.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/model_sampling.cpython-310.pyc b/src/comfyui/comfy/__pycache__/model_sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8560c6e787faa87093d41b2e295a3af89eb7a0e Binary files /dev/null and b/src/comfyui/comfy/__pycache__/model_sampling.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/ops.cpython-310.pyc b/src/comfyui/comfy/__pycache__/ops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38aa65f67b27bcef8f9232533bca0f7727567f13 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/ops.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/ops.cpython-38.pyc b/src/comfyui/comfy/__pycache__/ops.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70b747ab5a1845f93a8689a2d4acfb8e51665e3a Binary files /dev/null and b/src/comfyui/comfy/__pycache__/ops.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/options.cpython-310.pyc b/src/comfyui/comfy/__pycache__/options.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea983cbf3fbf71db50d197c80a6a9470addb229e Binary files /dev/null and b/src/comfyui/comfy/__pycache__/options.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/options.cpython-38.pyc b/src/comfyui/comfy/__pycache__/options.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0e5a8dd6d4d35333dfc76f82d9177d871b278f4 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/options.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sample.cpython-310.pyc b/src/comfyui/comfy/__pycache__/sample.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6c92d19103e8735b066578ab8f1a37b8392ac6c Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sample.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sampler_helpers.cpython-310.pyc b/src/comfyui/comfy/__pycache__/sampler_helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55e9233082d1ab78e7ca0e9fb076877b752a4ebd Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sampler_helpers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/samplers.cpython-310.pyc b/src/comfyui/comfy/__pycache__/samplers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ca61bf00e9c05ff55cc7696cecee7bc12824fb6 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/samplers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sd.cpython-310.pyc b/src/comfyui/comfy/__pycache__/sd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e71e2192579bc9acc36d2ca835bc4c1954bf3834 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sd.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sd.cpython-38.pyc b/src/comfyui/comfy/__pycache__/sd.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e33f3e048262d6f57ccd13a7dea31ebee1dc5deb Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sd.cpython-38.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sd1_clip.cpython-310.pyc b/src/comfyui/comfy/__pycache__/sd1_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2653e9a446a4ee3aa8adbb50007bc44229da093e Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sd1_clip.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/sdxl_clip.cpython-310.pyc b/src/comfyui/comfy/__pycache__/sdxl_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..149d795e49ad4b5aa9624f8851d21e4763d7929a Binary files /dev/null and b/src/comfyui/comfy/__pycache__/sdxl_clip.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/supported_models.cpython-310.pyc b/src/comfyui/comfy/__pycache__/supported_models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6056d08ac15d9ab9cf2cec0f3293a13f278ecc9 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/supported_models.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/supported_models_base.cpython-310.pyc b/src/comfyui/comfy/__pycache__/supported_models_base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..72a94752786f17fd5c7ba4e8774b847e85055335 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/supported_models_base.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/utils.cpython-310.pyc b/src/comfyui/comfy/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e19633169bcd5d4d9732dc2015b6c6f2606f0008 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/utils.cpython-310.pyc differ diff --git a/src/comfyui/comfy/__pycache__/utils.cpython-38.pyc b/src/comfyui/comfy/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..799ef6500e00989e8bf8bf661ab0f5d8f19128c8 Binary files /dev/null and b/src/comfyui/comfy/__pycache__/utils.cpython-38.pyc differ diff --git a/src/comfyui/comfy/checkpoint_pickle.py b/src/comfyui/comfy/checkpoint_pickle.py new file mode 100644 index 0000000000000000000000000000000000000000..206551d3c1cf0d654c907534629a800196ba138b --- /dev/null +++ b/src/comfyui/comfy/checkpoint_pickle.py @@ -0,0 +1,13 @@ +import pickle + +load = pickle.load + +class Empty: + pass + +class Unpickler(pickle.Unpickler): + def find_class(self, module, name): + #TODO: safe unpickle + if module.startswith("pytorch_lightning"): + return Empty + return super().find_class(module, name) diff --git a/src/comfyui/comfy/cldm/__pycache__/cldm.cpython-310.pyc b/src/comfyui/comfy/cldm/__pycache__/cldm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..503ef00462ad14ce80aa0f72f651ce3f5785401a Binary files /dev/null and b/src/comfyui/comfy/cldm/__pycache__/cldm.cpython-310.pyc differ diff --git a/src/comfyui/comfy/cldm/__pycache__/control_types.cpython-310.pyc b/src/comfyui/comfy/cldm/__pycache__/control_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0829120c11b678cd5c3c014ed2e85a93a499943b Binary files /dev/null and b/src/comfyui/comfy/cldm/__pycache__/control_types.cpython-310.pyc differ diff --git a/src/comfyui/comfy/cldm/__pycache__/mmdit.cpython-310.pyc b/src/comfyui/comfy/cldm/__pycache__/mmdit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89af3f3494cad7f7e2080994e55c13e8d615eb39 Binary files /dev/null and b/src/comfyui/comfy/cldm/__pycache__/mmdit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/cldm/cldm.py b/src/comfyui/comfy/cldm/cldm.py new file mode 100644 index 0000000000000000000000000000000000000000..9ec64a22751d10718af7483c3370e0dc4ce578a7 --- /dev/null +++ b/src/comfyui/comfy/cldm/cldm.py @@ -0,0 +1,437 @@ +#taken from: https://github.com/lllyasviel/ControlNet +#and modified + +import torch +import torch as th +import torch.nn as nn + +from ..ldm.modules.diffusionmodules.util import ( + zero_module, + timestep_embedding, +) + +from ..ldm.modules.attention import SpatialTransformer +from ..ldm.modules.diffusionmodules.openaimodel import UNetModel, TimestepEmbedSequential, ResBlock, Downsample +from ..ldm.util import exists +from .control_types import UNION_CONTROLNET_TYPES +from collections import OrderedDict +import comfy.ops +from comfy.ldm.modules.attention import optimized_attention + +class OptimizedAttention(nn.Module): + def __init__(self, c, nhead, dropout=0.0, dtype=None, device=None, operations=None): + super().__init__() + self.heads = nhead + self.c = c + + self.in_proj = operations.Linear(c, c * 3, bias=True, dtype=dtype, device=device) + self.out_proj = operations.Linear(c, c, bias=True, dtype=dtype, device=device) + + def forward(self, x): + x = self.in_proj(x) + q, k, v = x.split(self.c, dim=2) + out = optimized_attention(q, k, v, self.heads) + return self.out_proj(out) + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + +class ResBlockUnionControlnet(nn.Module): + def __init__(self, dim, nhead, dtype=None, device=None, operations=None): + super().__init__() + self.attn = OptimizedAttention(dim, nhead, dtype=dtype, device=device, operations=operations) + self.ln_1 = operations.LayerNorm(dim, dtype=dtype, device=device) + self.mlp = nn.Sequential( + OrderedDict([("c_fc", operations.Linear(dim, dim * 4, dtype=dtype, device=device)), ("gelu", QuickGELU()), + ("c_proj", operations.Linear(dim * 4, dim, dtype=dtype, device=device))])) + self.ln_2 = operations.LayerNorm(dim, dtype=dtype, device=device) + + def attention(self, x: torch.Tensor): + return self.attn(x) + + def forward(self, x: torch.Tensor): + x = x + self.attention(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + +class ControlledUnetModel(UNetModel): + #implemented in the ldm unet + pass + +class ControlNet(nn.Module): + def __init__( + self, + image_size, + in_channels, + model_channels, + hint_channels, + num_res_blocks, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + dtype=torch.float32, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + adm_in_channels=None, + transformer_depth_middle=None, + transformer_depth_output=None, + attn_precision=None, + union_controlnet_num_control_type=None, + device=None, + operations=comfy.ops.disable_weight_init, + **kwargs, + ): + super().__init__() + assert use_spatial_transformer == True, "use_spatial_transformer has to be true" + if use_spatial_transformer: + assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...' + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + # from omegaconf.listconfig import ListConfig + # if type(context_dim) == ListConfig: + # context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.dims = dims + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError("provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult") + self.num_res_blocks = num_res_blocks + + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + assert all(map(lambda i: self.num_res_blocks[i] >= num_attention_blocks[i], range(len(num_attention_blocks)))) + + transformer_depth = transformer_depth[:] + + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = dtype + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + elif self.num_classes == "continuous": + print("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + ] + ) + self.zero_convs = nn.ModuleList([self.make_zero_conv(model_channels, operations=operations, dtype=self.dtype, device=device)]) + + self.input_hint_block = TimestepEmbedSequential( + operations.conv_nd(dims, hint_channels, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 16, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 16, 32, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 32, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 32, 96, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 96, 3, padding=1, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 96, 256, 3, padding=1, stride=2, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, 256, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + ) + ] + ch = mult * model_channels + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: + layers.append( + SpatialTransformer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + self.zero_convs.append(self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device)) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + mid_block = [ + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + if transformer_depth_middle >= 0: + mid_block += [SpatialTransformer( # always uses a self-attn + ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + self.middle_block = TimestepEmbedSequential(*mid_block) + self.middle_block_out = self.make_zero_conv(ch, operations=operations, dtype=self.dtype, device=device) + self._feature_size += ch + + if union_controlnet_num_control_type is not None: + self.num_control_type = union_controlnet_num_control_type + num_trans_channel = 320 + num_trans_head = 8 + num_trans_layer = 1 + num_proj_channel = 320 + # task_scale_factor = num_trans_channel ** 0.5 + self.task_embedding = nn.Parameter(torch.empty(self.num_control_type, num_trans_channel, dtype=self.dtype, device=device)) + + self.transformer_layes = nn.Sequential(*[ResBlockUnionControlnet(num_trans_channel, num_trans_head, dtype=self.dtype, device=device, operations=operations) for _ in range(num_trans_layer)]) + self.spatial_ch_projs = operations.Linear(num_trans_channel, num_proj_channel, dtype=self.dtype, device=device) + #----------------------------------------------------------------------------------------------------- + + control_add_embed_dim = 256 + class ControlAddEmbedding(nn.Module): + def __init__(self, in_dim, out_dim, num_control_type, dtype=None, device=None, operations=None): + super().__init__() + self.num_control_type = num_control_type + self.in_dim = in_dim + self.linear_1 = operations.Linear(in_dim * num_control_type, out_dim, dtype=dtype, device=device) + self.linear_2 = operations.Linear(out_dim, out_dim, dtype=dtype, device=device) + def forward(self, control_type, dtype, device): + c_type = torch.zeros((self.num_control_type,), device=device) + c_type[control_type] = 1.0 + c_type = timestep_embedding(c_type.flatten(), self.in_dim, repeat_only=False).to(dtype).reshape((-1, self.num_control_type * self.in_dim)) + return self.linear_2(torch.nn.functional.silu(self.linear_1(c_type))) + + self.control_add_embedding = ControlAddEmbedding(control_add_embed_dim, time_embed_dim, self.num_control_type, dtype=self.dtype, device=device, operations=operations) + else: + self.task_embedding = None + self.control_add_embedding = None + + def union_controlnet_merge(self, hint, control_type, emb, context): + # Equivalent to: https://github.com/xinsir6/ControlNetPlus/tree/main + inputs = [] + condition_list = [] + + for idx in range(min(1, len(control_type))): + controlnet_cond = self.input_hint_block(hint[idx], emb, context) + feat_seq = torch.mean(controlnet_cond, dim=(2, 3)) + if idx < len(control_type): + feat_seq += self.task_embedding[control_type[idx]].to(dtype=feat_seq.dtype, device=feat_seq.device) + + inputs.append(feat_seq.unsqueeze(1)) + condition_list.append(controlnet_cond) + + x = torch.cat(inputs, dim=1) + x = self.transformer_layes(x) + controlnet_cond_fuser = None + for idx in range(len(control_type)): + alpha = self.spatial_ch_projs(x[:, idx]) + alpha = alpha.unsqueeze(-1).unsqueeze(-1) + o = condition_list[idx] + alpha + if controlnet_cond_fuser is None: + controlnet_cond_fuser = o + else: + controlnet_cond_fuser += o + return controlnet_cond_fuser + + def make_zero_conv(self, channels, operations=None, dtype=None, device=None): + return TimestepEmbedSequential(operations.conv_nd(self.dims, channels, channels, 1, padding=0, dtype=dtype, device=device)) + + def forward(self, x, hint, timesteps, context, y=None, **kwargs): + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + guided_hint = None + if self.control_add_embedding is not None: #Union Controlnet + control_type = kwargs.get("control_type", []) + + if any([c >= self.num_control_type for c in control_type]): + max_type = max(control_type) + max_type_name = { + v: k for k, v in UNION_CONTROLNET_TYPES.items() + }[max_type] + raise ValueError( + f"Control type {max_type_name}({max_type}) is out of range for the number of control types" + + f"({self.num_control_type}) supported.\n" + + "Please consider using the ProMax ControlNet Union model.\n" + + "https://huggingface.co/xinsir/controlnet-union-sdxl-1.0/tree/main" + ) + + emb += self.control_add_embedding(control_type, emb.dtype, emb.device) + if len(control_type) > 0: + if len(hint.shape) < 5: + hint = hint.unsqueeze(dim=0) + guided_hint = self.union_controlnet_merge(hint, control_type, emb, context) + + if guided_hint is None: + guided_hint = self.input_hint_block(hint, emb, context) + + out_output = [] + out_middle = [] + + hs = [] + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for module, zero_conv in zip(self.input_blocks, self.zero_convs): + if guided_hint is not None: + h = module(h, emb, context) + h += guided_hint + guided_hint = None + else: + h = module(h, emb, context) + out_output.append(zero_conv(h, emb, context)) + + h = self.middle_block(h, emb, context) + out_middle.append(self.middle_block_out(h, emb, context)) + + return {"middle": out_middle, "output": out_output} + diff --git a/src/comfyui/comfy/cldm/control_types.py b/src/comfyui/comfy/cldm/control_types.py new file mode 100644 index 0000000000000000000000000000000000000000..4128631a305a13d65c3c37ced17179d23fbbdcff --- /dev/null +++ b/src/comfyui/comfy/cldm/control_types.py @@ -0,0 +1,10 @@ +UNION_CONTROLNET_TYPES = { + "openpose": 0, + "depth": 1, + "hed/pidi/scribble/ted": 2, + "canny/lineart/anime_lineart/mlsd": 3, + "normal": 4, + "segment": 5, + "tile": 6, + "repaint": 7, +} diff --git a/src/comfyui/comfy/cldm/mmdit.py b/src/comfyui/comfy/cldm/mmdit.py new file mode 100644 index 0000000000000000000000000000000000000000..54a58ab835a30d20a7b6a2a34572a66fbc1c95c0 --- /dev/null +++ b/src/comfyui/comfy/cldm/mmdit.py @@ -0,0 +1,81 @@ +import torch +from typing import Dict, Optional +import comfy.ldm.modules.diffusionmodules.mmdit + +class ControlNet(comfy.ldm.modules.diffusionmodules.mmdit.MMDiT): + def __init__( + self, + num_blocks = None, + control_latent_channels = None, + dtype = None, + device = None, + operations = None, + **kwargs, + ): + super().__init__(dtype=dtype, device=device, operations=operations, final_layer=False, num_blocks=num_blocks, **kwargs) + # controlnet_blocks + self.controlnet_blocks = torch.nn.ModuleList([]) + for _ in range(len(self.joint_blocks)): + self.controlnet_blocks.append(operations.Linear(self.hidden_size, self.hidden_size, device=device, dtype=dtype)) + + if control_latent_channels is None: + control_latent_channels = self.in_channels + + self.pos_embed_input = comfy.ldm.modules.diffusionmodules.mmdit.PatchEmbed( + None, + self.patch_size, + control_latent_channels, + self.hidden_size, + bias=True, + strict_img_size=False, + dtype=dtype, + device=device, + operations=operations + ) + + def forward( + self, + x: torch.Tensor, + timesteps: torch.Tensor, + y: Optional[torch.Tensor] = None, + context: Optional[torch.Tensor] = None, + hint = None, + ) -> torch.Tensor: + + #weird sd3 controlnet specific stuff + y = torch.zeros_like(y) + + if self.context_processor is not None: + context = self.context_processor(context) + + hw = x.shape[-2:] + x = self.x_embedder(x) + self.cropped_pos_embed(hw, device=x.device).to(dtype=x.dtype, device=x.device) + x += self.pos_embed_input(hint) + + c = self.t_embedder(timesteps, dtype=x.dtype) + if y is not None and self.y_embedder is not None: + y = self.y_embedder(y) + c = c + y + + if context is not None: + context = self.context_embedder(context) + + output = [] + + blocks = len(self.joint_blocks) + for i in range(blocks): + context, x = self.joint_blocks[i]( + context, + x, + c=c, + use_checkpoint=self.use_checkpoint, + ) + + out = self.controlnet_blocks[i](x) + count = self.depth // blocks + if i == blocks - 1: + count -= 1 + for j in range(count): + output.append(out) + + return {"output": output} diff --git a/src/comfyui/comfy/cli_args.py b/src/comfyui/comfy/cli_args.py new file mode 100644 index 0000000000000000000000000000000000000000..20b9f4749c464be983a778015258e0290dc689b3 --- /dev/null +++ b/src/comfyui/comfy/cli_args.py @@ -0,0 +1,185 @@ +import argparse +import enum +import os +from typing import Optional +import comfy.options + + +class EnumAction(argparse.Action): + """ + Argparse action for handling Enums + """ + def __init__(self, **kwargs): + # Pop off the type value + enum_type = kwargs.pop("type", None) + + # Ensure an Enum subclass is provided + if enum_type is None: + raise ValueError("type must be assigned an Enum when using EnumAction") + if not issubclass(enum_type, enum.Enum): + raise TypeError("type must be an Enum when using EnumAction") + + # Generate choices from the Enum + choices = tuple(e.value for e in enum_type) + kwargs.setdefault("choices", choices) + kwargs.setdefault("metavar", f"[{','.join(list(choices))}]") + + super(EnumAction, self).__init__(**kwargs) + + self._enum = enum_type + + def __call__(self, parser, namespace, values, option_string=None): + # Convert value back into an Enum + value = self._enum(values) + setattr(namespace, self.dest, value) + + +parser = argparse.ArgumentParser() + +parser.add_argument("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0,::", help="Specify the IP address to listen on (default: 127.0.0.1). You can give a list of ip addresses by separating them with a comma like: 127.2.2.2,127.3.3.3 If --listen is provided without an argument, it defaults to 0.0.0.0,:: (listens on all ipv4 and ipv6)") +parser.add_argument("--port", type=int, default=8188, help="Set the listen port.") +parser.add_argument("--tls-keyfile", type=str, help="Path to TLS (SSL) key file. Enables TLS, makes app accessible at https://... requires --tls-certfile to function") +parser.add_argument("--tls-certfile", type=str, help="Path to TLS (SSL) certificate file. Enables TLS, makes app accessible at https://... requires --tls-keyfile to function") +parser.add_argument("--enable-cors-header", type=str, default=None, metavar="ORIGIN", nargs="?", const="*", help="Enable CORS (Cross-Origin Resource Sharing) with optional origin or allow all with default '*'.") +parser.add_argument("--max-upload-size", type=float, default=100, help="Set the maximum upload size in MB.") + +parser.add_argument("--extra-model-paths-config", type=str, default=None, metavar="PATH", nargs='+', action='append', help="Load one or more extra_model_paths.yaml files.") +parser.add_argument("--output-directory", type=str, default=None, help="Set the ComfyUI output directory.") +parser.add_argument("--temp-directory", type=str, default=None, help="Set the ComfyUI temp directory (default is in the ComfyUI directory).") +parser.add_argument("--input-directory", type=str, default=None, help="Set the ComfyUI input directory.") +parser.add_argument("--auto-launch", action="store_true", help="Automatically launch ComfyUI in the default browser.") +parser.add_argument("--disable-auto-launch", action="store_true", help="Disable auto launching the browser.") +parser.add_argument("--cuda-device", type=int, default=None, metavar="DEVICE_ID", help="Set the id of the cuda device this instance will use.") +cm_group = parser.add_mutually_exclusive_group() +cm_group.add_argument("--cuda-malloc", action="store_true", help="Enable cudaMallocAsync (enabled by default for torch 2.0 and up).") +cm_group.add_argument("--disable-cuda-malloc", action="store_true", help="Disable cudaMallocAsync.") + + +fp_group = parser.add_mutually_exclusive_group() +fp_group.add_argument("--force-fp32", action="store_true", help="Force fp32 (If this makes your GPU work better please report it).") +fp_group.add_argument("--force-fp16", action="store_true", help="Force fp16.") + +fpunet_group = parser.add_mutually_exclusive_group() +fpunet_group.add_argument("--bf16-unet", action="store_true", help="Run the UNET in bf16. This should only be used for testing stuff.") +fpunet_group.add_argument("--fp16-unet", action="store_true", help="Store unet weights in fp16.") +fpunet_group.add_argument("--fp8_e4m3fn-unet", action="store_true", help="Store unet weights in fp8_e4m3fn.") +fpunet_group.add_argument("--fp8_e5m2-unet", action="store_true", help="Store unet weights in fp8_e5m2.") + +fpvae_group = parser.add_mutually_exclusive_group() +fpvae_group.add_argument("--fp16-vae", action="store_true", help="Run the VAE in fp16, might cause black images.") +fpvae_group.add_argument("--fp32-vae", action="store_true", help="Run the VAE in full precision fp32.") +fpvae_group.add_argument("--bf16-vae", action="store_true", help="Run the VAE in bf16.") + +parser.add_argument("--cpu-vae", action="store_true", help="Run the VAE on the CPU.") + +fpte_group = parser.add_mutually_exclusive_group() +fpte_group.add_argument("--fp8_e4m3fn-text-enc", action="store_true", help="Store text encoder weights in fp8 (e4m3fn variant).") +fpte_group.add_argument("--fp8_e5m2-text-enc", action="store_true", help="Store text encoder weights in fp8 (e5m2 variant).") +fpte_group.add_argument("--fp16-text-enc", action="store_true", help="Store text encoder weights in fp16.") +fpte_group.add_argument("--fp32-text-enc", action="store_true", help="Store text encoder weights in fp32.") + +parser.add_argument("--force-channels-last", action="store_true", help="Force channels last format when inferencing the models.") + +parser.add_argument("--directml", type=int, nargs="?", metavar="DIRECTML_DEVICE", const=-1, help="Use torch-directml.") + +parser.add_argument("--disable-ipex-optimize", action="store_true", help="Disables ipex.optimize when loading models with Intel GPUs.") + +class LatentPreviewMethod(enum.Enum): + NoPreviews = "none" + Auto = "auto" + Latent2RGB = "latent2rgb" + TAESD = "taesd" + +parser.add_argument("--preview-method", type=LatentPreviewMethod, default=LatentPreviewMethod.NoPreviews, help="Default preview method for sampler nodes.", action=EnumAction) + +parser.add_argument("--preview-size", type=int, default=512, help="Sets the maximum preview size for sampler nodes.") + +cache_group = parser.add_mutually_exclusive_group() +cache_group.add_argument("--cache-classic", action="store_true", help="Use the old style (aggressive) caching.") +cache_group.add_argument("--cache-lru", type=int, default=0, help="Use LRU caching with a maximum of N node results cached. May use more RAM/VRAM.") + +attn_group = parser.add_mutually_exclusive_group() +attn_group.add_argument("--use-split-cross-attention", action="store_true", help="Use the split cross attention optimization. Ignored when xformers is used.") +attn_group.add_argument("--use-quad-cross-attention", action="store_true", help="Use the sub-quadratic cross attention optimization . Ignored when xformers is used.") +attn_group.add_argument("--use-pytorch-cross-attention", action="store_true", help="Use the new pytorch 2.0 cross attention function.") + +parser.add_argument("--disable-xformers", action="store_true", help="Disable xformers.") + +upcast = parser.add_mutually_exclusive_group() +upcast.add_argument("--force-upcast-attention", action="store_true", help="Force enable attention upcasting, please report if it fixes black images.") +upcast.add_argument("--dont-upcast-attention", action="store_true", help="Disable all upcasting of attention. Should be unnecessary except for debugging.") + + +vram_group = parser.add_mutually_exclusive_group() +vram_group.add_argument("--gpu-only", action="store_true", help="Store and run everything (text encoders/CLIP models, etc... on the GPU).") +vram_group.add_argument("--highvram", action="store_true", help="By default models will be unloaded to CPU memory after being used. This option keeps them in GPU memory.") +vram_group.add_argument("--normalvram", action="store_true", help="Used to force normal vram use if lowvram gets automatically enabled.") +vram_group.add_argument("--lowvram", action="store_true", help="Split the unet in parts to use less vram.") +vram_group.add_argument("--novram", action="store_true", help="When lowvram isn't enough.") +vram_group.add_argument("--cpu", action="store_true", help="To use the CPU for everything (slow).") + +parser.add_argument("--reserve-vram", type=float, default=None, help="Set the amount of vram in GB you want to reserve for use by your OS/other software. By default some amount is reverved depending on your OS.") + + +parser.add_argument("--default-hashing-function", type=str, choices=['md5', 'sha1', 'sha256', 'sha512'], default='sha256', help="Allows you to choose the hash function to use for duplicate filename / contents comparison. Default is sha256.") + +parser.add_argument("--disable-smart-memory", action="store_true", help="Force ComfyUI to agressively offload to regular ram instead of keeping models in vram when it can.") +parser.add_argument("--deterministic", action="store_true", help="Make pytorch use slower deterministic algorithms when it can. Note that this might not make images deterministic in all cases.") +parser.add_argument("--fast", action="store_true", help="Enable some untested and potentially quality deteriorating optimizations.") + +parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.") +parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI.") +parser.add_argument("--windows-standalone-build", action="store_true", help="Windows standalone build: Enable convenient things that most people using the standalone windows build will probably enjoy (like auto opening the page on startup).") + +parser.add_argument("--disable-metadata", action="store_true", help="Disable saving prompt metadata in files.") +parser.add_argument("--disable-all-custom-nodes", action="store_true", help="Disable loading all custom nodes.") + +parser.add_argument("--multi-user", action="store_true", help="Enables per-user storage.") + +parser.add_argument("--verbose", default='INFO', const='DEBUG', nargs="?", choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='Set the logging level') + +# The default built-in provider hosted under web/ +DEFAULT_VERSION_STRING = "comfyanonymous/ComfyUI@latest" + +parser.add_argument( + "--front-end-version", + type=str, + default=DEFAULT_VERSION_STRING, + help=""" + Specifies the version of the frontend to be used. This command needs internet connectivity to query and + download available frontend implementations from GitHub releases. + + The version string should be in the format of: + [repoOwner]/[repoName]@[version] + where version is one of: "latest" or a valid version number (e.g. "1.0.0") + """, +) + +def is_valid_directory(path: Optional[str]) -> Optional[str]: + """Validate if the given path is a directory.""" + if path is None: + return None + + if not os.path.isdir(path): + raise argparse.ArgumentTypeError(f"{path} is not a valid directory.") + return path + +parser.add_argument( + "--front-end-root", + type=is_valid_directory, + default=None, + help="The local filesystem path to the directory where the frontend is located. Overrides --front-end-version.", +) + +parser.add_argument("--user-directory", type=is_valid_directory, default=None, help="Set the ComfyUI user directory with an absolute path.") + +if comfy.options.args_parsing: + args = parser.parse_args() +else: + args = parser.parse_args([]) + +if args.windows_standalone_build: + args.auto_launch = True + +if args.disable_auto_launch: + args.auto_launch = False diff --git a/src/comfyui/comfy/clip_config_bigg.json b/src/comfyui/comfy/clip_config_bigg.json new file mode 100644 index 0000000000000000000000000000000000000000..35261deef14a68fcc6c5b1fc32914b5c102781a9 --- /dev/null +++ b/src/comfyui/comfy/clip_config_bigg.json @@ -0,0 +1,23 @@ +{ + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 49407, + "hidden_act": "gelu", + "hidden_size": 1280, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 5120, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 20, + "num_hidden_layers": 32, + "pad_token_id": 1, + "projection_dim": 1280, + "torch_dtype": "float32", + "vocab_size": 49408 +} diff --git a/src/comfyui/comfy/clip_model.py b/src/comfyui/comfy/clip_model.py new file mode 100644 index 0000000000000000000000000000000000000000..42cdc4f6e7d84663c67c23805da432bf8727be42 --- /dev/null +++ b/src/comfyui/comfy/clip_model.py @@ -0,0 +1,196 @@ +import torch +from comfy.ldm.modules.attention import optimized_attention_for_device +import comfy.ops + +class CLIPAttention(torch.nn.Module): + def __init__(self, embed_dim, heads, dtype, device, operations): + super().__init__() + + self.heads = heads + self.q_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.k_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.v_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + + self.out_proj = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + + def forward(self, x, mask=None, optimized_attention=None): + q = self.q_proj(x) + k = self.k_proj(x) + v = self.v_proj(x) + + out = optimized_attention(q, k, v, self.heads, mask) + return self.out_proj(out) + +ACTIVATIONS = {"quick_gelu": lambda a: a * torch.sigmoid(1.702 * a), + "gelu": torch.nn.functional.gelu, +} + +class CLIPMLP(torch.nn.Module): + def __init__(self, embed_dim, intermediate_size, activation, dtype, device, operations): + super().__init__() + self.fc1 = operations.Linear(embed_dim, intermediate_size, bias=True, dtype=dtype, device=device) + self.activation = ACTIVATIONS[activation] + self.fc2 = operations.Linear(intermediate_size, embed_dim, bias=True, dtype=dtype, device=device) + + def forward(self, x): + x = self.fc1(x) + x = self.activation(x) + x = self.fc2(x) + return x + +class CLIPLayer(torch.nn.Module): + def __init__(self, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): + super().__init__() + self.layer_norm1 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + self.self_attn = CLIPAttention(embed_dim, heads, dtype, device, operations) + self.layer_norm2 = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + self.mlp = CLIPMLP(embed_dim, intermediate_size, intermediate_activation, dtype, device, operations) + + def forward(self, x, mask=None, optimized_attention=None): + x += self.self_attn(self.layer_norm1(x), mask, optimized_attention) + x += self.mlp(self.layer_norm2(x)) + return x + + +class CLIPEncoder(torch.nn.Module): + def __init__(self, num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations): + super().__init__() + self.layers = torch.nn.ModuleList([CLIPLayer(embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) for i in range(num_layers)]) + + def forward(self, x, mask=None, intermediate_output=None): + optimized_attention = optimized_attention_for_device(x.device, mask=mask is not None, small_input=True) + + if intermediate_output is not None: + if intermediate_output < 0: + intermediate_output = len(self.layers) + intermediate_output + + intermediate = None + for i, l in enumerate(self.layers): + x = l(x, mask, optimized_attention) + if i == intermediate_output: + intermediate = x.clone() + return x, intermediate + +class CLIPEmbeddings(torch.nn.Module): + def __init__(self, embed_dim, vocab_size=49408, num_positions=77, dtype=None, device=None, operations=None): + super().__init__() + self.token_embedding = operations.Embedding(vocab_size, embed_dim, dtype=dtype, device=device) + self.position_embedding = operations.Embedding(num_positions, embed_dim, dtype=dtype, device=device) + + def forward(self, input_tokens, dtype=torch.float32): + return self.token_embedding(input_tokens, out_dtype=dtype) + comfy.ops.cast_to(self.position_embedding.weight, dtype=dtype, device=input_tokens.device) + + +class CLIPTextModel_(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + num_layers = config_dict["num_hidden_layers"] + embed_dim = config_dict["hidden_size"] + heads = config_dict["num_attention_heads"] + intermediate_size = config_dict["intermediate_size"] + intermediate_activation = config_dict["hidden_act"] + num_positions = config_dict["max_position_embeddings"] + self.eos_token_id = config_dict["eos_token_id"] + + super().__init__() + self.embeddings = CLIPEmbeddings(embed_dim, num_positions=num_positions, dtype=dtype, device=device, operations=operations) + self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) + self.final_layer_norm = operations.LayerNorm(embed_dim, dtype=dtype, device=device) + + def forward(self, input_tokens, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True, dtype=torch.float32): + x = self.embeddings(input_tokens, dtype=dtype) + mask = None + if attention_mask is not None: + mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) + mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) + + causal_mask = torch.empty(x.shape[1], x.shape[1], dtype=x.dtype, device=x.device).fill_(float("-inf")).triu_(1) + if mask is not None: + mask += causal_mask + else: + mask = causal_mask + + x, i = self.encoder(x, mask=mask, intermediate_output=intermediate_output) + x = self.final_layer_norm(x) + if i is not None and final_layer_norm_intermediate: + i = self.final_layer_norm(i) + + pooled_output = x[torch.arange(x.shape[0], device=x.device), (torch.round(input_tokens).to(dtype=torch.int, device=x.device) == self.eos_token_id).int().argmax(dim=-1),] + return x, i, pooled_output + +class CLIPTextModel(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.num_layers = config_dict["num_hidden_layers"] + self.text_model = CLIPTextModel_(config_dict, dtype, device, operations) + embed_dim = config_dict["hidden_size"] + self.text_projection = operations.Linear(embed_dim, embed_dim, bias=False, dtype=dtype, device=device) + self.dtype = dtype + + def get_input_embeddings(self): + return self.text_model.embeddings.token_embedding + + def set_input_embeddings(self, embeddings): + self.text_model.embeddings.token_embedding = embeddings + + def forward(self, *args, **kwargs): + x = self.text_model(*args, **kwargs) + out = self.text_projection(x[2]) + return (x[0], x[1], out, x[2]) + + +class CLIPVisionEmbeddings(torch.nn.Module): + def __init__(self, embed_dim, num_channels=3, patch_size=14, image_size=224, dtype=None, device=None, operations=None): + super().__init__() + self.class_embedding = torch.nn.Parameter(torch.empty(embed_dim, dtype=dtype, device=device)) + + self.patch_embedding = operations.Conv2d( + in_channels=num_channels, + out_channels=embed_dim, + kernel_size=patch_size, + stride=patch_size, + bias=False, + dtype=dtype, + device=device + ) + + num_patches = (image_size // patch_size) ** 2 + num_positions = num_patches + 1 + self.position_embedding = operations.Embedding(num_positions, embed_dim, dtype=dtype, device=device) + + def forward(self, pixel_values): + embeds = self.patch_embedding(pixel_values).flatten(2).transpose(1, 2) + return torch.cat([comfy.ops.cast_to_input(self.class_embedding, embeds).expand(pixel_values.shape[0], 1, -1), embeds], dim=1) + comfy.ops.cast_to_input(self.position_embedding.weight, embeds) + + +class CLIPVision(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + num_layers = config_dict["num_hidden_layers"] + embed_dim = config_dict["hidden_size"] + heads = config_dict["num_attention_heads"] + intermediate_size = config_dict["intermediate_size"] + intermediate_activation = config_dict["hidden_act"] + + self.embeddings = CLIPVisionEmbeddings(embed_dim, config_dict["num_channels"], config_dict["patch_size"], config_dict["image_size"], dtype=dtype, device=device, operations=operations) + self.pre_layrnorm = operations.LayerNorm(embed_dim) + self.encoder = CLIPEncoder(num_layers, embed_dim, heads, intermediate_size, intermediate_activation, dtype, device, operations) + self.post_layernorm = operations.LayerNorm(embed_dim) + + def forward(self, pixel_values, attention_mask=None, intermediate_output=None): + x = self.embeddings(pixel_values) + x = self.pre_layrnorm(x) + #TODO: attention_mask? + x, i = self.encoder(x, mask=None, intermediate_output=intermediate_output) + pooled_output = self.post_layernorm(x[:, 0, :]) + return x, i, pooled_output + +class CLIPVisionModelProjection(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.vision_model = CLIPVision(config_dict, dtype, device, operations) + self.visual_projection = operations.Linear(config_dict["hidden_size"], config_dict["projection_dim"], bias=False) + + def forward(self, *args, **kwargs): + x = self.vision_model(*args, **kwargs) + out = self.visual_projection(x[2]) + return (x[0], x[1], out) diff --git a/src/comfyui/comfy/clip_vision.py b/src/comfyui/comfy/clip_vision.py new file mode 100644 index 0000000000000000000000000000000000000000..64392e2704fe9a069a703e4baab3814452bf3aaf --- /dev/null +++ b/src/comfyui/comfy/clip_vision.py @@ -0,0 +1,120 @@ +from .utils import load_torch_file, transformers_convert, state_dict_prefix_replace +import os +import torch +import json +import logging + +import comfy.ops +import comfy.model_patcher +import comfy.model_management +import comfy.utils +import comfy.clip_model + +class Output: + def __getitem__(self, key): + return getattr(self, key) + def __setitem__(self, key, item): + setattr(self, key, item) + +def clip_preprocess(image, size=224): + mean = torch.tensor([ 0.48145466,0.4578275,0.40821073], device=image.device, dtype=image.dtype) + std = torch.tensor([0.26862954,0.26130258,0.27577711], device=image.device, dtype=image.dtype) + image = image.movedim(-1, 1) + if not (image.shape[2] == size and image.shape[3] == size): + scale = (size / min(image.shape[2], image.shape[3])) + image = torch.nn.functional.interpolate(image, size=(round(scale * image.shape[2]), round(scale * image.shape[3])), mode="bicubic", antialias=True) + h = (image.shape[2] - size)//2 + w = (image.shape[3] - size)//2 + image = image[:,:,h:h+size,w:w+size] + image = torch.clip((255. * image), 0, 255).round() / 255.0 + return (image - mean.view([3,1,1])) / std.view([3,1,1]) + +class ClipVisionModel(): + def __init__(self, json_config): + with open(json_config) as f: + config = json.load(f) + + self.image_size = config.get("image_size", 224) + self.load_device = comfy.model_management.text_encoder_device() + offload_device = comfy.model_management.text_encoder_offload_device() + self.dtype = comfy.model_management.text_encoder_dtype(self.load_device) + self.model = comfy.clip_model.CLIPVisionModelProjection(config, self.dtype, offload_device, comfy.ops.manual_cast) + self.model.eval() + + self.patcher = comfy.model_patcher.ModelPatcher(self.model, load_device=self.load_device, offload_device=offload_device) + + def load_sd(self, sd): + return self.model.load_state_dict(sd, strict=False) + + def get_sd(self): + return self.model.state_dict() + + def encode_image(self, image): + comfy.model_management.load_model_gpu(self.patcher) + pixel_values = clip_preprocess(image.to(self.load_device), size=self.image_size).float() + out = self.model(pixel_values=pixel_values, intermediate_output=-2) + + outputs = Output() + outputs["last_hidden_state"] = out[0].to(comfy.model_management.intermediate_device()) + outputs["image_embeds"] = out[2].to(comfy.model_management.intermediate_device()) + outputs["penultimate_hidden_states"] = out[1].to(comfy.model_management.intermediate_device()) + return outputs + +def convert_to_transformers(sd, prefix): + sd_k = sd.keys() + if "{}transformer.resblocks.0.attn.in_proj_weight".format(prefix) in sd_k: + keys_to_replace = { + "{}class_embedding".format(prefix): "vision_model.embeddings.class_embedding", + "{}conv1.weight".format(prefix): "vision_model.embeddings.patch_embedding.weight", + "{}positional_embedding".format(prefix): "vision_model.embeddings.position_embedding.weight", + "{}ln_post.bias".format(prefix): "vision_model.post_layernorm.bias", + "{}ln_post.weight".format(prefix): "vision_model.post_layernorm.weight", + "{}ln_pre.bias".format(prefix): "vision_model.pre_layrnorm.bias", + "{}ln_pre.weight".format(prefix): "vision_model.pre_layrnorm.weight", + } + + for x in keys_to_replace: + if x in sd_k: + sd[keys_to_replace[x]] = sd.pop(x) + + if "{}proj".format(prefix) in sd_k: + sd['visual_projection.weight'] = sd.pop("{}proj".format(prefix)).transpose(0, 1) + + sd = transformers_convert(sd, prefix, "vision_model.", 48) + else: + replace_prefix = {prefix: ""} + sd = state_dict_prefix_replace(sd, replace_prefix) + return sd + +def load_clipvision_from_sd(sd, prefix="", convert_keys=False): + if convert_keys: + sd = convert_to_transformers(sd, prefix) + if "vision_model.encoder.layers.47.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_g.json") + elif "vision_model.encoder.layers.30.layer_norm1.weight" in sd: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_h.json") + elif "vision_model.encoder.layers.22.layer_norm1.weight" in sd: + if sd["vision_model.embeddings.position_embedding.weight"].shape[0] == 577: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_vitl_336.json") + else: + json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_vision_config_vitl.json") + else: + return None + + clip = ClipVisionModel(json_config) + m, u = clip.load_sd(sd) + if len(m) > 0: + logging.warning("missing clip vision: {}".format(m)) + u = set(u) + keys = list(sd.keys()) + for k in keys: + if k not in u: + sd.pop(k) + return clip + +def load(ckpt_path): + sd = load_torch_file(ckpt_path) + if "visual.transformer.resblocks.0.attn.in_proj_weight" in sd: + return load_clipvision_from_sd(sd, prefix="visual.", convert_keys=True) + else: + return load_clipvision_from_sd(sd) diff --git a/src/comfyui/comfy/clip_vision_config_g.json b/src/comfyui/comfy/clip_vision_config_g.json new file mode 100644 index 0000000000000000000000000000000000000000..708e7e21ac3513a719d6a49e88e756f5ef7e2c8d --- /dev/null +++ b/src/comfyui/comfy/clip_vision_config_g.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "gelu", + "hidden_size": 1664, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 8192, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 48, + "patch_size": 14, + "projection_dim": 1280, + "torch_dtype": "float32" +} diff --git a/src/comfyui/comfy/clip_vision_config_h.json b/src/comfyui/comfy/clip_vision_config_h.json new file mode 100644 index 0000000000000000000000000000000000000000..bb71be419a4be0ad5c8c157850de032a65593cb9 --- /dev/null +++ b/src/comfyui/comfy/clip_vision_config_h.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "gelu", + "hidden_size": 1280, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 5120, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 32, + "patch_size": 14, + "projection_dim": 1024, + "torch_dtype": "float32" +} diff --git a/src/comfyui/comfy/clip_vision_config_vitl.json b/src/comfyui/comfy/clip_vision_config_vitl.json new file mode 100644 index 0000000000000000000000000000000000000000..c59b8ed5a4c1f41fbcc9e6811d2c7dfe44273de7 --- /dev/null +++ b/src/comfyui/comfy/clip_vision_config_vitl.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "image_size": 224, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "torch_dtype": "float32" +} diff --git a/src/comfyui/comfy/clip_vision_config_vitl_336.json b/src/comfyui/comfy/clip_vision_config_vitl_336.json new file mode 100644 index 0000000000000000000000000000000000000000..f26945273d99e88f207d64dcec78feee63b4b625 --- /dev/null +++ b/src/comfyui/comfy/clip_vision_config_vitl_336.json @@ -0,0 +1,18 @@ +{ + "attention_dropout": 0.0, + "dropout": 0.0, + "hidden_act": "quick_gelu", + "hidden_size": 1024, + "image_size": 336, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-5, + "model_type": "clip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "torch_dtype": "float32" +} diff --git a/src/comfyui/comfy/comfy_types.py b/src/comfyui/comfy/comfy_types.py new file mode 100644 index 0000000000000000000000000000000000000000..70cf4b158e5f969192c0c11d9bd461964aaea5b5 --- /dev/null +++ b/src/comfyui/comfy/comfy_types.py @@ -0,0 +1,32 @@ +import torch +from typing import Callable, Protocol, TypedDict, Optional, List + + +class UnetApplyFunction(Protocol): + """Function signature protocol on comfy.model_base.BaseModel.apply_model""" + + def __call__(self, x: torch.Tensor, t: torch.Tensor, **kwargs) -> torch.Tensor: + pass + + +class UnetApplyConds(TypedDict): + """Optional conditions for unet apply function.""" + + c_concat: Optional[torch.Tensor] + c_crossattn: Optional[torch.Tensor] + control: Optional[torch.Tensor] + transformer_options: Optional[dict] + + +class UnetParams(TypedDict): + # Tensor of shape [B, C, H, W] + input: torch.Tensor + # Tensor of shape [B] + timestep: torch.Tensor + c: UnetApplyConds + # List of [0, 1], [0], [1], ... + # 0 means conditional, 1 means conditional unconditional + cond_or_uncond: List[int] + + +UnetWrapperFunction = Callable[[UnetApplyFunction, UnetParams], torch.Tensor] diff --git a/src/comfyui/comfy/conds.py b/src/comfyui/comfy/conds.py new file mode 100644 index 0000000000000000000000000000000000000000..660690af8425209e6cc8d8b3e17185065e269a47 --- /dev/null +++ b/src/comfyui/comfy/conds.py @@ -0,0 +1,83 @@ +import torch +import math +import comfy.utils + + +def lcm(a, b): #TODO: eventually replace by math.lcm (added in python3.9) + return abs(a*b) // math.gcd(a, b) + +class CONDRegular: + def __init__(self, cond): + self.cond = cond + + def _copy_with(self, cond): + return self.__class__(cond) + + def process_cond(self, batch_size, device, **kwargs): + return self._copy_with(comfy.utils.repeat_to_batch_size(self.cond, batch_size).to(device)) + + def can_concat(self, other): + if self.cond.shape != other.cond.shape: + return False + return True + + def concat(self, others): + conds = [self.cond] + for x in others: + conds.append(x.cond) + return torch.cat(conds) + +class CONDNoiseShape(CONDRegular): + def process_cond(self, batch_size, device, area, **kwargs): + data = self.cond + if area is not None: + dims = len(area) // 2 + for i in range(dims): + data = data.narrow(i + 2, area[i + dims], area[i]) + + return self._copy_with(comfy.utils.repeat_to_batch_size(data, batch_size).to(device)) + + +class CONDCrossAttn(CONDRegular): + def can_concat(self, other): + s1 = self.cond.shape + s2 = other.cond.shape + if s1 != s2: + if s1[0] != s2[0] or s1[2] != s2[2]: #these 2 cases should not happen + return False + + mult_min = lcm(s1[1], s2[1]) + diff = mult_min // min(s1[1], s2[1]) + if diff > 4: #arbitrary limit on the padding because it's probably going to impact performance negatively if it's too much + return False + return True + + def concat(self, others): + conds = [self.cond] + crossattn_max_len = self.cond.shape[1] + for x in others: + c = x.cond + crossattn_max_len = lcm(crossattn_max_len, c.shape[1]) + conds.append(c) + + out = [] + for c in conds: + if c.shape[1] < crossattn_max_len: + c = c.repeat(1, crossattn_max_len // c.shape[1], 1) #padding with repeat doesn't change result + out.append(c) + return torch.cat(out) + +class CONDConstant(CONDRegular): + def __init__(self, cond): + self.cond = cond + + def process_cond(self, batch_size, device, **kwargs): + return self._copy_with(self.cond) + + def can_concat(self, other): + if self.cond != other.cond: + return False + return True + + def concat(self, others): + return self.cond diff --git a/src/comfyui/comfy/controlnet.py b/src/comfyui/comfy/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..d2744e427da458825b1650149db93a488401a404 --- /dev/null +++ b/src/comfyui/comfy/controlnet.py @@ -0,0 +1,766 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + + +import torch +from enum import Enum +import math +import os +import logging +import comfy.utils +import comfy.model_management +import comfy.model_detection +import comfy.model_patcher +import comfy.ops +import comfy.latent_formats + +import comfy.cldm.cldm +import comfy.t2i_adapter.adapter +import comfy.ldm.cascade.controlnet +import comfy.cldm.mmdit +import comfy.ldm.hydit.controlnet +import comfy.ldm.flux.controlnet + + +def broadcast_image_to(tensor, target_batch_size, batched_number): + current_batch_size = tensor.shape[0] + #print(current_batch_size, target_batch_size) + if current_batch_size == 1: + return tensor + + per_batch = target_batch_size // batched_number + tensor = tensor[:per_batch] + + if per_batch > tensor.shape[0]: + tensor = torch.cat([tensor] * (per_batch // tensor.shape[0]) + [tensor[:(per_batch % tensor.shape[0])]], dim=0) + + current_batch_size = tensor.shape[0] + if current_batch_size == target_batch_size: + return tensor + else: + return torch.cat([tensor] * batched_number, dim=0) + +class StrengthType(Enum): + CONSTANT = 1 + LINEAR_UP = 2 + +class ControlBase: + def __init__(self): + self.cond_hint_original = None + self.cond_hint = None + self.strength = 1.0 + self.timestep_percent_range = (0.0, 1.0) + self.latent_format = None + self.vae = None + self.global_average_pooling = False + self.timestep_range = None + self.compression_ratio = 8 + self.upscale_algorithm = 'nearest-exact' + self.extra_args = {} + self.previous_controlnet = None + self.extra_conds = [] + self.strength_type = StrengthType.CONSTANT + self.concat_mask = False + self.extra_concat_orig = [] + self.extra_concat = None + + def set_cond_hint(self, cond_hint, strength=1.0, timestep_percent_range=(0.0, 1.0), vae=None, extra_concat=[]): + self.cond_hint_original = cond_hint + self.strength = strength + self.timestep_percent_range = timestep_percent_range + if self.latent_format is not None: + if vae is None: + logging.warning("WARNING: no VAE provided to the controlnet apply node when this controlnet requires one.") + self.vae = vae + self.extra_concat_orig = extra_concat.copy() + if self.concat_mask and len(self.extra_concat_orig) == 0: + self.extra_concat_orig.append(torch.tensor([[[[1.0]]]])) + return self + + def pre_run(self, model, percent_to_timestep_function): + self.timestep_range = (percent_to_timestep_function(self.timestep_percent_range[0]), percent_to_timestep_function(self.timestep_percent_range[1])) + if self.previous_controlnet is not None: + self.previous_controlnet.pre_run(model, percent_to_timestep_function) + + def set_previous_controlnet(self, controlnet): + self.previous_controlnet = controlnet + return self + + def cleanup(self): + if self.previous_controlnet is not None: + self.previous_controlnet.cleanup() + + self.cond_hint = None + self.extra_concat = None + self.timestep_range = None + + def get_models(self): + out = [] + if self.previous_controlnet is not None: + out += self.previous_controlnet.get_models() + return out + + def copy_to(self, c): + c.cond_hint_original = self.cond_hint_original + c.strength = self.strength + c.timestep_percent_range = self.timestep_percent_range + c.global_average_pooling = self.global_average_pooling + c.compression_ratio = self.compression_ratio + c.upscale_algorithm = self.upscale_algorithm + c.latent_format = self.latent_format + c.extra_args = self.extra_args.copy() + c.vae = self.vae + c.extra_conds = self.extra_conds.copy() + c.strength_type = self.strength_type + c.concat_mask = self.concat_mask + c.extra_concat_orig = self.extra_concat_orig.copy() + + def inference_memory_requirements(self, dtype): + if self.previous_controlnet is not None: + return self.previous_controlnet.inference_memory_requirements(dtype) + return 0 + + def control_merge(self, control, control_prev, output_dtype): + out = {'input':[], 'middle':[], 'output': []} + + for key in control: + control_output = control[key] + applied_to = set() + for i in range(len(control_output)): + x = control_output[i] + if x is not None: + if self.global_average_pooling: + x = torch.mean(x, dim=(2, 3), keepdim=True).repeat(1, 1, x.shape[2], x.shape[3]) + + if x not in applied_to: #memory saving strategy, allow shared tensors and only apply strength to shared tensors once + applied_to.add(x) + if self.strength_type == StrengthType.CONSTANT: + x *= self.strength + elif self.strength_type == StrengthType.LINEAR_UP: + x *= (self.strength ** float(len(control_output) - i)) + + if output_dtype is not None and x.dtype != output_dtype: + x = x.to(output_dtype) + + out[key].append(x) + + if control_prev is not None: + for x in ['input', 'middle', 'output']: + o = out[x] + for i in range(len(control_prev[x])): + prev_val = control_prev[x][i] + if i >= len(o): + o.append(prev_val) + elif prev_val is not None: + if o[i] is None: + o[i] = prev_val + else: + if o[i].shape[0] < prev_val.shape[0]: + o[i] = prev_val + o[i] + else: + o[i] = prev_val + o[i] #TODO: change back to inplace add if shared tensors stop being an issue + return out + + def set_extra_arg(self, argument, value=None): + self.extra_args[argument] = value + + +class ControlNet(ControlBase): + def __init__(self, control_model=None, global_average_pooling=False, compression_ratio=8, latent_format=None, load_device=None, manual_cast_dtype=None, extra_conds=["y"], strength_type=StrengthType.CONSTANT, concat_mask=False): + super().__init__() + self.control_model = control_model + self.load_device = load_device + if control_model is not None: + self.control_model_wrapped = comfy.model_patcher.ModelPatcher(self.control_model, load_device=load_device, offload_device=comfy.model_management.unet_offload_device()) + + self.compression_ratio = compression_ratio + self.global_average_pooling = global_average_pooling + self.model_sampling_current = None + self.manual_cast_dtype = manual_cast_dtype + self.latent_format = latent_format + self.extra_conds += extra_conds + self.strength_type = strength_type + self.concat_mask = concat_mask + + def get_control(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + dtype = self.control_model.dtype + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + if self.cond_hint is None or x_noisy.shape[2] * self.compression_ratio != self.cond_hint.shape[2] or x_noisy.shape[3] * self.compression_ratio != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.cond_hint = None + compression_ratio = self.compression_ratio + if self.vae is not None: + compression_ratio *= self.vae.downscale_ratio + else: + if self.latent_format is not None: + raise ValueError("This Controlnet needs a VAE but none was provided, please use a ControlNetApply node with a VAE input and connect it.") + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, x_noisy.shape[3] * compression_ratio, x_noisy.shape[2] * compression_ratio, self.upscale_algorithm, "center") + if self.vae is not None: + loaded_models = comfy.model_management.loaded_models(only_currently_used=True) + self.cond_hint = self.vae.encode(self.cond_hint.movedim(1, -1)) + comfy.model_management.load_models_gpu(loaded_models) + if self.latent_format is not None: + self.cond_hint = self.latent_format.process_in(self.cond_hint) + if len(self.extra_concat_orig) > 0: + to_concat = [] + for c in self.extra_concat_orig: + c = c.to(self.cond_hint.device) + c = comfy.utils.common_upscale(c, self.cond_hint.shape[3], self.cond_hint.shape[2], self.upscale_algorithm, "center") + to_concat.append(comfy.utils.repeat_to_batch_size(c, self.cond_hint.shape[0])) + self.cond_hint = torch.cat([self.cond_hint] + to_concat, dim=1) + + self.cond_hint = self.cond_hint.to(device=x_noisy.device, dtype=dtype) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + + context = cond.get('crossattn_controlnet', cond['c_crossattn']) + extra = self.extra_args.copy() + for c in self.extra_conds: + temp = cond.get(c, None) + if temp is not None: + extra[c] = temp.to(dtype) + + timestep = self.model_sampling_current.timestep(t) + x_noisy = self.model_sampling_current.calculate_input(t, x_noisy) + + control = self.control_model(x=x_noisy.to(dtype), hint=self.cond_hint, timesteps=timestep.to(dtype), context=context.to(dtype), **extra) + return self.control_merge(control, control_prev, output_dtype=None) + + def copy(self): + c = ControlNet(None, global_average_pooling=self.global_average_pooling, load_device=self.load_device, manual_cast_dtype=self.manual_cast_dtype) + c.control_model = self.control_model + c.control_model_wrapped = self.control_model_wrapped + self.copy_to(c) + return c + + def get_models(self): + out = super().get_models() + out.append(self.control_model_wrapped) + return out + + def pre_run(self, model, percent_to_timestep_function): + super().pre_run(model, percent_to_timestep_function) + self.model_sampling_current = model.model_sampling + + def cleanup(self): + self.model_sampling_current = None + super().cleanup() + +class ControlLoraOps: + class Linear(torch.nn.Module, comfy.ops.CastWeightBiasOp): + def __init__(self, in_features: int, out_features: int, bias: bool = True, + device=None, dtype=None) -> None: + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.in_features = in_features + self.out_features = out_features + self.weight = None + self.up = None + self.down = None + self.bias = None + + def forward(self, input): + weight, bias = comfy.ops.cast_bias_weight(self, input) + if self.up is not None: + return torch.nn.functional.linear(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias) + else: + return torch.nn.functional.linear(input, weight, bias) + + class Conv2d(torch.nn.Module, comfy.ops.CastWeightBiasOp): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + bias=True, + padding_mode='zeros', + device=None, + dtype=None + ): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + self.dilation = dilation + self.transposed = False + self.output_padding = 0 + self.groups = groups + self.padding_mode = padding_mode + + self.weight = None + self.bias = None + self.up = None + self.down = None + + + def forward(self, input): + weight, bias = comfy.ops.cast_bias_weight(self, input) + if self.up is not None: + return torch.nn.functional.conv2d(input, weight + (torch.mm(self.up.flatten(start_dim=1), self.down.flatten(start_dim=1))).reshape(self.weight.shape).type(input.dtype), bias, self.stride, self.padding, self.dilation, self.groups) + else: + return torch.nn.functional.conv2d(input, weight, bias, self.stride, self.padding, self.dilation, self.groups) + + +class ControlLora(ControlNet): + def __init__(self, control_weights, global_average_pooling=False, model_options={}): #TODO? model_options + ControlBase.__init__(self) + self.control_weights = control_weights + self.global_average_pooling = global_average_pooling + self.extra_conds += ["y"] + + def pre_run(self, model, percent_to_timestep_function): + super().pre_run(model, percent_to_timestep_function) + controlnet_config = model.model_config.unet_config.copy() + controlnet_config.pop("out_channels") + controlnet_config["hint_channels"] = self.control_weights["input_hint_block.0.weight"].shape[1] + self.manual_cast_dtype = model.manual_cast_dtype + dtype = model.get_dtype() + if self.manual_cast_dtype is None: + class control_lora_ops(ControlLoraOps, comfy.ops.disable_weight_init): + pass + else: + class control_lora_ops(ControlLoraOps, comfy.ops.manual_cast): + pass + dtype = self.manual_cast_dtype + + controlnet_config["operations"] = control_lora_ops + controlnet_config["dtype"] = dtype + self.control_model = comfy.cldm.cldm.ControlNet(**controlnet_config) + self.control_model.to(comfy.model_management.get_torch_device()) + diffusion_model = model.diffusion_model + sd = diffusion_model.state_dict() + cm = self.control_model.state_dict() + + for k in sd: + weight = sd[k] + try: + comfy.utils.set_attr_param(self.control_model, k, weight) + except: + pass + + for k in self.control_weights: + if k not in {"lora_controlnet"}: + comfy.utils.set_attr_param(self.control_model, k, self.control_weights[k].to(dtype).to(comfy.model_management.get_torch_device())) + + def copy(self): + c = ControlLora(self.control_weights, global_average_pooling=self.global_average_pooling) + self.copy_to(c) + return c + + def cleanup(self): + del self.control_model + self.control_model = None + super().cleanup() + + def get_models(self): + out = ControlBase.get_models(self) + return out + + def inference_memory_requirements(self, dtype): + return comfy.utils.calculate_parameters(self.control_weights) * comfy.model_management.dtype_size(dtype) + ControlBase.inference_memory_requirements(self, dtype) + +def controlnet_config(sd, model_options={}): + model_config = comfy.model_detection.model_config_from_unet(sd, "", True) + + unet_dtype = model_options.get("dtype", None) + if unet_dtype is None: + weight_dtype = comfy.utils.weight_dtype(sd) + + supported_inference_dtypes = list(model_config.supported_inference_dtypes) + if weight_dtype is not None: + supported_inference_dtypes.append(weight_dtype) + + unet_dtype = comfy.model_management.unet_dtype(model_params=-1, supported_dtypes=supported_inference_dtypes) + + load_device = comfy.model_management.get_torch_device() + manual_cast_dtype = comfy.model_management.unet_manual_cast(unet_dtype, load_device) + + operations = model_options.get("custom_operations", None) + if operations is None: + operations = comfy.ops.pick_operations(unet_dtype, manual_cast_dtype, disable_fast_fp8=True) + + offload_device = comfy.model_management.unet_offload_device() + return model_config, operations, load_device, unet_dtype, manual_cast_dtype, offload_device + +def controlnet_load_state_dict(control_model, sd): + missing, unexpected = control_model.load_state_dict(sd, strict=False) + + if len(missing) > 0: + logging.warning("missing controlnet keys: {}".format(missing)) + + if len(unexpected) > 0: + logging.debug("unexpected controlnet keys: {}".format(unexpected)) + return control_model + +def load_controlnet_mmdit(sd, model_options={}): + new_sd = comfy.model_detection.convert_diffusers_mmdit(sd, "") + model_config, operations, load_device, unet_dtype, manual_cast_dtype, offload_device = controlnet_config(new_sd, model_options=model_options) + num_blocks = comfy.model_detection.count_blocks(new_sd, 'joint_blocks.{}.') + for k in sd: + new_sd[k] = sd[k] + + concat_mask = False + control_latent_channels = new_sd.get("pos_embed_input.proj.weight").shape[1] + if control_latent_channels == 17: #inpaint controlnet + concat_mask = True + + control_model = comfy.cldm.mmdit.ControlNet(num_blocks=num_blocks, control_latent_channels=control_latent_channels, operations=operations, device=offload_device, dtype=unet_dtype, **model_config.unet_config) + control_model = controlnet_load_state_dict(control_model, new_sd) + + latent_format = comfy.latent_formats.SD3() + latent_format.shift_factor = 0 #SD3 controlnet weirdness + control = ControlNet(control_model, compression_ratio=1, latent_format=latent_format, concat_mask=concat_mask, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + return control + + +def load_controlnet_hunyuandit(controlnet_data, model_options={}): + model_config, operations, load_device, unet_dtype, manual_cast_dtype, offload_device = controlnet_config(controlnet_data, model_options=model_options) + + control_model = comfy.ldm.hydit.controlnet.HunYuanControlNet(operations=operations, device=offload_device, dtype=unet_dtype) + control_model = controlnet_load_state_dict(control_model, controlnet_data) + + latent_format = comfy.latent_formats.SDXL() + extra_conds = ['text_embedding_mask', 'encoder_hidden_states_t5', 'text_embedding_mask_t5', 'image_meta_size', 'style', 'cos_cis_img', 'sin_cis_img'] + control = ControlNet(control_model, compression_ratio=1, latent_format=latent_format, load_device=load_device, manual_cast_dtype=manual_cast_dtype, extra_conds=extra_conds, strength_type=StrengthType.CONSTANT) + return control + +def load_controlnet_flux_xlabs_mistoline(sd, mistoline=False, model_options={}): + model_config, operations, load_device, unet_dtype, manual_cast_dtype, offload_device = controlnet_config(sd, model_options=model_options) + control_model = comfy.ldm.flux.controlnet.ControlNetFlux(mistoline=mistoline, operations=operations, device=offload_device, dtype=unet_dtype, **model_config.unet_config) + control_model = controlnet_load_state_dict(control_model, sd) + extra_conds = ['y', 'guidance'] + control = ControlNet(control_model, load_device=load_device, manual_cast_dtype=manual_cast_dtype, extra_conds=extra_conds) + return control + +def load_controlnet_flux_instantx(sd, model_options={}): + new_sd = comfy.model_detection.convert_diffusers_mmdit(sd, "") + model_config, operations, load_device, unet_dtype, manual_cast_dtype, offload_device = controlnet_config(new_sd, model_options=model_options) + for k in sd: + new_sd[k] = sd[k] + + num_union_modes = 0 + union_cnet = "controlnet_mode_embedder.weight" + if union_cnet in new_sd: + num_union_modes = new_sd[union_cnet].shape[0] + + control_latent_channels = new_sd.get("pos_embed_input.weight").shape[1] // 4 + concat_mask = False + if control_latent_channels == 17: + concat_mask = True + + control_model = comfy.ldm.flux.controlnet.ControlNetFlux(latent_input=True, num_union_modes=num_union_modes, control_latent_channels=control_latent_channels, operations=operations, device=offload_device, dtype=unet_dtype, **model_config.unet_config) + control_model = controlnet_load_state_dict(control_model, new_sd) + + latent_format = comfy.latent_formats.Flux() + extra_conds = ['y', 'guidance'] + control = ControlNet(control_model, compression_ratio=1, latent_format=latent_format, concat_mask=concat_mask, load_device=load_device, manual_cast_dtype=manual_cast_dtype, extra_conds=extra_conds) + return control + +def convert_mistoline(sd): + return comfy.utils.state_dict_prefix_replace(sd, {"single_controlnet_blocks.": "controlnet_single_blocks."}) + + +def load_controlnet_state_dict(state_dict, model=None, model_options={}): + controlnet_data = state_dict + if 'after_proj_list.18.bias' in controlnet_data.keys(): #Hunyuan DiT + return load_controlnet_hunyuandit(controlnet_data, model_options=model_options) + + if "lora_controlnet" in controlnet_data: + return ControlLora(controlnet_data, model_options=model_options) + + controlnet_config = None + supported_inference_dtypes = None + + if "controlnet_cond_embedding.conv_in.weight" in controlnet_data: #diffusers format + controlnet_config = comfy.model_detection.unet_config_from_diffusers_unet(controlnet_data) + diffusers_keys = comfy.utils.unet_to_diffusers(controlnet_config) + diffusers_keys["controlnet_mid_block.weight"] = "middle_block_out.0.weight" + diffusers_keys["controlnet_mid_block.bias"] = "middle_block_out.0.bias" + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + k_in = "controlnet_down_blocks.{}{}".format(count, s) + k_out = "zero_convs.{}.0{}".format(count, s) + if k_in not in controlnet_data: + loop = False + break + diffusers_keys[k_in] = k_out + count += 1 + + count = 0 + loop = True + while loop: + suffix = [".weight", ".bias"] + for s in suffix: + if count == 0: + k_in = "controlnet_cond_embedding.conv_in{}".format(s) + else: + k_in = "controlnet_cond_embedding.blocks.{}{}".format(count - 1, s) + k_out = "input_hint_block.{}{}".format(count * 2, s) + if k_in not in controlnet_data: + k_in = "controlnet_cond_embedding.conv_out{}".format(s) + loop = False + diffusers_keys[k_in] = k_out + count += 1 + + new_sd = {} + for k in diffusers_keys: + if k in controlnet_data: + new_sd[diffusers_keys[k]] = controlnet_data.pop(k) + + if "control_add_embedding.linear_1.bias" in controlnet_data: #Union Controlnet + controlnet_config["union_controlnet_num_control_type"] = controlnet_data["task_embedding"].shape[0] + for k in list(controlnet_data.keys()): + new_k = k.replace('.attn.in_proj_', '.attn.in_proj.') + new_sd[new_k] = controlnet_data.pop(k) + + leftover_keys = controlnet_data.keys() + if len(leftover_keys) > 0: + logging.warning("leftover keys: {}".format(leftover_keys)) + controlnet_data = new_sd + elif "controlnet_blocks.0.weight" in controlnet_data: + if "double_blocks.0.img_attn.norm.key_norm.scale" in controlnet_data: + return load_controlnet_flux_xlabs_mistoline(controlnet_data, model_options=model_options) + elif "pos_embed_input.proj.weight" in controlnet_data: + return load_controlnet_mmdit(controlnet_data, model_options=model_options) #SD3 diffusers controlnet + elif "controlnet_x_embedder.weight" in controlnet_data: + return load_controlnet_flux_instantx(controlnet_data, model_options=model_options) + elif "controlnet_blocks.0.linear.weight" in controlnet_data: #mistoline flux + return load_controlnet_flux_xlabs_mistoline(convert_mistoline(controlnet_data), mistoline=True, model_options=model_options) + + pth_key = 'control_model.zero_convs.0.0.weight' + pth = False + key = 'zero_convs.0.0.weight' + if pth_key in controlnet_data: + pth = True + key = pth_key + prefix = "control_model." + elif key in controlnet_data: + prefix = "" + else: + net = load_t2i_adapter(controlnet_data, model_options=model_options) + if net is None: + logging.error("error could not detect control model type.") + return net + + if controlnet_config is None: + model_config = comfy.model_detection.model_config_from_unet(controlnet_data, prefix, True) + supported_inference_dtypes = list(model_config.supported_inference_dtypes) + controlnet_config = model_config.unet_config + + unet_dtype = model_options.get("dtype", None) + if unet_dtype is None: + weight_dtype = comfy.utils.weight_dtype(controlnet_data) + + if supported_inference_dtypes is None: + supported_inference_dtypes = [comfy.model_management.unet_dtype()] + + if weight_dtype is not None: + supported_inference_dtypes.append(weight_dtype) + + unet_dtype = comfy.model_management.unet_dtype(model_params=-1, supported_dtypes=supported_inference_dtypes) + + load_device = comfy.model_management.get_torch_device() + + manual_cast_dtype = comfy.model_management.unet_manual_cast(unet_dtype, load_device) + operations = model_options.get("custom_operations", None) + if operations is None: + operations = comfy.ops.pick_operations(unet_dtype, manual_cast_dtype) + + controlnet_config["operations"] = operations + controlnet_config["dtype"] = unet_dtype + controlnet_config["device"] = comfy.model_management.unet_offload_device() + controlnet_config.pop("out_channels") + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] + control_model = comfy.cldm.cldm.ControlNet(**controlnet_config) + + if pth: + if 'difference' in controlnet_data: + if model is not None: + comfy.model_management.load_models_gpu([model]) + model_sd = model.model_state_dict() + for x in controlnet_data: + c_m = "control_model." + if x.startswith(c_m): + sd_key = "diffusion_model.{}".format(x[len(c_m):]) + if sd_key in model_sd: + cd = controlnet_data[x] + cd += model_sd[sd_key].type(cd.dtype).to(cd.device) + else: + logging.warning("WARNING: Loaded a diff controlnet without a model. It will very likely not work.") + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.control_model = control_model + missing, unexpected = w.load_state_dict(controlnet_data, strict=False) + else: + missing, unexpected = control_model.load_state_dict(controlnet_data, strict=False) + + if len(missing) > 0: + logging.warning("missing controlnet keys: {}".format(missing)) + + if len(unexpected) > 0: + logging.debug("unexpected controlnet keys: {}".format(unexpected)) + + global_average_pooling = model_options.get("global_average_pooling", False) + control = ControlNet(control_model, global_average_pooling=global_average_pooling, load_device=load_device, manual_cast_dtype=manual_cast_dtype) + return control + +def load_controlnet(ckpt_path, model=None, model_options={}): + if "global_average_pooling" not in model_options: + filename = os.path.splitext(ckpt_path)[0] + if filename.endswith("_shuffle") or filename.endswith("_shuffle_fp16"): #TODO: smarter way of enabling global_average_pooling + model_options["global_average_pooling"] = True + + cnet = load_controlnet_state_dict(comfy.utils.load_torch_file(ckpt_path, safe_load=True), model=model, model_options=model_options) + if cnet is None: + logging.error("error checkpoint does not contain controlnet or t2i adapter data {}".format(ckpt_path)) + return cnet + +class T2IAdapter(ControlBase): + def __init__(self, t2i_model, channels_in, compression_ratio, upscale_algorithm, device=None): + super().__init__() + self.t2i_model = t2i_model + self.channels_in = channels_in + self.control_input = None + self.compression_ratio = compression_ratio + self.upscale_algorithm = upscale_algorithm + if device is None: + device = comfy.model_management.get_torch_device() + self.device = device + + def scale_image_to(self, width, height): + unshuffle_amount = self.t2i_model.unshuffle_amount + width = math.ceil(width / unshuffle_amount) * unshuffle_amount + height = math.ceil(height / unshuffle_amount) * unshuffle_amount + return width, height + + def get_control(self, x_noisy, t, cond, batched_number): + control_prev = None + if self.previous_controlnet is not None: + control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number) + + if self.timestep_range is not None: + if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]: + if control_prev is not None: + return control_prev + else: + return None + + if self.cond_hint is None or x_noisy.shape[2] * self.compression_ratio != self.cond_hint.shape[2] or x_noisy.shape[3] * self.compression_ratio != self.cond_hint.shape[3]: + if self.cond_hint is not None: + del self.cond_hint + self.control_input = None + self.cond_hint = None + width, height = self.scale_image_to(x_noisy.shape[3] * self.compression_ratio, x_noisy.shape[2] * self.compression_ratio) + self.cond_hint = comfy.utils.common_upscale(self.cond_hint_original, width, height, self.upscale_algorithm, "center").float().to(self.device) + if self.channels_in == 1 and self.cond_hint.shape[1] > 1: + self.cond_hint = torch.mean(self.cond_hint, 1, keepdim=True) + if x_noisy.shape[0] != self.cond_hint.shape[0]: + self.cond_hint = broadcast_image_to(self.cond_hint, x_noisy.shape[0], batched_number) + if self.control_input is None: + self.t2i_model.to(x_noisy.dtype) + self.t2i_model.to(self.device) + self.control_input = self.t2i_model(self.cond_hint.to(x_noisy.dtype)) + self.t2i_model.cpu() + + control_input = {} + for k in self.control_input: + control_input[k] = list(map(lambda a: None if a is None else a.clone(), self.control_input[k])) + + return self.control_merge(control_input, control_prev, x_noisy.dtype) + + def copy(self): + c = T2IAdapter(self.t2i_model, self.channels_in, self.compression_ratio, self.upscale_algorithm) + self.copy_to(c) + return c + +def load_t2i_adapter(t2i_data, model_options={}): #TODO: model_options + compression_ratio = 8 + upscale_algorithm = 'nearest-exact' + + if 'adapter' in t2i_data: + t2i_data = t2i_data['adapter'] + if 'adapter.body.0.resnets.0.block1.weight' in t2i_data: #diffusers format + prefix_replace = {} + for i in range(4): + for j in range(2): + prefix_replace["adapter.body.{}.resnets.{}.".format(i, j)] = "body.{}.".format(i * 2 + j) + prefix_replace["adapter.body.{}.".format(i, j)] = "body.{}.".format(i * 2) + prefix_replace["adapter."] = "" + t2i_data = comfy.utils.state_dict_prefix_replace(t2i_data, prefix_replace) + keys = t2i_data.keys() + + if "body.0.in_conv.weight" in keys: + cin = t2i_data['body.0.in_conv.weight'].shape[1] + model_ad = comfy.t2i_adapter.adapter.Adapter_light(cin=cin, channels=[320, 640, 1280, 1280], nums_rb=4) + elif 'conv_in.weight' in keys: + cin = t2i_data['conv_in.weight'].shape[1] + channel = t2i_data['conv_in.weight'].shape[0] + ksize = t2i_data['body.0.block2.weight'].shape[2] + use_conv = False + down_opts = list(filter(lambda a: a.endswith("down_opt.op.weight"), keys)) + if len(down_opts) > 0: + use_conv = True + xl = False + if cin == 256 or cin == 768: + xl = True + model_ad = comfy.t2i_adapter.adapter.Adapter(cin=cin, channels=[channel, channel*2, channel*4, channel*4][:4], nums_rb=2, ksize=ksize, sk=True, use_conv=use_conv, xl=xl) + elif "backbone.0.0.weight" in keys: + model_ad = comfy.ldm.cascade.controlnet.ControlNet(c_in=t2i_data['backbone.0.0.weight'].shape[1], proj_blocks=[0, 4, 8, 12, 51, 55, 59, 63]) + compression_ratio = 32 + upscale_algorithm = 'bilinear' + elif "backbone.10.blocks.0.weight" in keys: + model_ad = comfy.ldm.cascade.controlnet.ControlNet(c_in=t2i_data['backbone.0.weight'].shape[1], bottleneck_mode="large", proj_blocks=[0, 4, 8, 12, 51, 55, 59, 63]) + compression_ratio = 1 + upscale_algorithm = 'nearest-exact' + else: + return None + + missing, unexpected = model_ad.load_state_dict(t2i_data) + if len(missing) > 0: + logging.warning("t2i missing {}".format(missing)) + + if len(unexpected) > 0: + logging.debug("t2i unexpected {}".format(unexpected)) + + return T2IAdapter(model_ad, model_ad.input_channels, compression_ratio, upscale_algorithm) diff --git a/src/comfyui/comfy/diffusers_convert.py b/src/comfyui/comfy/diffusers_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..ed2a45fea586284c7b881a2a7ab46983cd4baafb --- /dev/null +++ b/src/comfyui/comfy/diffusers_convert.py @@ -0,0 +1,281 @@ +import re +import torch +import logging + +# conversion code from https://github.com/huggingface/diffusers/blob/main/scripts/convert_diffusers_to_original_stable_diffusion.py + +# =================# +# UNet Conversion # +# =================# + +unet_conversion_map = [ + # (stable-diffusion, HF Diffusers) + ("time_embed.0.weight", "time_embedding.linear_1.weight"), + ("time_embed.0.bias", "time_embedding.linear_1.bias"), + ("time_embed.2.weight", "time_embedding.linear_2.weight"), + ("time_embed.2.bias", "time_embedding.linear_2.bias"), + ("input_blocks.0.0.weight", "conv_in.weight"), + ("input_blocks.0.0.bias", "conv_in.bias"), + ("out.0.weight", "conv_norm_out.weight"), + ("out.0.bias", "conv_norm_out.bias"), + ("out.2.weight", "conv_out.weight"), + ("out.2.bias", "conv_out.bias"), +] + +unet_conversion_map_resnet = [ + # (stable-diffusion, HF Diffusers) + ("in_layers.0", "norm1"), + ("in_layers.2", "conv1"), + ("out_layers.0", "norm2"), + ("out_layers.3", "conv2"), + ("emb_layers.1", "time_emb_proj"), + ("skip_connection", "conv_shortcut"), +] + +unet_conversion_map_layer = [] +# hardcoded number of downblocks and resnets/attentions... +# would need smarter logic for other networks. +for i in range(4): + # loop over downblocks/upblocks + + for j in range(2): + # loop over resnets/attentions for downblocks + hf_down_res_prefix = f"down_blocks.{i}.resnets.{j}." + sd_down_res_prefix = f"input_blocks.{3 * i + j + 1}.0." + unet_conversion_map_layer.append((sd_down_res_prefix, hf_down_res_prefix)) + + if i < 3: + # no attention layers in down_blocks.3 + hf_down_atn_prefix = f"down_blocks.{i}.attentions.{j}." + sd_down_atn_prefix = f"input_blocks.{3 * i + j + 1}.1." + unet_conversion_map_layer.append((sd_down_atn_prefix, hf_down_atn_prefix)) + + for j in range(3): + # loop over resnets/attentions for upblocks + hf_up_res_prefix = f"up_blocks.{i}.resnets.{j}." + sd_up_res_prefix = f"output_blocks.{3 * i + j}.0." + unet_conversion_map_layer.append((sd_up_res_prefix, hf_up_res_prefix)) + + if i > 0: + # no attention layers in up_blocks.0 + hf_up_atn_prefix = f"up_blocks.{i}.attentions.{j}." + sd_up_atn_prefix = f"output_blocks.{3 * i + j}.1." + unet_conversion_map_layer.append((sd_up_atn_prefix, hf_up_atn_prefix)) + + if i < 3: + # no downsample in down_blocks.3 + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0.conv." + sd_downsample_prefix = f"input_blocks.{3 * (i + 1)}.0.op." + unet_conversion_map_layer.append((sd_downsample_prefix, hf_downsample_prefix)) + + # no upsample in up_blocks.3 + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"output_blocks.{3 * i + 2}.{1 if i == 0 else 2}." + unet_conversion_map_layer.append((sd_upsample_prefix, hf_upsample_prefix)) + +hf_mid_atn_prefix = "mid_block.attentions.0." +sd_mid_atn_prefix = "middle_block.1." +unet_conversion_map_layer.append((sd_mid_atn_prefix, hf_mid_atn_prefix)) + +for j in range(2): + hf_mid_res_prefix = f"mid_block.resnets.{j}." + sd_mid_res_prefix = f"middle_block.{2 * j}." + unet_conversion_map_layer.append((sd_mid_res_prefix, hf_mid_res_prefix)) + + +def convert_unet_state_dict(unet_state_dict): + # buyer beware: this is a *brittle* function, + # and correct output requires that all of these pieces interact in + # the exact order in which I have arranged them. + mapping = {k: k for k in unet_state_dict.keys()} + for sd_name, hf_name in unet_conversion_map: + mapping[hf_name] = sd_name + for k, v in mapping.items(): + if "resnets" in k: + for sd_part, hf_part in unet_conversion_map_resnet: + v = v.replace(hf_part, sd_part) + mapping[k] = v + for k, v in mapping.items(): + for sd_part, hf_part in unet_conversion_map_layer: + v = v.replace(hf_part, sd_part) + mapping[k] = v + new_state_dict = {v: unet_state_dict[k] for k, v in mapping.items()} + return new_state_dict + + +# ================# +# VAE Conversion # +# ================# + +vae_conversion_map = [ + # (stable-diffusion, HF Diffusers) + ("nin_shortcut", "conv_shortcut"), + ("norm_out", "conv_norm_out"), + ("mid.attn_1.", "mid_block.attentions.0."), +] + +for i in range(4): + # down_blocks have two resnets + for j in range(2): + hf_down_prefix = f"encoder.down_blocks.{i}.resnets.{j}." + sd_down_prefix = f"encoder.down.{i}.block.{j}." + vae_conversion_map.append((sd_down_prefix, hf_down_prefix)) + + if i < 3: + hf_downsample_prefix = f"down_blocks.{i}.downsamplers.0." + sd_downsample_prefix = f"down.{i}.downsample." + vae_conversion_map.append((sd_downsample_prefix, hf_downsample_prefix)) + + hf_upsample_prefix = f"up_blocks.{i}.upsamplers.0." + sd_upsample_prefix = f"up.{3 - i}.upsample." + vae_conversion_map.append((sd_upsample_prefix, hf_upsample_prefix)) + + # up_blocks have three resnets + # also, up blocks in hf are numbered in reverse from sd + for j in range(3): + hf_up_prefix = f"decoder.up_blocks.{i}.resnets.{j}." + sd_up_prefix = f"decoder.up.{3 - i}.block.{j}." + vae_conversion_map.append((sd_up_prefix, hf_up_prefix)) + +# this part accounts for mid blocks in both the encoder and the decoder +for i in range(2): + hf_mid_res_prefix = f"mid_block.resnets.{i}." + sd_mid_res_prefix = f"mid.block_{i + 1}." + vae_conversion_map.append((sd_mid_res_prefix, hf_mid_res_prefix)) + +vae_conversion_map_attn = [ + # (stable-diffusion, HF Diffusers) + ("norm.", "group_norm."), + ("q.", "query."), + ("k.", "key."), + ("v.", "value."), + ("q.", "to_q."), + ("k.", "to_k."), + ("v.", "to_v."), + ("proj_out.", "to_out.0."), + ("proj_out.", "proj_attn."), +] + + +def reshape_weight_for_sd(w): + # convert HF linear weights to SD conv2d weights + return w.reshape(*w.shape, 1, 1) + + +def convert_vae_state_dict(vae_state_dict): + mapping = {k: k for k in vae_state_dict.keys()} + for k, v in mapping.items(): + for sd_part, hf_part in vae_conversion_map: + v = v.replace(hf_part, sd_part) + mapping[k] = v + for k, v in mapping.items(): + if "attentions" in k: + for sd_part, hf_part in vae_conversion_map_attn: + v = v.replace(hf_part, sd_part) + mapping[k] = v + new_state_dict = {v: vae_state_dict[k] for k, v in mapping.items()} + weights_to_convert = ["q", "k", "v", "proj_out"] + for k, v in new_state_dict.items(): + for weight_name in weights_to_convert: + if f"mid.attn_1.{weight_name}.weight" in k: + logging.debug(f"Reshaping {k} for SD format") + new_state_dict[k] = reshape_weight_for_sd(v) + return new_state_dict + + +# =========================# +# Text Encoder Conversion # +# =========================# + + +textenc_conversion_lst = [ + # (stable-diffusion, HF Diffusers) + ("resblocks.", "text_model.encoder.layers."), + ("ln_1", "layer_norm1"), + ("ln_2", "layer_norm2"), + (".c_fc.", ".fc1."), + (".c_proj.", ".fc2."), + (".attn", ".self_attn"), + ("ln_final.", "transformer.text_model.final_layer_norm."), + ("token_embedding.weight", "transformer.text_model.embeddings.token_embedding.weight"), + ("positional_embedding", "transformer.text_model.embeddings.position_embedding.weight"), +] +protected = {re.escape(x[1]): x[0] for x in textenc_conversion_lst} +textenc_pattern = re.compile("|".join(protected.keys())) + +# Ordering is from https://github.com/pytorch/pytorch/blob/master/test/cpp/api/modules.cpp +code2idx = {"q": 0, "k": 1, "v": 2} + +# This function exists because at the time of writing torch.cat can't do fp8 with cuda +def cat_tensors(tensors): + x = 0 + for t in tensors: + x += t.shape[0] + + shape = [x] + list(tensors[0].shape)[1:] + out = torch.empty(shape, device=tensors[0].device, dtype=tensors[0].dtype) + + x = 0 + for t in tensors: + out[x:x + t.shape[0]] = t + x += t.shape[0] + + return out + +def convert_text_enc_state_dict_v20(text_enc_dict, prefix=""): + new_state_dict = {} + capture_qkv_weight = {} + capture_qkv_bias = {} + for k, v in text_enc_dict.items(): + if not k.startswith(prefix): + continue + if ( + k.endswith(".self_attn.q_proj.weight") + or k.endswith(".self_attn.k_proj.weight") + or k.endswith(".self_attn.v_proj.weight") + ): + k_pre = k[: -len(".q_proj.weight")] + k_code = k[-len("q_proj.weight")] + if k_pre not in capture_qkv_weight: + capture_qkv_weight[k_pre] = [None, None, None] + capture_qkv_weight[k_pre][code2idx[k_code]] = v + continue + + if ( + k.endswith(".self_attn.q_proj.bias") + or k.endswith(".self_attn.k_proj.bias") + or k.endswith(".self_attn.v_proj.bias") + ): + k_pre = k[: -len(".q_proj.bias")] + k_code = k[-len("q_proj.bias")] + if k_pre not in capture_qkv_bias: + capture_qkv_bias[k_pre] = [None, None, None] + capture_qkv_bias[k_pre][code2idx[k_code]] = v + continue + + text_proj = "transformer.text_projection.weight" + if k.endswith(text_proj): + new_state_dict[k.replace(text_proj, "text_projection")] = v.transpose(0, 1).contiguous() + else: + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k) + new_state_dict[relabelled_key] = v + + for k_pre, tensors in capture_qkv_weight.items(): + if None in tensors: + raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) + new_state_dict[relabelled_key + ".in_proj_weight"] = cat_tensors(tensors) + + for k_pre, tensors in capture_qkv_bias.items(): + if None in tensors: + raise Exception("CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing") + relabelled_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], k_pre) + new_state_dict[relabelled_key + ".in_proj_bias"] = cat_tensors(tensors) + + return new_state_dict + + +def convert_text_enc_state_dict(text_enc_dict): + return text_enc_dict + + diff --git a/src/comfyui/comfy/diffusers_load.py b/src/comfyui/comfy/diffusers_load.py new file mode 100644 index 0000000000000000000000000000000000000000..56e63a7565f083eb4e3bc484a3a9f90103306a2f --- /dev/null +++ b/src/comfyui/comfy/diffusers_load.py @@ -0,0 +1,36 @@ +import os + +import comfy.sd + +def first_file(path, filenames): + for f in filenames: + p = os.path.join(path, f) + if os.path.exists(p): + return p + return None + +def load_diffusers(model_path, output_vae=True, output_clip=True, embedding_directory=None): + diffusion_model_names = ["diffusion_pytorch_model.fp16.safetensors", "diffusion_pytorch_model.safetensors", "diffusion_pytorch_model.fp16.bin", "diffusion_pytorch_model.bin"] + unet_path = first_file(os.path.join(model_path, "unet"), diffusion_model_names) + vae_path = first_file(os.path.join(model_path, "vae"), diffusion_model_names) + + text_encoder_model_names = ["model.fp16.safetensors", "model.safetensors", "pytorch_model.fp16.bin", "pytorch_model.bin"] + text_encoder1_path = first_file(os.path.join(model_path, "text_encoder"), text_encoder_model_names) + text_encoder2_path = first_file(os.path.join(model_path, "text_encoder_2"), text_encoder_model_names) + + text_encoder_paths = [text_encoder1_path] + if text_encoder2_path is not None: + text_encoder_paths.append(text_encoder2_path) + + unet = comfy.sd.load_diffusion_model(unet_path) + + clip = None + if output_clip: + clip = comfy.sd.load_clip(text_encoder_paths, embedding_directory=embedding_directory) + + vae = None + if output_vae: + sd = comfy.utils.load_torch_file(vae_path) + vae = comfy.sd.VAE(sd=sd) + + return (unet, clip, vae) diff --git a/src/comfyui/comfy/extra_samplers/__pycache__/uni_pc.cpython-310.pyc b/src/comfyui/comfy/extra_samplers/__pycache__/uni_pc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7284eba3138c54f2b71abf67e5a70d1e846d879 Binary files /dev/null and b/src/comfyui/comfy/extra_samplers/__pycache__/uni_pc.cpython-310.pyc differ diff --git a/src/comfyui/comfy/extra_samplers/uni_pc.py b/src/comfyui/comfy/extra_samplers/uni_pc.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab42c6a940f2639b3ccbfefc8e0721fd85a456b --- /dev/null +++ b/src/comfyui/comfy/extra_samplers/uni_pc.py @@ -0,0 +1,875 @@ +#code taken from: https://github.com/wl-zhao/UniPC and modified + +import torch +import torch.nn.functional as F +import math + +from tqdm.auto import trange, tqdm + + +class NoiseScheduleVP: + def __init__( + self, + schedule='discrete', + betas=None, + alphas_cumprod=None, + continuous_beta_0=0.1, + continuous_beta_1=20., + ): + r"""Create a wrapper class for the forward SDE (VP type). + + *** + Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. + We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. + *** + + The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). + We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). + Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: + + log_alpha_t = self.marginal_log_mean_coeff(t) + sigma_t = self.marginal_std(t) + lambda_t = self.marginal_lambda(t) + + Moreover, as lambda(t) is an invertible function, we also support its inverse function: + + t = self.inverse_lambda(lambda_t) + + =============================================================== + + We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). + + 1. For discrete-time DPMs: + + For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: + t_i = (i + 1) / N + e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. + We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. + + Args: + betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) + alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) + + Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + + **Important**: Please pay special attention for the args for `alphas_cumprod`: + The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that + q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). + Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have + alpha_{t_n} = \sqrt{\hat{alpha_n}}, + and + log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). + + + 2. For continuous-time DPMs: + + We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise + schedule are the default settings in DDPM and improved-DDPM: + + Args: + beta_min: A `float` number. The smallest beta for the linear schedule. + beta_max: A `float` number. The largest beta for the linear schedule. + cosine_s: A `float` number. The hyperparameter in the cosine schedule. + cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. + T: A `float` number. The ending time of the forward process. + + =============================================================== + + Args: + schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, + 'linear' or 'cosine' for continuous-time DPMs. + Returns: + A wrapper object of the forward SDE (VP type). + + =============================================================== + + Example: + + # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', betas=betas) + + # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) + + # For continuous-time DPMs (VPSDE), linear schedule: + >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) + + """ + + if schedule not in ['discrete', 'linear', 'cosine']: + raise ValueError("Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format(schedule)) + + self.schedule = schedule + if schedule == 'discrete': + if betas is not None: + log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) + else: + assert alphas_cumprod is not None + log_alphas = 0.5 * torch.log(alphas_cumprod) + self.total_N = len(log_alphas) + self.T = 1. + self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)) + self.log_alpha_array = log_alphas.reshape((1, -1,)) + else: + self.total_N = 1000 + self.beta_0 = continuous_beta_0 + self.beta_1 = continuous_beta_1 + self.cosine_s = 0.008 + self.cosine_beta_max = 999. + self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) + self.schedule = schedule + if schedule == 'cosine': + # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. + # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. + self.T = 0.9946 + else: + self.T = 1. + + def marginal_log_mean_coeff(self, t): + """ + Compute log(alpha_t) of a given continuous-time label t in [0, T]. + """ + if self.schedule == 'discrete': + return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) + elif self.schedule == 'linear': + return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 + elif self.schedule == 'cosine': + log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + return log_alpha_t + + def marginal_alpha(self, t): + """ + Compute alpha_t of a given continuous-time label t in [0, T]. + """ + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + """ + Compute sigma_t of a given continuous-time label t in [0, T]. + """ + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + + def inverse_lambda(self, lamb): + """ + Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. + """ + if self.schedule == 'linear': + tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + Delta = self.beta_0**2 + tmp + return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) + elif self.schedule == 'discrete': + log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) + t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) + return t.reshape((-1,)) + else: + log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) + t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s + t = t_fn(log_alpha) + return t + + +def model_wrapper( + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1., + classifier_fn=None, + classifier_kwargs={}, +): + """Create a wrapper function for the noise prediction model. + + DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to + firstly wrap the model function to a noise prediction model that accepts the continuous time as the input. + + We support four types of the diffusion model by setting `model_type`: + + 1. "noise": noise prediction model. (Trained by predicting noise). + + 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0). + + 3. "v": velocity prediction model. (Trained by predicting the velocity). + The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2]. + + [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models." + arXiv preprint arXiv:2202.00512 (2022). + [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models." + arXiv preprint arXiv:2210.02303 (2022). + + 4. "score": marginal score function. (Trained by denoising score matching). + Note that the score function and the noise prediction model follows a simple relationship: + ``` + noise(x_t, t) = -sigma_t * score(x_t, t) + ``` + + We support three types of guided sampling by DPMs by setting `guidance_type`: + 1. "uncond": unconditional sampling by DPMs. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + The input `classifier_fn` has the following format: + `` + classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond) + `` + + [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis," + in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794. + + 3. "classifier-free": classifier-free guidance sampling by conditional DPMs. + The input `model` has the following format: + `` + model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score + `` + And if cond == `unconditional_condition`, the model output is the unconditional DPM output. + + [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance." + arXiv preprint arXiv:2207.12598 (2022). + + + The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999) + or continuous-time labels (i.e. epsilon to T). + + We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise: + `` + def model_fn(x, t_continuous) -> noise: + t_input = get_model_input_time(t_continuous) + return noise_pred(model, x, t_input, **model_kwargs) + `` + where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver. + + =============================================================== + + Args: + model: A diffusion model with the corresponding format described above. + noise_schedule: A noise schedule object, such as NoiseScheduleVP. + model_type: A `str`. The parameterization type of the diffusion model. + "noise" or "x_start" or "v" or "score". + model_kwargs: A `dict`. A dict for the other inputs of the model function. + guidance_type: A `str`. The type of the guidance for sampling. + "uncond" or "classifier" or "classifier-free". + condition: A pytorch tensor. The condition for the guided sampling. + Only used for "classifier" or "classifier-free" guidance type. + unconditional_condition: A pytorch tensor. The condition for the unconditional sampling. + Only used for "classifier-free" guidance type. + guidance_scale: A `float`. The scale for the guided sampling. + classifier_fn: A classifier function. Only used for the classifier guidance. + classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function. + Returns: + A noise prediction model that accepts the noised data and the continuous time as the inputs. + """ + + def get_model_input_time(t_continuous): + """ + Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. + For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. + For continuous-time DPMs, we just use `t_continuous`. + """ + if noise_schedule.schedule == 'discrete': + return (t_continuous - 1. / noise_schedule.total_N) * 1000. + else: + return t_continuous + + def noise_pred_fn(x, t_continuous, cond=None): + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + t_input = get_model_input_time(t_continuous) + output = model(x, t_input, **model_kwargs) + if model_type == "noise": + return output + elif model_type == "x_start": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return (x - expand_dims(alpha_t, dims) * output) / expand_dims(sigma_t, dims) + elif model_type == "v": + alpha_t, sigma_t = noise_schedule.marginal_alpha(t_continuous), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x + elif model_type == "score": + sigma_t = noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return -expand_dims(sigma_t, dims) * output + + def cond_grad_fn(x, t_input): + """ + Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). + """ + with torch.enable_grad(): + x_in = x.detach().requires_grad_(True) + log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) + return torch.autograd.grad(log_prob.sum(), x_in)[0] + + def model_fn(x, t_continuous): + """ + The noise predicition model function that is used for DPM-Solver. + """ + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + if guidance_type == "uncond": + return noise_pred_fn(x, t_continuous) + elif guidance_type == "classifier": + assert classifier_fn is not None + t_input = get_model_input_time(t_continuous) + cond_grad = cond_grad_fn(x, t_input) + sigma_t = noise_schedule.marginal_std(t_continuous) + noise = noise_pred_fn(x, t_continuous) + return noise - guidance_scale * expand_dims(sigma_t, dims=cond_grad.dim()) * cond_grad + elif guidance_type == "classifier-free": + if guidance_scale == 1. or unconditional_condition is None: + return noise_pred_fn(x, t_continuous, cond=condition) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t_continuous] * 2) + c_in = torch.cat([unconditional_condition, condition]) + noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) + return noise_uncond + guidance_scale * (noise - noise_uncond) + + assert model_type in ["noise", "x_start", "v"] + assert guidance_type in ["uncond", "classifier", "classifier-free"] + return model_fn + + +class UniPC: + def __init__( + self, + model_fn, + noise_schedule, + predict_x0=True, + thresholding=False, + max_val=1., + variant='bh1', + ): + """Construct a UniPC. + + We support both data_prediction and noise_prediction. + """ + self.model = model_fn + self.noise_schedule = noise_schedule + self.variant = variant + self.predict_x0 = predict_x0 + self.thresholding = thresholding + self.max_val = max_val + + def dynamic_thresholding_fn(self, x0, t=None): + """ + The dynamic thresholding method. + """ + dims = x0.dim() + p = self.dynamic_thresholding_ratio + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.thresholding_max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def noise_prediction_fn(self, x, t): + """ + Return the noise prediction model. + """ + return self.model(x, t) + + def data_prediction_fn(self, x, t): + """ + Return the data prediction model (with thresholding). + """ + noise = self.noise_prediction_fn(x, t) + dims = x.dim() + alpha_t, sigma_t = self.noise_schedule.marginal_alpha(t), self.noise_schedule.marginal_std(t) + x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) + if self.thresholding: + p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims(torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def model_fn(self, x, t): + """ + Convert the model to the noise prediction model or the data prediction model. + """ + if self.predict_x0: + return self.data_prediction_fn(x, t) + else: + return self.noise_prediction_fn(x, t) + + def get_time_steps(self, skip_type, t_T, t_0, N, device): + """Compute the intermediate time steps for sampling. + """ + if skip_type == 'logSNR': + lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) + lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) + logSNR_steps = torch.linspace(lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1).to(device) + return self.noise_schedule.inverse_lambda(logSNR_steps) + elif skip_type == 'time_uniform': + return torch.linspace(t_T, t_0, N + 1).to(device) + elif skip_type == 'time_quadratic': + t_order = 2 + t = torch.linspace(t_T**(1. / t_order), t_0**(1. / t_order), N + 1).pow(t_order).to(device) + return t + else: + raise ValueError("Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format(skip_type)) + + def get_orders_and_timesteps_for_singlestep_solver(self, steps, order, skip_type, t_T, t_0, device): + """ + Get the order of each step for sampling by the singlestep DPM-Solver. + """ + if order == 3: + K = steps // 3 + 1 + if steps % 3 == 0: + orders = [3,] * (K - 2) + [2, 1] + elif steps % 3 == 1: + orders = [3,] * (K - 1) + [1] + else: + orders = [3,] * (K - 1) + [2] + elif order == 2: + if steps % 2 == 0: + K = steps // 2 + orders = [2,] * K + else: + K = steps // 2 + 1 + orders = [2,] * (K - 1) + [1] + elif order == 1: + K = steps + orders = [1,] * steps + else: + raise ValueError("'order' must be '1' or '2' or '3'.") + if skip_type == 'logSNR': + # To reproduce the results in DPM-Solver paper + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) + else: + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[torch.cumsum(torch.tensor([0,] + orders), 0).to(device)] + return timesteps_outer, orders + + def denoise_to_zero_fn(self, x, s): + """ + Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. + """ + return self.data_prediction_fn(x, s) + + def multistep_uni_pc_update(self, x, model_prev_list, t_prev_list, t, order, **kwargs): + if len(t.shape) == 0: + t = t.view(-1) + if 'bh' in self.variant: + return self.multistep_uni_pc_bh_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + else: + assert self.variant == 'vary_coeff' + return self.multistep_uni_pc_vary_update(x, model_prev_list, t_prev_list, t, order, **kwargs) + + def multistep_uni_pc_vary_update(self, x, model_prev_list, t_prev_list, t, order, use_corrector=True): + print(f'using unified predictor-corrector with order {order} (solver type: vary coeff)') + ns = self.noise_schedule + assert order <= len(model_prev_list) + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_t = ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = (lambda_prev_i - lambda_prev_0) / h + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + K = len(rks) + # build C matrix + C = [] + + col = torch.ones_like(rks) + for k in range(1, K + 1): + C.append(col) + col = col * rks / (k + 1) + C = torch.stack(C, dim=1) + + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + C_inv_p = torch.linalg.inv(C[:-1, :-1]) + A_p = C_inv_p + + if use_corrector: + print('using corrector') + C_inv = torch.linalg.inv(C) + A_c = C_inv + + hh = -h if self.predict_x0 else h + h_phi_1 = torch.expm1(hh) + h_phi_ks = [] + factorial_k = 1 + h_phi_k = h_phi_1 + for k in range(1, K + 2): + h_phi_ks.append(h_phi_k) + h_phi_k = h_phi_k / hh - 1 / factorial_k + factorial_k *= (k + 1) + + model_t = None + if self.predict_x0: + x_t_ = ( + sigma_t / sigma_prev_0 * x + - alpha_t * h_phi_1 * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - alpha_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - alpha_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + else: + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + x_t_ = ( + (torch.exp(log_alpha_t - log_alpha_prev_0)) * x + - (sigma_t * h_phi_1) * model_prev_0 + ) + # now predictor + x_t = x_t_ + if len(D1s) > 0: + # compute the residuals for predictor + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_p[k]) + # now corrector + if use_corrector: + model_t = self.model_fn(x_t, t) + D1_t = (model_t - model_prev_0) + x_t = x_t_ + k = 0 + for k in range(K - 1): + x_t = x_t - sigma_t * h_phi_ks[k + 1] * torch.einsum('bkchw,k->bchw', D1s, A_c[k][:-1]) + x_t = x_t - sigma_t * h_phi_ks[K] * (D1_t * A_c[k][-1]) + return x_t, model_t + + def multistep_uni_pc_bh_update(self, x, model_prev_list, t_prev_list, t, order, x_t=None, use_corrector=True): + # print(f'using unified predictor-corrector with order {order} (solver type: B(h))') + ns = self.noise_schedule + assert order <= len(model_prev_list) + dims = x.dim() + + # first compute rks + t_prev_0 = t_prev_list[-1] + lambda_prev_0 = ns.marginal_lambda(t_prev_0) + lambda_t = ns.marginal_lambda(t) + model_prev_0 = model_prev_list[-1] + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff(t_prev_0), ns.marginal_log_mean_coeff(t) + alpha_t = torch.exp(log_alpha_t) + + h = lambda_t - lambda_prev_0 + + rks = [] + D1s = [] + for i in range(1, order): + t_prev_i = t_prev_list[-(i + 1)] + model_prev_i = model_prev_list[-(i + 1)] + lambda_prev_i = ns.marginal_lambda(t_prev_i) + rk = ((lambda_prev_i - lambda_prev_0) / h)[0] + rks.append(rk) + D1s.append((model_prev_i - model_prev_0) / rk) + + rks.append(1.) + rks = torch.tensor(rks, device=x.device) + + R = [] + b = [] + + hh = -h[0] if self.predict_x0 else h[0] + h_phi_1 = torch.expm1(hh) # h\phi_1(h) = e^h - 1 + h_phi_k = h_phi_1 / hh - 1 + + factorial_i = 1 + + if self.variant == 'bh1': + B_h = hh + elif self.variant == 'bh2': + B_h = torch.expm1(hh) + else: + raise NotImplementedError() + + for i in range(1, order + 1): + R.append(torch.pow(rks, i - 1)) + b.append(h_phi_k * factorial_i / B_h) + factorial_i *= (i + 1) + h_phi_k = h_phi_k / hh - 1 / factorial_i + + R = torch.stack(R) + b = torch.tensor(b, device=x.device) + + # now predictor + use_predictor = len(D1s) > 0 and x_t is None + if len(D1s) > 0: + D1s = torch.stack(D1s, dim=1) # (B, K) + if x_t is None: + # for order 2, we use a simplified version + if order == 2: + rhos_p = torch.tensor([0.5], device=b.device) + else: + rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1]) + else: + D1s = None + + if use_corrector: + # print('using corrector') + # for order 1, we use a simplified version + if order == 1: + rhos_c = torch.tensor([0.5], device=b.device) + else: + rhos_c = torch.linalg.solve(R, b) + + model_t = None + if self.predict_x0: + x_t_ = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * h_phi_1, dims)* model_prev_0 + ) + + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(alpha_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + else: + x_t_ = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * h_phi_1, dims) * model_prev_0 + ) + if x_t is None: + if use_predictor: + pred_res = torch.einsum('k,bkchw->bchw', rhos_p, D1s) + else: + pred_res = 0 + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * pred_res + + if use_corrector: + model_t = self.model_fn(x_t, t) + if D1s is not None: + corr_res = torch.einsum('k,bkchw->bchw', rhos_c[:-1], D1s) + else: + corr_res = 0 + D1_t = (model_t - model_prev_0) + x_t = x_t_ - expand_dims(sigma_t * B_h, dims) * (corr_res + rhos_c[-1] * D1_t) + return x_t, model_t + + + def sample(self, x, timesteps, t_start=None, t_end=None, order=3, skip_type='time_uniform', + method='singlestep', lower_order_final=True, denoise_to_zero=False, solver_type='dpm_solver', + atol=0.0078, rtol=0.05, corrector=False, callback=None, disable_pbar=False + ): + # t_0 = 1. / self.noise_schedule.total_N if t_end is None else t_end + # t_T = self.noise_schedule.T if t_start is None else t_start + device = x.device + steps = len(timesteps) - 1 + if method == 'multistep': + assert steps >= order + # timesteps = self.get_time_steps(skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device) + assert timesteps.shape[0] - 1 == steps + # with torch.no_grad(): + for step_index in trange(steps, disable=disable_pbar): + if step_index == 0: + vec_t = timesteps[0].expand((x.shape[0])) + model_prev_list = [self.model_fn(x, vec_t)] + t_prev_list = [vec_t] + elif step_index < order: + init_order = step_index + # Init the first `order` values by lower order multistep DPM-Solver. + # for init_order in range(1, order): + vec_t = timesteps[init_order].expand(x.shape[0]) + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, init_order, use_corrector=True) + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list.append(model_x) + t_prev_list.append(vec_t) + else: + extra_final_step = 0 + if step_index == (steps - 1): + extra_final_step = 1 + for step in range(step_index, step_index + 1 + extra_final_step): + vec_t = timesteps[step].expand(x.shape[0]) + if lower_order_final: + step_order = min(order, steps + 1 - step) + else: + step_order = order + # print('this step order:', step_order) + if step == steps: + # print('do not run corrector at the last step') + use_corrector = False + else: + use_corrector = True + x, model_x = self.multistep_uni_pc_update(x, model_prev_list, t_prev_list, vec_t, step_order, use_corrector=use_corrector) + for i in range(order - 1): + t_prev_list[i] = t_prev_list[i + 1] + model_prev_list[i] = model_prev_list[i + 1] + t_prev_list[-1] = vec_t + # We do not need to evaluate the final model value. + if step < steps: + if model_x is None: + model_x = self.model_fn(x, vec_t) + model_prev_list[-1] = model_x + if callback is not None: + callback({'x': x, 'i': step_index, 'denoised': model_prev_list[-1]}) + else: + raise NotImplementedError() + # if denoise_to_zero: + # x = self.denoise_to_zero_fn(x, torch.ones((x.shape[0],)).to(device) * t_0) + return x + + +############################################################# +# other utility functions +############################################################# + +def interpolate_fn(x, xp, yp): + """ + A piecewise linear function y = f(x), using xp and yp as keypoints. + We implement f(x) in a differentiable way (i.e. applicable for autograd). + The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) + + Args: + x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). + xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. + yp: PyTorch tensor with shape [C, K]. + Returns: + The function values f(x), with shape [N, C]. + """ + N, K = x.shape[0], xp.shape[1] + all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) + sorted_all_x, x_indices = torch.sort(all_x, dim=2) + x_idx = torch.argmin(x_indices, dim=2) + cand_start_idx = x_idx - 1 + start_idx = torch.where( + torch.eq(x_idx, 0), + torch.tensor(1, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) + start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) + end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) + start_idx2 = torch.where( + torch.eq(x_idx, 0), + torch.tensor(0, device=x.device), + torch.where( + torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, + ), + ) + y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) + start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) + end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) + cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) + return cand + + +def expand_dims(v, dims): + """ + Expand the tensor `v` to the dim `dims`. + + Args: + `v`: a PyTorch tensor with shape [N]. + `dim`: a `int`. + Returns: + a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. + """ + return v[(...,) + (None,)*(dims - 1)] + + +class SigmaConvert: + schedule = "" + def marginal_log_mean_coeff(self, sigma): + return 0.5 * torch.log(1 / ((sigma * sigma) + 1)) + + def marginal_alpha(self, t): + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) + return log_mean_coeff - log_std + +def predict_eps_sigma(model, input, sigma_in, **kwargs): + sigma = sigma_in.view(sigma_in.shape[:1] + (1,) * (input.ndim - 1)) + input = input * ((sigma ** 2 + 1.0) ** 0.5) + return (input - model(input, sigma_in, **kwargs)) / sigma + + +def sample_unipc(model, noise, sigmas, extra_args=None, callback=None, disable=False, variant='bh1'): + timesteps = sigmas.clone() + if sigmas[-1] == 0: + timesteps = sigmas[:] + timesteps[-1] = 0.001 + else: + timesteps = sigmas.clone() + ns = SigmaConvert() + + noise = noise / torch.sqrt(1.0 + timesteps[0] ** 2.0) + model_type = "noise" + + model_fn = model_wrapper( + lambda input, sigma, **kwargs: predict_eps_sigma(model, input, sigma, **kwargs), + ns, + model_type=model_type, + guidance_type="uncond", + model_kwargs=extra_args, + ) + + order = min(3, len(timesteps) - 2) + uni_pc = UniPC(model_fn, ns, predict_x0=True, thresholding=False, variant=variant) + x = uni_pc.sample(noise, timesteps=timesteps, skip_type="time_uniform", method="multistep", order=order, lower_order_final=True, callback=callback, disable_pbar=disable) + x /= ns.marginal_alpha(timesteps[-1]) + return x + +def sample_unipc_bh2(model, noise, sigmas, extra_args=None, callback=None, disable=False): + return sample_unipc(model, noise, sigmas, extra_args, callback, disable, variant='bh2') \ No newline at end of file diff --git a/src/comfyui/comfy/float.py b/src/comfyui/comfy/float.py new file mode 100644 index 0000000000000000000000000000000000000000..521316fd2facaab90583da8487029a365aefd9e7 --- /dev/null +++ b/src/comfyui/comfy/float.py @@ -0,0 +1,67 @@ +import torch + +def calc_mantissa(abs_x, exponent, normal_mask, MANTISSA_BITS, EXPONENT_BIAS, generator=None): + mantissa_scaled = torch.where( + normal_mask, + (abs_x / (2.0 ** (exponent - EXPONENT_BIAS)) - 1.0) * (2**MANTISSA_BITS), + (abs_x / (2.0 ** (-EXPONENT_BIAS + 1 - MANTISSA_BITS))) + ) + + mantissa_scaled += torch.rand(mantissa_scaled.size(), dtype=mantissa_scaled.dtype, layout=mantissa_scaled.layout, device=mantissa_scaled.device, generator=generator) + return mantissa_scaled.floor() / (2**MANTISSA_BITS) + +#Not 100% sure about this +def manual_stochastic_round_to_float8(x, dtype, generator=None): + if dtype == torch.float8_e4m3fn: + EXPONENT_BITS, MANTISSA_BITS, EXPONENT_BIAS = 4, 3, 7 + elif dtype == torch.float8_e5m2: + EXPONENT_BITS, MANTISSA_BITS, EXPONENT_BIAS = 5, 2, 15 + else: + raise ValueError("Unsupported dtype") + + x = x.half() + sign = torch.sign(x) + abs_x = x.abs() + sign = torch.where(abs_x == 0, 0, sign) + + # Combine exponent calculation and clamping + exponent = torch.clamp( + torch.floor(torch.log2(abs_x)) + EXPONENT_BIAS, + 0, 2**EXPONENT_BITS - 1 + ) + + # Combine mantissa calculation and rounding + normal_mask = ~(exponent == 0) + + abs_x[:] = calc_mantissa(abs_x, exponent, normal_mask, MANTISSA_BITS, EXPONENT_BIAS, generator=generator) + + sign *= torch.where( + normal_mask, + (2.0 ** (exponent - EXPONENT_BIAS)) * (1.0 + abs_x), + (2.0 ** (-EXPONENT_BIAS + 1)) * abs_x + ) + + inf = torch.finfo(dtype) + torch.clamp(sign, min=inf.min, max=inf.max, out=sign) + return sign + + + +def stochastic_rounding(value, dtype, seed=0): + if dtype == torch.float32: + return value.to(dtype=torch.float32) + if dtype == torch.float16: + return value.to(dtype=torch.float16) + if dtype == torch.bfloat16: + return value.to(dtype=torch.bfloat16) + if dtype == torch.float8_e4m3fn or dtype == torch.float8_e5m2: + generator = torch.Generator(device=value.device) + generator.manual_seed(seed) + output = torch.empty_like(value, dtype=dtype) + num_slices = max(1, (value.numel() / (4096 * 4096))) + slice_size = max(1, round(value.shape[0] / num_slices)) + for i in range(0, value.shape[0], slice_size): + output[i:i+slice_size].copy_(manual_stochastic_round_to_float8(value[i:i+slice_size], dtype, generator=generator)) + return output + + return value.to(dtype=dtype) diff --git a/src/comfyui/comfy/gligen.py b/src/comfyui/comfy/gligen.py new file mode 100644 index 0000000000000000000000000000000000000000..592522767e98bbe11b6e5e9411b1f734cbf92b9b --- /dev/null +++ b/src/comfyui/comfy/gligen.py @@ -0,0 +1,343 @@ +import torch +from torch import nn +from .ldm.modules.attention import CrossAttention +from inspect import isfunction +import comfy.ops +ops = comfy.ops.manual_cast + +def exists(val): + return val is not None + + +def uniq(arr): + return{el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = ops.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * torch.nn.functional.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + ops.Linear(dim, inner_dim), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + ops.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +class GatedCrossAttentionDense(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + self.attn = CrossAttention( + query_dim=query_dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head, + operations=ops) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = ops.LayerNorm(query_dim) + self.norm2 = ops.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + x = x + self.scale * \ + torch.tanh(self.alpha_attn) * self.attn(self.norm1(x), objs, objs) + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class GatedSelfAttentionDense(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + # we need a linear projection since we need cat visual feature and obj + # feature + self.linear = ops.Linear(context_dim, query_dim) + + self.attn = CrossAttention( + query_dim=query_dim, + context_dim=query_dim, + heads=n_heads, + dim_head=d_head, + operations=ops) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = ops.LayerNorm(query_dim) + self.norm2 = ops.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + N_visual = x.shape[1] + objs = self.linear(objs) + + x = x + self.scale * torch.tanh(self.alpha_attn) * self.attn( + self.norm1(torch.cat([x, objs], dim=1)))[:, 0:N_visual, :] + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class GatedSelfAttentionDense2(nn.Module): + def __init__(self, query_dim, context_dim, n_heads, d_head): + super().__init__() + + # we need a linear projection since we need cat visual feature and obj + # feature + self.linear = ops.Linear(context_dim, query_dim) + + self.attn = CrossAttention( + query_dim=query_dim, context_dim=query_dim, dim_head=d_head, operations=ops) + self.ff = FeedForward(query_dim, glu=True) + + self.norm1 = ops.LayerNorm(query_dim) + self.norm2 = ops.LayerNorm(query_dim) + + self.register_parameter('alpha_attn', nn.Parameter(torch.tensor(0.))) + self.register_parameter('alpha_dense', nn.Parameter(torch.tensor(0.))) + + # this can be useful: we can externally change magnitude of tanh(alpha) + # for example, when it is set to 0, then the entire model is same as + # original one + self.scale = 1 + + def forward(self, x, objs): + + B, N_visual, _ = x.shape + B, N_ground, _ = objs.shape + + objs = self.linear(objs) + + # sanity check + size_v = math.sqrt(N_visual) + size_g = math.sqrt(N_ground) + assert int(size_v) == size_v, "Visual tokens must be square rootable" + assert int(size_g) == size_g, "Grounding tokens must be square rootable" + size_v = int(size_v) + size_g = int(size_g) + + # select grounding token and resize it to visual token size as residual + out = self.attn(self.norm1(torch.cat([x, objs], dim=1)))[ + :, N_visual:, :] + out = out.permute(0, 2, 1).reshape(B, -1, size_g, size_g) + out = torch.nn.functional.interpolate( + out, (size_v, size_v), mode='bicubic') + residual = out.reshape(B, -1, N_visual).permute(0, 2, 1) + + # add residual to visual feature + x = x + self.scale * torch.tanh(self.alpha_attn) * residual + x = x + self.scale * \ + torch.tanh(self.alpha_dense) * self.ff(self.norm2(x)) + + return x + + +class FourierEmbedder(): + def __init__(self, num_freqs=64, temperature=100): + + self.num_freqs = num_freqs + self.temperature = temperature + self.freq_bands = temperature ** (torch.arange(num_freqs) / num_freqs) + + @torch.no_grad() + def __call__(self, x, cat_dim=-1): + "x: arbitrary shape of tensor. dim: cat dim" + out = [] + for freq in self.freq_bands: + out.append(torch.sin(freq * x)) + out.append(torch.cos(freq * x)) + return torch.cat(out, cat_dim) + + +class PositionNet(nn.Module): + def __init__(self, in_dim, out_dim, fourier_freqs=8): + super().__init__() + self.in_dim = in_dim + self.out_dim = out_dim + + self.fourier_embedder = FourierEmbedder(num_freqs=fourier_freqs) + self.position_dim = fourier_freqs * 2 * 4 # 2 is sin&cos, 4 is xyxy + + self.linears = nn.Sequential( + ops.Linear(self.in_dim + self.position_dim, 512), + nn.SiLU(), + ops.Linear(512, 512), + nn.SiLU(), + ops.Linear(512, out_dim), + ) + + self.null_positive_feature = torch.nn.Parameter( + torch.zeros([self.in_dim])) + self.null_position_feature = torch.nn.Parameter( + torch.zeros([self.position_dim])) + + def forward(self, boxes, masks, positive_embeddings): + B, N, _ = boxes.shape + masks = masks.unsqueeze(-1) + positive_embeddings = positive_embeddings + + # embedding position (it may includes padding as placeholder) + xyxy_embedding = self.fourier_embedder(boxes) # B*N*4 --> B*N*C + + # learnable null embedding + positive_null = self.null_positive_feature.to(device=boxes.device, dtype=boxes.dtype).view(1, 1, -1) + xyxy_null = self.null_position_feature.to(device=boxes.device, dtype=boxes.dtype).view(1, 1, -1) + + # replace padding with learnable null embedding + positive_embeddings = positive_embeddings * \ + masks + (1 - masks) * positive_null + xyxy_embedding = xyxy_embedding * masks + (1 - masks) * xyxy_null + + objs = self.linears( + torch.cat([positive_embeddings, xyxy_embedding], dim=-1)) + assert objs.shape == torch.Size([B, N, self.out_dim]) + return objs + + +class Gligen(nn.Module): + def __init__(self, modules, position_net, key_dim): + super().__init__() + self.module_list = nn.ModuleList(modules) + self.position_net = position_net + self.key_dim = key_dim + self.max_objs = 30 + self.current_device = torch.device("cpu") + + def _set_position(self, boxes, masks, positive_embeddings): + objs = self.position_net(boxes, masks, positive_embeddings) + def func(x, extra_options): + key = extra_options["transformer_index"] + module = self.module_list[key] + return module(x, objs.to(device=x.device, dtype=x.dtype)) + return func + + def set_position(self, latent_image_shape, position_params, device): + batch, c, h, w = latent_image_shape + masks = torch.zeros([self.max_objs], device="cpu") + boxes = [] + positive_embeddings = [] + for p in position_params: + x1 = (p[4]) / w + y1 = (p[3]) / h + x2 = (p[4] + p[2]) / w + y2 = (p[3] + p[1]) / h + masks[len(boxes)] = 1.0 + boxes += [torch.tensor((x1, y1, x2, y2)).unsqueeze(0)] + positive_embeddings += [p[0]] + append_boxes = [] + append_conds = [] + if len(boxes) < self.max_objs: + append_boxes = [torch.zeros( + [self.max_objs - len(boxes), 4], device="cpu")] + append_conds = [torch.zeros( + [self.max_objs - len(boxes), self.key_dim], device="cpu")] + + box_out = torch.cat( + boxes + append_boxes).unsqueeze(0).repeat(batch, 1, 1) + masks = masks.unsqueeze(0).repeat(batch, 1) + conds = torch.cat(positive_embeddings + + append_conds).unsqueeze(0).repeat(batch, 1, 1) + return self._set_position( + box_out.to(device), + masks.to(device), + conds.to(device)) + + def set_empty(self, latent_image_shape, device): + batch, c, h, w = latent_image_shape + masks = torch.zeros([self.max_objs], device="cpu").repeat(batch, 1) + box_out = torch.zeros([self.max_objs, 4], + device="cpu").repeat(batch, 1, 1) + conds = torch.zeros([self.max_objs, self.key_dim], + device="cpu").repeat(batch, 1, 1) + return self._set_position( + box_out.to(device), + masks.to(device), + conds.to(device)) + + +def load_gligen(sd): + sd_k = sd.keys() + output_list = [] + key_dim = 768 + for a in ["input_blocks", "middle_block", "output_blocks"]: + for b in range(20): + k_temp = filter(lambda k: "{}.{}.".format(a, b) + in k and ".fuser." in k, sd_k) + k_temp = map(lambda k: (k, k.split(".fuser.")[-1]), k_temp) + + n_sd = {} + for k in k_temp: + n_sd[k[1]] = sd[k[0]] + if len(n_sd) > 0: + query_dim = n_sd["linear.weight"].shape[0] + key_dim = n_sd["linear.weight"].shape[1] + + if key_dim == 768: # SD1.x + n_heads = 8 + d_head = query_dim // n_heads + else: + d_head = 64 + n_heads = query_dim // d_head + + gated = GatedSelfAttentionDense( + query_dim, key_dim, n_heads, d_head) + gated.load_state_dict(n_sd, strict=False) + output_list.append(gated) + + if "position_net.null_positive_feature" in sd_k: + in_dim = sd["position_net.null_positive_feature"].shape[0] + out_dim = sd["position_net.linears.4.weight"].shape[0] + + class WeightsLoader(torch.nn.Module): + pass + w = WeightsLoader() + w.position_net = PositionNet(in_dim, out_dim) + w.load_state_dict(sd, strict=False) + + gligen = Gligen(output_list, w.position_net, key_dim) + return gligen diff --git a/src/comfyui/comfy/k_diffusion/__pycache__/deis.cpython-310.pyc b/src/comfyui/comfy/k_diffusion/__pycache__/deis.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70c7b007c994e2bed09ef5bc60d21f5c3e7f8fcf Binary files /dev/null and b/src/comfyui/comfy/k_diffusion/__pycache__/deis.cpython-310.pyc differ diff --git a/src/comfyui/comfy/k_diffusion/__pycache__/sampling.cpython-310.pyc b/src/comfyui/comfy/k_diffusion/__pycache__/sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de573c094b46baa39a56c8fe528b62078ab90788 Binary files /dev/null and b/src/comfyui/comfy/k_diffusion/__pycache__/sampling.cpython-310.pyc differ diff --git a/src/comfyui/comfy/k_diffusion/__pycache__/utils.cpython-310.pyc b/src/comfyui/comfy/k_diffusion/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa88604acc010ee6d7d4b9c9aa556760a2bd81c5 Binary files /dev/null and b/src/comfyui/comfy/k_diffusion/__pycache__/utils.cpython-310.pyc differ diff --git a/src/comfyui/comfy/k_diffusion/deis.py b/src/comfyui/comfy/k_diffusion/deis.py new file mode 100644 index 0000000000000000000000000000000000000000..6074106566288434bdfc06e34e77732903c4b81b --- /dev/null +++ b/src/comfyui/comfy/k_diffusion/deis.py @@ -0,0 +1,121 @@ +#Taken from: https://github.com/zju-pi/diff-sampler/blob/main/gits-main/solver_utils.py +#under Apache 2 license +import torch +import numpy as np + +# A pytorch reimplementation of DEIS (https://github.com/qsh-zh/deis). +############################# +### Utils for DEIS solver ### +############################# +#---------------------------------------------------------------------------- +# Transfer from the input time (sigma) used in EDM to that (t) used in DEIS. + +def edm2t(edm_steps, epsilon_s=1e-3, sigma_min=0.002, sigma_max=80): + vp_sigma = lambda beta_d, beta_min: lambda t: (np.e ** (0.5 * beta_d * (t ** 2) + beta_min * t) - 1) ** 0.5 + vp_sigma_inv = lambda beta_d, beta_min: lambda sigma: ((beta_min ** 2 + 2 * beta_d * (sigma ** 2 + 1).log()).sqrt() - beta_min) / beta_d + vp_beta_d = 2 * (np.log(torch.tensor(sigma_min).cpu() ** 2 + 1) / epsilon_s - np.log(torch.tensor(sigma_max).cpu() ** 2 + 1)) / (epsilon_s - 1) + vp_beta_min = np.log(torch.tensor(sigma_max).cpu() ** 2 + 1) - 0.5 * vp_beta_d + t_steps = vp_sigma_inv(vp_beta_d.clone().detach().cpu(), vp_beta_min.clone().detach().cpu())(edm_steps.clone().detach().cpu()) + return t_steps, vp_beta_min, vp_beta_d + vp_beta_min + +#---------------------------------------------------------------------------- + +def cal_poly(prev_t, j, taus): + poly = 1 + for k in range(prev_t.shape[0]): + if k == j: + continue + poly *= (taus - prev_t[k]) / (prev_t[j] - prev_t[k]) + return poly + +#---------------------------------------------------------------------------- +# Transfer from t to alpha_t. + +def t2alpha_fn(beta_0, beta_1, t): + return torch.exp(-0.5 * t ** 2 * (beta_1 - beta_0) - t * beta_0) + +#---------------------------------------------------------------------------- + +def cal_intergrand(beta_0, beta_1, taus): + with torch.inference_mode(mode=False): + taus = taus.clone() + beta_0 = beta_0.clone() + beta_1 = beta_1.clone() + with torch.enable_grad(): + taus.requires_grad_(True) + alpha = t2alpha_fn(beta_0, beta_1, taus) + log_alpha = alpha.log() + log_alpha.sum().backward() + d_log_alpha_dtau = taus.grad + integrand = -0.5 * d_log_alpha_dtau / torch.sqrt(alpha * (1 - alpha)) + return integrand + +#---------------------------------------------------------------------------- + +def get_deis_coeff_list(t_steps, max_order, N=10000, deis_mode='tab'): + """ + Get the coefficient list for DEIS sampling. + + Args: + t_steps: A pytorch tensor. The time steps for sampling. + max_order: A `int`. Maximum order of the solver. 1 <= max_order <= 4 + N: A `int`. Use how many points to perform the numerical integration when deis_mode=='tab'. + deis_mode: A `str`. Select between 'tab' and 'rhoab'. Type of DEIS. + Returns: + A pytorch tensor. A batch of generated samples or sampling trajectories if return_inters=True. + """ + if deis_mode == 'tab': + t_steps, beta_0, beta_1 = edm2t(t_steps) + C = [] + for i, (t_cur, t_next) in enumerate(zip(t_steps[:-1], t_steps[1:])): + order = min(i+1, max_order) + if order == 1: + C.append([]) + else: + taus = torch.linspace(t_cur, t_next, N) # split the interval for integral appximation + dtau = (t_next - t_cur) / N + prev_t = t_steps[[i - k for k in range(order)]] + coeff_temp = [] + integrand = cal_intergrand(beta_0, beta_1, taus) + for j in range(order): + poly = cal_poly(prev_t, j, taus) + coeff_temp.append(torch.sum(integrand * poly) * dtau) + C.append(coeff_temp) + + elif deis_mode == 'rhoab': + # Analytical solution, second order + def get_def_intergral_2(a, b, start, end, c): + coeff = (end**3 - start**3) / 3 - (end**2 - start**2) * (a + b) / 2 + (end - start) * a * b + return coeff / ((c - a) * (c - b)) + + # Analytical solution, third order + def get_def_intergral_3(a, b, c, start, end, d): + coeff = (end**4 - start**4) / 4 - (end**3 - start**3) * (a + b + c) / 3 \ + + (end**2 - start**2) * (a*b + a*c + b*c) / 2 - (end - start) * a * b * c + return coeff / ((d - a) * (d - b) * (d - c)) + + C = [] + for i, (t_cur, t_next) in enumerate(zip(t_steps[:-1], t_steps[1:])): + order = min(i, max_order) + if order == 0: + C.append([]) + else: + prev_t = t_steps[[i - k for k in range(order+1)]] + if order == 1: + coeff_cur = ((t_next - prev_t[1])**2 - (t_cur - prev_t[1])**2) / (2 * (t_cur - prev_t[1])) + coeff_prev1 = (t_next - t_cur)**2 / (2 * (prev_t[1] - t_cur)) + coeff_temp = [coeff_cur, coeff_prev1] + elif order == 2: + coeff_cur = get_def_intergral_2(prev_t[1], prev_t[2], t_cur, t_next, t_cur) + coeff_prev1 = get_def_intergral_2(t_cur, prev_t[2], t_cur, t_next, prev_t[1]) + coeff_prev2 = get_def_intergral_2(t_cur, prev_t[1], t_cur, t_next, prev_t[2]) + coeff_temp = [coeff_cur, coeff_prev1, coeff_prev2] + elif order == 3: + coeff_cur = get_def_intergral_3(prev_t[1], prev_t[2], prev_t[3], t_cur, t_next, t_cur) + coeff_prev1 = get_def_intergral_3(t_cur, prev_t[2], prev_t[3], t_cur, t_next, prev_t[1]) + coeff_prev2 = get_def_intergral_3(t_cur, prev_t[1], prev_t[3], t_cur, t_next, prev_t[2]) + coeff_prev3 = get_def_intergral_3(t_cur, prev_t[1], prev_t[2], t_cur, t_next, prev_t[3]) + coeff_temp = [coeff_cur, coeff_prev1, coeff_prev2, coeff_prev3] + C.append(coeff_temp) + return C + diff --git a/src/comfyui/comfy/k_diffusion/sampling.py b/src/comfyui/comfy/k_diffusion/sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..ffdd888ee14bd770048a8c2f890355eaa40082d0 --- /dev/null +++ b/src/comfyui/comfy/k_diffusion/sampling.py @@ -0,0 +1,1211 @@ +import math + +from scipy import integrate +import torch +from torch import nn +import torchsde +from tqdm.auto import trange, tqdm + +from . import utils +from . import deis +import comfy.model_patcher +import comfy.model_sampling + +def append_zero(x): + return torch.cat([x, x.new_zeros([1])]) + + +def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): + """Constructs the noise schedule of Karras et al. (2022).""" + ramp = torch.linspace(0, 1, n, device=device) + min_inv_rho = sigma_min ** (1 / rho) + max_inv_rho = sigma_max ** (1 / rho) + sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho + return append_zero(sigmas).to(device) + + +def get_sigmas_exponential(n, sigma_min, sigma_max, device='cpu'): + """Constructs an exponential noise schedule.""" + sigmas = torch.linspace(math.log(sigma_max), math.log(sigma_min), n, device=device).exp() + return append_zero(sigmas) + + +def get_sigmas_polyexponential(n, sigma_min, sigma_max, rho=1., device='cpu'): + """Constructs an polynomial in log sigma noise schedule.""" + ramp = torch.linspace(1, 0, n, device=device) ** rho + sigmas = torch.exp(ramp * (math.log(sigma_max) - math.log(sigma_min)) + math.log(sigma_min)) + return append_zero(sigmas) + + +def get_sigmas_vp(n, beta_d=19.9, beta_min=0.1, eps_s=1e-3, device='cpu'): + """Constructs a continuous VP noise schedule.""" + t = torch.linspace(1, eps_s, n, device=device) + sigmas = torch.sqrt(torch.exp(beta_d * t ** 2 / 2 + beta_min * t) - 1) + return append_zero(sigmas) + + +def get_sigmas_laplace(n, sigma_min, sigma_max, mu=0., beta=0.5, device='cpu'): + """Constructs the noise schedule proposed by Tiankai et al. (2024). """ + epsilon = 1e-5 # avoid log(0) + x = torch.linspace(0, 1, n, device=device) + clamp = lambda x: torch.clamp(x, min=sigma_min, max=sigma_max) + lmb = mu - beta * torch.sign(0.5-x) * torch.log(1 - 2 * torch.abs(0.5-x) + epsilon) + sigmas = clamp(torch.exp(lmb)) + return sigmas + + + +def to_d(x, sigma, denoised): + """Converts a denoiser output to a Karras ODE derivative.""" + return (x - denoised) / utils.append_dims(sigma, x.ndim) + + +def get_ancestral_step(sigma_from, sigma_to, eta=1.): + """Calculates the noise level (sigma_down) to step down to and the amount + of noise to add (sigma_up) when doing an ancestral sampling step.""" + if not eta: + return sigma_to, 0. + sigma_up = min(sigma_to, eta * (sigma_to ** 2 * (sigma_from ** 2 - sigma_to ** 2) / sigma_from ** 2) ** 0.5) + sigma_down = (sigma_to ** 2 - sigma_up ** 2) ** 0.5 + return sigma_down, sigma_up + + +def default_noise_sampler(x): + return lambda sigma, sigma_next: torch.randn_like(x) + + +class BatchedBrownianTree: + """A wrapper around torchsde.BrownianTree that enables batches of entropy.""" + + def __init__(self, x, t0, t1, seed=None, **kwargs): + self.cpu_tree = True + if "cpu" in kwargs: + self.cpu_tree = kwargs.pop("cpu") + t0, t1, self.sign = self.sort(t0, t1) + w0 = kwargs.get('w0', torch.zeros_like(x)) + if seed is None: + seed = torch.randint(0, 2 ** 63 - 1, []).item() + self.batched = True + try: + assert len(seed) == x.shape[0] + w0 = w0[0] + except TypeError: + seed = [seed] + self.batched = False + if self.cpu_tree: + self.trees = [torchsde.BrownianTree(t0.cpu(), w0.cpu(), t1.cpu(), entropy=s, **kwargs) for s in seed] + else: + self.trees = [torchsde.BrownianTree(t0, w0, t1, entropy=s, **kwargs) for s in seed] + + @staticmethod + def sort(a, b): + return (a, b, 1) if a < b else (b, a, -1) + + def __call__(self, t0, t1): + t0, t1, sign = self.sort(t0, t1) + if self.cpu_tree: + w = torch.stack([tree(t0.cpu().float(), t1.cpu().float()).to(t0.dtype).to(t0.device) for tree in self.trees]) * (self.sign * sign) + else: + w = torch.stack([tree(t0, t1) for tree in self.trees]) * (self.sign * sign) + + return w if self.batched else w[0] + + +class BrownianTreeNoiseSampler: + """A noise sampler backed by a torchsde.BrownianTree. + + Args: + x (Tensor): The tensor whose shape, device and dtype to use to generate + random samples. + sigma_min (float): The low end of the valid interval. + sigma_max (float): The high end of the valid interval. + seed (int or List[int]): The random seed. If a list of seeds is + supplied instead of a single integer, then the noise sampler will + use one BrownianTree per batch item, each with its own seed. + transform (callable): A function that maps sigma to the sampler's + internal timestep. + """ + + def __init__(self, x, sigma_min, sigma_max, seed=None, transform=lambda x: x, cpu=False): + self.transform = transform + t0, t1 = self.transform(torch.as_tensor(sigma_min)), self.transform(torch.as_tensor(sigma_max)) + self.tree = BatchedBrownianTree(x, t0, t1, seed, cpu=cpu) + + def __call__(self, sigma, sigma_next): + t0, t1 = self.transform(torch.as_tensor(sigma)), self.transform(torch.as_tensor(sigma_next)) + return self.tree(t0, t1) / (t1 - t0).abs().sqrt() + + +@torch.no_grad() +def sample_euler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """Implements Algorithm 2 (Euler steps) from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + if s_churn > 0: + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + else: + gamma = 0 + sigma_hat = sigmas[i] + + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + # Euler method + x = x + d * dt + return x + + +@torch.no_grad() +def sample_euler_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + if isinstance(model.inner_model.inner_model.model_sampling, comfy.model_sampling.CONST): + return sample_euler_ancestral_RF(model, x, sigmas, extra_args, callback, disable, eta, s_noise, noise_sampler) + """Ancestral sampling with Euler method steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + d = to_d(x, sigmas[i], denoised) + # Euler method + dt = sigma_down - sigmas[i] + x = x + d * dt + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + +@torch.no_grad() +def sample_euler_ancestral_RF(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1.0, s_noise=1., noise_sampler=None): + """Ancestral sampling with Euler method steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + # sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + downstep_ratio = 1 + (sigmas[i+1]/sigmas[i] - 1) * eta + sigma_down = sigmas[i+1] * downstep_ratio + alpha_ip1 = 1 - sigmas[i+1] + alpha_down = 1 - sigma_down + renoise_coeff = (sigmas[i+1]**2 - sigma_down**2*alpha_ip1**2/alpha_down**2)**0.5 + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + # Euler method + sigma_down_i_ratio = sigma_down / sigmas[i] + x = sigma_down_i_ratio * x + (1 - sigma_down_i_ratio) * denoised + if sigmas[i + 1] > 0 and eta > 0: + x = (alpha_ip1/alpha_down) * x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * renoise_coeff + return x + +@torch.no_grad() +def sample_heun(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """Implements Algorithm 2 (Heun steps) from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + if s_churn > 0: + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + else: + gamma = 0 + sigma_hat = sigmas[i] + + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + if sigmas[i + 1] == 0: + # Euler method + x = x + d * dt + else: + # Heun's method + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + d_prime = (d + d_2) / 2 + x = x + d_prime * dt + return x + + +@torch.no_grad() +def sample_dpm_2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + """A sampler inspired by DPM-Solver-2 and Algorithm 2 from Karras et al. (2022).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + if s_churn > 0: + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + sigma_hat = sigmas[i] * (gamma + 1) + else: + gamma = 0 + sigma_hat = sigmas[i] + + if gamma > 0: + eps = torch.randn_like(x) * s_noise + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Euler method + dt = sigmas[i + 1] - sigma_hat + x = x + d * dt + else: + # DPM-Solver-2 + sigma_mid = sigma_hat.log().lerp(sigmas[i + 1].log(), 0.5).exp() + dt_1 = sigma_mid - sigma_hat + dt_2 = sigmas[i + 1] - sigma_hat + x_2 = x + d * dt_1 + denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) + d_2 = to_d(x_2, sigma_mid, denoised_2) + x = x + d_2 * dt_2 + return x + + +@torch.no_grad() +def sample_dpm_2_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with DPM-Solver second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + d = to_d(x, sigmas[i], denoised) + if sigma_down == 0: + # Euler method + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver-2 + sigma_mid = sigmas[i].log().lerp(sigma_down.log(), 0.5).exp() + dt_1 = sigma_mid - sigmas[i] + dt_2 = sigma_down - sigmas[i] + x_2 = x + d * dt_1 + denoised_2 = model(x_2, sigma_mid * s_in, **extra_args) + d_2 = to_d(x_2, sigma_mid, denoised_2) + x = x + d_2 * dt_2 + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + + +def linear_multistep_coeff(order, t, i, j): + if order - 1 > i: + raise ValueError(f'Order {order} too high for step {i}') + def fn(tau): + prod = 1. + for k in range(order): + if j == k: + continue + prod *= (tau - t[i - k]) / (t[i - j] - t[i - k]) + return prod + return integrate.quad(fn, t[i], t[i + 1], epsrel=1e-4)[0] + + +@torch.no_grad() +def sample_lms(model, x, sigmas, extra_args=None, callback=None, disable=None, order=4): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigmas_cpu = sigmas.detach().cpu().numpy() + ds = [] + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + d = to_d(x, sigmas[i], denoised) + ds.append(d) + if len(ds) > order: + ds.pop(0) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + cur_order = min(i + 1, order) + coeffs = [linear_multistep_coeff(cur_order, sigmas_cpu, i, j) for j in range(cur_order)] + x = x + sum(coeff * d for coeff, d in zip(coeffs, reversed(ds))) + return x + + +class PIDStepSizeController: + """A PID controller for ODE adaptive step size control.""" + def __init__(self, h, pcoeff, icoeff, dcoeff, order=1, accept_safety=0.81, eps=1e-8): + self.h = h + self.b1 = (pcoeff + icoeff + dcoeff) / order + self.b2 = -(pcoeff + 2 * dcoeff) / order + self.b3 = dcoeff / order + self.accept_safety = accept_safety + self.eps = eps + self.errs = [] + + def limiter(self, x): + return 1 + math.atan(x - 1) + + def propose_step(self, error): + inv_error = 1 / (float(error) + self.eps) + if not self.errs: + self.errs = [inv_error, inv_error, inv_error] + self.errs[0] = inv_error + factor = self.errs[0] ** self.b1 * self.errs[1] ** self.b2 * self.errs[2] ** self.b3 + factor = self.limiter(factor) + accept = factor >= self.accept_safety + if accept: + self.errs[2] = self.errs[1] + self.errs[1] = self.errs[0] + self.h *= factor + return accept + + +class DPMSolver(nn.Module): + """DPM-Solver. See https://arxiv.org/abs/2206.00927.""" + + def __init__(self, model, extra_args=None, eps_callback=None, info_callback=None): + super().__init__() + self.model = model + self.extra_args = {} if extra_args is None else extra_args + self.eps_callback = eps_callback + self.info_callback = info_callback + + def t(self, sigma): + return -sigma.log() + + def sigma(self, t): + return t.neg().exp() + + def eps(self, eps_cache, key, x, t, *args, **kwargs): + if key in eps_cache: + return eps_cache[key], eps_cache + sigma = self.sigma(t) * x.new_ones([x.shape[0]]) + eps = (x - self.model(x, sigma, *args, **self.extra_args, **kwargs)) / self.sigma(t) + if self.eps_callback is not None: + self.eps_callback() + return eps, {key: eps, **eps_cache} + + def dpm_solver_1_step(self, x, t, t_next, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + x_1 = x - self.sigma(t_next) * h.expm1() * eps + return x_1, eps_cache + + def dpm_solver_2_step(self, x, t, t_next, r1=1 / 2, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + s1 = t + r1 * h + u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps + eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) + x_2 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / (2 * r1) * h.expm1() * (eps_r1 - eps) + return x_2, eps_cache + + def dpm_solver_3_step(self, x, t, t_next, r1=1 / 3, r2=2 / 3, eps_cache=None): + eps_cache = {} if eps_cache is None else eps_cache + h = t_next - t + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + s1 = t + r1 * h + s2 = t + r2 * h + u1 = x - self.sigma(s1) * (r1 * h).expm1() * eps + eps_r1, eps_cache = self.eps(eps_cache, 'eps_r1', u1, s1) + u2 = x - self.sigma(s2) * (r2 * h).expm1() * eps - self.sigma(s2) * (r2 / r1) * ((r2 * h).expm1() / (r2 * h) - 1) * (eps_r1 - eps) + eps_r2, eps_cache = self.eps(eps_cache, 'eps_r2', u2, s2) + x_3 = x - self.sigma(t_next) * h.expm1() * eps - self.sigma(t_next) / r2 * (h.expm1() / h - 1) * (eps_r2 - eps) + return x_3, eps_cache + + def dpm_solver_fast(self, x, t_start, t_end, nfe, eta=0., s_noise=1., noise_sampler=None): + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + if not t_end > t_start and eta: + raise ValueError('eta must be 0 for reverse sampling') + + m = math.floor(nfe / 3) + 1 + ts = torch.linspace(t_start, t_end, m + 1, device=x.device) + + if nfe % 3 == 0: + orders = [3] * (m - 2) + [2, 1] + else: + orders = [3] * (m - 1) + [nfe % 3] + + for i in range(len(orders)): + eps_cache = {} + t, t_next = ts[i], ts[i + 1] + if eta: + sd, su = get_ancestral_step(self.sigma(t), self.sigma(t_next), eta) + t_next_ = torch.minimum(t_end, self.t(sd)) + su = (self.sigma(t_next) ** 2 - self.sigma(t_next_) ** 2) ** 0.5 + else: + t_next_, su = t_next, 0. + + eps, eps_cache = self.eps(eps_cache, 'eps', x, t) + denoised = x - self.sigma(t) * eps + if self.info_callback is not None: + self.info_callback({'x': x, 'i': i, 't': ts[i], 't_up': t, 'denoised': denoised}) + + if orders[i] == 1: + x, eps_cache = self.dpm_solver_1_step(x, t, t_next_, eps_cache=eps_cache) + elif orders[i] == 2: + x, eps_cache = self.dpm_solver_2_step(x, t, t_next_, eps_cache=eps_cache) + else: + x, eps_cache = self.dpm_solver_3_step(x, t, t_next_, eps_cache=eps_cache) + + x = x + su * s_noise * noise_sampler(self.sigma(t), self.sigma(t_next)) + + return x + + def dpm_solver_adaptive(self, x, t_start, t_end, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None): + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + if order not in {2, 3}: + raise ValueError('order should be 2 or 3') + forward = t_end > t_start + if not forward and eta: + raise ValueError('eta must be 0 for reverse sampling') + h_init = abs(h_init) * (1 if forward else -1) + atol = torch.tensor(atol) + rtol = torch.tensor(rtol) + s = t_start + x_prev = x + accept = True + pid = PIDStepSizeController(h_init, pcoeff, icoeff, dcoeff, 1.5 if eta else order, accept_safety) + info = {'steps': 0, 'nfe': 0, 'n_accept': 0, 'n_reject': 0} + + while s < t_end - 1e-5 if forward else s > t_end + 1e-5: + eps_cache = {} + t = torch.minimum(t_end, s + pid.h) if forward else torch.maximum(t_end, s + pid.h) + if eta: + sd, su = get_ancestral_step(self.sigma(s), self.sigma(t), eta) + t_ = torch.minimum(t_end, self.t(sd)) + su = (self.sigma(t) ** 2 - self.sigma(t_) ** 2) ** 0.5 + else: + t_, su = t, 0. + + eps, eps_cache = self.eps(eps_cache, 'eps', x, s) + denoised = x - self.sigma(s) * eps + + if order == 2: + x_low, eps_cache = self.dpm_solver_1_step(x, s, t_, eps_cache=eps_cache) + x_high, eps_cache = self.dpm_solver_2_step(x, s, t_, eps_cache=eps_cache) + else: + x_low, eps_cache = self.dpm_solver_2_step(x, s, t_, r1=1 / 3, eps_cache=eps_cache) + x_high, eps_cache = self.dpm_solver_3_step(x, s, t_, eps_cache=eps_cache) + delta = torch.maximum(atol, rtol * torch.maximum(x_low.abs(), x_prev.abs())) + error = torch.linalg.norm((x_low - x_high) / delta) / x.numel() ** 0.5 + accept = pid.propose_step(error) + if accept: + x_prev = x_low + x = x_high + su * s_noise * noise_sampler(self.sigma(s), self.sigma(t)) + s = t + info['n_accept'] += 1 + else: + info['n_reject'] += 1 + info['nfe'] += order + info['steps'] += 1 + + if self.info_callback is not None: + self.info_callback({'x': x, 'i': info['steps'] - 1, 't': s, 't_up': s, 'denoised': denoised, 'error': error, 'h': pid.h, **info}) + + return x, info + + +@torch.no_grad() +def sample_dpm_fast(model, x, sigma_min, sigma_max, n, extra_args=None, callback=None, disable=None, eta=0., s_noise=1., noise_sampler=None): + """DPM-Solver-Fast (fixed step size). See https://arxiv.org/abs/2206.00927.""" + if sigma_min <= 0 or sigma_max <= 0: + raise ValueError('sigma_min and sigma_max must not be 0') + with tqdm(total=n, disable=disable) as pbar: + dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) + if callback is not None: + dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) + return dpm_solver.dpm_solver_fast(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), n, eta, s_noise, noise_sampler) + + +@torch.no_grad() +def sample_dpm_adaptive(model, x, sigma_min, sigma_max, extra_args=None, callback=None, disable=None, order=3, rtol=0.05, atol=0.0078, h_init=0.05, pcoeff=0., icoeff=1., dcoeff=0., accept_safety=0.81, eta=0., s_noise=1., noise_sampler=None, return_info=False): + """DPM-Solver-12 and 23 (adaptive step size). See https://arxiv.org/abs/2206.00927.""" + if sigma_min <= 0 or sigma_max <= 0: + raise ValueError('sigma_min and sigma_max must not be 0') + with tqdm(disable=disable) as pbar: + dpm_solver = DPMSolver(model, extra_args, eps_callback=pbar.update) + if callback is not None: + dpm_solver.info_callback = lambda info: callback({'sigma': dpm_solver.sigma(info['t']), 'sigma_hat': dpm_solver.sigma(info['t_up']), **info}) + x, info = dpm_solver.dpm_solver_adaptive(x, dpm_solver.t(torch.tensor(sigma_max)), dpm_solver.t(torch.tensor(sigma_min)), order, rtol, atol, h_init, pcoeff, icoeff, dcoeff, accept_safety, eta, s_noise, noise_sampler) + if return_info: + return x, info + return x + + +@torch.no_grad() +def sample_dpmpp_2s_ancestral(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + if isinstance(model.inner_model.inner_model.model_sampling, comfy.model_sampling.CONST): + return sample_dpmpp_2s_ancestral_RF(model, x, sigmas, extra_args, callback, disable, eta, s_noise, noise_sampler) + + """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigma_down == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++(2S) + t, t_next = t_fn(sigmas[i]), t_fn(sigma_down) + r = 1 / 2 + h = t_next - t + s = t + r * h + x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_2 + # Noise addition + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + + +@torch.no_grad() +def sample_dpmpp_2s_ancestral_RF(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda lbda: (lbda.exp() + 1) ** -1 + lambda_fn = lambda sigma: ((1-sigma)/sigma).log() + + # logged_x = x.unsqueeze(0) + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + downstep_ratio = 1 + (sigmas[i+1]/sigmas[i] - 1) * eta + sigma_down = sigmas[i+1] * downstep_ratio + alpha_ip1 = 1 - sigmas[i+1] + alpha_down = 1 - sigma_down + renoise_coeff = (sigmas[i+1]**2 - sigma_down**2*alpha_ip1**2/alpha_down**2)**0.5 + # sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigma_down - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++(2S) + if sigmas[i] == 1.0: + sigma_s = 0.9999 + else: + t_i, t_down = lambda_fn(sigmas[i]), lambda_fn(sigma_down) + r = 1 / 2 + h = t_down - t_i + s = t_i + r * h + sigma_s = sigma_fn(s) + # sigma_s = sigmas[i+1] + sigma_s_i_ratio = sigma_s / sigmas[i] + u = sigma_s_i_ratio * x + (1 - sigma_s_i_ratio) * denoised + D_i = model(u, sigma_s * s_in, **extra_args) + sigma_down_i_ratio = sigma_down / sigmas[i] + x = sigma_down_i_ratio * x + (1 - sigma_down_i_ratio) * D_i + # print("sigma_i", sigmas[i], "sigma_ip1", sigmas[i+1],"sigma_down", sigma_down, "sigma_down_i_ratio", sigma_down_i_ratio, "sigma_s_i_ratio", sigma_s_i_ratio, "renoise_coeff", renoise_coeff) + # Noise addition + if sigmas[i + 1] > 0 and eta > 0: + x = (alpha_ip1/alpha_down) * x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * renoise_coeff + # logged_x = torch.cat((logged_x, x.unsqueeze(0)), dim=0) + return x + +@torch.no_grad() +def sample_dpmpp_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): + """DPM-Solver++ (stochastic).""" + if len(sigmas) <= 1: + return x + + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + seed = extra_args.get("seed", None) + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Euler method + d = to_d(x, sigmas[i], denoised) + dt = sigmas[i + 1] - sigmas[i] + x = x + d * dt + else: + # DPM-Solver++ + t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) + h = t_next - t + s = t + h * r + fac = 1 / (2 * r) + + # Step 1 + sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(s), eta) + s_ = t_fn(sd) + x_2 = (sigma_fn(s_) / sigma_fn(t)) * x - (t - s_).expm1() * denoised + x_2 = x_2 + noise_sampler(sigma_fn(t), sigma_fn(s)) * s_noise * su + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + + # Step 2 + sd, su = get_ancestral_step(sigma_fn(t), sigma_fn(t_next), eta) + t_next_ = t_fn(sd) + denoised_d = (1 - fac) * denoised + fac * denoised_2 + x = (sigma_fn(t_next_) / sigma_fn(t)) * x - (t - t_next_).expm1() * denoised_d + x = x + noise_sampler(sigma_fn(t), sigma_fn(t_next)) * s_noise * su + return x + + +@torch.no_grad() +def sample_dpmpp_2m(model, x, sigmas, extra_args=None, callback=None, disable=None): + """DPM-Solver++(2M).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + old_denoised = None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) + h = t_next - t + if old_denoised is None or sigmas[i + 1] == 0: + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised + else: + h_last = t - t_fn(sigmas[i - 1]) + r = h_last / h + denoised_d = (1 + 1 / (2 * r)) * denoised - (1 / (2 * r)) * old_denoised + x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_d + old_denoised = denoised + return x + +@torch.no_grad() +def sample_dpmpp_2m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): + """DPM-Solver++(2M) SDE.""" + if len(sigmas) <= 1: + return x + + if solver_type not in {'heun', 'midpoint'}: + raise ValueError('solver_type must be \'heun\' or \'midpoint\'') + + seed = extra_args.get("seed", None) + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + old_denoised = None + h_last = None + h = None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Denoising step + x = denoised + else: + # DPM-Solver++(2M) SDE + t, s = -sigmas[i].log(), -sigmas[i + 1].log() + h = s - t + eta_h = eta * h + + x = sigmas[i + 1] / sigmas[i] * (-eta_h).exp() * x + (-h - eta_h).expm1().neg() * denoised + + if old_denoised is not None: + r = h_last / h + if solver_type == 'heun': + x = x + ((-h - eta_h).expm1().neg() / (-h - eta_h) + 1) * (1 / r) * (denoised - old_denoised) + elif solver_type == 'midpoint': + x = x + 0.5 * (-h - eta_h).expm1().neg() * (1 / r) * (denoised - old_denoised) + + if eta: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * eta_h).expm1().neg().sqrt() * s_noise + + old_denoised = denoised + h_last = h + return x + +@torch.no_grad() +def sample_dpmpp_3m_sde(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """DPM-Solver++(3M) SDE.""" + + if len(sigmas) <= 1: + return x + + seed = extra_args.get("seed", None) + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=True) if noise_sampler is None else noise_sampler + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + denoised_1, denoised_2 = None, None + h, h_1, h_2 = None, None, None + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigmas[i + 1] == 0: + # Denoising step + x = denoised + else: + t, s = -sigmas[i].log(), -sigmas[i + 1].log() + h = s - t + h_eta = h * (eta + 1) + + x = torch.exp(-h_eta) * x + (-h_eta).expm1().neg() * denoised + + if h_2 is not None: + r0 = h_1 / h + r1 = h_2 / h + d1_0 = (denoised - denoised_1) / r0 + d1_1 = (denoised_1 - denoised_2) / r1 + d1 = d1_0 + (d1_0 - d1_1) * r0 / (r0 + r1) + d2 = (d1_0 - d1_1) / (r0 + r1) + phi_2 = h_eta.neg().expm1() / h_eta + 1 + phi_3 = phi_2 / h_eta - 0.5 + x = x + phi_2 * d1 - phi_3 * d2 + elif h_1 is not None: + r = h_1 / h + d = (denoised - denoised_1) / r + phi_2 = h_eta.neg().expm1() / h_eta + 1 + x = x + phi_2 * d + + if eta: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * sigmas[i + 1] * (-2 * h * eta).expm1().neg().sqrt() * s_noise + + denoised_1, denoised_2 = denoised, denoised_1 + h_1, h_2 = h, h_1 + return x + +@torch.no_grad() +def sample_dpmpp_3m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + if len(sigmas) <= 1: + return x + + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_3m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler) + +@torch.no_grad() +def sample_dpmpp_2m_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, solver_type='midpoint'): + if len(sigmas) <= 1: + return x + + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_2m_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, solver_type=solver_type) + +@torch.no_grad() +def sample_dpmpp_sde_gpu(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None, r=1 / 2): + if len(sigmas) <= 1: + return x + + sigma_min, sigma_max = sigmas[sigmas > 0].min(), sigmas.max() + noise_sampler = BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=extra_args.get("seed", None), cpu=False) if noise_sampler is None else noise_sampler + return sample_dpmpp_sde(model, x, sigmas, extra_args=extra_args, callback=callback, disable=disable, eta=eta, s_noise=s_noise, noise_sampler=noise_sampler, r=r) + + +def DDPMSampler_step(x, sigma, sigma_prev, noise, noise_sampler): + alpha_cumprod = 1 / ((sigma * sigma) + 1) + alpha_cumprod_prev = 1 / ((sigma_prev * sigma_prev) + 1) + alpha = (alpha_cumprod / alpha_cumprod_prev) + + mu = (1.0 / alpha).sqrt() * (x - (1 - alpha) * noise / (1 - alpha_cumprod).sqrt()) + if sigma_prev > 0: + mu += ((1 - alpha) * (1. - alpha_cumprod_prev) / (1. - alpha_cumprod)).sqrt() * noise_sampler(sigma, sigma_prev) + return mu + +def generic_step_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None, step_function=None): + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + x = step_function(x / torch.sqrt(1.0 + sigmas[i] ** 2.0), sigmas[i], sigmas[i + 1], (x - denoised) / sigmas[i], noise_sampler) + if sigmas[i + 1] != 0: + x *= torch.sqrt(1.0 + sigmas[i + 1] ** 2.0) + return x + + +@torch.no_grad() +def sample_ddpm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): + return generic_step_sampler(model, x, sigmas, extra_args, callback, disable, noise_sampler, DDPMSampler_step) + +@torch.no_grad() +def sample_lcm(model, x, sigmas, extra_args=None, callback=None, disable=None, noise_sampler=None): + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + x = denoised + if sigmas[i + 1] > 0: + x = model.inner_model.inner_model.model_sampling.noise_scaling(sigmas[i + 1], noise_sampler(sigmas[i], sigmas[i + 1]), x) + return x + + + +@torch.no_grad() +def sample_heunpp2(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + # From MIT licensed: https://github.com/Carzit/sd-webui-samplers-scheduler/ + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + s_end = sigmas[-1] + for i in trange(len(sigmas) - 1, disable=disable): + gamma = min(s_churn / (len(sigmas) - 1), 2 ** 0.5 - 1) if s_tmin <= sigmas[i] <= s_tmax else 0. + eps = torch.randn_like(x) * s_noise + sigma_hat = sigmas[i] * (gamma + 1) + if gamma > 0: + x = x + eps * (sigma_hat ** 2 - sigmas[i] ** 2) ** 0.5 + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + if sigmas[i + 1] == s_end: + # Euler method + x = x + d * dt + elif sigmas[i + 2] == s_end: + + # Heun's method + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + + w = 2 * sigmas[0] + w2 = sigmas[i+1]/w + w1 = 1 - w2 + + d_prime = d * w1 + d_2 * w2 + + + x = x + d_prime * dt + + else: + # Heun++ + x_2 = x + d * dt + denoised_2 = model(x_2, sigmas[i + 1] * s_in, **extra_args) + d_2 = to_d(x_2, sigmas[i + 1], denoised_2) + dt_2 = sigmas[i + 2] - sigmas[i + 1] + + x_3 = x_2 + d_2 * dt_2 + denoised_3 = model(x_3, sigmas[i + 2] * s_in, **extra_args) + d_3 = to_d(x_3, sigmas[i + 2], denoised_3) + + w = 3 * sigmas[0] + w2 = sigmas[i + 1] / w + w3 = sigmas[i + 2] / w + w1 = 1 - w2 - w3 + + d_prime = w1 * d + w2 * d_2 + w3 * d_3 + x = x + d_prime * dt + return x + + +#From https://github.com/zju-pi/diff-sampler/blob/main/diff-solvers-main/solvers.py +#under Apache 2 license +def sample_ipndm(model, x, sigmas, extra_args=None, callback=None, disable=None, max_order=4): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + x_next = x + + buffer_model = [] + for i in trange(len(sigmas) - 1, disable=disable): + t_cur = sigmas[i] + t_next = sigmas[i + 1] + + x_cur = x_next + + denoised = model(x_cur, t_cur * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + d_cur = (x_cur - denoised) / t_cur + + order = min(max_order, i+1) + if order == 1: # First Euler step. + x_next = x_cur + (t_next - t_cur) * d_cur + elif order == 2: # Use one history point. + x_next = x_cur + (t_next - t_cur) * (3 * d_cur - buffer_model[-1]) / 2 + elif order == 3: # Use two history points. + x_next = x_cur + (t_next - t_cur) * (23 * d_cur - 16 * buffer_model[-1] + 5 * buffer_model[-2]) / 12 + elif order == 4: # Use three history points. + x_next = x_cur + (t_next - t_cur) * (55 * d_cur - 59 * buffer_model[-1] + 37 * buffer_model[-2] - 9 * buffer_model[-3]) / 24 + + if len(buffer_model) == max_order - 1: + for k in range(max_order - 2): + buffer_model[k] = buffer_model[k+1] + buffer_model[-1] = d_cur + else: + buffer_model.append(d_cur) + + return x_next + +#From https://github.com/zju-pi/diff-sampler/blob/main/diff-solvers-main/solvers.py +#under Apache 2 license +def sample_ipndm_v(model, x, sigmas, extra_args=None, callback=None, disable=None, max_order=4): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + x_next = x + t_steps = sigmas + + buffer_model = [] + for i in trange(len(sigmas) - 1, disable=disable): + t_cur = sigmas[i] + t_next = sigmas[i + 1] + + x_cur = x_next + + denoised = model(x_cur, t_cur * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + d_cur = (x_cur - denoised) / t_cur + + order = min(max_order, i+1) + if order == 1: # First Euler step. + x_next = x_cur + (t_next - t_cur) * d_cur + elif order == 2: # Use one history point. + h_n = (t_next - t_cur) + h_n_1 = (t_cur - t_steps[i-1]) + coeff1 = (2 + (h_n / h_n_1)) / 2 + coeff2 = -(h_n / h_n_1) / 2 + x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1]) + elif order == 3: # Use two history points. + h_n = (t_next - t_cur) + h_n_1 = (t_cur - t_steps[i-1]) + h_n_2 = (t_steps[i-1] - t_steps[i-2]) + temp = (1 - h_n / (3 * (h_n + h_n_1)) * (h_n * (h_n + h_n_1)) / (h_n_1 * (h_n_1 + h_n_2))) / 2 + coeff1 = (2 + (h_n / h_n_1)) / 2 + temp + coeff2 = -(h_n / h_n_1) / 2 - (1 + h_n_1 / h_n_2) * temp + coeff3 = temp * h_n_1 / h_n_2 + x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1] + coeff3 * buffer_model[-2]) + elif order == 4: # Use three history points. + h_n = (t_next - t_cur) + h_n_1 = (t_cur - t_steps[i-1]) + h_n_2 = (t_steps[i-1] - t_steps[i-2]) + h_n_3 = (t_steps[i-2] - t_steps[i-3]) + temp1 = (1 - h_n / (3 * (h_n + h_n_1)) * (h_n * (h_n + h_n_1)) / (h_n_1 * (h_n_1 + h_n_2))) / 2 + temp2 = ((1 - h_n / (3 * (h_n + h_n_1))) / 2 + (1 - h_n / (2 * (h_n + h_n_1))) * h_n / (6 * (h_n + h_n_1 + h_n_2))) \ + * (h_n * (h_n + h_n_1) * (h_n + h_n_1 + h_n_2)) / (h_n_1 * (h_n_1 + h_n_2) * (h_n_1 + h_n_2 + h_n_3)) + coeff1 = (2 + (h_n / h_n_1)) / 2 + temp1 + temp2 + coeff2 = -(h_n / h_n_1) / 2 - (1 + h_n_1 / h_n_2) * temp1 - (1 + (h_n_1 / h_n_2) + (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3)))) * temp2 + coeff3 = temp1 * h_n_1 / h_n_2 + ((h_n_1 / h_n_2) + (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3))) * (1 + h_n_2 / h_n_3)) * temp2 + coeff4 = -temp2 * (h_n_1 * (h_n_1 + h_n_2) / (h_n_2 * (h_n_2 + h_n_3))) * h_n_1 / h_n_2 + x_next = x_cur + (t_next - t_cur) * (coeff1 * d_cur + coeff2 * buffer_model[-1] + coeff3 * buffer_model[-2] + coeff4 * buffer_model[-3]) + + if len(buffer_model) == max_order - 1: + for k in range(max_order - 2): + buffer_model[k] = buffer_model[k+1] + buffer_model[-1] = d_cur.detach() + else: + buffer_model.append(d_cur.detach()) + + return x_next + +#From https://github.com/zju-pi/diff-sampler/blob/main/diff-solvers-main/solvers.py +#under Apache 2 license +@torch.no_grad() +def sample_deis(model, x, sigmas, extra_args=None, callback=None, disable=None, max_order=3, deis_mode='tab'): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + + x_next = x + t_steps = sigmas + + coeff_list = deis.get_deis_coeff_list(t_steps, max_order, deis_mode=deis_mode) + + buffer_model = [] + for i in trange(len(sigmas) - 1, disable=disable): + t_cur = sigmas[i] + t_next = sigmas[i + 1] + + x_cur = x_next + + denoised = model(x_cur, t_cur * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + d_cur = (x_cur - denoised) / t_cur + + order = min(max_order, i+1) + if t_next <= 0: + order = 1 + + if order == 1: # First Euler step. + x_next = x_cur + (t_next - t_cur) * d_cur + elif order == 2: # Use one history point. + coeff_cur, coeff_prev1 = coeff_list[i] + x_next = x_cur + coeff_cur * d_cur + coeff_prev1 * buffer_model[-1] + elif order == 3: # Use two history points. + coeff_cur, coeff_prev1, coeff_prev2 = coeff_list[i] + x_next = x_cur + coeff_cur * d_cur + coeff_prev1 * buffer_model[-1] + coeff_prev2 * buffer_model[-2] + elif order == 4: # Use three history points. + coeff_cur, coeff_prev1, coeff_prev2, coeff_prev3 = coeff_list[i] + x_next = x_cur + coeff_cur * d_cur + coeff_prev1 * buffer_model[-1] + coeff_prev2 * buffer_model[-2] + coeff_prev3 * buffer_model[-3] + + if len(buffer_model) == max_order - 1: + for k in range(max_order - 2): + buffer_model[k] = buffer_model[k+1] + buffer_model[-1] = d_cur.detach() + else: + buffer_model.append(d_cur.detach()) + + return x_next + +@torch.no_grad() +def sample_euler_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None): + extra_args = {} if extra_args is None else extra_args + + temp = [0] + def post_cfg_function(args): + temp[0] = args["uncond_denoised"] + return args["denoised"] + + model_options = extra_args.get("model_options", {}).copy() + extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True) + + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + sigma_hat = sigmas[i] + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x, sigma_hat, temp[0]) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + # Euler method + x = denoised + d * sigmas[i + 1] + return x + +@torch.no_grad() +def sample_euler_ancestral_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with Euler method steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + + temp = [0] + def post_cfg_function(args): + temp[0] = args["uncond_denoised"] + return args["denoised"] + + model_options = extra_args.get("model_options", {}).copy() + extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True) + + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + d = to_d(x, sigmas[i], temp[0]) + # Euler method + x = denoised + d * sigma_down + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x +@torch.no_grad() +def sample_dpmpp_2s_ancestral_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None, eta=1., s_noise=1., noise_sampler=None): + """Ancestral sampling with DPM-Solver++(2S) second-order steps.""" + extra_args = {} if extra_args is None else extra_args + noise_sampler = default_noise_sampler(x) if noise_sampler is None else noise_sampler + + temp = [0] + def post_cfg_function(args): + temp[0] = args["uncond_denoised"] + return args["denoised"] + + model_options = extra_args.get("model_options", {}).copy() + extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True) + + s_in = x.new_ones([x.shape[0]]) + sigma_fn = lambda t: t.neg().exp() + t_fn = lambda sigma: sigma.log().neg() + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + sigma_down, sigma_up = get_ancestral_step(sigmas[i], sigmas[i + 1], eta=eta) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + if sigma_down == 0: + # Euler method + d = to_d(x, sigmas[i], temp[0]) + x = denoised + d * sigma_down + else: + # DPM-Solver++(2S) + t, t_next = t_fn(sigmas[i]), t_fn(sigma_down) + # r = torch.sinh(1 + (2 - eta) * (t_next - t) / (t - t_fn(sigma_up))) works only on non-cfgpp, weird + r = 1 / 2 + h = t_next - t + s = t + r * h + x_2 = (sigma_fn(s) / sigma_fn(t)) * (x + (denoised - temp[0])) - (-h * r).expm1() * denoised + denoised_2 = model(x_2, sigma_fn(s) * s_in, **extra_args) + x = (sigma_fn(t_next) / sigma_fn(t)) * (x + (denoised - temp[0])) - (-h).expm1() * denoised_2 + # Noise addition + if sigmas[i + 1] > 0: + x = x + noise_sampler(sigmas[i], sigmas[i + 1]) * s_noise * sigma_up + return x + +@torch.no_grad() +def sample_dpmpp_2m_cfg_pp(model, x, sigmas, extra_args=None, callback=None, disable=None): + """DPM-Solver++(2M).""" + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + t_fn = lambda sigma: sigma.log().neg() + + old_uncond_denoised = None + uncond_denoised = None + def post_cfg_function(args): + nonlocal uncond_denoised + uncond_denoised = args["uncond_denoised"] + return args["denoised"] + + model_options = extra_args.get("model_options", {}).copy() + extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True) + + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1]) + h = t_next - t + if old_uncond_denoised is None or sigmas[i + 1] == 0: + denoised_mix = -torch.exp(-h) * uncond_denoised + else: + h_last = t - t_fn(sigmas[i - 1]) + r = h_last / h + denoised_mix = -torch.exp(-h) * uncond_denoised - torch.expm1(-h) * (1 / (2 * r)) * (denoised - old_uncond_denoised) + x = denoised + denoised_mix + torch.exp(-h) * x + old_uncond_denoised = uncond_denoised + return x diff --git a/src/comfyui/comfy/k_diffusion/utils.py b/src/comfyui/comfy/k_diffusion/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a644df2f3cf82b32ac6e9bf2cb7bfc70c95e05f9 --- /dev/null +++ b/src/comfyui/comfy/k_diffusion/utils.py @@ -0,0 +1,313 @@ +from contextlib import contextmanager +import hashlib +import math +from pathlib import Path +import shutil +import urllib +import warnings + +from PIL import Image +import torch +from torch import nn, optim +from torch.utils import data + + +def hf_datasets_augs_helper(examples, transform, image_key, mode='RGB'): + """Apply passed in transforms for HuggingFace Datasets.""" + images = [transform(image.convert(mode)) for image in examples[image_key]] + return {image_key: images} + + +def append_dims(x, target_dims): + """Appends dimensions to the end of a tensor until it has target_dims dimensions.""" + dims_to_append = target_dims - x.ndim + if dims_to_append < 0: + raise ValueError(f'input has {x.ndim} dims but target_dims is {target_dims}, which is less') + expanded = x[(...,) + (None,) * dims_to_append] + # MPS will get inf values if it tries to index into the new axes, but detaching fixes this. + # https://github.com/pytorch/pytorch/issues/84364 + return expanded.detach().clone() if expanded.device.type == 'mps' else expanded + + +def n_params(module): + """Returns the number of trainable parameters in a module.""" + return sum(p.numel() for p in module.parameters()) + + +def download_file(path, url, digest=None): + """Downloads a file if it does not exist, optionally checking its SHA-256 hash.""" + path = Path(path) + path.parent.mkdir(parents=True, exist_ok=True) + if not path.exists(): + with urllib.request.urlopen(url) as response, open(path, 'wb') as f: + shutil.copyfileobj(response, f) + if digest is not None: + file_digest = hashlib.sha256(open(path, 'rb').read()).hexdigest() + if digest != file_digest: + raise OSError(f'hash of {path} (url: {url}) failed to validate') + return path + + +@contextmanager +def train_mode(model, mode=True): + """A context manager that places a model into training mode and restores + the previous mode on exit.""" + modes = [module.training for module in model.modules()] + try: + yield model.train(mode) + finally: + for i, module in enumerate(model.modules()): + module.training = modes[i] + + +def eval_mode(model): + """A context manager that places a model into evaluation mode and restores + the previous mode on exit.""" + return train_mode(model, False) + + +@torch.no_grad() +def ema_update(model, averaged_model, decay): + """Incorporates updated model parameters into an exponential moving averaged + version of a model. It should be called after each optimizer step.""" + model_params = dict(model.named_parameters()) + averaged_params = dict(averaged_model.named_parameters()) + assert model_params.keys() == averaged_params.keys() + + for name, param in model_params.items(): + averaged_params[name].mul_(decay).add_(param, alpha=1 - decay) + + model_buffers = dict(model.named_buffers()) + averaged_buffers = dict(averaged_model.named_buffers()) + assert model_buffers.keys() == averaged_buffers.keys() + + for name, buf in model_buffers.items(): + averaged_buffers[name].copy_(buf) + + +class EMAWarmup: + """Implements an EMA warmup using an inverse decay schedule. + If inv_gamma=1 and power=1, implements a simple average. inv_gamma=1, power=2/3 are + good values for models you plan to train for a million or more steps (reaches decay + factor 0.999 at 31.6K steps, 0.9999 at 1M steps), inv_gamma=1, power=3/4 for models + you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999 at + 215.4k steps). + Args: + inv_gamma (float): Inverse multiplicative factor of EMA warmup. Default: 1. + power (float): Exponential factor of EMA warmup. Default: 1. + min_value (float): The minimum EMA decay rate. Default: 0. + max_value (float): The maximum EMA decay rate. Default: 1. + start_at (int): The epoch to start averaging at. Default: 0. + last_epoch (int): The index of last epoch. Default: 0. + """ + + def __init__(self, inv_gamma=1., power=1., min_value=0., max_value=1., start_at=0, + last_epoch=0): + self.inv_gamma = inv_gamma + self.power = power + self.min_value = min_value + self.max_value = max_value + self.start_at = start_at + self.last_epoch = last_epoch + + def state_dict(self): + """Returns the state of the class as a :class:`dict`.""" + return dict(self.__dict__.items()) + + def load_state_dict(self, state_dict): + """Loads the class's state. + Args: + state_dict (dict): scaler state. Should be an object returned + from a call to :meth:`state_dict`. + """ + self.__dict__.update(state_dict) + + def get_value(self): + """Gets the current EMA decay rate.""" + epoch = max(0, self.last_epoch - self.start_at) + value = 1 - (1 + epoch / self.inv_gamma) ** -self.power + return 0. if epoch < 0 else min(self.max_value, max(self.min_value, value)) + + def step(self): + """Updates the step count.""" + self.last_epoch += 1 + + +class InverseLR(optim.lr_scheduler._LRScheduler): + """Implements an inverse decay learning rate schedule with an optional exponential + warmup. When last_epoch=-1, sets initial lr as lr. + inv_gamma is the number of steps/epochs required for the learning rate to decay to + (1 / 2)**power of its original value. + Args: + optimizer (Optimizer): Wrapped optimizer. + inv_gamma (float): Inverse multiplicative factor of learning rate decay. Default: 1. + power (float): Exponential factor of learning rate decay. Default: 1. + warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) + Default: 0. + min_lr (float): The minimum learning rate. Default: 0. + last_epoch (int): The index of last epoch. Default: -1. + verbose (bool): If ``True``, prints a message to stdout for + each update. Default: ``False``. + """ + + def __init__(self, optimizer, inv_gamma=1., power=1., warmup=0., min_lr=0., + last_epoch=-1, verbose=False): + self.inv_gamma = inv_gamma + self.power = power + if not 0. <= warmup < 1: + raise ValueError('Invalid value for warmup') + self.warmup = warmup + self.min_lr = min_lr + super().__init__(optimizer, last_epoch, verbose) + + def get_lr(self): + if not self._get_lr_called_within_step: + warnings.warn("To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.") + + return self._get_closed_form_lr() + + def _get_closed_form_lr(self): + warmup = 1 - self.warmup ** (self.last_epoch + 1) + lr_mult = (1 + self.last_epoch / self.inv_gamma) ** -self.power + return [warmup * max(self.min_lr, base_lr * lr_mult) + for base_lr in self.base_lrs] + + +class ExponentialLR(optim.lr_scheduler._LRScheduler): + """Implements an exponential learning rate schedule with an optional exponential + warmup. When last_epoch=-1, sets initial lr as lr. Decays the learning rate + continuously by decay (default 0.5) every num_steps steps. + Args: + optimizer (Optimizer): Wrapped optimizer. + num_steps (float): The number of steps to decay the learning rate by decay in. + decay (float): The factor by which to decay the learning rate every num_steps + steps. Default: 0.5. + warmup (float): Exponential warmup factor (0 <= warmup < 1, 0 to disable) + Default: 0. + min_lr (float): The minimum learning rate. Default: 0. + last_epoch (int): The index of last epoch. Default: -1. + verbose (bool): If ``True``, prints a message to stdout for + each update. Default: ``False``. + """ + + def __init__(self, optimizer, num_steps, decay=0.5, warmup=0., min_lr=0., + last_epoch=-1, verbose=False): + self.num_steps = num_steps + self.decay = decay + if not 0. <= warmup < 1: + raise ValueError('Invalid value for warmup') + self.warmup = warmup + self.min_lr = min_lr + super().__init__(optimizer, last_epoch, verbose) + + def get_lr(self): + if not self._get_lr_called_within_step: + warnings.warn("To get the last learning rate computed by the scheduler, " + "please use `get_last_lr()`.") + + return self._get_closed_form_lr() + + def _get_closed_form_lr(self): + warmup = 1 - self.warmup ** (self.last_epoch + 1) + lr_mult = (self.decay ** (1 / self.num_steps)) ** self.last_epoch + return [warmup * max(self.min_lr, base_lr * lr_mult) + for base_lr in self.base_lrs] + + +def rand_log_normal(shape, loc=0., scale=1., device='cpu', dtype=torch.float32): + """Draws samples from an lognormal distribution.""" + return (torch.randn(shape, device=device, dtype=dtype) * scale + loc).exp() + + +def rand_log_logistic(shape, loc=0., scale=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): + """Draws samples from an optionally truncated log-logistic distribution.""" + min_value = torch.as_tensor(min_value, device=device, dtype=torch.float64) + max_value = torch.as_tensor(max_value, device=device, dtype=torch.float64) + min_cdf = min_value.log().sub(loc).div(scale).sigmoid() + max_cdf = max_value.log().sub(loc).div(scale).sigmoid() + u = torch.rand(shape, device=device, dtype=torch.float64) * (max_cdf - min_cdf) + min_cdf + return u.logit().mul(scale).add(loc).exp().to(dtype) + + +def rand_log_uniform(shape, min_value, max_value, device='cpu', dtype=torch.float32): + """Draws samples from an log-uniform distribution.""" + min_value = math.log(min_value) + max_value = math.log(max_value) + return (torch.rand(shape, device=device, dtype=dtype) * (max_value - min_value) + min_value).exp() + + +def rand_v_diffusion(shape, sigma_data=1., min_value=0., max_value=float('inf'), device='cpu', dtype=torch.float32): + """Draws samples from a truncated v-diffusion training timestep distribution.""" + min_cdf = math.atan(min_value / sigma_data) * 2 / math.pi + max_cdf = math.atan(max_value / sigma_data) * 2 / math.pi + u = torch.rand(shape, device=device, dtype=dtype) * (max_cdf - min_cdf) + min_cdf + return torch.tan(u * math.pi / 2) * sigma_data + + +def rand_split_log_normal(shape, loc, scale_1, scale_2, device='cpu', dtype=torch.float32): + """Draws samples from a split lognormal distribution.""" + n = torch.randn(shape, device=device, dtype=dtype).abs() + u = torch.rand(shape, device=device, dtype=dtype) + n_left = n * -scale_1 + loc + n_right = n * scale_2 + loc + ratio = scale_1 / (scale_1 + scale_2) + return torch.where(u < ratio, n_left, n_right).exp() + + +class FolderOfImages(data.Dataset): + """Recursively finds all images in a directory. It does not support + classes/targets.""" + + IMG_EXTENSIONS = {'.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp'} + + def __init__(self, root, transform=None): + super().__init__() + self.root = Path(root) + self.transform = nn.Identity() if transform is None else transform + self.paths = sorted(path for path in self.root.rglob('*') if path.suffix.lower() in self.IMG_EXTENSIONS) + + def __repr__(self): + return f'FolderOfImages(root="{self.root}", len: {len(self)})' + + def __len__(self): + return len(self.paths) + + def __getitem__(self, key): + path = self.paths[key] + with open(path, 'rb') as f: + image = Image.open(f).convert('RGB') + image = self.transform(image) + return image, + + +class CSVLogger: + def __init__(self, filename, columns): + self.filename = Path(filename) + self.columns = columns + if self.filename.exists(): + self.file = open(self.filename, 'a') + else: + self.file = open(self.filename, 'w') + self.write(*self.columns) + + def write(self, *args): + print(*args, sep=',', file=self.file, flush=True) + + +@contextmanager +def tf32_mode(cudnn=None, matmul=None): + """A context manager that sets whether TF32 is allowed on cuDNN or matmul.""" + cudnn_old = torch.backends.cudnn.allow_tf32 + matmul_old = torch.backends.cuda.matmul.allow_tf32 + try: + if cudnn is not None: + torch.backends.cudnn.allow_tf32 = cudnn + if matmul is not None: + torch.backends.cuda.matmul.allow_tf32 = matmul + yield + finally: + if cudnn is not None: + torch.backends.cudnn.allow_tf32 = cudnn_old + if matmul is not None: + torch.backends.cuda.matmul.allow_tf32 = matmul_old diff --git a/src/comfyui/comfy/latent_formats.py b/src/comfyui/comfy/latent_formats.py new file mode 100644 index 0000000000000000000000000000000000000000..a48f60c74e9d0f87ee847cd2f774b6a20a9b083e --- /dev/null +++ b/src/comfyui/comfy/latent_formats.py @@ -0,0 +1,204 @@ +import torch + +class LatentFormat: + scale_factor = 1.0 + latent_channels = 4 + latent_rgb_factors = None + latent_rgb_factors_bias = None + taesd_decoder_name = None + + def process_in(self, latent): + return latent * self.scale_factor + + def process_out(self, latent): + return latent / self.scale_factor + +class SD15(LatentFormat): + def __init__(self, scale_factor=0.18215): + self.scale_factor = scale_factor + self.latent_rgb_factors = [ + # R G B + [ 0.3512, 0.2297, 0.3227], + [ 0.3250, 0.4974, 0.2350], + [-0.2829, 0.1762, 0.2721], + [-0.2120, -0.2616, -0.7177] + ] + self.taesd_decoder_name = "taesd_decoder" + +class SDXL(LatentFormat): + scale_factor = 0.13025 + + def __init__(self): + self.latent_rgb_factors = [ + # R G B + [ 0.3651, 0.4232, 0.4341], + [-0.2533, -0.0042, 0.1068], + [ 0.1076, 0.1111, -0.0362], + [-0.3165, -0.2492, -0.2188] + ] + self.latent_rgb_factors_bias = [ 0.1084, -0.0175, -0.0011] + + self.taesd_decoder_name = "taesdxl_decoder" + +class SDXL_Playground_2_5(LatentFormat): + def __init__(self): + self.scale_factor = 0.5 + self.latents_mean = torch.tensor([-1.6574, 1.886, -1.383, 2.5155]).view(1, 4, 1, 1) + self.latents_std = torch.tensor([8.4927, 5.9022, 6.5498, 5.2299]).view(1, 4, 1, 1) + + self.latent_rgb_factors = [ + # R G B + [ 0.3920, 0.4054, 0.4549], + [-0.2634, -0.0196, 0.0653], + [ 0.0568, 0.1687, -0.0755], + [-0.3112, -0.2359, -0.2076] + ] + self.taesd_decoder_name = "taesdxl_decoder" + + def process_in(self, latent): + latents_mean = self.latents_mean.to(latent.device, latent.dtype) + latents_std = self.latents_std.to(latent.device, latent.dtype) + return (latent - latents_mean) * self.scale_factor / latents_std + + def process_out(self, latent): + latents_mean = self.latents_mean.to(latent.device, latent.dtype) + latents_std = self.latents_std.to(latent.device, latent.dtype) + return latent * latents_std / self.scale_factor + latents_mean + + +class SD_X4(LatentFormat): + def __init__(self): + self.scale_factor = 0.08333 + self.latent_rgb_factors = [ + [-0.2340, -0.3863, -0.3257], + [ 0.0994, 0.0885, -0.0908], + [-0.2833, -0.2349, -0.3741], + [ 0.2523, -0.0055, -0.1651] + ] + +class SC_Prior(LatentFormat): + latent_channels = 16 + def __init__(self): + self.scale_factor = 1.0 + self.latent_rgb_factors = [ + [-0.0326, -0.0204, -0.0127], + [-0.1592, -0.0427, 0.0216], + [ 0.0873, 0.0638, -0.0020], + [-0.0602, 0.0442, 0.1304], + [ 0.0800, -0.0313, -0.1796], + [-0.0810, -0.0638, -0.1581], + [ 0.1791, 0.1180, 0.0967], + [ 0.0740, 0.1416, 0.0432], + [-0.1745, -0.1888, -0.1373], + [ 0.2412, 0.1577, 0.0928], + [ 0.1908, 0.0998, 0.0682], + [ 0.0209, 0.0365, -0.0092], + [ 0.0448, -0.0650, -0.1728], + [-0.1658, -0.1045, -0.1308], + [ 0.0542, 0.1545, 0.1325], + [-0.0352, -0.1672, -0.2541] + ] + +class SC_B(LatentFormat): + def __init__(self): + self.scale_factor = 1.0 / 0.43 + self.latent_rgb_factors = [ + [ 0.1121, 0.2006, 0.1023], + [-0.2093, -0.0222, -0.0195], + [-0.3087, -0.1535, 0.0366], + [ 0.0290, -0.1574, -0.4078] + ] + +class SD3(LatentFormat): + latent_channels = 16 + def __init__(self): + self.scale_factor = 1.5305 + self.shift_factor = 0.0609 + self.latent_rgb_factors = [ + [-0.0922, -0.0175, 0.0749], + [ 0.0311, 0.0633, 0.0954], + [ 0.1994, 0.0927, 0.0458], + [ 0.0856, 0.0339, 0.0902], + [ 0.0587, 0.0272, -0.0496], + [-0.0006, 0.1104, 0.0309], + [ 0.0978, 0.0306, 0.0427], + [-0.0042, 0.1038, 0.1358], + [-0.0194, 0.0020, 0.0669], + [-0.0488, 0.0130, -0.0268], + [ 0.0922, 0.0988, 0.0951], + [-0.0278, 0.0524, -0.0542], + [ 0.0332, 0.0456, 0.0895], + [-0.0069, -0.0030, -0.0810], + [-0.0596, -0.0465, -0.0293], + [-0.1448, -0.1463, -0.1189] + ] + self.latent_rgb_factors_bias = [0.2394, 0.2135, 0.1925] + self.taesd_decoder_name = "taesd3_decoder" + + def process_in(self, latent): + return (latent - self.shift_factor) * self.scale_factor + + def process_out(self, latent): + return (latent / self.scale_factor) + self.shift_factor + +class StableAudio1(LatentFormat): + latent_channels = 64 + +class Flux(SD3): + latent_channels = 16 + def __init__(self): + self.scale_factor = 0.3611 + self.shift_factor = 0.1159 + self.latent_rgb_factors =[ + [-0.0346, 0.0244, 0.0681], + [ 0.0034, 0.0210, 0.0687], + [ 0.0275, -0.0668, -0.0433], + [-0.0174, 0.0160, 0.0617], + [ 0.0859, 0.0721, 0.0329], + [ 0.0004, 0.0383, 0.0115], + [ 0.0405, 0.0861, 0.0915], + [-0.0236, -0.0185, -0.0259], + [-0.0245, 0.0250, 0.1180], + [ 0.1008, 0.0755, -0.0421], + [-0.0515, 0.0201, 0.0011], + [ 0.0428, -0.0012, -0.0036], + [ 0.0817, 0.0765, 0.0749], + [-0.1264, -0.0522, -0.1103], + [-0.0280, -0.0881, -0.0499], + [-0.1262, -0.0982, -0.0778] + ] + self.latent_rgb_factors_bias = [-0.0329, -0.0718, -0.0851] + self.taesd_decoder_name = "taef1_decoder" + + def process_in(self, latent): + return (latent - self.shift_factor) * self.scale_factor + + def process_out(self, latent): + return (latent / self.scale_factor) + self.shift_factor + +class Mochi(LatentFormat): + latent_channels = 12 + + def __init__(self): + self.scale_factor = 1.0 + self.latents_mean = torch.tensor([-0.06730895953510081, -0.038011381506090416, -0.07477820912866141, + -0.05565264470995561, 0.012767231469026969, -0.04703542746246419, + 0.043896967884726704, -0.09346305707025976, -0.09918314763016893, + -0.008729793427399178, -0.011931556316503654, -0.0321993391887285]).view(1, self.latent_channels, 1, 1, 1) + self.latents_std = torch.tensor([0.9263795028493863, 0.9248894543193766, 0.9393059390890617, + 0.959253732819592, 0.8244560132752793, 0.917259975397747, + 0.9294154431013696, 1.3720942357788521, 0.881393668867029, + 0.9168315692124348, 0.9185249279345552, 0.9274757570805041]).view(1, self.latent_channels, 1, 1, 1) + + self.latent_rgb_factors = None #TODO + self.taesd_decoder_name = None #TODO + + def process_in(self, latent): + latents_mean = self.latents_mean.to(latent.device, latent.dtype) + latents_std = self.latents_std.to(latent.device, latent.dtype) + return (latent - latents_mean) * self.scale_factor / latents_std + + def process_out(self, latent): + latents_mean = self.latents_mean.to(latent.device, latent.dtype) + latents_std = self.latents_std.to(latent.device, latent.dtype) + return latent * latents_std / self.scale_factor + latents_mean diff --git a/src/comfyui/comfy/ldm/__pycache__/common_dit.cpython-310.pyc b/src/comfyui/comfy/ldm/__pycache__/common_dit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a52f921b64bb72a5ce207588fd9dc8421b07d54 Binary files /dev/null and b/src/comfyui/comfy/ldm/__pycache__/common_dit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/__pycache__/util.cpython-310.pyc b/src/comfyui/comfy/ldm/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3495ebcccb322d60d6c76482af685190d6bc7dff Binary files /dev/null and b/src/comfyui/comfy/ldm/__pycache__/util.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/__pycache__/util.cpython-38.pyc b/src/comfyui/comfy/ldm/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb82174b4676f088dc476216b3b89dc02c49e86e Binary files /dev/null and b/src/comfyui/comfy/ldm/__pycache__/util.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/audio/__pycache__/autoencoder.cpython-310.pyc b/src/comfyui/comfy/ldm/audio/__pycache__/autoencoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..706e901791c62a61347f618ca73fc1d15a51f950 Binary files /dev/null and b/src/comfyui/comfy/ldm/audio/__pycache__/autoencoder.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/audio/__pycache__/dit.cpython-310.pyc b/src/comfyui/comfy/ldm/audio/__pycache__/dit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bdaff1a713336fdbc6eecef77f441193e46ab87a Binary files /dev/null and b/src/comfyui/comfy/ldm/audio/__pycache__/dit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/audio/__pycache__/embedders.cpython-310.pyc b/src/comfyui/comfy/ldm/audio/__pycache__/embedders.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9bc576873f0d5dbd02f7f152e940dbb991065ad Binary files /dev/null and b/src/comfyui/comfy/ldm/audio/__pycache__/embedders.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/audio/autoencoder.py b/src/comfyui/comfy/ldm/audio/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..8123e66a50074d63bea45591f48e44723dbe5ebf --- /dev/null +++ b/src/comfyui/comfy/ldm/audio/autoencoder.py @@ -0,0 +1,282 @@ +# code adapted from: https://github.com/Stability-AI/stable-audio-tools + +import torch +from torch import nn +from typing import Literal, Dict, Any +import math +import comfy.ops +ops = comfy.ops.disable_weight_init + +def vae_sample(mean, scale): + stdev = nn.functional.softplus(scale) + 1e-4 + var = stdev * stdev + logvar = torch.log(var) + latents = torch.randn_like(mean) * stdev + mean + + kl = (mean * mean + var - logvar - 1).sum(1).mean() + + return latents, kl + +class VAEBottleneck(nn.Module): + def __init__(self): + super().__init__() + self.is_discrete = False + + def encode(self, x, return_info=False, **kwargs): + info = {} + + mean, scale = x.chunk(2, dim=1) + + x, kl = vae_sample(mean, scale) + + info["kl"] = kl + + if return_info: + return x, info + else: + return x + + def decode(self, x): + return x + + +def snake_beta(x, alpha, beta): + return x + (1.0 / (beta + 0.000000001)) * pow(torch.sin(x * alpha), 2) + +# Adapted from https://github.com/NVIDIA/BigVGAN/blob/main/activations.py under MIT license +class SnakeBeta(nn.Module): + + def __init__(self, in_features, alpha=1.0, alpha_trainable=True, alpha_logscale=True): + super(SnakeBeta, self).__init__() + self.in_features = in_features + + # initialize alpha + self.alpha_logscale = alpha_logscale + if self.alpha_logscale: # log scale alphas initialized to zeros + self.alpha = nn.Parameter(torch.zeros(in_features) * alpha) + self.beta = nn.Parameter(torch.zeros(in_features) * alpha) + else: # linear scale alphas initialized to ones + self.alpha = nn.Parameter(torch.ones(in_features) * alpha) + self.beta = nn.Parameter(torch.ones(in_features) * alpha) + + # self.alpha.requires_grad = alpha_trainable + # self.beta.requires_grad = alpha_trainable + + self.no_div_by_zero = 0.000000001 + + def forward(self, x): + alpha = self.alpha.unsqueeze(0).unsqueeze(-1).to(x.device) # line up with x to [B, C, T] + beta = self.beta.unsqueeze(0).unsqueeze(-1).to(x.device) + if self.alpha_logscale: + alpha = torch.exp(alpha) + beta = torch.exp(beta) + x = snake_beta(x, alpha, beta) + + return x + +def WNConv1d(*args, **kwargs): + try: + return torch.nn.utils.parametrizations.weight_norm(ops.Conv1d(*args, **kwargs)) + except: + return torch.nn.utils.weight_norm(ops.Conv1d(*args, **kwargs)) #support pytorch 2.1 and older + +def WNConvTranspose1d(*args, **kwargs): + try: + return torch.nn.utils.parametrizations.weight_norm(ops.ConvTranspose1d(*args, **kwargs)) + except: + return torch.nn.utils.weight_norm(ops.ConvTranspose1d(*args, **kwargs)) #support pytorch 2.1 and older + +def get_activation(activation: Literal["elu", "snake", "none"], antialias=False, channels=None) -> nn.Module: + if activation == "elu": + act = torch.nn.ELU() + elif activation == "snake": + act = SnakeBeta(channels) + elif activation == "none": + act = torch.nn.Identity() + else: + raise ValueError(f"Unknown activation {activation}") + + if antialias: + act = Activation1d(act) + + return act + + +class ResidualUnit(nn.Module): + def __init__(self, in_channels, out_channels, dilation, use_snake=False, antialias_activation=False): + super().__init__() + + self.dilation = dilation + + padding = (dilation * (7-1)) // 2 + + self.layers = nn.Sequential( + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=out_channels), + WNConv1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=7, dilation=dilation, padding=padding), + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=out_channels), + WNConv1d(in_channels=out_channels, out_channels=out_channels, + kernel_size=1) + ) + + def forward(self, x): + res = x + + #x = checkpoint(self.layers, x) + x = self.layers(x) + + return x + res + +class EncoderBlock(nn.Module): + def __init__(self, in_channels, out_channels, stride, use_snake=False, antialias_activation=False): + super().__init__() + + self.layers = nn.Sequential( + ResidualUnit(in_channels=in_channels, + out_channels=in_channels, dilation=1, use_snake=use_snake), + ResidualUnit(in_channels=in_channels, + out_channels=in_channels, dilation=3, use_snake=use_snake), + ResidualUnit(in_channels=in_channels, + out_channels=in_channels, dilation=9, use_snake=use_snake), + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=in_channels), + WNConv1d(in_channels=in_channels, out_channels=out_channels, + kernel_size=2*stride, stride=stride, padding=math.ceil(stride/2)), + ) + + def forward(self, x): + return self.layers(x) + +class DecoderBlock(nn.Module): + def __init__(self, in_channels, out_channels, stride, use_snake=False, antialias_activation=False, use_nearest_upsample=False): + super().__init__() + + if use_nearest_upsample: + upsample_layer = nn.Sequential( + nn.Upsample(scale_factor=stride, mode="nearest"), + WNConv1d(in_channels=in_channels, + out_channels=out_channels, + kernel_size=2*stride, + stride=1, + bias=False, + padding='same') + ) + else: + upsample_layer = WNConvTranspose1d(in_channels=in_channels, + out_channels=out_channels, + kernel_size=2*stride, stride=stride, padding=math.ceil(stride/2)) + + self.layers = nn.Sequential( + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=in_channels), + upsample_layer, + ResidualUnit(in_channels=out_channels, out_channels=out_channels, + dilation=1, use_snake=use_snake), + ResidualUnit(in_channels=out_channels, out_channels=out_channels, + dilation=3, use_snake=use_snake), + ResidualUnit(in_channels=out_channels, out_channels=out_channels, + dilation=9, use_snake=use_snake), + ) + + def forward(self, x): + return self.layers(x) + +class OobleckEncoder(nn.Module): + def __init__(self, + in_channels=2, + channels=128, + latent_dim=32, + c_mults = [1, 2, 4, 8], + strides = [2, 4, 8, 8], + use_snake=False, + antialias_activation=False + ): + super().__init__() + + c_mults = [1] + c_mults + + self.depth = len(c_mults) + + layers = [ + WNConv1d(in_channels=in_channels, out_channels=c_mults[0] * channels, kernel_size=7, padding=3) + ] + + for i in range(self.depth-1): + layers += [EncoderBlock(in_channels=c_mults[i]*channels, out_channels=c_mults[i+1]*channels, stride=strides[i], use_snake=use_snake)] + + layers += [ + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=c_mults[-1] * channels), + WNConv1d(in_channels=c_mults[-1]*channels, out_channels=latent_dim, kernel_size=3, padding=1) + ] + + self.layers = nn.Sequential(*layers) + + def forward(self, x): + return self.layers(x) + + +class OobleckDecoder(nn.Module): + def __init__(self, + out_channels=2, + channels=128, + latent_dim=32, + c_mults = [1, 2, 4, 8], + strides = [2, 4, 8, 8], + use_snake=False, + antialias_activation=False, + use_nearest_upsample=False, + final_tanh=True): + super().__init__() + + c_mults = [1] + c_mults + + self.depth = len(c_mults) + + layers = [ + WNConv1d(in_channels=latent_dim, out_channels=c_mults[-1]*channels, kernel_size=7, padding=3), + ] + + for i in range(self.depth-1, 0, -1): + layers += [DecoderBlock( + in_channels=c_mults[i]*channels, + out_channels=c_mults[i-1]*channels, + stride=strides[i-1], + use_snake=use_snake, + antialias_activation=antialias_activation, + use_nearest_upsample=use_nearest_upsample + ) + ] + + layers += [ + get_activation("snake" if use_snake else "elu", antialias=antialias_activation, channels=c_mults[0] * channels), + WNConv1d(in_channels=c_mults[0] * channels, out_channels=out_channels, kernel_size=7, padding=3, bias=False), + nn.Tanh() if final_tanh else nn.Identity() + ] + + self.layers = nn.Sequential(*layers) + + def forward(self, x): + return self.layers(x) + + +class AudioOobleckVAE(nn.Module): + def __init__(self, + in_channels=2, + channels=128, + latent_dim=64, + c_mults = [1, 2, 4, 8, 16], + strides = [2, 4, 4, 8, 8], + use_snake=True, + antialias_activation=False, + use_nearest_upsample=False, + final_tanh=False): + super().__init__() + self.encoder = OobleckEncoder(in_channels, channels, latent_dim * 2, c_mults, strides, use_snake, antialias_activation) + self.decoder = OobleckDecoder(in_channels, channels, latent_dim, c_mults, strides, use_snake, antialias_activation, + use_nearest_upsample=use_nearest_upsample, final_tanh=final_tanh) + self.bottleneck = VAEBottleneck() + + def encode(self, x): + return self.bottleneck.encode(self.encoder(x)) + + def decode(self, x): + return self.decoder(self.bottleneck.decode(x)) + diff --git a/src/comfyui/comfy/ldm/audio/dit.py b/src/comfyui/comfy/ldm/audio/dit.py new file mode 100644 index 0000000000000000000000000000000000000000..4d2185be83c5e23c9fea17ba4ffd382c4899daca --- /dev/null +++ b/src/comfyui/comfy/ldm/audio/dit.py @@ -0,0 +1,891 @@ +# code adapted from: https://github.com/Stability-AI/stable-audio-tools + +from comfy.ldm.modules.attention import optimized_attention +import typing as tp + +import torch + +from einops import rearrange +from torch import nn +from torch.nn import functional as F +import math +import comfy.ops + +class FourierFeatures(nn.Module): + def __init__(self, in_features, out_features, std=1., dtype=None, device=None): + super().__init__() + assert out_features % 2 == 0 + self.weight = nn.Parameter(torch.empty( + [out_features // 2, in_features], dtype=dtype, device=device)) + + def forward(self, input): + f = 2 * math.pi * input @ comfy.ops.cast_to_input(self.weight.T, input) + return torch.cat([f.cos(), f.sin()], dim=-1) + +# norms +class LayerNorm(nn.Module): + def __init__(self, dim, bias=False, fix_scale=False, dtype=None, device=None): + """ + bias-less layernorm has been shown to be more stable. most newer models have moved towards rmsnorm, also bias-less + """ + super().__init__() + + self.gamma = nn.Parameter(torch.empty(dim, dtype=dtype, device=device)) + + if bias: + self.beta = nn.Parameter(torch.empty(dim, dtype=dtype, device=device)) + else: + self.beta = None + + def forward(self, x): + beta = self.beta + if beta is not None: + beta = comfy.ops.cast_to_input(beta, x) + return F.layer_norm(x, x.shape[-1:], weight=comfy.ops.cast_to_input(self.gamma, x), bias=beta) + +class GLU(nn.Module): + def __init__( + self, + dim_in, + dim_out, + activation, + use_conv = False, + conv_kernel_size = 3, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.act = activation + self.proj = operations.Linear(dim_in, dim_out * 2, dtype=dtype, device=device) if not use_conv else operations.Conv1d(dim_in, dim_out * 2, conv_kernel_size, padding = (conv_kernel_size // 2), dtype=dtype, device=device) + self.use_conv = use_conv + + def forward(self, x): + if self.use_conv: + x = rearrange(x, 'b n d -> b d n') + x = self.proj(x) + x = rearrange(x, 'b d n -> b n d') + else: + x = self.proj(x) + + x, gate = x.chunk(2, dim = -1) + return x * self.act(gate) + +class AbsolutePositionalEmbedding(nn.Module): + def __init__(self, dim, max_seq_len): + super().__init__() + self.scale = dim ** -0.5 + self.max_seq_len = max_seq_len + self.emb = nn.Embedding(max_seq_len, dim) + + def forward(self, x, pos = None, seq_start_pos = None): + seq_len, device = x.shape[1], x.device + assert seq_len <= self.max_seq_len, f'you are passing in a sequence length of {seq_len} but your absolute positional embedding has a max sequence length of {self.max_seq_len}' + + if pos is None: + pos = torch.arange(seq_len, device = device) + + if seq_start_pos is not None: + pos = (pos - seq_start_pos[..., None]).clamp(min = 0) + + pos_emb = self.emb(pos) + pos_emb = pos_emb * self.scale + return pos_emb + +class ScaledSinusoidalEmbedding(nn.Module): + def __init__(self, dim, theta = 10000): + super().__init__() + assert (dim % 2) == 0, 'dimension must be divisible by 2' + self.scale = nn.Parameter(torch.ones(1) * dim ** -0.5) + + half_dim = dim // 2 + freq_seq = torch.arange(half_dim).float() / half_dim + inv_freq = theta ** -freq_seq + self.register_buffer('inv_freq', inv_freq, persistent = False) + + def forward(self, x, pos = None, seq_start_pos = None): + seq_len, device = x.shape[1], x.device + + if pos is None: + pos = torch.arange(seq_len, device = device) + + if seq_start_pos is not None: + pos = pos - seq_start_pos[..., None] + + emb = torch.einsum('i, j -> i j', pos, self.inv_freq) + emb = torch.cat((emb.sin(), emb.cos()), dim = -1) + return emb * self.scale + +class RotaryEmbedding(nn.Module): + def __init__( + self, + dim, + use_xpos = False, + scale_base = 512, + interpolation_factor = 1., + base = 10000, + base_rescale_factor = 1., + dtype=None, + device=None, + ): + super().__init__() + # proposed by reddit user bloc97, to rescale rotary embeddings to longer sequence length without fine-tuning + # has some connection to NTK literature + # https://www.reddit.com/r/LocalLLaMA/comments/14lz7j5/ntkaware_scaled_rope_allows_llama_models_to_have/ + base *= base_rescale_factor ** (dim / (dim - 2)) + + # inv_freq = 1. / (base ** (torch.arange(0, dim, 2).float() / dim)) + self.register_buffer('inv_freq', torch.empty((dim // 2,), device=device, dtype=dtype)) + + assert interpolation_factor >= 1. + self.interpolation_factor = interpolation_factor + + if not use_xpos: + self.register_buffer('scale', None) + return + + scale = (torch.arange(0, dim, 2) + 0.4 * dim) / (1.4 * dim) + + self.scale_base = scale_base + self.register_buffer('scale', scale) + + def forward_from_seq_len(self, seq_len, device, dtype): + # device = self.inv_freq.device + + t = torch.arange(seq_len, device=device, dtype=dtype) + return self.forward(t) + + def forward(self, t): + # device = self.inv_freq.device + device = t.device + dtype = t.dtype + + # t = t.to(torch.float32) + + t = t / self.interpolation_factor + + freqs = torch.einsum('i , j -> i j', t, comfy.ops.cast_to_input(self.inv_freq, t)) + freqs = torch.cat((freqs, freqs), dim = -1) + + if self.scale is None: + return freqs, 1. + + power = (torch.arange(seq_len, device = device) - (seq_len // 2)) / self.scale_base + scale = comfy.ops.cast_to_input(self.scale, t) ** rearrange(power, 'n -> n 1') + scale = torch.cat((scale, scale), dim = -1) + + return freqs, scale + +def rotate_half(x): + x = rearrange(x, '... (j d) -> ... j d', j = 2) + x1, x2 = x.unbind(dim = -2) + return torch.cat((-x2, x1), dim = -1) + +def apply_rotary_pos_emb(t, freqs, scale = 1): + out_dtype = t.dtype + + # cast to float32 if necessary for numerical stability + dtype = t.dtype #reduce(torch.promote_types, (t.dtype, freqs.dtype, torch.float32)) + rot_dim, seq_len = freqs.shape[-1], t.shape[-2] + freqs, t = freqs.to(dtype), t.to(dtype) + freqs = freqs[-seq_len:, :] + + if t.ndim == 4 and freqs.ndim == 3: + freqs = rearrange(freqs, 'b n d -> b 1 n d') + + # partial rotary embeddings, Wang et al. GPT-J + t, t_unrotated = t[..., :rot_dim], t[..., rot_dim:] + t = (t * freqs.cos() * scale) + (rotate_half(t) * freqs.sin() * scale) + + t, t_unrotated = t.to(out_dtype), t_unrotated.to(out_dtype) + + return torch.cat((t, t_unrotated), dim = -1) + +class FeedForward(nn.Module): + def __init__( + self, + dim, + dim_out = None, + mult = 4, + no_bias = False, + glu = True, + use_conv = False, + conv_kernel_size = 3, + zero_init_output = True, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + inner_dim = int(dim * mult) + + # Default to SwiGLU + + activation = nn.SiLU() + + dim_out = dim if dim_out is None else dim_out + + if glu: + linear_in = GLU(dim, inner_dim, activation, dtype=dtype, device=device, operations=operations) + else: + linear_in = nn.Sequential( + Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + operations.Linear(dim, inner_dim, bias = not no_bias, dtype=dtype, device=device) if not use_conv else operations.Conv1d(dim, inner_dim, conv_kernel_size, padding = (conv_kernel_size // 2), bias = not no_bias, dtype=dtype, device=device), + Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + activation + ) + + linear_out = operations.Linear(inner_dim, dim_out, bias = not no_bias, dtype=dtype, device=device) if not use_conv else operations.Conv1d(inner_dim, dim_out, conv_kernel_size, padding = (conv_kernel_size // 2), bias = not no_bias, dtype=dtype, device=device) + + # # init last linear layer to 0 + # if zero_init_output: + # nn.init.zeros_(linear_out.weight) + # if not no_bias: + # nn.init.zeros_(linear_out.bias) + + + self.ff = nn.Sequential( + linear_in, + Rearrange('b d n -> b n d') if use_conv else nn.Identity(), + linear_out, + Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + ) + + def forward(self, x): + return self.ff(x) + +class Attention(nn.Module): + def __init__( + self, + dim, + dim_heads = 64, + dim_context = None, + causal = False, + zero_init_output=True, + qk_norm = False, + natten_kernel_size = None, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.dim = dim + self.dim_heads = dim_heads + self.causal = causal + + dim_kv = dim_context if dim_context is not None else dim + + self.num_heads = dim // dim_heads + self.kv_heads = dim_kv // dim_heads + + if dim_context is not None: + self.to_q = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.to_kv = operations.Linear(dim_kv, dim_kv * 2, bias=False, dtype=dtype, device=device) + else: + self.to_qkv = operations.Linear(dim, dim * 3, bias=False, dtype=dtype, device=device) + + self.to_out = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + + # if zero_init_output: + # nn.init.zeros_(self.to_out.weight) + + self.qk_norm = qk_norm + + + def forward( + self, + x, + context = None, + mask = None, + context_mask = None, + rotary_pos_emb = None, + causal = None + ): + h, kv_h, has_context = self.num_heads, self.kv_heads, context is not None + + kv_input = context if has_context else x + + if hasattr(self, 'to_q'): + # Use separate linear projections for q and k/v + q = self.to_q(x) + q = rearrange(q, 'b n (h d) -> b h n d', h = h) + + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + + k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = kv_h), (k, v)) + else: + # Use fused linear projection + q, k, v = self.to_qkv(x).chunk(3, dim=-1) + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = h), (q, k, v)) + + # Normalize q and k for cosine sim attention + if self.qk_norm: + q = F.normalize(q, dim=-1) + k = F.normalize(k, dim=-1) + + if rotary_pos_emb is not None and not has_context: + freqs, _ = rotary_pos_emb + + q_dtype = q.dtype + k_dtype = k.dtype + + q = q.to(torch.float32) + k = k.to(torch.float32) + freqs = freqs.to(torch.float32) + + q = apply_rotary_pos_emb(q, freqs) + k = apply_rotary_pos_emb(k, freqs) + + q = q.to(q_dtype) + k = k.to(k_dtype) + + input_mask = context_mask + + if input_mask is None and not has_context: + input_mask = mask + + # determine masking + masks = [] + final_attn_mask = None # The mask that will be applied to the attention matrix, taking all masks into account + + if input_mask is not None: + input_mask = rearrange(input_mask, 'b j -> b 1 1 j') + masks.append(~input_mask) + + # Other masks will be added here later + + if len(masks) > 0: + final_attn_mask = ~or_reduce(masks) + + n, device = q.shape[-2], q.device + + causal = self.causal if causal is None else causal + + if n == 1 and causal: + causal = False + + if h != kv_h: + # Repeat interleave kv_heads to match q_heads + heads_per_kv_head = h // kv_h + k, v = map(lambda t: t.repeat_interleave(heads_per_kv_head, dim = 1), (k, v)) + + out = optimized_attention(q, k, v, h, skip_reshape=True) + out = self.to_out(out) + + if mask is not None: + mask = rearrange(mask, 'b n -> b n 1') + out = out.masked_fill(~mask, 0.) + + return out + +class ConformerModule(nn.Module): + def __init__( + self, + dim, + norm_kwargs = {}, + ): + + super().__init__() + + self.dim = dim + + self.in_norm = LayerNorm(dim, **norm_kwargs) + self.pointwise_conv = nn.Conv1d(dim, dim, kernel_size=1, bias=False) + self.glu = GLU(dim, dim, nn.SiLU()) + self.depthwise_conv = nn.Conv1d(dim, dim, kernel_size=17, groups=dim, padding=8, bias=False) + self.mid_norm = LayerNorm(dim, **norm_kwargs) # This is a batch norm in the original but I don't like batch norm + self.swish = nn.SiLU() + self.pointwise_conv_2 = nn.Conv1d(dim, dim, kernel_size=1, bias=False) + + def forward(self, x): + x = self.in_norm(x) + x = rearrange(x, 'b n d -> b d n') + x = self.pointwise_conv(x) + x = rearrange(x, 'b d n -> b n d') + x = self.glu(x) + x = rearrange(x, 'b n d -> b d n') + x = self.depthwise_conv(x) + x = rearrange(x, 'b d n -> b n d') + x = self.mid_norm(x) + x = self.swish(x) + x = rearrange(x, 'b n d -> b d n') + x = self.pointwise_conv_2(x) + x = rearrange(x, 'b d n -> b n d') + + return x + +class TransformerBlock(nn.Module): + def __init__( + self, + dim, + dim_heads = 64, + cross_attend = False, + dim_context = None, + global_cond_dim = None, + causal = False, + zero_init_branch_outputs = True, + conformer = False, + layer_ix = -1, + remove_norms = False, + attn_kwargs = {}, + ff_kwargs = {}, + norm_kwargs = {}, + dtype=None, + device=None, + operations=None, + ): + + super().__init__() + self.dim = dim + self.dim_heads = dim_heads + self.cross_attend = cross_attend + self.dim_context = dim_context + self.causal = causal + + self.pre_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() + + self.self_attn = Attention( + dim, + dim_heads = dim_heads, + causal = causal, + zero_init_output=zero_init_branch_outputs, + dtype=dtype, + device=device, + operations=operations, + **attn_kwargs + ) + + if cross_attend: + self.cross_attend_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() + self.cross_attn = Attention( + dim, + dim_heads = dim_heads, + dim_context=dim_context, + causal = causal, + zero_init_output=zero_init_branch_outputs, + dtype=dtype, + device=device, + operations=operations, + **attn_kwargs + ) + + self.ff_norm = LayerNorm(dim, dtype=dtype, device=device, **norm_kwargs) if not remove_norms else nn.Identity() + self.ff = FeedForward(dim, zero_init_output=zero_init_branch_outputs, dtype=dtype, device=device, operations=operations,**ff_kwargs) + + self.layer_ix = layer_ix + + self.conformer = ConformerModule(dim, norm_kwargs=norm_kwargs) if conformer else None + + self.global_cond_dim = global_cond_dim + + if global_cond_dim is not None: + self.to_scale_shift_gate = nn.Sequential( + nn.SiLU(), + nn.Linear(global_cond_dim, dim * 6, bias=False) + ) + + nn.init.zeros_(self.to_scale_shift_gate[1].weight) + #nn.init.zeros_(self.to_scale_shift_gate_self[1].bias) + + def forward( + self, + x, + context = None, + global_cond=None, + mask = None, + context_mask = None, + rotary_pos_emb = None + ): + if self.global_cond_dim is not None and self.global_cond_dim > 0 and global_cond is not None: + + scale_self, shift_self, gate_self, scale_ff, shift_ff, gate_ff = self.to_scale_shift_gate(global_cond).unsqueeze(1).chunk(6, dim = -1) + + # self-attention with adaLN + residual = x + x = self.pre_norm(x) + x = x * (1 + scale_self) + shift_self + x = self.self_attn(x, mask = mask, rotary_pos_emb = rotary_pos_emb) + x = x * torch.sigmoid(1 - gate_self) + x = x + residual + + if context is not None: + x = x + self.cross_attn(self.cross_attend_norm(x), context = context, context_mask = context_mask) + + if self.conformer is not None: + x = x + self.conformer(x) + + # feedforward with adaLN + residual = x + x = self.ff_norm(x) + x = x * (1 + scale_ff) + shift_ff + x = self.ff(x) + x = x * torch.sigmoid(1 - gate_ff) + x = x + residual + + else: + x = x + self.self_attn(self.pre_norm(x), mask = mask, rotary_pos_emb = rotary_pos_emb) + + if context is not None: + x = x + self.cross_attn(self.cross_attend_norm(x), context = context, context_mask = context_mask) + + if self.conformer is not None: + x = x + self.conformer(x) + + x = x + self.ff(self.ff_norm(x)) + + return x + +class ContinuousTransformer(nn.Module): + def __init__( + self, + dim, + depth, + *, + dim_in = None, + dim_out = None, + dim_heads = 64, + cross_attend=False, + cond_token_dim=None, + global_cond_dim=None, + causal=False, + rotary_pos_emb=True, + zero_init_branch_outputs=True, + conformer=False, + use_sinusoidal_emb=False, + use_abs_pos_emb=False, + abs_pos_emb_max_length=10000, + dtype=None, + device=None, + operations=None, + **kwargs + ): + + super().__init__() + + self.dim = dim + self.depth = depth + self.causal = causal + self.layers = nn.ModuleList([]) + + self.project_in = operations.Linear(dim_in, dim, bias=False, dtype=dtype, device=device) if dim_in is not None else nn.Identity() + self.project_out = operations.Linear(dim, dim_out, bias=False, dtype=dtype, device=device) if dim_out is not None else nn.Identity() + + if rotary_pos_emb: + self.rotary_pos_emb = RotaryEmbedding(max(dim_heads // 2, 32), device=device, dtype=dtype) + else: + self.rotary_pos_emb = None + + self.use_sinusoidal_emb = use_sinusoidal_emb + if use_sinusoidal_emb: + self.pos_emb = ScaledSinusoidalEmbedding(dim) + + self.use_abs_pos_emb = use_abs_pos_emb + if use_abs_pos_emb: + self.pos_emb = AbsolutePositionalEmbedding(dim, abs_pos_emb_max_length) + + for i in range(depth): + self.layers.append( + TransformerBlock( + dim, + dim_heads = dim_heads, + cross_attend = cross_attend, + dim_context = cond_token_dim, + global_cond_dim = global_cond_dim, + causal = causal, + zero_init_branch_outputs = zero_init_branch_outputs, + conformer=conformer, + layer_ix=i, + dtype=dtype, + device=device, + operations=operations, + **kwargs + ) + ) + + def forward( + self, + x, + mask = None, + prepend_embeds = None, + prepend_mask = None, + global_cond = None, + return_info = False, + **kwargs + ): + batch, seq, device = *x.shape[:2], x.device + + info = { + "hidden_states": [], + } + + x = self.project_in(x) + + if prepend_embeds is not None: + prepend_length, prepend_dim = prepend_embeds.shape[1:] + + assert prepend_dim == x.shape[-1], 'prepend dimension must match sequence dimension' + + x = torch.cat((prepend_embeds, x), dim = -2) + + if prepend_mask is not None or mask is not None: + mask = mask if mask is not None else torch.ones((batch, seq), device = device, dtype = torch.bool) + prepend_mask = prepend_mask if prepend_mask is not None else torch.ones((batch, prepend_length), device = device, dtype = torch.bool) + + mask = torch.cat((prepend_mask, mask), dim = -1) + + # Attention layers + + if self.rotary_pos_emb is not None: + rotary_pos_emb = self.rotary_pos_emb.forward_from_seq_len(x.shape[1], dtype=x.dtype, device=x.device) + else: + rotary_pos_emb = None + + if self.use_sinusoidal_emb or self.use_abs_pos_emb: + x = x + self.pos_emb(x) + + # Iterate over the transformer layers + for layer in self.layers: + x = layer(x, rotary_pos_emb = rotary_pos_emb, global_cond=global_cond, **kwargs) + # x = checkpoint(layer, x, rotary_pos_emb = rotary_pos_emb, global_cond=global_cond, **kwargs) + + if return_info: + info["hidden_states"].append(x) + + x = self.project_out(x) + + if return_info: + return x, info + + return x + +class AudioDiffusionTransformer(nn.Module): + def __init__(self, + io_channels=64, + patch_size=1, + embed_dim=1536, + cond_token_dim=768, + project_cond_tokens=False, + global_cond_dim=1536, + project_global_cond=True, + input_concat_dim=0, + prepend_cond_dim=0, + depth=24, + num_heads=24, + transformer_type: tp.Literal["continuous_transformer"] = "continuous_transformer", + global_cond_type: tp.Literal["prepend", "adaLN"] = "prepend", + audio_model="", + dtype=None, + device=None, + operations=None, + **kwargs): + + super().__init__() + + self.dtype = dtype + self.cond_token_dim = cond_token_dim + + # Timestep embeddings + timestep_features_dim = 256 + + self.timestep_features = FourierFeatures(1, timestep_features_dim, dtype=dtype, device=device) + + self.to_timestep_embed = nn.Sequential( + operations.Linear(timestep_features_dim, embed_dim, bias=True, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device), + ) + + if cond_token_dim > 0: + # Conditioning tokens + + cond_embed_dim = cond_token_dim if not project_cond_tokens else embed_dim + self.to_cond_embed = nn.Sequential( + operations.Linear(cond_token_dim, cond_embed_dim, bias=False, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(cond_embed_dim, cond_embed_dim, bias=False, dtype=dtype, device=device) + ) + else: + cond_embed_dim = 0 + + if global_cond_dim > 0: + # Global conditioning + global_embed_dim = global_cond_dim if not project_global_cond else embed_dim + self.to_global_embed = nn.Sequential( + operations.Linear(global_cond_dim, global_embed_dim, bias=False, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(global_embed_dim, global_embed_dim, bias=False, dtype=dtype, device=device) + ) + + if prepend_cond_dim > 0: + # Prepend conditioning + self.to_prepend_embed = nn.Sequential( + operations.Linear(prepend_cond_dim, embed_dim, bias=False, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(embed_dim, embed_dim, bias=False, dtype=dtype, device=device) + ) + + self.input_concat_dim = input_concat_dim + + dim_in = io_channels + self.input_concat_dim + + self.patch_size = patch_size + + # Transformer + + self.transformer_type = transformer_type + + self.global_cond_type = global_cond_type + + if self.transformer_type == "continuous_transformer": + + global_dim = None + + if self.global_cond_type == "adaLN": + # The global conditioning is projected to the embed_dim already at this point + global_dim = embed_dim + + self.transformer = ContinuousTransformer( + dim=embed_dim, + depth=depth, + dim_heads=embed_dim // num_heads, + dim_in=dim_in * patch_size, + dim_out=io_channels * patch_size, + cross_attend = cond_token_dim > 0, + cond_token_dim = cond_embed_dim, + global_cond_dim=global_dim, + dtype=dtype, + device=device, + operations=operations, + **kwargs + ) + else: + raise ValueError(f"Unknown transformer type: {self.transformer_type}") + + self.preprocess_conv = operations.Conv1d(dim_in, dim_in, 1, bias=False, dtype=dtype, device=device) + self.postprocess_conv = operations.Conv1d(io_channels, io_channels, 1, bias=False, dtype=dtype, device=device) + + def _forward( + self, + x, + t, + mask=None, + cross_attn_cond=None, + cross_attn_cond_mask=None, + input_concat_cond=None, + global_embed=None, + prepend_cond=None, + prepend_cond_mask=None, + return_info=False, + **kwargs): + + if cross_attn_cond is not None: + cross_attn_cond = self.to_cond_embed(cross_attn_cond) + + if global_embed is not None: + # Project the global conditioning to the embedding dimension + global_embed = self.to_global_embed(global_embed) + + prepend_inputs = None + prepend_mask = None + prepend_length = 0 + if prepend_cond is not None: + # Project the prepend conditioning to the embedding dimension + prepend_cond = self.to_prepend_embed(prepend_cond) + + prepend_inputs = prepend_cond + if prepend_cond_mask is not None: + prepend_mask = prepend_cond_mask + + if input_concat_cond is not None: + + # Interpolate input_concat_cond to the same length as x + if input_concat_cond.shape[2] != x.shape[2]: + input_concat_cond = F.interpolate(input_concat_cond, (x.shape[2], ), mode='nearest') + + x = torch.cat([x, input_concat_cond], dim=1) + + # Get the batch of timestep embeddings + timestep_embed = self.to_timestep_embed(self.timestep_features(t[:, None]).to(x.dtype)) # (b, embed_dim) + + # Timestep embedding is considered a global embedding. Add to the global conditioning if it exists + if global_embed is not None: + global_embed = global_embed + timestep_embed + else: + global_embed = timestep_embed + + # Add the global_embed to the prepend inputs if there is no global conditioning support in the transformer + if self.global_cond_type == "prepend": + if prepend_inputs is None: + # Prepend inputs are just the global embed, and the mask is all ones + prepend_inputs = global_embed.unsqueeze(1) + prepend_mask = torch.ones((x.shape[0], 1), device=x.device, dtype=torch.bool) + else: + # Prepend inputs are the prepend conditioning + the global embed + prepend_inputs = torch.cat([prepend_inputs, global_embed.unsqueeze(1)], dim=1) + prepend_mask = torch.cat([prepend_mask, torch.ones((x.shape[0], 1), device=x.device, dtype=torch.bool)], dim=1) + + prepend_length = prepend_inputs.shape[1] + + x = self.preprocess_conv(x) + x + + x = rearrange(x, "b c t -> b t c") + + extra_args = {} + + if self.global_cond_type == "adaLN": + extra_args["global_cond"] = global_embed + + if self.patch_size > 1: + x = rearrange(x, "b (t p) c -> b t (c p)", p=self.patch_size) + + if self.transformer_type == "x-transformers": + output = self.transformer(x, prepend_embeds=prepend_inputs, context=cross_attn_cond, context_mask=cross_attn_cond_mask, mask=mask, prepend_mask=prepend_mask, **extra_args, **kwargs) + elif self.transformer_type == "continuous_transformer": + output = self.transformer(x, prepend_embeds=prepend_inputs, context=cross_attn_cond, context_mask=cross_attn_cond_mask, mask=mask, prepend_mask=prepend_mask, return_info=return_info, **extra_args, **kwargs) + + if return_info: + output, info = output + elif self.transformer_type == "mm_transformer": + output = self.transformer(x, context=cross_attn_cond, mask=mask, context_mask=cross_attn_cond_mask, **extra_args, **kwargs) + + output = rearrange(output, "b t c -> b c t")[:,:,prepend_length:] + + if self.patch_size > 1: + output = rearrange(output, "b (c p) t -> b c (t p)", p=self.patch_size) + + output = self.postprocess_conv(output) + output + + if return_info: + return output, info + + return output + + def forward( + self, + x, + timestep, + context=None, + context_mask=None, + input_concat_cond=None, + global_embed=None, + negative_global_embed=None, + prepend_cond=None, + prepend_cond_mask=None, + mask=None, + return_info=False, + control=None, + transformer_options={}, + **kwargs): + return self._forward( + x, + timestep, + cross_attn_cond=context, + cross_attn_cond_mask=context_mask, + input_concat_cond=input_concat_cond, + global_embed=global_embed, + prepend_cond=prepend_cond, + prepend_cond_mask=prepend_cond_mask, + mask=mask, + return_info=return_info, + **kwargs + ) diff --git a/src/comfyui/comfy/ldm/audio/embedders.py b/src/comfyui/comfy/ldm/audio/embedders.py new file mode 100644 index 0000000000000000000000000000000000000000..82a3210c60de10b4294335cd0001cb3e72b68bd6 --- /dev/null +++ b/src/comfyui/comfy/ldm/audio/embedders.py @@ -0,0 +1,108 @@ +# code adapted from: https://github.com/Stability-AI/stable-audio-tools + +import torch +import torch.nn as nn +from torch import Tensor, einsum +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, TypeVar, Union +from einops import rearrange +import math +import comfy.ops + +class LearnedPositionalEmbedding(nn.Module): + """Used for continuous time""" + + def __init__(self, dim: int): + super().__init__() + assert (dim % 2) == 0 + half_dim = dim // 2 + self.weights = nn.Parameter(torch.empty(half_dim)) + + def forward(self, x: Tensor) -> Tensor: + x = rearrange(x, "b -> b 1") + freqs = x * rearrange(self.weights, "d -> 1 d") * 2 * math.pi + fouriered = torch.cat((freqs.sin(), freqs.cos()), dim=-1) + fouriered = torch.cat((x, fouriered), dim=-1) + return fouriered + +def TimePositionalEmbedding(dim: int, out_features: int) -> nn.Module: + return nn.Sequential( + LearnedPositionalEmbedding(dim), + comfy.ops.manual_cast.Linear(in_features=dim + 1, out_features=out_features), + ) + + +class NumberEmbedder(nn.Module): + def __init__( + self, + features: int, + dim: int = 256, + ): + super().__init__() + self.features = features + self.embedding = TimePositionalEmbedding(dim=dim, out_features=features) + + def forward(self, x: Union[List[float], Tensor]) -> Tensor: + if not torch.is_tensor(x): + device = next(self.embedding.parameters()).device + x = torch.tensor(x, device=device) + assert isinstance(x, Tensor) + shape = x.shape + x = rearrange(x, "... -> (...)") + embedding = self.embedding(x) + x = embedding.view(*shape, self.features) + return x # type: ignore + + +class Conditioner(nn.Module): + def __init__( + self, + dim: int, + output_dim: int, + project_out: bool = False + ): + + super().__init__() + + self.dim = dim + self.output_dim = output_dim + self.proj_out = nn.Linear(dim, output_dim) if (dim != output_dim or project_out) else nn.Identity() + + def forward(self, x): + raise NotImplementedError() + +class NumberConditioner(Conditioner): + ''' + Conditioner that takes a list of floats, normalizes them for a given range, and returns a list of embeddings + ''' + def __init__(self, + output_dim: int, + min_val: float=0, + max_val: float=1 + ): + super().__init__(output_dim, output_dim) + + self.min_val = min_val + self.max_val = max_val + + self.embedder = NumberEmbedder(features=output_dim) + + def forward(self, floats, device=None): + # Cast the inputs to floats + floats = [float(x) for x in floats] + + if device is None: + device = next(self.embedder.parameters()).device + + floats = torch.tensor(floats).to(device) + + floats = floats.clamp(self.min_val, self.max_val) + + normalized_floats = (floats - self.min_val) / (self.max_val - self.min_val) + + # Cast floats to same type as embedder + embedder_dtype = next(self.embedder.parameters()).dtype + normalized_floats = normalized_floats.to(embedder_dtype) + + float_embeds = self.embedder(normalized_floats).unsqueeze(1) + + return [float_embeds, torch.ones(float_embeds.shape[0], 1).to(device)] diff --git a/src/comfyui/comfy/ldm/aura/__pycache__/mmdit.cpython-310.pyc b/src/comfyui/comfy/ldm/aura/__pycache__/mmdit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7241005272b221fae55c8e1e2aaf72b9f4d90d1a Binary files /dev/null and b/src/comfyui/comfy/ldm/aura/__pycache__/mmdit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/aura/mmdit.py b/src/comfyui/comfy/ldm/aura/mmdit.py new file mode 100644 index 0000000000000000000000000000000000000000..cd9a421852c9f310818ccff33edfcb37b0e545f9 --- /dev/null +++ b/src/comfyui/comfy/ldm/aura/mmdit.py @@ -0,0 +1,478 @@ +#AuraFlow MMDiT +#Originally written by the AuraFlow Authors + +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from comfy.ldm.modules.attention import optimized_attention +import comfy.ops +import comfy.ldm.common_dit + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +def find_multiple(n: int, k: int) -> int: + if n % k == 0: + return n + return n + k - (n % k) + + +class MLP(nn.Module): + def __init__(self, dim, hidden_dim=None, dtype=None, device=None, operations=None) -> None: + super().__init__() + if hidden_dim is None: + hidden_dim = 4 * dim + + n_hidden = int(2 * hidden_dim / 3) + n_hidden = find_multiple(n_hidden, 256) + + self.c_fc1 = operations.Linear(dim, n_hidden, bias=False, dtype=dtype, device=device) + self.c_fc2 = operations.Linear(dim, n_hidden, bias=False, dtype=dtype, device=device) + self.c_proj = operations.Linear(n_hidden, dim, bias=False, dtype=dtype, device=device) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = F.silu(self.c_fc1(x)) * self.c_fc2(x) + x = self.c_proj(x) + return x + + +class MultiHeadLayerNorm(nn.Module): + def __init__(self, hidden_size=None, eps=1e-5, dtype=None, device=None): + # Copy pasta from https://github.com/huggingface/transformers/blob/e5f71ecaae50ea476d1e12351003790273c4b2ed/src/transformers/models/cohere/modeling_cohere.py#L78 + + super().__init__() + self.weight = nn.Parameter(torch.empty(hidden_size, dtype=dtype, device=device)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + mean = hidden_states.mean(-1, keepdim=True) + variance = (hidden_states - mean).pow(2).mean(-1, keepdim=True) + hidden_states = (hidden_states - mean) * torch.rsqrt( + variance + self.variance_epsilon + ) + hidden_states = self.weight.to(torch.float32) * hidden_states + return hidden_states.to(input_dtype) + +class SingleAttention(nn.Module): + def __init__(self, dim, n_heads, mh_qknorm=False, dtype=None, device=None, operations=None): + super().__init__() + + self.n_heads = n_heads + self.head_dim = dim // n_heads + + # this is for cond + self.w1q = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1k = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1v = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1o = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + + self.q_norm1 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + self.k_norm1 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + + #@torch.compile() + def forward(self, c): + + bsz, seqlen1, _ = c.shape + + q, k, v = self.w1q(c), self.w1k(c), self.w1v(c) + q = q.view(bsz, seqlen1, self.n_heads, self.head_dim) + k = k.view(bsz, seqlen1, self.n_heads, self.head_dim) + v = v.view(bsz, seqlen1, self.n_heads, self.head_dim) + q, k = self.q_norm1(q), self.k_norm1(k) + + output = optimized_attention(q.permute(0, 2, 1, 3), k.permute(0, 2, 1, 3), v.permute(0, 2, 1, 3), self.n_heads, skip_reshape=True) + c = self.w1o(output) + return c + + + +class DoubleAttention(nn.Module): + def __init__(self, dim, n_heads, mh_qknorm=False, dtype=None, device=None, operations=None): + super().__init__() + + self.n_heads = n_heads + self.head_dim = dim // n_heads + + # this is for cond + self.w1q = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1k = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1v = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w1o = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + + # this is for x + self.w2q = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w2k = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w2v = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + self.w2o = operations.Linear(dim, dim, bias=False, dtype=dtype, device=device) + + self.q_norm1 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + self.k_norm1 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + + self.q_norm2 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + self.k_norm2 = ( + MultiHeadLayerNorm((self.n_heads, self.head_dim), dtype=dtype, device=device) + if mh_qknorm + else operations.LayerNorm(self.head_dim, elementwise_affine=False, dtype=dtype, device=device) + ) + + + #@torch.compile() + def forward(self, c, x): + + bsz, seqlen1, _ = c.shape + bsz, seqlen2, _ = x.shape + seqlen = seqlen1 + seqlen2 + + cq, ck, cv = self.w1q(c), self.w1k(c), self.w1v(c) + cq = cq.view(bsz, seqlen1, self.n_heads, self.head_dim) + ck = ck.view(bsz, seqlen1, self.n_heads, self.head_dim) + cv = cv.view(bsz, seqlen1, self.n_heads, self.head_dim) + cq, ck = self.q_norm1(cq), self.k_norm1(ck) + + xq, xk, xv = self.w2q(x), self.w2k(x), self.w2v(x) + xq = xq.view(bsz, seqlen2, self.n_heads, self.head_dim) + xk = xk.view(bsz, seqlen2, self.n_heads, self.head_dim) + xv = xv.view(bsz, seqlen2, self.n_heads, self.head_dim) + xq, xk = self.q_norm2(xq), self.k_norm2(xk) + + # concat all + q, k, v = ( + torch.cat([cq, xq], dim=1), + torch.cat([ck, xk], dim=1), + torch.cat([cv, xv], dim=1), + ) + + output = optimized_attention(q.permute(0, 2, 1, 3), k.permute(0, 2, 1, 3), v.permute(0, 2, 1, 3), self.n_heads, skip_reshape=True) + + c, x = output.split([seqlen1, seqlen2], dim=1) + c = self.w1o(c) + x = self.w2o(x) + + return c, x + + +class MMDiTBlock(nn.Module): + def __init__(self, dim, heads=8, global_conddim=1024, is_last=False, dtype=None, device=None, operations=None): + super().__init__() + + self.normC1 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + self.normC2 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + if not is_last: + self.mlpC = MLP(dim, hidden_dim=dim * 4, dtype=dtype, device=device, operations=operations) + self.modC = nn.Sequential( + nn.SiLU(), + operations.Linear(global_conddim, 6 * dim, bias=False, dtype=dtype, device=device), + ) + else: + self.modC = nn.Sequential( + nn.SiLU(), + operations.Linear(global_conddim, 2 * dim, bias=False, dtype=dtype, device=device), + ) + + self.normX1 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + self.normX2 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + self.mlpX = MLP(dim, hidden_dim=dim * 4, dtype=dtype, device=device, operations=operations) + self.modX = nn.Sequential( + nn.SiLU(), + operations.Linear(global_conddim, 6 * dim, bias=False, dtype=dtype, device=device), + ) + + self.attn = DoubleAttention(dim, heads, dtype=dtype, device=device, operations=operations) + self.is_last = is_last + + #@torch.compile() + def forward(self, c, x, global_cond, **kwargs): + + cres, xres = c, x + + cshift_msa, cscale_msa, cgate_msa, cshift_mlp, cscale_mlp, cgate_mlp = ( + self.modC(global_cond).chunk(6, dim=1) + ) + + c = modulate(self.normC1(c), cshift_msa, cscale_msa) + + # xpath + xshift_msa, xscale_msa, xgate_msa, xshift_mlp, xscale_mlp, xgate_mlp = ( + self.modX(global_cond).chunk(6, dim=1) + ) + + x = modulate(self.normX1(x), xshift_msa, xscale_msa) + + # attention + c, x = self.attn(c, x) + + + c = self.normC2(cres + cgate_msa.unsqueeze(1) * c) + c = cgate_mlp.unsqueeze(1) * self.mlpC(modulate(c, cshift_mlp, cscale_mlp)) + c = cres + c + + x = self.normX2(xres + xgate_msa.unsqueeze(1) * x) + x = xgate_mlp.unsqueeze(1) * self.mlpX(modulate(x, xshift_mlp, xscale_mlp)) + x = xres + x + + return c, x + +class DiTBlock(nn.Module): + # like MMDiTBlock, but it only has X + def __init__(self, dim, heads=8, global_conddim=1024, dtype=None, device=None, operations=None): + super().__init__() + + self.norm1 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + self.norm2 = operations.LayerNorm(dim, elementwise_affine=False, dtype=dtype, device=device) + + self.modCX = nn.Sequential( + nn.SiLU(), + operations.Linear(global_conddim, 6 * dim, bias=False, dtype=dtype, device=device), + ) + + self.attn = SingleAttention(dim, heads, dtype=dtype, device=device, operations=operations) + self.mlp = MLP(dim, hidden_dim=dim * 4, dtype=dtype, device=device, operations=operations) + + #@torch.compile() + def forward(self, cx, global_cond, **kwargs): + cxres = cx + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.modCX( + global_cond + ).chunk(6, dim=1) + cx = modulate(self.norm1(cx), shift_msa, scale_msa) + cx = self.attn(cx) + cx = self.norm2(cxres + gate_msa.unsqueeze(1) * cx) + mlpout = self.mlp(modulate(cx, shift_mlp, scale_mlp)) + cx = gate_mlp.unsqueeze(1) * mlpout + + cx = cxres + cx + + return cx + + + +class TimestepEmbedder(nn.Module): + def __init__(self, hidden_size, frequency_embedding_size=256, dtype=None, device=None, operations=None): + super().__init__() + self.mlp = nn.Sequential( + operations.Linear(frequency_embedding_size, hidden_size, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(hidden_size, hidden_size, dtype=dtype, device=device), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + half = dim // 2 + freqs = 1000 * torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half) / half + ).to(t.device) + args = t[:, None] * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + return embedding + + #@torch.compile() + def forward(self, t, dtype): + t_freq = self.timestep_embedding(t, self.frequency_embedding_size).to(dtype) + t_emb = self.mlp(t_freq) + return t_emb + + +class MMDiT(nn.Module): + def __init__( + self, + in_channels=4, + out_channels=4, + patch_size=2, + dim=3072, + n_layers=36, + n_double_layers=4, + n_heads=12, + global_conddim=3072, + cond_seq_dim=2048, + max_seq=32 * 32, + device=None, + dtype=None, + operations=None, + ): + super().__init__() + self.dtype = dtype + + self.t_embedder = TimestepEmbedder(global_conddim, dtype=dtype, device=device, operations=operations) + + self.cond_seq_linear = operations.Linear( + cond_seq_dim, dim, bias=False, dtype=dtype, device=device + ) # linear for something like text sequence. + self.init_x_linear = operations.Linear( + patch_size * patch_size * in_channels, dim, dtype=dtype, device=device + ) # init linear for patchified image. + + self.positional_encoding = nn.Parameter(torch.empty(1, max_seq, dim, dtype=dtype, device=device)) + self.register_tokens = nn.Parameter(torch.empty(1, 8, dim, dtype=dtype, device=device)) + + self.double_layers = nn.ModuleList([]) + self.single_layers = nn.ModuleList([]) + + + for idx in range(n_double_layers): + self.double_layers.append( + MMDiTBlock(dim, n_heads, global_conddim, is_last=(idx == n_layers - 1), dtype=dtype, device=device, operations=operations) + ) + + for idx in range(n_double_layers, n_layers): + self.single_layers.append( + DiTBlock(dim, n_heads, global_conddim, dtype=dtype, device=device, operations=operations) + ) + + + self.final_linear = operations.Linear( + dim, patch_size * patch_size * out_channels, bias=False, dtype=dtype, device=device + ) + + self.modF = nn.Sequential( + nn.SiLU(), + operations.Linear(global_conddim, 2 * dim, bias=False, dtype=dtype, device=device), + ) + + self.out_channels = out_channels + self.patch_size = patch_size + self.n_double_layers = n_double_layers + self.n_layers = n_layers + + self.h_max = round(max_seq**0.5) + self.w_max = round(max_seq**0.5) + + @torch.no_grad() + def extend_pe(self, init_dim=(16, 16), target_dim=(64, 64)): + # extend pe + pe_data = self.positional_encoding.data.squeeze(0)[: init_dim[0] * init_dim[1]] + + pe_as_2d = pe_data.view(init_dim[0], init_dim[1], -1).permute(2, 0, 1) + + # now we need to extend this to target_dim. for this we will use interpolation. + # we will use torch.nn.functional.interpolate + pe_as_2d = F.interpolate( + pe_as_2d.unsqueeze(0), size=target_dim, mode="bilinear" + ) + pe_new = pe_as_2d.squeeze(0).permute(1, 2, 0).flatten(0, 1) + self.positional_encoding.data = pe_new.unsqueeze(0).contiguous() + self.h_max, self.w_max = target_dim + print("PE extended to", target_dim) + + def pe_selection_index_based_on_dim(self, h, w): + h_p, w_p = h // self.patch_size, w // self.patch_size + original_pe_indexes = torch.arange(self.positional_encoding.shape[1]) + original_pe_indexes = original_pe_indexes.view(self.h_max, self.w_max) + starth = self.h_max // 2 - h_p // 2 + endh =starth + h_p + startw = self.w_max // 2 - w_p // 2 + endw = startw + w_p + original_pe_indexes = original_pe_indexes[ + starth:endh, startw:endw + ] + return original_pe_indexes.flatten() + + def unpatchify(self, x, h, w): + c = self.out_channels + p = self.patch_size + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum("nhwpqc->nchpwq", x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) + return imgs + + def patchify(self, x): + B, C, H, W = x.size() + x = comfy.ldm.common_dit.pad_to_patch_size(x, (self.patch_size, self.patch_size)) + x = x.view( + B, + C, + (H + 1) // self.patch_size, + self.patch_size, + (W + 1) // self.patch_size, + self.patch_size, + ) + x = x.permute(0, 2, 4, 1, 3, 5).flatten(-3).flatten(1, 2) + return x + + def apply_pos_embeds(self, x, h, w): + h = (h + 1) // self.patch_size + w = (w + 1) // self.patch_size + max_dim = max(h, w) + + cur_dim = self.h_max + pos_encoding = comfy.ops.cast_to_input(self.positional_encoding.reshape(1, cur_dim, cur_dim, -1), x) + + if max_dim > cur_dim: + pos_encoding = F.interpolate(pos_encoding.movedim(-1, 1), (max_dim, max_dim), mode="bilinear").movedim(1, -1) + cur_dim = max_dim + + from_h = (cur_dim - h) // 2 + from_w = (cur_dim - w) // 2 + pos_encoding = pos_encoding[:,from_h:from_h+h,from_w:from_w+w] + return x + pos_encoding.reshape(1, -1, self.positional_encoding.shape[-1]) + + def forward(self, x, timestep, context, **kwargs): + # patchify x, add PE + b, c, h, w = x.shape + + # pe_indexes = self.pe_selection_index_based_on_dim(h, w) + # print(pe_indexes, pe_indexes.shape) + + x = self.init_x_linear(self.patchify(x)) # B, T_x, D + x = self.apply_pos_embeds(x, h, w) + # x = x + self.positional_encoding[:, : x.size(1)].to(device=x.device, dtype=x.dtype) + # x = x + self.positional_encoding[:, pe_indexes].to(device=x.device, dtype=x.dtype) + + # process conditions for MMDiT Blocks + c_seq = context # B, T_c, D_c + t = timestep + + c = self.cond_seq_linear(c_seq) # B, T_c, D + c = torch.cat([comfy.ops.cast_to_input(self.register_tokens, c).repeat(c.size(0), 1, 1), c], dim=1) + + global_cond = self.t_embedder(t, x.dtype) # B, D + + if len(self.double_layers) > 0: + for layer in self.double_layers: + c, x = layer(c, x, global_cond, **kwargs) + + if len(self.single_layers) > 0: + c_len = c.size(1) + cx = torch.cat([c, x], dim=1) + for layer in self.single_layers: + cx = layer(cx, global_cond, **kwargs) + + x = cx[:, c_len:] + + fshift, fscale = self.modF(global_cond).chunk(2, dim=1) + + x = modulate(x, fshift, fscale) + x = self.final_linear(x) + x = self.unpatchify(x, (h + 1) // self.patch_size, (w + 1) // self.patch_size)[:,:,:h,:w] + return x diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/common.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7e619913edd49dc88640c0918c6fd806e9964d1 Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/common.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/controlnet.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7050d6ce0fadfae6453963a67b2087c6d01517fa Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/controlnet.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9f91ec2f74d23106d4cd0f8f1363f1074ad0abf Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-38.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ed6979573efa58754658f2cda8ce45d089d5b21 Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_a.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_b.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_b.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e162bb13fb4f1e852429260383c9cb3a42c30fe Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_b.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13d24bdde77f81f345f991d97d5ac423a6d6944b Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-310.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0bf7a8938f368319aea69a85ec6785860270b61c Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-38.pyc b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd758b3193208c99addcfbe6173818ab69299935 Binary files /dev/null and b/src/comfyui/comfy/ldm/cascade/__pycache__/stage_c_coder.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/cascade/common.py b/src/comfyui/comfy/ldm/cascade/common.py new file mode 100644 index 0000000000000000000000000000000000000000..3eaa0c821cccddbe891ac8a705d702c509c85582 --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/common.py @@ -0,0 +1,154 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import torch.nn as nn +from comfy.ldm.modules.attention import optimized_attention +import comfy.ops + +class OptimizedAttention(nn.Module): + def __init__(self, c, nhead, dropout=0.0, dtype=None, device=None, operations=None): + super().__init__() + self.heads = nhead + + self.to_q = operations.Linear(c, c, bias=True, dtype=dtype, device=device) + self.to_k = operations.Linear(c, c, bias=True, dtype=dtype, device=device) + self.to_v = operations.Linear(c, c, bias=True, dtype=dtype, device=device) + + self.out_proj = operations.Linear(c, c, bias=True, dtype=dtype, device=device) + + def forward(self, q, k, v): + q = self.to_q(q) + k = self.to_k(k) + v = self.to_v(v) + + out = optimized_attention(q, k, v, self.heads) + + return self.out_proj(out) + +class Attention2D(nn.Module): + def __init__(self, c, nhead, dropout=0.0, dtype=None, device=None, operations=None): + super().__init__() + self.attn = OptimizedAttention(c, nhead, dtype=dtype, device=device, operations=operations) + # self.attn = nn.MultiheadAttention(c, nhead, dropout=dropout, bias=True, batch_first=True, dtype=dtype, device=device) + + def forward(self, x, kv, self_attn=False): + orig_shape = x.shape + x = x.view(x.size(0), x.size(1), -1).permute(0, 2, 1) # Bx4xHxW -> Bx(HxW)x4 + if self_attn: + kv = torch.cat([x, kv], dim=1) + # x = self.attn(x, kv, kv, need_weights=False)[0] + x = self.attn(x, kv, kv) + x = x.permute(0, 2, 1).view(*orig_shape) + return x + + +def LayerNorm2d_op(operations): + class LayerNorm2d(operations.LayerNorm): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def forward(self, x): + return super().forward(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + return LayerNorm2d + +class GlobalResponseNorm(nn.Module): + "from https://github.com/facebookresearch/ConvNeXt-V2/blob/3608f67cc1dae164790c5d0aead7bf2d73d9719b/models/utils.py#L105" + def __init__(self, dim, dtype=None, device=None): + super().__init__() + self.gamma = nn.Parameter(torch.empty(1, 1, 1, dim, dtype=dtype, device=device)) + self.beta = nn.Parameter(torch.empty(1, 1, 1, dim, dtype=dtype, device=device)) + + def forward(self, x): + Gx = torch.norm(x, p=2, dim=(1, 2), keepdim=True) + Nx = Gx / (Gx.mean(dim=-1, keepdim=True) + 1e-6) + return comfy.ops.cast_to_input(self.gamma, x) * (x * Nx) + comfy.ops.cast_to_input(self.beta, x) + x + + +class ResBlock(nn.Module): + def __init__(self, c, c_skip=0, kernel_size=3, dropout=0.0, dtype=None, device=None, operations=None): # , num_heads=4, expansion=2): + super().__init__() + self.depthwise = operations.Conv2d(c, c, kernel_size=kernel_size, padding=kernel_size // 2, groups=c, dtype=dtype, device=device) + # self.depthwise = SAMBlock(c, num_heads, expansion) + self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.channelwise = nn.Sequential( + operations.Linear(c + c_skip, c * 4, dtype=dtype, device=device), + nn.GELU(), + GlobalResponseNorm(c * 4, dtype=dtype, device=device), + nn.Dropout(dropout), + operations.Linear(c * 4, c, dtype=dtype, device=device) + ) + + def forward(self, x, x_skip=None): + x_res = x + x = self.norm(self.depthwise(x)) + if x_skip is not None: + x = torch.cat([x, x_skip], dim=1) + x = self.channelwise(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + return x + x_res + + +class AttnBlock(nn.Module): + def __init__(self, c, c_cond, nhead, self_attn=True, dropout=0.0, dtype=None, device=None, operations=None): + super().__init__() + self.self_attn = self_attn + self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.attention = Attention2D(c, nhead, dropout, dtype=dtype, device=device, operations=operations) + self.kv_mapper = nn.Sequential( + nn.SiLU(), + operations.Linear(c_cond, c, dtype=dtype, device=device) + ) + + def forward(self, x, kv): + kv = self.kv_mapper(kv) + x = x + self.attention(self.norm(x), kv, self_attn=self.self_attn) + return x + + +class FeedForwardBlock(nn.Module): + def __init__(self, c, dropout=0.0, dtype=None, device=None, operations=None): + super().__init__() + self.norm = LayerNorm2d_op(operations)(c, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.channelwise = nn.Sequential( + operations.Linear(c, c * 4, dtype=dtype, device=device), + nn.GELU(), + GlobalResponseNorm(c * 4, dtype=dtype, device=device), + nn.Dropout(dropout), + operations.Linear(c * 4, c, dtype=dtype, device=device) + ) + + def forward(self, x): + x = x + self.channelwise(self.norm(x).permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + return x + + +class TimestepBlock(nn.Module): + def __init__(self, c, c_timestep, conds=['sca'], dtype=None, device=None, operations=None): + super().__init__() + self.mapper = operations.Linear(c_timestep, c * 2, dtype=dtype, device=device) + self.conds = conds + for cname in conds: + setattr(self, f"mapper_{cname}", operations.Linear(c_timestep, c * 2, dtype=dtype, device=device)) + + def forward(self, x, t): + t = t.chunk(len(self.conds) + 1, dim=1) + a, b = self.mapper(t[0])[:, :, None, None].chunk(2, dim=1) + for i, c in enumerate(self.conds): + ac, bc = getattr(self, f"mapper_{c}")(t[i + 1])[:, :, None, None].chunk(2, dim=1) + a, b = a + ac, b + bc + return x * (1 + a) + b diff --git a/src/comfyui/comfy/ldm/cascade/controlnet.py b/src/comfyui/comfy/ldm/cascade/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7a52c3c263f96008e7e2b0ca56e3214784c57eb3 --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/controlnet.py @@ -0,0 +1,93 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import torchvision +from torch import nn +from .common import LayerNorm2d_op + + +class CNetResBlock(nn.Module): + def __init__(self, c, dtype=None, device=None, operations=None): + super().__init__() + self.blocks = nn.Sequential( + LayerNorm2d_op(operations)(c, dtype=dtype, device=device), + nn.GELU(), + operations.Conv2d(c, c, kernel_size=3, padding=1), + LayerNorm2d_op(operations)(c, dtype=dtype, device=device), + nn.GELU(), + operations.Conv2d(c, c, kernel_size=3, padding=1), + ) + + def forward(self, x): + return x + self.blocks(x) + + +class ControlNet(nn.Module): + def __init__(self, c_in=3, c_proj=2048, proj_blocks=None, bottleneck_mode=None, dtype=None, device=None, operations=nn): + super().__init__() + if bottleneck_mode is None: + bottleneck_mode = 'effnet' + self.proj_blocks = proj_blocks + if bottleneck_mode == 'effnet': + embd_channels = 1280 + self.backbone = torchvision.models.efficientnet_v2_s().features.eval() + if c_in != 3: + in_weights = self.backbone[0][0].weight.data + self.backbone[0][0] = operations.Conv2d(c_in, 24, kernel_size=3, stride=2, bias=False, dtype=dtype, device=device) + if c_in > 3: + # nn.init.constant_(self.backbone[0][0].weight, 0) + self.backbone[0][0].weight.data[:, :3] = in_weights[:, :3].clone() + else: + self.backbone[0][0].weight.data = in_weights[:, :c_in].clone() + elif bottleneck_mode == 'simple': + embd_channels = c_in + self.backbone = nn.Sequential( + operations.Conv2d(embd_channels, embd_channels * 4, kernel_size=3, padding=1, dtype=dtype, device=device), + nn.LeakyReLU(0.2, inplace=True), + operations.Conv2d(embd_channels * 4, embd_channels, kernel_size=3, padding=1, dtype=dtype, device=device), + ) + elif bottleneck_mode == 'large': + self.backbone = nn.Sequential( + operations.Conv2d(c_in, 4096 * 4, kernel_size=1, dtype=dtype, device=device), + nn.LeakyReLU(0.2, inplace=True), + operations.Conv2d(4096 * 4, 1024, kernel_size=1, dtype=dtype, device=device), + *[CNetResBlock(1024, dtype=dtype, device=device, operations=operations) for _ in range(8)], + operations.Conv2d(1024, 1280, kernel_size=1, dtype=dtype, device=device), + ) + embd_channels = 1280 + else: + raise ValueError(f'Unknown bottleneck mode: {bottleneck_mode}') + self.projections = nn.ModuleList() + for _ in range(len(proj_blocks)): + self.projections.append(nn.Sequential( + operations.Conv2d(embd_channels, embd_channels, kernel_size=1, bias=False, dtype=dtype, device=device), + nn.LeakyReLU(0.2, inplace=True), + operations.Conv2d(embd_channels, c_proj, kernel_size=1, bias=False, dtype=dtype, device=device), + )) + # nn.init.constant_(self.projections[-1][-1].weight, 0) # zero output projection + self.xl = False + self.input_channels = c_in + self.unshuffle_amount = 8 + + def forward(self, x): + x = self.backbone(x) + proj_outputs = [None for _ in range(max(self.proj_blocks) + 1)] + for i, idx in enumerate(self.proj_blocks): + proj_outputs[idx] = self.projections[i](x) + return {"input": proj_outputs[::-1]} diff --git a/src/comfyui/comfy/ldm/cascade/stage_a.py b/src/comfyui/comfy/ldm/cascade/stage_a.py new file mode 100644 index 0000000000000000000000000000000000000000..ca8867eaf35cbc57eb5d925082b7e2bb7b36932d --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/stage_a.py @@ -0,0 +1,255 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +from torch import nn +from torch.autograd import Function + +class vector_quantize(Function): + @staticmethod + def forward(ctx, x, codebook): + with torch.no_grad(): + codebook_sqr = torch.sum(codebook ** 2, dim=1) + x_sqr = torch.sum(x ** 2, dim=1, keepdim=True) + + dist = torch.addmm(codebook_sqr + x_sqr, x, codebook.t(), alpha=-2.0, beta=1.0) + _, indices = dist.min(dim=1) + + ctx.save_for_backward(indices, codebook) + ctx.mark_non_differentiable(indices) + + nn = torch.index_select(codebook, 0, indices) + return nn, indices + + @staticmethod + def backward(ctx, grad_output, grad_indices): + grad_inputs, grad_codebook = None, None + + if ctx.needs_input_grad[0]: + grad_inputs = grad_output.clone() + if ctx.needs_input_grad[1]: + # Gradient wrt. the codebook + indices, codebook = ctx.saved_tensors + + grad_codebook = torch.zeros_like(codebook) + grad_codebook.index_add_(0, indices, grad_output) + + return (grad_inputs, grad_codebook) + + +class VectorQuantize(nn.Module): + def __init__(self, embedding_size, k, ema_decay=0.99, ema_loss=False): + """ + Takes an input of variable size (as long as the last dimension matches the embedding size). + Returns one tensor containing the nearest neigbour embeddings to each of the inputs, + with the same size as the input, vq and commitment components for the loss as a touple + in the second output and the indices of the quantized vectors in the third: + quantized, (vq_loss, commit_loss), indices + """ + super(VectorQuantize, self).__init__() + + self.codebook = nn.Embedding(k, embedding_size) + self.codebook.weight.data.uniform_(-1./k, 1./k) + self.vq = vector_quantize.apply + + self.ema_decay = ema_decay + self.ema_loss = ema_loss + if ema_loss: + self.register_buffer('ema_element_count', torch.ones(k)) + self.register_buffer('ema_weight_sum', torch.zeros_like(self.codebook.weight)) + + def _laplace_smoothing(self, x, epsilon): + n = torch.sum(x) + return ((x + epsilon) / (n + x.size(0) * epsilon) * n) + + def _updateEMA(self, z_e_x, indices): + mask = nn.functional.one_hot(indices, self.ema_element_count.size(0)).float() + elem_count = mask.sum(dim=0) + weight_sum = torch.mm(mask.t(), z_e_x) + + self.ema_element_count = (self.ema_decay * self.ema_element_count) + ((1-self.ema_decay) * elem_count) + self.ema_element_count = self._laplace_smoothing(self.ema_element_count, 1e-5) + self.ema_weight_sum = (self.ema_decay * self.ema_weight_sum) + ((1-self.ema_decay) * weight_sum) + + self.codebook.weight.data = self.ema_weight_sum / self.ema_element_count.unsqueeze(-1) + + def idx2vq(self, idx, dim=-1): + q_idx = self.codebook(idx) + if dim != -1: + q_idx = q_idx.movedim(-1, dim) + return q_idx + + def forward(self, x, get_losses=True, dim=-1): + if dim != -1: + x = x.movedim(dim, -1) + z_e_x = x.contiguous().view(-1, x.size(-1)) if len(x.shape) > 2 else x + z_q_x, indices = self.vq(z_e_x, self.codebook.weight.detach()) + vq_loss, commit_loss = None, None + if self.ema_loss and self.training: + self._updateEMA(z_e_x.detach(), indices.detach()) + # pick the graded embeddings after updating the codebook in order to have a more accurate commitment loss + z_q_x_grd = torch.index_select(self.codebook.weight, dim=0, index=indices) + if get_losses: + vq_loss = (z_q_x_grd - z_e_x.detach()).pow(2).mean() + commit_loss = (z_e_x - z_q_x_grd.detach()).pow(2).mean() + + z_q_x = z_q_x.view(x.shape) + if dim != -1: + z_q_x = z_q_x.movedim(-1, dim) + return z_q_x, (vq_loss, commit_loss), indices.view(x.shape[:-1]) + + +class ResBlock(nn.Module): + def __init__(self, c, c_hidden): + super().__init__() + # depthwise/attention + self.norm1 = nn.LayerNorm(c, elementwise_affine=False, eps=1e-6) + self.depthwise = nn.Sequential( + nn.ReplicationPad2d(1), + nn.Conv2d(c, c, kernel_size=3, groups=c) + ) + + # channelwise + self.norm2 = nn.LayerNorm(c, elementwise_affine=False, eps=1e-6) + self.channelwise = nn.Sequential( + nn.Linear(c, c_hidden), + nn.GELU(), + nn.Linear(c_hidden, c), + ) + + self.gammas = nn.Parameter(torch.zeros(6), requires_grad=True) + + # Init weights + def _basic_init(module): + if isinstance(module, nn.Linear) or isinstance(module, nn.Conv2d): + torch.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + def _norm(self, x, norm): + return norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + + def forward(self, x): + mods = self.gammas + + x_temp = self._norm(x, self.norm1) * (1 + mods[0]) + mods[1] + try: + x = x + self.depthwise(x_temp) * mods[2] + except: #operation not implemented for bf16 + x_temp = self.depthwise[0](x_temp.float()).to(x.dtype) + x = x + self.depthwise[1](x_temp) * mods[2] + + x_temp = self._norm(x, self.norm2) * (1 + mods[3]) + mods[4] + x = x + self.channelwise(x_temp.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) * mods[5] + + return x + + +class StageA(nn.Module): + def __init__(self, levels=2, bottleneck_blocks=12, c_hidden=384, c_latent=4, codebook_size=8192): + super().__init__() + self.c_latent = c_latent + c_levels = [c_hidden // (2 ** i) for i in reversed(range(levels))] + + # Encoder blocks + self.in_block = nn.Sequential( + nn.PixelUnshuffle(2), + nn.Conv2d(3 * 4, c_levels[0], kernel_size=1) + ) + down_blocks = [] + for i in range(levels): + if i > 0: + down_blocks.append(nn.Conv2d(c_levels[i - 1], c_levels[i], kernel_size=4, stride=2, padding=1)) + block = ResBlock(c_levels[i], c_levels[i] * 4) + down_blocks.append(block) + down_blocks.append(nn.Sequential( + nn.Conv2d(c_levels[-1], c_latent, kernel_size=1, bias=False), + nn.BatchNorm2d(c_latent), # then normalize them to have mean 0 and std 1 + )) + self.down_blocks = nn.Sequential(*down_blocks) + self.down_blocks[0] + + self.codebook_size = codebook_size + self.vquantizer = VectorQuantize(c_latent, k=codebook_size) + + # Decoder blocks + up_blocks = [nn.Sequential( + nn.Conv2d(c_latent, c_levels[-1], kernel_size=1) + )] + for i in range(levels): + for j in range(bottleneck_blocks if i == 0 else 1): + block = ResBlock(c_levels[levels - 1 - i], c_levels[levels - 1 - i] * 4) + up_blocks.append(block) + if i < levels - 1: + up_blocks.append( + nn.ConvTranspose2d(c_levels[levels - 1 - i], c_levels[levels - 2 - i], kernel_size=4, stride=2, + padding=1)) + self.up_blocks = nn.Sequential(*up_blocks) + self.out_block = nn.Sequential( + nn.Conv2d(c_levels[0], 3 * 4, kernel_size=1), + nn.PixelShuffle(2), + ) + + def encode(self, x, quantize=False): + x = self.in_block(x) + x = self.down_blocks(x) + if quantize: + qe, (vq_loss, commit_loss), indices = self.vquantizer.forward(x, dim=1) + return qe, x, indices, vq_loss + commit_loss * 0.25 + else: + return x + + def decode(self, x): + x = self.up_blocks(x) + x = self.out_block(x) + return x + + def forward(self, x, quantize=False): + qe, x, _, vq_loss = self.encode(x, quantize) + x = self.decode(qe) + return x, vq_loss + + +class Discriminator(nn.Module): + def __init__(self, c_in=3, c_cond=0, c_hidden=512, depth=6): + super().__init__() + d = max(depth - 3, 3) + layers = [ + nn.utils.spectral_norm(nn.Conv2d(c_in, c_hidden // (2 ** d), kernel_size=3, stride=2, padding=1)), + nn.LeakyReLU(0.2), + ] + for i in range(depth - 1): + c_in = c_hidden // (2 ** max((d - i), 0)) + c_out = c_hidden // (2 ** max((d - 1 - i), 0)) + layers.append(nn.utils.spectral_norm(nn.Conv2d(c_in, c_out, kernel_size=3, stride=2, padding=1))) + layers.append(nn.InstanceNorm2d(c_out)) + layers.append(nn.LeakyReLU(0.2)) + self.encoder = nn.Sequential(*layers) + self.shuffle = nn.Conv2d((c_hidden + c_cond) if c_cond > 0 else c_hidden, 1, kernel_size=1) + self.logits = nn.Sigmoid() + + def forward(self, x, cond=None): + x = self.encoder(x) + if cond is not None: + cond = cond.view(cond.size(0), cond.size(1), 1, 1, ).expand(-1, -1, x.size(-2), x.size(-1)) + x = torch.cat([x, cond], dim=1) + x = self.shuffle(x) + x = self.logits(x) + return x diff --git a/src/comfyui/comfy/ldm/cascade/stage_b.py b/src/comfyui/comfy/ldm/cascade/stage_b.py new file mode 100644 index 0000000000000000000000000000000000000000..7c3d8feabd826accc702b6e6e598b61b4a739194 --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/stage_b.py @@ -0,0 +1,256 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import math +import torch +from torch import nn +from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock + +class StageB(nn.Module): + def __init__(self, c_in=4, c_out=4, c_r=64, patch_size=2, c_cond=1280, c_hidden=[320, 640, 1280, 1280], + nhead=[-1, -1, 20, 20], blocks=[[2, 6, 28, 6], [6, 28, 6, 2]], + block_repeat=[[1, 1, 1, 1], [3, 3, 2, 2]], level_config=['CT', 'CT', 'CTA', 'CTA'], c_clip=1280, + c_clip_seq=4, c_effnet=16, c_pixels=3, kernel_size=3, dropout=[0, 0, 0.0, 0.0], self_attn=True, + t_conds=['sca'], stable_cascade_stage=None, dtype=None, device=None, operations=None): + super().__init__() + self.dtype = dtype + self.c_r = c_r + self.t_conds = t_conds + self.c_clip_seq = c_clip_seq + if not isinstance(dropout, list): + dropout = [dropout] * len(c_hidden) + if not isinstance(self_attn, list): + self_attn = [self_attn] * len(c_hidden) + + # CONDITIONING + self.effnet_mapper = nn.Sequential( + operations.Conv2d(c_effnet, c_hidden[0] * 4, kernel_size=1, dtype=dtype, device=device), + nn.GELU(), + operations.Conv2d(c_hidden[0] * 4, c_hidden[0], kernel_size=1, dtype=dtype, device=device), + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + ) + self.pixels_mapper = nn.Sequential( + operations.Conv2d(c_pixels, c_hidden[0] * 4, kernel_size=1, dtype=dtype, device=device), + nn.GELU(), + operations.Conv2d(c_hidden[0] * 4, c_hidden[0], kernel_size=1, dtype=dtype, device=device), + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + ) + self.clip_mapper = operations.Linear(c_clip, c_cond * c_clip_seq, dtype=dtype, device=device) + self.clip_norm = operations.LayerNorm(c_cond, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + + self.embedding = nn.Sequential( + nn.PixelUnshuffle(patch_size), + operations.Conv2d(c_in * (patch_size ** 2), c_hidden[0], kernel_size=1, dtype=dtype, device=device), + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + ) + + def get_block(block_type, c_hidden, nhead, c_skip=0, dropout=0, self_attn=True): + if block_type == 'C': + return ResBlock(c_hidden, c_skip, kernel_size=kernel_size, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'A': + return AttnBlock(c_hidden, c_cond, nhead, self_attn=self_attn, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'F': + return FeedForwardBlock(c_hidden, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'T': + return TimestepBlock(c_hidden, c_r, conds=t_conds, dtype=dtype, device=device, operations=operations) + else: + raise Exception(f'Block type {block_type} not supported') + + # BLOCKS + # -- down blocks + self.down_blocks = nn.ModuleList() + self.down_downscalers = nn.ModuleList() + self.down_repeat_mappers = nn.ModuleList() + for i in range(len(c_hidden)): + if i > 0: + self.down_downscalers.append(nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[i - 1], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), + operations.Conv2d(c_hidden[i - 1], c_hidden[i], kernel_size=2, stride=2, dtype=dtype, device=device), + )) + else: + self.down_downscalers.append(nn.Identity()) + down_block = nn.ModuleList() + for _ in range(blocks[0][i]): + for block_type in level_config[i]: + block = get_block(block_type, c_hidden[i], nhead[i], dropout=dropout[i], self_attn=self_attn[i]) + down_block.append(block) + self.down_blocks.append(down_block) + if block_repeat is not None: + block_repeat_mappers = nn.ModuleList() + for _ in range(block_repeat[0][i] - 1): + block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) + self.down_repeat_mappers.append(block_repeat_mappers) + + # -- up blocks + self.up_blocks = nn.ModuleList() + self.up_upscalers = nn.ModuleList() + self.up_repeat_mappers = nn.ModuleList() + for i in reversed(range(len(c_hidden))): + if i > 0: + self.up_upscalers.append(nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[i], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), + operations.ConvTranspose2d(c_hidden[i], c_hidden[i - 1], kernel_size=2, stride=2, dtype=dtype, device=device), + )) + else: + self.up_upscalers.append(nn.Identity()) + up_block = nn.ModuleList() + for j in range(blocks[1][::-1][i]): + for k, block_type in enumerate(level_config[i]): + c_skip = c_hidden[i] if i < len(c_hidden) - 1 and j == k == 0 else 0 + block = get_block(block_type, c_hidden[i], nhead[i], c_skip=c_skip, dropout=dropout[i], + self_attn=self_attn[i]) + up_block.append(block) + self.up_blocks.append(up_block) + if block_repeat is not None: + block_repeat_mappers = nn.ModuleList() + for _ in range(block_repeat[1][::-1][i] - 1): + block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) + self.up_repeat_mappers.append(block_repeat_mappers) + + # OUTPUT + self.clf = nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), + operations.Conv2d(c_hidden[0], c_out * (patch_size ** 2), kernel_size=1, dtype=dtype, device=device), + nn.PixelShuffle(patch_size), + ) + + # --- WEIGHT INIT --- + # self.apply(self._init_weights) # General init + # nn.init.normal_(self.clip_mapper.weight, std=0.02) # conditionings + # nn.init.normal_(self.effnet_mapper[0].weight, std=0.02) # conditionings + # nn.init.normal_(self.effnet_mapper[2].weight, std=0.02) # conditionings + # nn.init.normal_(self.pixels_mapper[0].weight, std=0.02) # conditionings + # nn.init.normal_(self.pixels_mapper[2].weight, std=0.02) # conditionings + # torch.nn.init.xavier_uniform_(self.embedding[1].weight, 0.02) # inputs + # nn.init.constant_(self.clf[1].weight, 0) # outputs + # + # # blocks + # for level_block in self.down_blocks + self.up_blocks: + # for block in level_block: + # if isinstance(block, ResBlock) or isinstance(block, FeedForwardBlock): + # block.channelwise[-1].weight.data *= np.sqrt(1 / sum(blocks[0])) + # elif isinstance(block, TimestepBlock): + # for layer in block.modules(): + # if isinstance(layer, nn.Linear): + # nn.init.constant_(layer.weight, 0) + # + # def _init_weights(self, m): + # if isinstance(m, (nn.Conv2d, nn.Linear)): + # torch.nn.init.xavier_uniform_(m.weight) + # if m.bias is not None: + # nn.init.constant_(m.bias, 0) + + def gen_r_embedding(self, r, max_positions=10000): + r = r * max_positions + half_dim = self.c_r // 2 + emb = math.log(max_positions) / (half_dim - 1) + emb = torch.arange(half_dim, device=r.device).float().mul(-emb).exp() + emb = r[:, None] * emb[None, :] + emb = torch.cat([emb.sin(), emb.cos()], dim=1) + if self.c_r % 2 == 1: # zero pad + emb = nn.functional.pad(emb, (0, 1), mode='constant') + return emb + + def gen_c_embeddings(self, clip): + if len(clip.shape) == 2: + clip = clip.unsqueeze(1) + clip = self.clip_mapper(clip).view(clip.size(0), clip.size(1) * self.c_clip_seq, -1) + clip = self.clip_norm(clip) + return clip + + def _down_encode(self, x, r_embed, clip): + level_outputs = [] + block_group = zip(self.down_blocks, self.down_downscalers, self.down_repeat_mappers) + for down_block, downscaler, repmap in block_group: + x = downscaler(x) + for i in range(len(repmap) + 1): + for block in down_block: + if isinstance(block, ResBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + ResBlock)): + x = block(x) + elif isinstance(block, AttnBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + AttnBlock)): + x = block(x, clip) + elif isinstance(block, TimestepBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + TimestepBlock)): + x = block(x, r_embed) + else: + x = block(x) + if i < len(repmap): + x = repmap[i](x) + level_outputs.insert(0, x) + return level_outputs + + def _up_decode(self, level_outputs, r_embed, clip): + x = level_outputs[0] + block_group = zip(self.up_blocks, self.up_upscalers, self.up_repeat_mappers) + for i, (up_block, upscaler, repmap) in enumerate(block_group): + for j in range(len(repmap) + 1): + for k, block in enumerate(up_block): + if isinstance(block, ResBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + ResBlock)): + skip = level_outputs[i] if k == 0 and i > 0 else None + if skip is not None and (x.size(-1) != skip.size(-1) or x.size(-2) != skip.size(-2)): + x = torch.nn.functional.interpolate(x, skip.shape[-2:], mode='bilinear', + align_corners=True) + x = block(x, skip) + elif isinstance(block, AttnBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + AttnBlock)): + x = block(x, clip) + elif isinstance(block, TimestepBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + TimestepBlock)): + x = block(x, r_embed) + else: + x = block(x) + if j < len(repmap): + x = repmap[j](x) + x = upscaler(x) + return x + + def forward(self, x, r, effnet, clip, pixels=None, **kwargs): + if pixels is None: + pixels = x.new_zeros(x.size(0), 3, 8, 8) + + # Process the conditioning embeddings + r_embed = self.gen_r_embedding(r).to(dtype=x.dtype) + for c in self.t_conds: + t_cond = kwargs.get(c, torch.zeros_like(r)) + r_embed = torch.cat([r_embed, self.gen_r_embedding(t_cond).to(dtype=x.dtype)], dim=1) + clip = self.gen_c_embeddings(clip) + + # Model Blocks + x = self.embedding(x) + x = x + self.effnet_mapper( + nn.functional.interpolate(effnet, size=x.shape[-2:], mode='bilinear', align_corners=True)) + x = x + nn.functional.interpolate(self.pixels_mapper(pixels), size=x.shape[-2:], mode='bilinear', + align_corners=True) + level_outputs = self._down_encode(x, r_embed, clip) + x = self._up_decode(level_outputs, r_embed, clip) + return self.clf(x) + + def update_weights_ema(self, src_model, beta=0.999): + for self_params, src_params in zip(self.parameters(), src_model.parameters()): + self_params.data = self_params.data * beta + src_params.data.clone().to(self_params.device) * (1 - beta) + for self_buffers, src_buffers in zip(self.buffers(), src_model.buffers()): + self_buffers.data = self_buffers.data * beta + src_buffers.data.clone().to(self_buffers.device) * (1 - beta) diff --git a/src/comfyui/comfy/ldm/cascade/stage_c.py b/src/comfyui/comfy/ldm/cascade/stage_c.py new file mode 100644 index 0000000000000000000000000000000000000000..c85da1f01c1d862de5906e73fc746fc92eb51304 --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/stage_c.py @@ -0,0 +1,273 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +from torch import nn +import math +from .common import AttnBlock, LayerNorm2d_op, ResBlock, FeedForwardBlock, TimestepBlock +# from .controlnet import ControlNetDeliverer + +class UpDownBlock2d(nn.Module): + def __init__(self, c_in, c_out, mode, enabled=True, dtype=None, device=None, operations=None): + super().__init__() + assert mode in ['up', 'down'] + interpolation = nn.Upsample(scale_factor=2 if mode == 'up' else 0.5, mode='bilinear', + align_corners=True) if enabled else nn.Identity() + mapping = operations.Conv2d(c_in, c_out, kernel_size=1, dtype=dtype, device=device) + self.blocks = nn.ModuleList([interpolation, mapping] if mode == 'up' else [mapping, interpolation]) + + def forward(self, x): + for block in self.blocks: + x = block(x) + return x + + +class StageC(nn.Module): + def __init__(self, c_in=16, c_out=16, c_r=64, patch_size=1, c_cond=2048, c_hidden=[2048, 2048], nhead=[32, 32], + blocks=[[8, 24], [24, 8]], block_repeat=[[1, 1], [1, 1]], level_config=['CTA', 'CTA'], + c_clip_text=1280, c_clip_text_pooled=1280, c_clip_img=768, c_clip_seq=4, kernel_size=3, + dropout=[0.0, 0.0], self_attn=True, t_conds=['sca', 'crp'], switch_level=[False], stable_cascade_stage=None, + dtype=None, device=None, operations=None): + super().__init__() + self.dtype = dtype + self.c_r = c_r + self.t_conds = t_conds + self.c_clip_seq = c_clip_seq + if not isinstance(dropout, list): + dropout = [dropout] * len(c_hidden) + if not isinstance(self_attn, list): + self_attn = [self_attn] * len(c_hidden) + + # CONDITIONING + self.clip_txt_mapper = operations.Linear(c_clip_text, c_cond, dtype=dtype, device=device) + self.clip_txt_pooled_mapper = operations.Linear(c_clip_text_pooled, c_cond * c_clip_seq, dtype=dtype, device=device) + self.clip_img_mapper = operations.Linear(c_clip_img, c_cond * c_clip_seq, dtype=dtype, device=device) + self.clip_norm = operations.LayerNorm(c_cond, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + + self.embedding = nn.Sequential( + nn.PixelUnshuffle(patch_size), + operations.Conv2d(c_in * (patch_size ** 2), c_hidden[0], kernel_size=1, dtype=dtype, device=device), + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6) + ) + + def get_block(block_type, c_hidden, nhead, c_skip=0, dropout=0, self_attn=True): + if block_type == 'C': + return ResBlock(c_hidden, c_skip, kernel_size=kernel_size, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'A': + return AttnBlock(c_hidden, c_cond, nhead, self_attn=self_attn, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'F': + return FeedForwardBlock(c_hidden, dropout=dropout, dtype=dtype, device=device, operations=operations) + elif block_type == 'T': + return TimestepBlock(c_hidden, c_r, conds=t_conds, dtype=dtype, device=device, operations=operations) + else: + raise Exception(f'Block type {block_type} not supported') + + # BLOCKS + # -- down blocks + self.down_blocks = nn.ModuleList() + self.down_downscalers = nn.ModuleList() + self.down_repeat_mappers = nn.ModuleList() + for i in range(len(c_hidden)): + if i > 0: + self.down_downscalers.append(nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[i - 1], elementwise_affine=False, eps=1e-6), + UpDownBlock2d(c_hidden[i - 1], c_hidden[i], mode='down', enabled=switch_level[i - 1], dtype=dtype, device=device, operations=operations) + )) + else: + self.down_downscalers.append(nn.Identity()) + down_block = nn.ModuleList() + for _ in range(blocks[0][i]): + for block_type in level_config[i]: + block = get_block(block_type, c_hidden[i], nhead[i], dropout=dropout[i], self_attn=self_attn[i]) + down_block.append(block) + self.down_blocks.append(down_block) + if block_repeat is not None: + block_repeat_mappers = nn.ModuleList() + for _ in range(block_repeat[0][i] - 1): + block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) + self.down_repeat_mappers.append(block_repeat_mappers) + + # -- up blocks + self.up_blocks = nn.ModuleList() + self.up_upscalers = nn.ModuleList() + self.up_repeat_mappers = nn.ModuleList() + for i in reversed(range(len(c_hidden))): + if i > 0: + self.up_upscalers.append(nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[i], elementwise_affine=False, eps=1e-6), + UpDownBlock2d(c_hidden[i], c_hidden[i - 1], mode='up', enabled=switch_level[i - 1], dtype=dtype, device=device, operations=operations) + )) + else: + self.up_upscalers.append(nn.Identity()) + up_block = nn.ModuleList() + for j in range(blocks[1][::-1][i]): + for k, block_type in enumerate(level_config[i]): + c_skip = c_hidden[i] if i < len(c_hidden) - 1 and j == k == 0 else 0 + block = get_block(block_type, c_hidden[i], nhead[i], c_skip=c_skip, dropout=dropout[i], + self_attn=self_attn[i]) + up_block.append(block) + self.up_blocks.append(up_block) + if block_repeat is not None: + block_repeat_mappers = nn.ModuleList() + for _ in range(block_repeat[1][::-1][i] - 1): + block_repeat_mappers.append(operations.Conv2d(c_hidden[i], c_hidden[i], kernel_size=1, dtype=dtype, device=device)) + self.up_repeat_mappers.append(block_repeat_mappers) + + # OUTPUT + self.clf = nn.Sequential( + LayerNorm2d_op(operations)(c_hidden[0], elementwise_affine=False, eps=1e-6, dtype=dtype, device=device), + operations.Conv2d(c_hidden[0], c_out * (patch_size ** 2), kernel_size=1, dtype=dtype, device=device), + nn.PixelShuffle(patch_size), + ) + + # --- WEIGHT INIT --- + # self.apply(self._init_weights) # General init + # nn.init.normal_(self.clip_txt_mapper.weight, std=0.02) # conditionings + # nn.init.normal_(self.clip_txt_pooled_mapper.weight, std=0.02) # conditionings + # nn.init.normal_(self.clip_img_mapper.weight, std=0.02) # conditionings + # torch.nn.init.xavier_uniform_(self.embedding[1].weight, 0.02) # inputs + # nn.init.constant_(self.clf[1].weight, 0) # outputs + # + # # blocks + # for level_block in self.down_blocks + self.up_blocks: + # for block in level_block: + # if isinstance(block, ResBlock) or isinstance(block, FeedForwardBlock): + # block.channelwise[-1].weight.data *= np.sqrt(1 / sum(blocks[0])) + # elif isinstance(block, TimestepBlock): + # for layer in block.modules(): + # if isinstance(layer, nn.Linear): + # nn.init.constant_(layer.weight, 0) + # + # def _init_weights(self, m): + # if isinstance(m, (nn.Conv2d, nn.Linear)): + # torch.nn.init.xavier_uniform_(m.weight) + # if m.bias is not None: + # nn.init.constant_(m.bias, 0) + + def gen_r_embedding(self, r, max_positions=10000): + r = r * max_positions + half_dim = self.c_r // 2 + emb = math.log(max_positions) / (half_dim - 1) + emb = torch.arange(half_dim, device=r.device).float().mul(-emb).exp() + emb = r[:, None] * emb[None, :] + emb = torch.cat([emb.sin(), emb.cos()], dim=1) + if self.c_r % 2 == 1: # zero pad + emb = nn.functional.pad(emb, (0, 1), mode='constant') + return emb + + def gen_c_embeddings(self, clip_txt, clip_txt_pooled, clip_img): + clip_txt = self.clip_txt_mapper(clip_txt) + if len(clip_txt_pooled.shape) == 2: + clip_txt_pooled = clip_txt_pooled.unsqueeze(1) + if len(clip_img.shape) == 2: + clip_img = clip_img.unsqueeze(1) + clip_txt_pool = self.clip_txt_pooled_mapper(clip_txt_pooled).view(clip_txt_pooled.size(0), clip_txt_pooled.size(1) * self.c_clip_seq, -1) + clip_img = self.clip_img_mapper(clip_img).view(clip_img.size(0), clip_img.size(1) * self.c_clip_seq, -1) + clip = torch.cat([clip_txt, clip_txt_pool, clip_img], dim=1) + clip = self.clip_norm(clip) + return clip + + def _down_encode(self, x, r_embed, clip, cnet=None): + level_outputs = [] + block_group = zip(self.down_blocks, self.down_downscalers, self.down_repeat_mappers) + for down_block, downscaler, repmap in block_group: + x = downscaler(x) + for i in range(len(repmap) + 1): + for block in down_block: + if isinstance(block, ResBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + ResBlock)): + if cnet is not None: + next_cnet = cnet.pop() + if next_cnet is not None: + x = x + nn.functional.interpolate(next_cnet, size=x.shape[-2:], mode='bilinear', + align_corners=True).to(x.dtype) + x = block(x) + elif isinstance(block, AttnBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + AttnBlock)): + x = block(x, clip) + elif isinstance(block, TimestepBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + TimestepBlock)): + x = block(x, r_embed) + else: + x = block(x) + if i < len(repmap): + x = repmap[i](x) + level_outputs.insert(0, x) + return level_outputs + + def _up_decode(self, level_outputs, r_embed, clip, cnet=None): + x = level_outputs[0] + block_group = zip(self.up_blocks, self.up_upscalers, self.up_repeat_mappers) + for i, (up_block, upscaler, repmap) in enumerate(block_group): + for j in range(len(repmap) + 1): + for k, block in enumerate(up_block): + if isinstance(block, ResBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + ResBlock)): + skip = level_outputs[i] if k == 0 and i > 0 else None + if skip is not None and (x.size(-1) != skip.size(-1) or x.size(-2) != skip.size(-2)): + x = torch.nn.functional.interpolate(x, skip.shape[-2:], mode='bilinear', + align_corners=True) + if cnet is not None: + next_cnet = cnet.pop() + if next_cnet is not None: + x = x + nn.functional.interpolate(next_cnet, size=x.shape[-2:], mode='bilinear', + align_corners=True).to(x.dtype) + x = block(x, skip) + elif isinstance(block, AttnBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + AttnBlock)): + x = block(x, clip) + elif isinstance(block, TimestepBlock) or ( + hasattr(block, '_fsdp_wrapped_module') and isinstance(block._fsdp_wrapped_module, + TimestepBlock)): + x = block(x, r_embed) + else: + x = block(x) + if j < len(repmap): + x = repmap[j](x) + x = upscaler(x) + return x + + def forward(self, x, r, clip_text, clip_text_pooled, clip_img, control=None, **kwargs): + # Process the conditioning embeddings + r_embed = self.gen_r_embedding(r).to(dtype=x.dtype) + for c in self.t_conds: + t_cond = kwargs.get(c, torch.zeros_like(r)) + r_embed = torch.cat([r_embed, self.gen_r_embedding(t_cond).to(dtype=x.dtype)], dim=1) + clip = self.gen_c_embeddings(clip_text, clip_text_pooled, clip_img) + + if control is not None: + cnet = control.get("input") + else: + cnet = None + + # Model Blocks + x = self.embedding(x) + level_outputs = self._down_encode(x, r_embed, clip, cnet) + x = self._up_decode(level_outputs, r_embed, clip, cnet) + return self.clf(x) + + def update_weights_ema(self, src_model, beta=0.999): + for self_params, src_params in zip(self.parameters(), src_model.parameters()): + self_params.data = self_params.data * beta + src_params.data.clone().to(self_params.device) * (1 - beta) + for self_buffers, src_buffers in zip(self.buffers(), src_model.buffers()): + self_buffers.data = self_buffers.data * beta + src_buffers.data.clone().to(self_buffers.device) * (1 - beta) diff --git a/src/comfyui/comfy/ldm/cascade/stage_c_coder.py b/src/comfyui/comfy/ldm/cascade/stage_c_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..0cb7c49fc90c434553954772cbf522e1f4a88955 --- /dev/null +++ b/src/comfyui/comfy/ldm/cascade/stage_c_coder.py @@ -0,0 +1,95 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" +import torch +import torchvision +from torch import nn + + +# EfficientNet +class EfficientNetEncoder(nn.Module): + def __init__(self, c_latent=16): + super().__init__() + self.backbone = torchvision.models.efficientnet_v2_s().features.eval() + self.mapper = nn.Sequential( + nn.Conv2d(1280, c_latent, kernel_size=1, bias=False), + nn.BatchNorm2d(c_latent, affine=False), # then normalize them to have mean 0 and std 1 + ) + self.mean = nn.Parameter(torch.tensor([0.485, 0.456, 0.406])) + self.std = nn.Parameter(torch.tensor([0.229, 0.224, 0.225])) + + def forward(self, x): + x = x * 0.5 + 0.5 + x = (x - self.mean.view([3,1,1])) / self.std.view([3,1,1]) + o = self.mapper(self.backbone(x)) + return o + + +# Fast Decoder for Stage C latents. E.g. 16 x 24 x 24 -> 3 x 192 x 192 +class Previewer(nn.Module): + def __init__(self, c_in=16, c_hidden=512, c_out=3): + super().__init__() + self.blocks = nn.Sequential( + nn.Conv2d(c_in, c_hidden, kernel_size=1), # 16 channels to 512 channels + nn.GELU(), + nn.BatchNorm2d(c_hidden), + + nn.Conv2d(c_hidden, c_hidden, kernel_size=3, padding=1), + nn.GELU(), + nn.BatchNorm2d(c_hidden), + + nn.ConvTranspose2d(c_hidden, c_hidden // 2, kernel_size=2, stride=2), # 16 -> 32 + nn.GELU(), + nn.BatchNorm2d(c_hidden // 2), + + nn.Conv2d(c_hidden // 2, c_hidden // 2, kernel_size=3, padding=1), + nn.GELU(), + nn.BatchNorm2d(c_hidden // 2), + + nn.ConvTranspose2d(c_hidden // 2, c_hidden // 4, kernel_size=2, stride=2), # 32 -> 64 + nn.GELU(), + nn.BatchNorm2d(c_hidden // 4), + + nn.Conv2d(c_hidden // 4, c_hidden // 4, kernel_size=3, padding=1), + nn.GELU(), + nn.BatchNorm2d(c_hidden // 4), + + nn.ConvTranspose2d(c_hidden // 4, c_hidden // 4, kernel_size=2, stride=2), # 64 -> 128 + nn.GELU(), + nn.BatchNorm2d(c_hidden // 4), + + nn.Conv2d(c_hidden // 4, c_hidden // 4, kernel_size=3, padding=1), + nn.GELU(), + nn.BatchNorm2d(c_hidden // 4), + + nn.Conv2d(c_hidden // 4, c_out, kernel_size=1), + ) + + def forward(self, x): + return (self.blocks(x) - 0.5) * 2.0 + +class StageC_coder(nn.Module): + def __init__(self): + super().__init__() + self.previewer = Previewer() + self.encoder = EfficientNetEncoder() + + def encode(self, x): + return self.encoder(x) + + def decode(self, x): + return self.previewer(x) diff --git a/src/comfyui/comfy/ldm/common_dit.py b/src/comfyui/comfy/ldm/common_dit.py new file mode 100644 index 0000000000000000000000000000000000000000..cb6b741478619a0b49fb6aed7aa7f492c8fc1c1b --- /dev/null +++ b/src/comfyui/comfy/ldm/common_dit.py @@ -0,0 +1,27 @@ +import torch +import comfy.ops + +def pad_to_patch_size(img, patch_size=(2, 2), padding_mode="circular"): + if padding_mode == "circular" and torch.jit.is_tracing() or torch.jit.is_scripting(): + padding_mode = "reflect" + pad_h = (patch_size[0] - img.shape[-2] % patch_size[0]) % patch_size[0] + pad_w = (patch_size[1] - img.shape[-1] % patch_size[1]) % patch_size[1] + return torch.nn.functional.pad(img, (0, pad_w, 0, pad_h), mode=padding_mode) + +try: + rms_norm_torch = torch.nn.functional.rms_norm +except: + rms_norm_torch = None + +def rms_norm(x, weight=None, eps=1e-6): + if rms_norm_torch is not None and not (torch.jit.is_tracing() or torch.jit.is_scripting()): + if weight is None: + return rms_norm_torch(x, (x.shape[-1],), eps=eps) + else: + return rms_norm_torch(x, weight.shape, weight=comfy.ops.cast_to(weight, dtype=x.dtype, device=x.device), eps=eps) + else: + r = x * torch.rsqrt(torch.mean(x**2, dim=-1, keepdim=True) + eps) + if weight is None: + return r + else: + return r * comfy.ops.cast_to(weight, dtype=x.dtype, device=x.device) diff --git a/src/comfyui/comfy/ldm/flux/__pycache__/controlnet.cpython-310.pyc b/src/comfyui/comfy/ldm/flux/__pycache__/controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9f38ed6ac659e5193120e80a457972343c3d941 Binary files /dev/null and b/src/comfyui/comfy/ldm/flux/__pycache__/controlnet.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/flux/__pycache__/layers.cpython-310.pyc b/src/comfyui/comfy/ldm/flux/__pycache__/layers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1cb2b72855996535cd97cb54852857848c258886 Binary files /dev/null and b/src/comfyui/comfy/ldm/flux/__pycache__/layers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/flux/__pycache__/math.cpython-310.pyc b/src/comfyui/comfy/ldm/flux/__pycache__/math.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7422c1bdfd44cdcec9e2299b578d7d629fd237d2 Binary files /dev/null and b/src/comfyui/comfy/ldm/flux/__pycache__/math.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/flux/__pycache__/model.cpython-310.pyc b/src/comfyui/comfy/ldm/flux/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..baa60887f91ba68908bdec4f006515b5ee6c2cea Binary files /dev/null and b/src/comfyui/comfy/ldm/flux/__pycache__/model.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/flux/controlnet.py b/src/comfyui/comfy/ldm/flux/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..c033dea52f2e677c57797c17677cb9072fbb04e5 --- /dev/null +++ b/src/comfyui/comfy/ldm/flux/controlnet.py @@ -0,0 +1,205 @@ +#Original code can be found on: https://github.com/XLabs-AI/x-flux/blob/main/src/flux/controlnet.py +#modified to support different types of flux controlnets + +import torch +import math +from torch import Tensor, nn +from einops import rearrange, repeat + +from .layers import (DoubleStreamBlock, EmbedND, LastLayer, + MLPEmbedder, SingleStreamBlock, + timestep_embedding) + +from .model import Flux +import comfy.ldm.common_dit + +class MistolineCondDownsamplBlock(nn.Module): + def __init__(self, dtype=None, device=None, operations=None): + super().__init__() + self.encoder = nn.Sequential( + operations.Conv2d(3, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device) + ) + + def forward(self, x): + return self.encoder(x) + +class MistolineControlnetBlock(nn.Module): + def __init__(self, hidden_size, dtype=None, device=None, operations=None): + super().__init__() + self.linear = operations.Linear(hidden_size, hidden_size, dtype=dtype, device=device) + self.act = nn.SiLU() + + def forward(self, x): + return self.act(self.linear(x)) + + +class ControlNetFlux(Flux): + def __init__(self, latent_input=False, num_union_modes=0, mistoline=False, control_latent_channels=None, image_model=None, dtype=None, device=None, operations=None, **kwargs): + super().__init__(final_layer=False, dtype=dtype, device=device, operations=operations, **kwargs) + + self.main_model_double = 19 + self.main_model_single = 38 + + self.mistoline = mistoline + # add ControlNet blocks + if self.mistoline: + control_block = lambda : MistolineControlnetBlock(self.hidden_size, dtype=dtype, device=device, operations=operations) + else: + control_block = lambda : operations.Linear(self.hidden_size, self.hidden_size, dtype=dtype, device=device) + + self.controlnet_blocks = nn.ModuleList([]) + for _ in range(self.params.depth): + self.controlnet_blocks.append(control_block()) + + self.controlnet_single_blocks = nn.ModuleList([]) + for _ in range(self.params.depth_single_blocks): + self.controlnet_single_blocks.append(control_block()) + + self.num_union_modes = num_union_modes + self.controlnet_mode_embedder = None + if self.num_union_modes > 0: + self.controlnet_mode_embedder = operations.Embedding(self.num_union_modes, self.hidden_size, dtype=dtype, device=device) + + self.gradient_checkpointing = False + self.latent_input = latent_input + if control_latent_channels is None: + control_latent_channels = self.in_channels + else: + control_latent_channels *= 2 * 2 #patch size + + self.pos_embed_input = operations.Linear(control_latent_channels, self.hidden_size, bias=True, dtype=dtype, device=device) + if not self.latent_input: + if self.mistoline: + self.input_cond_block = MistolineCondDownsamplBlock(dtype=dtype, device=device, operations=operations) + else: + self.input_hint_block = nn.Sequential( + operations.Conv2d(3, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, stride=2, dtype=dtype, device=device), + nn.SiLU(), + operations.Conv2d(16, 16, 3, padding=1, dtype=dtype, device=device) + ) + + def forward_orig( + self, + img: Tensor, + img_ids: Tensor, + controlnet_cond: Tensor, + txt: Tensor, + txt_ids: Tensor, + timesteps: Tensor, + y: Tensor, + guidance: Tensor = None, + control_type: Tensor = None, + ) -> Tensor: + if img.ndim != 3 or txt.ndim != 3: + raise ValueError("Input img and txt tensors must have 3 dimensions.") + + # running on sequences img + img = self.img_in(img) + + controlnet_cond = self.pos_embed_input(controlnet_cond) + img = img + controlnet_cond + vec = self.time_in(timestep_embedding(timesteps, 256)) + if self.params.guidance_embed: + vec = vec + self.guidance_in(timestep_embedding(guidance, 256)) + vec = vec + self.vector_in(y) + txt = self.txt_in(txt) + + if self.controlnet_mode_embedder is not None and len(control_type) > 0: + control_cond = self.controlnet_mode_embedder(torch.tensor(control_type, device=img.device), out_dtype=img.dtype).unsqueeze(0).repeat((txt.shape[0], 1, 1)) + txt = torch.cat([control_cond, txt], dim=1) + txt_ids = torch.cat([txt_ids[:,:1], txt_ids], dim=1) + + ids = torch.cat((txt_ids, img_ids), dim=1) + pe = self.pe_embedder(ids) + + controlnet_double = () + + for i in range(len(self.double_blocks)): + img, txt = self.double_blocks[i](img=img, txt=txt, vec=vec, pe=pe) + controlnet_double = controlnet_double + (self.controlnet_blocks[i](img),) + + img = torch.cat((txt, img), 1) + + controlnet_single = () + + for i in range(len(self.single_blocks)): + img = self.single_blocks[i](img, vec=vec, pe=pe) + controlnet_single = controlnet_single + (self.controlnet_single_blocks[i](img[:, txt.shape[1] :, ...]),) + + repeat = math.ceil(self.main_model_double / len(controlnet_double)) + if self.latent_input: + out_input = () + for x in controlnet_double: + out_input += (x,) * repeat + else: + out_input = (controlnet_double * repeat) + + out = {"input": out_input[:self.main_model_double]} + if len(controlnet_single) > 0: + repeat = math.ceil(self.main_model_single / len(controlnet_single)) + out_output = () + if self.latent_input: + for x in controlnet_single: + out_output += (x,) * repeat + else: + out_output = (controlnet_single * repeat) + out["output"] = out_output[:self.main_model_single] + return out + + def forward(self, x, timesteps, context, y, guidance=None, hint=None, **kwargs): + patch_size = 2 + if self.latent_input: + hint = comfy.ldm.common_dit.pad_to_patch_size(hint, (patch_size, patch_size)) + elif self.mistoline: + hint = hint * 2.0 - 1.0 + hint = self.input_cond_block(hint) + else: + hint = hint * 2.0 - 1.0 + hint = self.input_hint_block(hint) + + hint = rearrange(hint, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + + bs, c, h, w = x.shape + x = comfy.ldm.common_dit.pad_to_patch_size(x, (patch_size, patch_size)) + + img = rearrange(x, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + + h_len = ((h + (patch_size // 2)) // patch_size) + w_len = ((w + (patch_size // 2)) // patch_size) + img_ids = torch.zeros((h_len, w_len, 3), device=x.device, dtype=x.dtype) + img_ids[..., 1] = img_ids[..., 1] + torch.linspace(0, h_len - 1, steps=h_len, device=x.device, dtype=x.dtype)[:, None] + img_ids[..., 2] = img_ids[..., 2] + torch.linspace(0, w_len - 1, steps=w_len, device=x.device, dtype=x.dtype)[None, :] + img_ids = repeat(img_ids, "h w c -> b (h w) c", b=bs) + + txt_ids = torch.zeros((bs, context.shape[1], 3), device=x.device, dtype=x.dtype) + return self.forward_orig(img, img_ids, hint, context, txt_ids, timesteps, y, guidance, control_type=kwargs.get("control_type", [])) diff --git a/src/comfyui/comfy/ldm/flux/layers.py b/src/comfyui/comfy/ldm/flux/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..dabab3e33023308785b3594a96dc9796e777dcdb --- /dev/null +++ b/src/comfyui/comfy/ldm/flux/layers.py @@ -0,0 +1,249 @@ +import math +from dataclasses import dataclass + +import torch +from torch import Tensor, nn + +from .math import attention, rope +import comfy.ops +import comfy.ldm.common_dit + + +class EmbedND(nn.Module): + def __init__(self, dim: int, theta: int, axes_dim: list): + super().__init__() + self.dim = dim + self.theta = theta + self.axes_dim = axes_dim + + def forward(self, ids: Tensor) -> Tensor: + n_axes = ids.shape[-1] + emb = torch.cat( + [rope(ids[..., i], self.axes_dim[i], self.theta) for i in range(n_axes)], + dim=-3, + ) + + return emb.unsqueeze(1) + + +def timestep_embedding(t: Tensor, dim, max_period=10000, time_factor: float = 1000.0): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + t = time_factor * t + half = dim // 2 + freqs = torch.exp(-math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) / half) + + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + if torch.is_floating_point(t): + embedding = embedding.to(t) + return embedding + +class MLPEmbedder(nn.Module): + def __init__(self, in_dim: int, hidden_dim: int, dtype=None, device=None, operations=None): + super().__init__() + self.in_layer = operations.Linear(in_dim, hidden_dim, bias=True, dtype=dtype, device=device) + self.silu = nn.SiLU() + self.out_layer = operations.Linear(hidden_dim, hidden_dim, bias=True, dtype=dtype, device=device) + + def forward(self, x: Tensor) -> Tensor: + return self.out_layer(self.silu(self.in_layer(x))) + + +class RMSNorm(torch.nn.Module): + def __init__(self, dim: int, dtype=None, device=None, operations=None): + super().__init__() + self.scale = nn.Parameter(torch.empty((dim), dtype=dtype, device=device)) + + def forward(self, x: Tensor): + return comfy.ldm.common_dit.rms_norm(x, self.scale, 1e-6) + + +class QKNorm(torch.nn.Module): + def __init__(self, dim: int, dtype=None, device=None, operations=None): + super().__init__() + self.query_norm = RMSNorm(dim, dtype=dtype, device=device, operations=operations) + self.key_norm = RMSNorm(dim, dtype=dtype, device=device, operations=operations) + + def forward(self, q: Tensor, k: Tensor, v: Tensor) -> tuple: + q = self.query_norm(q) + k = self.key_norm(k) + return q.to(v), k.to(v) + + +class SelfAttention(nn.Module): + def __init__(self, dim: int, num_heads: int = 8, qkv_bias: bool = False, dtype=None, device=None, operations=None): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + + self.qkv = operations.Linear(dim, dim * 3, bias=qkv_bias, dtype=dtype, device=device) + self.norm = QKNorm(head_dim, dtype=dtype, device=device, operations=operations) + self.proj = operations.Linear(dim, dim, dtype=dtype, device=device) + + +@dataclass +class ModulationOut: + shift: Tensor + scale: Tensor + gate: Tensor + + +class Modulation(nn.Module): + def __init__(self, dim: int, double: bool, dtype=None, device=None, operations=None): + super().__init__() + self.is_double = double + self.multiplier = 6 if double else 3 + self.lin = operations.Linear(dim, self.multiplier * dim, bias=True, dtype=dtype, device=device) + + def forward(self, vec: Tensor) -> tuple: + out = self.lin(nn.functional.silu(vec))[:, None, :].chunk(self.multiplier, dim=-1) + + return ( + ModulationOut(*out[:3]), + ModulationOut(*out[3:]) if self.is_double else None, + ) + + +class DoubleStreamBlock(nn.Module): + def __init__(self, hidden_size: int, num_heads: int, mlp_ratio: float, qkv_bias: bool = False, dtype=None, device=None, operations=None): + super().__init__() + + mlp_hidden_dim = int(hidden_size * mlp_ratio) + self.num_heads = num_heads + self.hidden_size = hidden_size + self.img_mod = Modulation(hidden_size, double=True, dtype=dtype, device=device, operations=operations) + self.img_norm1 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.img_attn = SelfAttention(dim=hidden_size, num_heads=num_heads, qkv_bias=qkv_bias, dtype=dtype, device=device, operations=operations) + + self.img_norm2 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.img_mlp = nn.Sequential( + operations.Linear(hidden_size, mlp_hidden_dim, bias=True, dtype=dtype, device=device), + nn.GELU(approximate="tanh"), + operations.Linear(mlp_hidden_dim, hidden_size, bias=True, dtype=dtype, device=device), + ) + + self.txt_mod = Modulation(hidden_size, double=True, dtype=dtype, device=device, operations=operations) + self.txt_norm1 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.txt_attn = SelfAttention(dim=hidden_size, num_heads=num_heads, qkv_bias=qkv_bias, dtype=dtype, device=device, operations=operations) + + self.txt_norm2 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.txt_mlp = nn.Sequential( + operations.Linear(hidden_size, mlp_hidden_dim, bias=True, dtype=dtype, device=device), + nn.GELU(approximate="tanh"), + operations.Linear(mlp_hidden_dim, hidden_size, bias=True, dtype=dtype, device=device), + ) + + def forward(self, img: Tensor, txt: Tensor, vec: Tensor, pe: Tensor): + img_mod1, img_mod2 = self.img_mod(vec) + txt_mod1, txt_mod2 = self.txt_mod(vec) + + # prepare image for attention + img_modulated = self.img_norm1(img) + img_modulated = (1 + img_mod1.scale) * img_modulated + img_mod1.shift + img_qkv = self.img_attn.qkv(img_modulated) + img_q, img_k, img_v = img_qkv.view(img_qkv.shape[0], img_qkv.shape[1], 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + img_q, img_k = self.img_attn.norm(img_q, img_k, img_v) + + # prepare txt for attention + txt_modulated = self.txt_norm1(txt) + txt_modulated = (1 + txt_mod1.scale) * txt_modulated + txt_mod1.shift + txt_qkv = self.txt_attn.qkv(txt_modulated) + txt_q, txt_k, txt_v = txt_qkv.view(txt_qkv.shape[0], txt_qkv.shape[1], 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + txt_q, txt_k = self.txt_attn.norm(txt_q, txt_k, txt_v) + + # run actual attention + attn = attention(torch.cat((txt_q, img_q), dim=2), + torch.cat((txt_k, img_k), dim=2), + torch.cat((txt_v, img_v), dim=2), pe=pe) + + txt_attn, img_attn = attn[:, : txt.shape[1]], attn[:, txt.shape[1] :] + + # calculate the img bloks + img = img + img_mod1.gate * self.img_attn.proj(img_attn) + img = img + img_mod2.gate * self.img_mlp((1 + img_mod2.scale) * self.img_norm2(img) + img_mod2.shift) + + # calculate the txt bloks + txt += txt_mod1.gate * self.txt_attn.proj(txt_attn) + txt += txt_mod2.gate * self.txt_mlp((1 + txt_mod2.scale) * self.txt_norm2(txt) + txt_mod2.shift) + + if txt.dtype == torch.float16: + txt = torch.nan_to_num(txt, nan=0.0, posinf=65504, neginf=-65504) + + return img, txt + + +class SingleStreamBlock(nn.Module): + """ + A DiT block with parallel linear layers as described in + https://arxiv.org/abs/2302.05442 and adapted modulation interface. + """ + + def __init__( + self, + hidden_size: int, + num_heads: int, + mlp_ratio: float = 4.0, + qk_scale: float = None, + dtype=None, + device=None, + operations=None + ): + super().__init__() + self.hidden_dim = hidden_size + self.num_heads = num_heads + head_dim = hidden_size // num_heads + self.scale = qk_scale or head_dim**-0.5 + + self.mlp_hidden_dim = int(hidden_size * mlp_ratio) + # qkv and mlp_in + self.linear1 = operations.Linear(hidden_size, hidden_size * 3 + self.mlp_hidden_dim, dtype=dtype, device=device) + # proj and mlp_out + self.linear2 = operations.Linear(hidden_size + self.mlp_hidden_dim, hidden_size, dtype=dtype, device=device) + + self.norm = QKNorm(head_dim, dtype=dtype, device=device, operations=operations) + + self.hidden_size = hidden_size + self.pre_norm = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + + self.mlp_act = nn.GELU(approximate="tanh") + self.modulation = Modulation(hidden_size, double=False, dtype=dtype, device=device, operations=operations) + + def forward(self, x: Tensor, vec: Tensor, pe: Tensor) -> Tensor: + mod, _ = self.modulation(vec) + x_mod = (1 + mod.scale) * self.pre_norm(x) + mod.shift + qkv, mlp = torch.split(self.linear1(x_mod), [3 * self.hidden_size, self.mlp_hidden_dim], dim=-1) + + q, k, v = qkv.view(qkv.shape[0], qkv.shape[1], 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + q, k = self.norm(q, k, v) + + # compute attention + attn = attention(q, k, v, pe=pe) + # compute activation in mlp stream, cat again and run second linear layer + output = self.linear2(torch.cat((attn, self.mlp_act(mlp)), 2)) + x += mod.gate * output + if x.dtype == torch.float16: + x = torch.nan_to_num(x, nan=0.0, posinf=65504, neginf=-65504) + return x + + +class LastLayer(nn.Module): + def __init__(self, hidden_size: int, patch_size: int, out_channels: int, dtype=None, device=None, operations=None): + super().__init__() + self.norm_final = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.linear = operations.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True, dtype=dtype, device=device) + self.adaLN_modulation = nn.Sequential(nn.SiLU(), operations.Linear(hidden_size, 2 * hidden_size, bias=True, dtype=dtype, device=device)) + + def forward(self, x: Tensor, vec: Tensor) -> Tensor: + shift, scale = self.adaLN_modulation(vec).chunk(2, dim=1) + x = (1 + scale[:, None, :]) * self.norm_final(x) + shift[:, None, :] + x = self.linear(x) + return x diff --git a/src/comfyui/comfy/ldm/flux/math.py b/src/comfyui/comfy/ldm/flux/math.py new file mode 100644 index 0000000000000000000000000000000000000000..136ce2aa83cf6f80713a37d6f7ad5ba62ce5b186 --- /dev/null +++ b/src/comfyui/comfy/ldm/flux/math.py @@ -0,0 +1,35 @@ +import torch +from einops import rearrange +from torch import Tensor +from comfy.ldm.modules.attention import optimized_attention +import comfy.model_management + +def attention(q: Tensor, k: Tensor, v: Tensor, pe: Tensor) -> Tensor: + q, k = apply_rope(q, k, pe) + + heads = q.shape[1] + x = optimized_attention(q, k, v, heads, skip_reshape=True) + return x + + +def rope(pos: Tensor, dim: int, theta: int) -> Tensor: + assert dim % 2 == 0 + if comfy.model_management.is_device_mps(pos.device) or comfy.model_management.is_intel_xpu(): + device = torch.device("cpu") + else: + device = pos.device + + scale = torch.linspace(0, (dim - 2) / dim, steps=dim//2, dtype=torch.float64, device=device) + omega = 1.0 / (theta**scale) + out = torch.einsum("...n,d->...nd", pos.to(dtype=torch.float32, device=device), omega) + out = torch.stack([torch.cos(out), -torch.sin(out), torch.sin(out), torch.cos(out)], dim=-1) + out = rearrange(out, "b n d (i j) -> b n d i j", i=2, j=2) + return out.to(dtype=torch.float32, device=pos.device) + + +def apply_rope(xq: Tensor, xk: Tensor, freqs_cis: Tensor): + xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) + xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) + xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] + xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] + return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) diff --git a/src/comfyui/comfy/ldm/flux/model.py b/src/comfyui/comfy/ldm/flux/model.py new file mode 100644 index 0000000000000000000000000000000000000000..b7d8a692da7a2b672583101d6846f7806979825a --- /dev/null +++ b/src/comfyui/comfy/ldm/flux/model.py @@ -0,0 +1,160 @@ +#Original code can be found on: https://github.com/black-forest-labs/flux + +from dataclasses import dataclass + +import torch +from torch import Tensor, nn + +from .layers import ( + DoubleStreamBlock, + EmbedND, + LastLayer, + MLPEmbedder, + SingleStreamBlock, + timestep_embedding, +) + +from einops import rearrange, repeat +import comfy.ldm.common_dit + +@dataclass +class FluxParams: + in_channels: int + vec_in_dim: int + context_in_dim: int + hidden_size: int + mlp_ratio: float + num_heads: int + depth: int + depth_single_blocks: int + axes_dim: list + theta: int + qkv_bias: bool + guidance_embed: bool + + +class Flux(nn.Module): + """ + Transformer model for flow matching on sequences. + """ + + def __init__(self, image_model=None, final_layer=True, dtype=None, device=None, operations=None, **kwargs): + super().__init__() + self.dtype = dtype + params = FluxParams(**kwargs) + self.params = params + self.in_channels = params.in_channels * 2 * 2 + self.out_channels = self.in_channels + if params.hidden_size % params.num_heads != 0: + raise ValueError( + f"Hidden size {params.hidden_size} must be divisible by num_heads {params.num_heads}" + ) + pe_dim = params.hidden_size // params.num_heads + if sum(params.axes_dim) != pe_dim: + raise ValueError(f"Got {params.axes_dim} but expected positional dim {pe_dim}") + self.hidden_size = params.hidden_size + self.num_heads = params.num_heads + self.pe_embedder = EmbedND(dim=pe_dim, theta=params.theta, axes_dim=params.axes_dim) + self.img_in = operations.Linear(self.in_channels, self.hidden_size, bias=True, dtype=dtype, device=device) + self.time_in = MLPEmbedder(in_dim=256, hidden_dim=self.hidden_size, dtype=dtype, device=device, operations=operations) + self.vector_in = MLPEmbedder(params.vec_in_dim, self.hidden_size, dtype=dtype, device=device, operations=operations) + self.guidance_in = ( + MLPEmbedder(in_dim=256, hidden_dim=self.hidden_size, dtype=dtype, device=device, operations=operations) if params.guidance_embed else nn.Identity() + ) + self.txt_in = operations.Linear(params.context_in_dim, self.hidden_size, dtype=dtype, device=device) + + self.double_blocks = nn.ModuleList( + [ + DoubleStreamBlock( + self.hidden_size, + self.num_heads, + mlp_ratio=params.mlp_ratio, + qkv_bias=params.qkv_bias, + dtype=dtype, device=device, operations=operations + ) + for _ in range(params.depth) + ] + ) + + self.single_blocks = nn.ModuleList( + [ + SingleStreamBlock(self.hidden_size, self.num_heads, mlp_ratio=params.mlp_ratio, dtype=dtype, device=device, operations=operations) + for _ in range(params.depth_single_blocks) + ] + ) + + if final_layer: + self.final_layer = LastLayer(self.hidden_size, 1, self.out_channels, dtype=dtype, device=device, operations=operations) + + def forward_orig( + self, + img: Tensor, + img_ids: Tensor, + txt: Tensor, + txt_ids: Tensor, + timesteps: Tensor, + y: Tensor, + guidance: Tensor = None, + control=None, + ) -> Tensor: + if img.ndim != 3 or txt.ndim != 3: + raise ValueError("Input img and txt tensors must have 3 dimensions.") + + # running on sequences img + img = self.img_in(img) + vec = self.time_in(timestep_embedding(timesteps, 256).to(img.dtype)) + if self.params.guidance_embed: + if guidance is None: + raise ValueError("Didn't get guidance strength for guidance distilled model.") + vec = vec + self.guidance_in(timestep_embedding(guidance, 256).to(img.dtype)) + + vec = vec + self.vector_in(y[:,:self.params.vec_in_dim]) + txt = self.txt_in(txt) + + ids = torch.cat((txt_ids, img_ids), dim=1) + pe = self.pe_embedder(ids) + + for i, block in enumerate(self.double_blocks): + img, txt = block(img=img, txt=txt, vec=vec, pe=pe) + + if control is not None: # Controlnet + control_i = control.get("input") + if i < len(control_i): + add = control_i[i] + if add is not None: + img += add + + img = torch.cat((txt, img), 1) + + for i, block in enumerate(self.single_blocks): + img = block(img, vec=vec, pe=pe) + + if control is not None: # Controlnet + control_o = control.get("output") + if i < len(control_o): + add = control_o[i] + if add is not None: + img[:, txt.shape[1] :, ...] += add + + img = img[:, txt.shape[1] :, ...] + + img = self.final_layer(img, vec) # (N, T, patch_size ** 2 * out_channels) + return img + + def forward(self, x, timestep, context, y, guidance, control=None, **kwargs): + bs, c, h, w = x.shape + patch_size = 2 + x = comfy.ldm.common_dit.pad_to_patch_size(x, (patch_size, patch_size)) + + img = rearrange(x, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + + h_len = ((h + (patch_size // 2)) // patch_size) + w_len = ((w + (patch_size // 2)) // patch_size) + img_ids = torch.zeros((h_len, w_len, 3), device=x.device, dtype=x.dtype) + img_ids[:, :, 1] = torch.linspace(0, h_len - 1, steps=h_len, device=x.device, dtype=x.dtype).unsqueeze(1) + img_ids[:, :, 2] = torch.linspace(0, w_len - 1, steps=w_len, device=x.device, dtype=x.dtype).unsqueeze(0) + img_ids = repeat(img_ids, "h w c -> b (h w) c", b=bs) + + txt_ids = torch.zeros((bs, context.shape[1], 3), device=x.device, dtype=x.dtype) + out = self.forward_orig(img, img_ids, context, txt_ids, timestep, y, guidance, control) + return rearrange(out, "b (h w) (c ph pw) -> b c (h ph) (w pw)", h=h_len, w=w_len, ph=2, pw=2)[:,:,:h,:w] diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/asymm_models_joint.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/asymm_models_joint.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fad8b6d2dc0d1b3ef5cff4cddb86d4cc4696aa81 Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/asymm_models_joint.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/layers.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/layers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..916ad27762d841451350fb203d7beb21d6f3c84c Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/layers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/rope_mixed.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/rope_mixed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf249657d28cbc7ca9e16976f57258c484994d73 Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/rope_mixed.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/temporal_rope.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/temporal_rope.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd011b89fa3a673ecf2e97a85406ff50fa9cf043 Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/temporal_rope.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/utils.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c337974d9100f83acf0f0acb386cb5184cdc806 Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/joint_model/__pycache__/utils.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/asymm_models_joint.py b/src/comfyui/comfy/ldm/genmo/joint_model/asymm_models_joint.py new file mode 100644 index 0000000000000000000000000000000000000000..c36a000688b8bfe3392c3a4af5f169609aaa2f4e --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/joint_model/asymm_models_joint.py @@ -0,0 +1,541 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license +#adapted to ComfyUI + +from typing import Dict, List, Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange +# from flash_attn import flash_attn_varlen_qkvpacked_func +from comfy.ldm.modules.attention import optimized_attention + +from .layers import ( + FeedForward, + PatchEmbed, + RMSNorm, + TimestepEmbedder, +) + +from .rope_mixed import ( + compute_mixed_rotation, + create_position_matrix, +) +from .temporal_rope import apply_rotary_emb_qk_real +from .utils import ( + AttentionPool, + modulate, +) + +import comfy.ldm.common_dit +import comfy.ops + + +def modulated_rmsnorm(x, scale, eps=1e-6): + # Normalize and modulate + x_normed = comfy.ldm.common_dit.rms_norm(x, eps=eps) + x_modulated = x_normed * (1 + scale.unsqueeze(1)) + + return x_modulated + + +def residual_tanh_gated_rmsnorm(x, x_res, gate, eps=1e-6): + # Apply tanh to gate + tanh_gate = torch.tanh(gate).unsqueeze(1) + + # Normalize and apply gated scaling + x_normed = comfy.ldm.common_dit.rms_norm(x_res, eps=eps) * tanh_gate + + # Apply residual connection + output = x + x_normed + + return output + +class AsymmetricAttention(nn.Module): + def __init__( + self, + dim_x: int, + dim_y: int, + num_heads: int = 8, + qkv_bias: bool = True, + qk_norm: bool = False, + attn_drop: float = 0.0, + update_y: bool = True, + out_bias: bool = True, + attend_to_padding: bool = False, + softmax_scale: Optional[float] = None, + device: Optional[torch.device] = None, + dtype=None, + operations=None, + ): + super().__init__() + self.dim_x = dim_x + self.dim_y = dim_y + self.num_heads = num_heads + self.head_dim = dim_x // num_heads + self.attn_drop = attn_drop + self.update_y = update_y + self.attend_to_padding = attend_to_padding + self.softmax_scale = softmax_scale + if dim_x % num_heads != 0: + raise ValueError( + f"dim_x={dim_x} should be divisible by num_heads={num_heads}" + ) + + # Input layers. + self.qkv_bias = qkv_bias + self.qkv_x = operations.Linear(dim_x, 3 * dim_x, bias=qkv_bias, device=device, dtype=dtype) + # Project text features to match visual features (dim_y -> dim_x) + self.qkv_y = operations.Linear(dim_y, 3 * dim_x, bias=qkv_bias, device=device, dtype=dtype) + + # Query and key normalization for stability. + assert qk_norm + self.q_norm_x = RMSNorm(self.head_dim, device=device, dtype=dtype) + self.k_norm_x = RMSNorm(self.head_dim, device=device, dtype=dtype) + self.q_norm_y = RMSNorm(self.head_dim, device=device, dtype=dtype) + self.k_norm_y = RMSNorm(self.head_dim, device=device, dtype=dtype) + + # Output layers. y features go back down from dim_x -> dim_y. + self.proj_x = operations.Linear(dim_x, dim_x, bias=out_bias, device=device, dtype=dtype) + self.proj_y = ( + operations.Linear(dim_x, dim_y, bias=out_bias, device=device, dtype=dtype) + if update_y + else nn.Identity() + ) + + def forward( + self, + x: torch.Tensor, # (B, N, dim_x) + y: torch.Tensor, # (B, L, dim_y) + scale_x: torch.Tensor, # (B, dim_x), modulation for pre-RMSNorm. + scale_y: torch.Tensor, # (B, dim_y), modulation for pre-RMSNorm. + crop_y, + **rope_rotation, + ) -> Tuple[torch.Tensor, torch.Tensor]: + rope_cos = rope_rotation.get("rope_cos") + rope_sin = rope_rotation.get("rope_sin") + # Pre-norm for visual features + x = modulated_rmsnorm(x, scale_x) # (B, M, dim_x) where M = N / cp_group_size + + # Process visual features + # qkv_x = self.qkv_x(x) # (B, M, 3 * dim_x) + # assert qkv_x.dtype == torch.bfloat16 + # qkv_x = all_to_all_collect_tokens( + # qkv_x, self.num_heads + # ) # (3, B, N, local_h, head_dim) + + # Process text features + y = modulated_rmsnorm(y, scale_y) # (B, L, dim_y) + q_y, k_y, v_y = self.qkv_y(y).view(y.shape[0], y.shape[1], 3, self.num_heads, -1).unbind(2) # (B, N, local_h, head_dim) + + q_y = self.q_norm_y(q_y) + k_y = self.k_norm_y(k_y) + + # Split qkv_x into q, k, v + q_x, k_x, v_x = self.qkv_x(x).view(x.shape[0], x.shape[1], 3, self.num_heads, -1).unbind(2) # (B, N, local_h, head_dim) + q_x = self.q_norm_x(q_x) + q_x = apply_rotary_emb_qk_real(q_x, rope_cos, rope_sin) + k_x = self.k_norm_x(k_x) + k_x = apply_rotary_emb_qk_real(k_x, rope_cos, rope_sin) + + q = torch.cat([q_x, q_y[:, :crop_y]], dim=1).transpose(1, 2) + k = torch.cat([k_x, k_y[:, :crop_y]], dim=1).transpose(1, 2) + v = torch.cat([v_x, v_y[:, :crop_y]], dim=1).transpose(1, 2) + + xy = optimized_attention(q, + k, + v, self.num_heads, skip_reshape=True) + + x, y = torch.tensor_split(xy, (q_x.shape[1],), dim=1) + x = self.proj_x(x) + o = torch.zeros(y.shape[0], q_y.shape[1], y.shape[-1], device=y.device, dtype=y.dtype) + o[:, :y.shape[1]] = y + + y = self.proj_y(o) + # print("ox", x) + # print("oy", y) + return x, y + + +class AsymmetricJointBlock(nn.Module): + def __init__( + self, + hidden_size_x: int, + hidden_size_y: int, + num_heads: int, + *, + mlp_ratio_x: float = 8.0, # Ratio of hidden size to d_model for MLP for visual tokens. + mlp_ratio_y: float = 4.0, # Ratio of hidden size to d_model for MLP for text tokens. + update_y: bool = True, # Whether to update text tokens in this block. + device: Optional[torch.device] = None, + dtype=None, + operations=None, + **block_kwargs, + ): + super().__init__() + self.update_y = update_y + self.hidden_size_x = hidden_size_x + self.hidden_size_y = hidden_size_y + self.mod_x = operations.Linear(hidden_size_x, 4 * hidden_size_x, device=device, dtype=dtype) + if self.update_y: + self.mod_y = operations.Linear(hidden_size_x, 4 * hidden_size_y, device=device, dtype=dtype) + else: + self.mod_y = operations.Linear(hidden_size_x, hidden_size_y, device=device, dtype=dtype) + + # Self-attention: + self.attn = AsymmetricAttention( + hidden_size_x, + hidden_size_y, + num_heads=num_heads, + update_y=update_y, + device=device, + dtype=dtype, + operations=operations, + **block_kwargs, + ) + + # MLP. + mlp_hidden_dim_x = int(hidden_size_x * mlp_ratio_x) + assert mlp_hidden_dim_x == int(1536 * 8) + self.mlp_x = FeedForward( + in_features=hidden_size_x, + hidden_size=mlp_hidden_dim_x, + multiple_of=256, + ffn_dim_multiplier=None, + device=device, + dtype=dtype, + operations=operations, + ) + + # MLP for text not needed in last block. + if self.update_y: + mlp_hidden_dim_y = int(hidden_size_y * mlp_ratio_y) + self.mlp_y = FeedForward( + in_features=hidden_size_y, + hidden_size=mlp_hidden_dim_y, + multiple_of=256, + ffn_dim_multiplier=None, + device=device, + dtype=dtype, + operations=operations, + ) + + def forward( + self, + x: torch.Tensor, + c: torch.Tensor, + y: torch.Tensor, + **attn_kwargs, + ): + """Forward pass of a block. + + Args: + x: (B, N, dim) tensor of visual tokens + c: (B, dim) tensor of conditioned features + y: (B, L, dim) tensor of text tokens + num_frames: Number of frames in the video. N = num_frames * num_spatial_tokens + + Returns: + x: (B, N, dim) tensor of visual tokens after block + y: (B, L, dim) tensor of text tokens after block + """ + N = x.size(1) + + c = F.silu(c) + mod_x = self.mod_x(c) + scale_msa_x, gate_msa_x, scale_mlp_x, gate_mlp_x = mod_x.chunk(4, dim=1) + + mod_y = self.mod_y(c) + if self.update_y: + scale_msa_y, gate_msa_y, scale_mlp_y, gate_mlp_y = mod_y.chunk(4, dim=1) + else: + scale_msa_y = mod_y + + # Self-attention block. + x_attn, y_attn = self.attn( + x, + y, + scale_x=scale_msa_x, + scale_y=scale_msa_y, + **attn_kwargs, + ) + + assert x_attn.size(1) == N + x = residual_tanh_gated_rmsnorm(x, x_attn, gate_msa_x) + if self.update_y: + y = residual_tanh_gated_rmsnorm(y, y_attn, gate_msa_y) + + # MLP block. + x = self.ff_block_x(x, scale_mlp_x, gate_mlp_x) + if self.update_y: + y = self.ff_block_y(y, scale_mlp_y, gate_mlp_y) + + return x, y + + def ff_block_x(self, x, scale_x, gate_x): + x_mod = modulated_rmsnorm(x, scale_x) + x_res = self.mlp_x(x_mod) + x = residual_tanh_gated_rmsnorm(x, x_res, gate_x) # Sandwich norm + return x + + def ff_block_y(self, y, scale_y, gate_y): + y_mod = modulated_rmsnorm(y, scale_y) + y_res = self.mlp_y(y_mod) + y = residual_tanh_gated_rmsnorm(y, y_res, gate_y) # Sandwich norm + return y + + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + + def __init__( + self, + hidden_size, + patch_size, + out_channels, + device: Optional[torch.device] = None, + dtype=None, + operations=None, + ): + super().__init__() + self.norm_final = operations.LayerNorm( + hidden_size, elementwise_affine=False, eps=1e-6, device=device, dtype=dtype + ) + self.mod = operations.Linear(hidden_size, 2 * hidden_size, device=device, dtype=dtype) + self.linear = operations.Linear( + hidden_size, patch_size * patch_size * out_channels, device=device, dtype=dtype + ) + + def forward(self, x, c): + c = F.silu(c) + shift, scale = self.mod(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class AsymmDiTJoint(nn.Module): + """ + Diffusion model with a Transformer backbone. + + Ingests text embeddings instead of a label. + """ + + def __init__( + self, + *, + patch_size=2, + in_channels=4, + hidden_size_x=1152, + hidden_size_y=1152, + depth=48, + num_heads=16, + mlp_ratio_x=8.0, + mlp_ratio_y=4.0, + use_t5: bool = False, + t5_feat_dim: int = 4096, + t5_token_length: int = 256, + learn_sigma=True, + patch_embed_bias: bool = True, + timestep_mlp_bias: bool = True, + attend_to_padding: bool = False, + timestep_scale: Optional[float] = None, + use_extended_posenc: bool = False, + posenc_preserve_area: bool = False, + rope_theta: float = 10000.0, + image_model=None, + device: Optional[torch.device] = None, + dtype=None, + operations=None, + **block_kwargs, + ): + super().__init__() + + self.dtype = dtype + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size_x = hidden_size_x + self.hidden_size_y = hidden_size_y + self.head_dim = ( + hidden_size_x // num_heads + ) # Head dimension and count is determined by visual. + self.attend_to_padding = attend_to_padding + self.use_extended_posenc = use_extended_posenc + self.posenc_preserve_area = posenc_preserve_area + self.use_t5 = use_t5 + self.t5_token_length = t5_token_length + self.t5_feat_dim = t5_feat_dim + self.rope_theta = ( + rope_theta # Scaling factor for frequency computation for temporal RoPE. + ) + + self.x_embedder = PatchEmbed( + patch_size=patch_size, + in_chans=in_channels, + embed_dim=hidden_size_x, + bias=patch_embed_bias, + dtype=dtype, + device=device, + operations=operations + ) + # Conditionings + # Timestep + self.t_embedder = TimestepEmbedder( + hidden_size_x, bias=timestep_mlp_bias, timestep_scale=timestep_scale, dtype=dtype, device=device, operations=operations + ) + + if self.use_t5: + # Caption Pooling (T5) + self.t5_y_embedder = AttentionPool( + t5_feat_dim, num_heads=8, output_dim=hidden_size_x, dtype=dtype, device=device, operations=operations + ) + + # Dense Embedding Projection (T5) + self.t5_yproj = operations.Linear( + t5_feat_dim, hidden_size_y, bias=True, dtype=dtype, device=device + ) + + # Initialize pos_frequencies as an empty parameter. + self.pos_frequencies = nn.Parameter( + torch.empty(3, self.num_heads, self.head_dim // 2, dtype=dtype, device=device) + ) + + assert not self.attend_to_padding + + # for depth 48: + # b = 0: AsymmetricJointBlock, update_y=True + # b = 1: AsymmetricJointBlock, update_y=True + # ... + # b = 46: AsymmetricJointBlock, update_y=True + # b = 47: AsymmetricJointBlock, update_y=False. No need to update text features. + blocks = [] + for b in range(depth): + # Joint multi-modal block + update_y = b < depth - 1 + block = AsymmetricJointBlock( + hidden_size_x, + hidden_size_y, + num_heads, + mlp_ratio_x=mlp_ratio_x, + mlp_ratio_y=mlp_ratio_y, + update_y=update_y, + attend_to_padding=attend_to_padding, + device=device, + dtype=dtype, + operations=operations, + **block_kwargs, + ) + + blocks.append(block) + self.blocks = nn.ModuleList(blocks) + + self.final_layer = FinalLayer( + hidden_size_x, patch_size, self.out_channels, dtype=dtype, device=device, operations=operations + ) + + def embed_x(self, x: torch.Tensor) -> torch.Tensor: + """ + Args: + x: (B, C=12, T, H, W) tensor of visual tokens + + Returns: + x: (B, C=3072, N) tensor of visual tokens with positional embedding. + """ + return self.x_embedder(x) # Convert BcTHW to BCN + + def prepare( + self, + x: torch.Tensor, + sigma: torch.Tensor, + t5_feat: torch.Tensor, + t5_mask: torch.Tensor, + ): + """Prepare input and conditioning embeddings.""" + # Visual patch embeddings with positional encoding. + T, H, W = x.shape[-3:] + pH, pW = H // self.patch_size, W // self.patch_size + x = self.embed_x(x) # (B, N, D), where N = T * H * W / patch_size ** 2 + assert x.ndim == 3 + B = x.size(0) + + + pH, pW = H // self.patch_size, W // self.patch_size + N = T * pH * pW + assert x.size(1) == N + pos = create_position_matrix( + T, pH=pH, pW=pW, device=x.device, dtype=torch.float32 + ) # (N, 3) + rope_cos, rope_sin = compute_mixed_rotation( + freqs=comfy.ops.cast_to(self.pos_frequencies, dtype=x.dtype, device=x.device), pos=pos + ) # Each are (N, num_heads, dim // 2) + + c_t = self.t_embedder(1 - sigma, out_dtype=x.dtype) # (B, D) + + t5_y_pool = self.t5_y_embedder(t5_feat, t5_mask) # (B, D) + + c = c_t + t5_y_pool + + y_feat = self.t5_yproj(t5_feat) # (B, L, t5_feat_dim) --> (B, L, D) + + return x, c, y_feat, rope_cos, rope_sin + + def forward( + self, + x: torch.Tensor, + timestep: torch.Tensor, + context: List[torch.Tensor], + attention_mask: List[torch.Tensor], + num_tokens=256, + packed_indices: Dict[str, torch.Tensor] = None, + rope_cos: torch.Tensor = None, + rope_sin: torch.Tensor = None, + control=None, **kwargs + ): + y_feat = context + y_mask = attention_mask + sigma = timestep + """Forward pass of DiT. + + Args: + x: (B, C, T, H, W) tensor of spatial inputs (images or latent representations of images) + sigma: (B,) tensor of noise standard deviations + y_feat: List((B, L, y_feat_dim) tensor of caption token features. For SDXL text encoders: L=77, y_feat_dim=2048) + y_mask: List((B, L) boolean tensor indicating which tokens are not padding) + packed_indices: Dict with keys for Flash Attention. Result of compute_packed_indices. + """ + B, _, T, H, W = x.shape + + x, c, y_feat, rope_cos, rope_sin = self.prepare( + x, sigma, y_feat, y_mask + ) + del y_mask + + for i, block in enumerate(self.blocks): + x, y_feat = block( + x, + c, + y_feat, + rope_cos=rope_cos, + rope_sin=rope_sin, + crop_y=num_tokens, + ) # (B, M, D), (B, L, D) + del y_feat # Final layers don't use dense text features. + + x = self.final_layer(x, c) # (B, M, patch_size ** 2 * out_channels) + x = rearrange( + x, + "B (T hp wp) (p1 p2 c) -> B c T (hp p1) (wp p2)", + T=T, + hp=H // self.patch_size, + wp=W // self.patch_size, + p1=self.patch_size, + p2=self.patch_size, + c=self.out_channels, + ) + + return -x diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/layers.py b/src/comfyui/comfy/ldm/genmo/joint_model/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..51d979559ed574ca97bd4c86d52576ea9bd33826 --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/joint_model/layers.py @@ -0,0 +1,164 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license +#adapted to ComfyUI + +import collections.abc +import math +from itertools import repeat +from typing import Callable, Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange +import comfy.ldm.common_dit + + +# From PyTorch internals +def _ntuple(n): + def parse(x): + if isinstance(x, collections.abc.Iterable) and not isinstance(x, str): + return tuple(x) + return tuple(repeat(x, n)) + + return parse + + +to_2tuple = _ntuple(2) + + +class TimestepEmbedder(nn.Module): + def __init__( + self, + hidden_size: int, + frequency_embedding_size: int = 256, + *, + bias: bool = True, + timestep_scale: Optional[float] = None, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.mlp = nn.Sequential( + operations.Linear(frequency_embedding_size, hidden_size, bias=bias, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(hidden_size, hidden_size, bias=bias, dtype=dtype, device=device), + ) + self.frequency_embedding_size = frequency_embedding_size + self.timestep_scale = timestep_scale + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + half = dim // 2 + freqs = torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) + freqs.mul_(-math.log(max_period) / half).exp_() + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + return embedding + + def forward(self, t, out_dtype): + if self.timestep_scale is not None: + t = t * self.timestep_scale + t_freq = self.timestep_embedding(t, self.frequency_embedding_size).to(dtype=out_dtype) + t_emb = self.mlp(t_freq) + return t_emb + + +class FeedForward(nn.Module): + def __init__( + self, + in_features: int, + hidden_size: int, + multiple_of: int, + ffn_dim_multiplier: Optional[float], + device: Optional[torch.device] = None, + dtype=None, + operations=None, + ): + super().__init__() + # keep parameter count and computation constant compared to standard FFN + hidden_size = int(2 * hidden_size / 3) + # custom dim factor multiplier + if ffn_dim_multiplier is not None: + hidden_size = int(ffn_dim_multiplier * hidden_size) + hidden_size = multiple_of * ((hidden_size + multiple_of - 1) // multiple_of) + + self.hidden_dim = hidden_size + self.w1 = operations.Linear(in_features, 2 * hidden_size, bias=False, device=device, dtype=dtype) + self.w2 = operations.Linear(hidden_size, in_features, bias=False, device=device, dtype=dtype) + + def forward(self, x): + x, gate = self.w1(x).chunk(2, dim=-1) + x = self.w2(F.silu(x) * gate) + return x + + +class PatchEmbed(nn.Module): + def __init__( + self, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + norm_layer: Optional[Callable] = None, + flatten: bool = True, + bias: bool = True, + dynamic_img_pad: bool = False, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.patch_size = to_2tuple(patch_size) + self.flatten = flatten + self.dynamic_img_pad = dynamic_img_pad + + self.proj = operations.Conv2d( + in_chans, + embed_dim, + kernel_size=patch_size, + stride=patch_size, + bias=bias, + device=device, + dtype=dtype, + ) + assert norm_layer is None + self.norm = ( + norm_layer(embed_dim, device=device) if norm_layer else nn.Identity() + ) + + def forward(self, x): + B, _C, T, H, W = x.shape + if not self.dynamic_img_pad: + assert H % self.patch_size[0] == 0, f"Input height ({H}) should be divisible by patch size ({self.patch_size[0]})." + assert W % self.patch_size[1] == 0, f"Input width ({W}) should be divisible by patch size ({self.patch_size[1]})." + else: + pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0] + pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1] + x = F.pad(x, (0, pad_w, 0, pad_h)) + + x = rearrange(x, "B C T H W -> (B T) C H W", B=B, T=T) + x = comfy.ldm.common_dit.pad_to_patch_size(x, self.patch_size, padding_mode='circular') + x = self.proj(x) + + # Flatten temporal and spatial dimensions. + if not self.flatten: + raise NotImplementedError("Must flatten output.") + x = rearrange(x, "(B T) C H W -> B (T H W) C", B=B, T=T) + + x = self.norm(x) + return x + + +class RMSNorm(torch.nn.Module): + def __init__(self, hidden_size, eps=1e-5, device=None, dtype=None): + super().__init__() + self.eps = eps + self.weight = torch.nn.Parameter(torch.empty(hidden_size, device=device, dtype=dtype)) + self.register_parameter("bias", None) + + def forward(self, x): + return comfy.ldm.common_dit.rms_norm(x, self.weight, self.eps) diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/rope_mixed.py b/src/comfyui/comfy/ldm/genmo/joint_model/rope_mixed.py new file mode 100644 index 0000000000000000000000000000000000000000..dee3fa21f5318a610321fc9372553d618462f773 --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/joint_model/rope_mixed.py @@ -0,0 +1,88 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license + +# import functools +import math + +import torch + + +def centers(start: float, stop, num, dtype=None, device=None): + """linspace through bin centers. + + Args: + start (float): Start of the range. + stop (float): End of the range. + num (int): Number of points. + dtype (torch.dtype): Data type of the points. + device (torch.device): Device of the points. + + Returns: + centers (Tensor): Centers of the bins. Shape: (num,). + """ + edges = torch.linspace(start, stop, num + 1, dtype=dtype, device=device) + return (edges[:-1] + edges[1:]) / 2 + + +# @functools.lru_cache(maxsize=1) +def create_position_matrix( + T: int, + pH: int, + pW: int, + device: torch.device, + dtype: torch.dtype, + *, + target_area: float = 36864, +): + """ + Args: + T: int - Temporal dimension + pH: int - Height dimension after patchify + pW: int - Width dimension after patchify + + Returns: + pos: [T * pH * pW, 3] - position matrix + """ + # Create 1D tensors for each dimension + t = torch.arange(T, dtype=dtype) + + # Positionally interpolate to area 36864. + # (3072x3072 frame with 16x16 patches = 192x192 latents). + # This automatically scales rope positions when the resolution changes. + # We use a large target area so the model is more sensitive + # to changes in the learned pos_frequencies matrix. + scale = math.sqrt(target_area / (pW * pH)) + w = centers(-pW * scale / 2, pW * scale / 2, pW) + h = centers(-pH * scale / 2, pH * scale / 2, pH) + + # Use meshgrid to create 3D grids + grid_t, grid_h, grid_w = torch.meshgrid(t, h, w, indexing="ij") + + # Stack and reshape the grids. + pos = torch.stack([grid_t, grid_h, grid_w], dim=-1) # [T, pH, pW, 3] + pos = pos.view(-1, 3) # [T * pH * pW, 3] + pos = pos.to(dtype=dtype, device=device) + + return pos + + +def compute_mixed_rotation( + freqs: torch.Tensor, + pos: torch.Tensor, +): + """ + Project each 3-dim position into per-head, per-head-dim 1D frequencies. + + Args: + freqs: [3, num_heads, num_freqs] - learned rotation frequency (for t, row, col) for each head position + pos: [N, 3] - position of each token + num_heads: int + + Returns: + freqs_cos: [N, num_heads, num_freqs] - cosine components + freqs_sin: [N, num_heads, num_freqs] - sine components + """ + assert freqs.ndim == 3 + freqs_sum = torch.einsum("Nd,dhf->Nhf", pos.to(freqs), freqs) + freqs_cos = torch.cos(freqs_sum) + freqs_sin = torch.sin(freqs_sum) + return freqs_cos, freqs_sin diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/temporal_rope.py b/src/comfyui/comfy/ldm/genmo/joint_model/temporal_rope.py new file mode 100644 index 0000000000000000000000000000000000000000..88f5d6d26151db0c8ad0a89fcf748c45d4b89bc0 --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/joint_model/temporal_rope.py @@ -0,0 +1,34 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license + +# Based on Llama3 Implementation. +import torch + + +def apply_rotary_emb_qk_real( + xqk: torch.Tensor, + freqs_cos: torch.Tensor, + freqs_sin: torch.Tensor, +) -> torch.Tensor: + """ + Apply rotary embeddings to input tensors using the given frequency tensor without complex numbers. + + Args: + xqk (torch.Tensor): Query and/or Key tensors to apply rotary embeddings. Shape: (B, S, *, num_heads, D) + Can be either just query or just key, or both stacked along some batch or * dim. + freqs_cos (torch.Tensor): Precomputed cosine frequency tensor. + freqs_sin (torch.Tensor): Precomputed sine frequency tensor. + + Returns: + torch.Tensor: The input tensor with rotary embeddings applied. + """ + # Split the last dimension into even and odd parts + xqk_even = xqk[..., 0::2] + xqk_odd = xqk[..., 1::2] + + # Apply rotation + cos_part = (xqk_even * freqs_cos - xqk_odd * freqs_sin).type_as(xqk) + sin_part = (xqk_even * freqs_sin + xqk_odd * freqs_cos).type_as(xqk) + + # Interleave the results back into the original shape + out = torch.stack([cos_part, sin_part], dim=-1).flatten(-2) + return out diff --git a/src/comfyui/comfy/ldm/genmo/joint_model/utils.py b/src/comfyui/comfy/ldm/genmo/joint_model/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..411902423b41a09808e208137639919aa75b0311 --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/joint_model/utils.py @@ -0,0 +1,102 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license +#adapted to ComfyUI + +from typing import Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +def pool_tokens(x: torch.Tensor, mask: torch.Tensor, *, keepdim=False) -> torch.Tensor: + """ + Pool tokens in x using mask. + + NOTE: We assume x does not require gradients. + + Args: + x: (B, L, D) tensor of tokens. + mask: (B, L) boolean tensor indicating which tokens are not padding. + + Returns: + pooled: (B, D) tensor of pooled tokens. + """ + assert x.size(1) == mask.size(1) # Expected mask to have same length as tokens. + assert x.size(0) == mask.size(0) # Expected mask to have same batch size as tokens. + mask = mask[:, :, None].to(dtype=x.dtype) + mask = mask / mask.sum(dim=1, keepdim=True).clamp(min=1) + pooled = (x * mask).sum(dim=1, keepdim=keepdim) + return pooled + + +class AttentionPool(nn.Module): + def __init__( + self, + embed_dim: int, + num_heads: int, + output_dim: int = None, + device: Optional[torch.device] = None, + dtype=None, + operations=None, + ): + """ + Args: + spatial_dim (int): Number of tokens in sequence length. + embed_dim (int): Dimensionality of input tokens. + num_heads (int): Number of attention heads. + output_dim (int): Dimensionality of output tokens. Defaults to embed_dim. + """ + super().__init__() + self.num_heads = num_heads + self.to_kv = operations.Linear(embed_dim, 2 * embed_dim, device=device, dtype=dtype) + self.to_q = operations.Linear(embed_dim, embed_dim, device=device, dtype=dtype) + self.to_out = operations.Linear(embed_dim, output_dim or embed_dim, device=device, dtype=dtype) + + def forward(self, x, mask): + """ + Args: + x (torch.Tensor): (B, L, D) tensor of input tokens. + mask (torch.Tensor): (B, L) boolean tensor indicating which tokens are not padding. + + NOTE: We assume x does not require gradients. + + Returns: + x (torch.Tensor): (B, D) tensor of pooled tokens. + """ + D = x.size(2) + + # Construct attention mask, shape: (B, 1, num_queries=1, num_keys=1+L). + attn_mask = mask[:, None, None, :].bool() # (B, 1, 1, L). + attn_mask = F.pad(attn_mask, (1, 0), value=True) # (B, 1, 1, 1+L). + + # Average non-padding token features. These will be used as the query. + x_pool = pool_tokens(x, mask, keepdim=True) # (B, 1, D) + + # Concat pooled features to input sequence. + x = torch.cat([x_pool, x], dim=1) # (B, L+1, D) + + # Compute queries, keys, values. Only the mean token is used to create a query. + kv = self.to_kv(x) # (B, L+1, 2 * D) + q = self.to_q(x[:, 0]) # (B, D) + + # Extract heads. + head_dim = D // self.num_heads + kv = kv.unflatten(2, (2, self.num_heads, head_dim)) # (B, 1+L, 2, H, head_dim) + kv = kv.transpose(1, 3) # (B, H, 2, 1+L, head_dim) + k, v = kv.unbind(2) # (B, H, 1+L, head_dim) + q = q.unflatten(1, (self.num_heads, head_dim)) # (B, H, head_dim) + q = q.unsqueeze(2) # (B, H, 1, head_dim) + + # Compute attention. + x = F.scaled_dot_product_attention( + q, k, v, attn_mask=attn_mask, dropout_p=0.0 + ) # (B, H, 1, head_dim) + + # Concatenate heads and run output. + x = x.squeeze(2).flatten(1, 2) # (B, D = H * head_dim) + x = self.to_out(x) + return x diff --git a/src/comfyui/comfy/ldm/genmo/vae/__pycache__/model.cpython-310.pyc b/src/comfyui/comfy/ldm/genmo/vae/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b13805cd02a82086ea44d34f264274f804575d61 Binary files /dev/null and b/src/comfyui/comfy/ldm/genmo/vae/__pycache__/model.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/genmo/vae/model.py b/src/comfyui/comfy/ldm/genmo/vae/model.py new file mode 100644 index 0000000000000000000000000000000000000000..e44c08a408cf86ac0fd915fa4a5a0dad140b9b2c --- /dev/null +++ b/src/comfyui/comfy/ldm/genmo/vae/model.py @@ -0,0 +1,480 @@ +#original code from https://github.com/genmoai/models under apache 2.0 license +#adapted to ComfyUI + +from typing import Callable, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange + +import comfy.ops +ops = comfy.ops.disable_weight_init + +# import mochi_preview.dit.joint_model.context_parallel as cp +# from mochi_preview.vae.cp_conv import cp_pass_frames, gather_all_frames + + +def cast_tuple(t, length=1): + return t if isinstance(t, tuple) else ((t,) * length) + + +class GroupNormSpatial(ops.GroupNorm): + """ + GroupNorm applied per-frame. + """ + + def forward(self, x: torch.Tensor, *, chunk_size: int = 8): + B, C, T, H, W = x.shape + x = rearrange(x, "B C T H W -> (B T) C H W") + # Run group norm in chunks. + output = torch.empty_like(x) + for b in range(0, B * T, chunk_size): + output[b : b + chunk_size] = super().forward(x[b : b + chunk_size]) + return rearrange(output, "(B T) C H W -> B C T H W", B=B, T=T) + +class PConv3d(ops.Conv3d): + def __init__( + self, + in_channels, + out_channels, + kernel_size: Union[int, Tuple[int, int, int]], + stride: Union[int, Tuple[int, int, int]], + causal: bool = True, + context_parallel: bool = True, + **kwargs, + ): + self.causal = causal + self.context_parallel = context_parallel + kernel_size = cast_tuple(kernel_size, 3) + stride = cast_tuple(stride, 3) + height_pad = (kernel_size[1] - 1) // 2 + width_pad = (kernel_size[2] - 1) // 2 + + super().__init__( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + dilation=(1, 1, 1), + padding=(0, height_pad, width_pad), + **kwargs, + ) + + def forward(self, x: torch.Tensor): + # Compute padding amounts. + context_size = self.kernel_size[0] - 1 + if self.causal: + pad_front = context_size + pad_back = 0 + else: + pad_front = context_size // 2 + pad_back = context_size - pad_front + + # Apply padding. + assert self.padding_mode == "replicate" # DEBUG + mode = "constant" if self.padding_mode == "zeros" else self.padding_mode + x = F.pad(x, (0, 0, 0, 0, pad_front, pad_back), mode=mode) + return super().forward(x) + + +class Conv1x1(ops.Linear): + """*1x1 Conv implemented with a linear layer.""" + + def __init__(self, in_features: int, out_features: int, *args, **kwargs): + super().__init__(in_features, out_features, *args, **kwargs) + + def forward(self, x: torch.Tensor): + """Forward pass. + + Args: + x: Input tensor. Shape: [B, C, *] or [B, *, C]. + + Returns: + x: Output tensor. Shape: [B, C', *] or [B, *, C']. + """ + x = x.movedim(1, -1) + x = super().forward(x) + x = x.movedim(-1, 1) + return x + + +class DepthToSpaceTime(nn.Module): + def __init__( + self, + temporal_expansion: int, + spatial_expansion: int, + ): + super().__init__() + self.temporal_expansion = temporal_expansion + self.spatial_expansion = spatial_expansion + + # When printed, this module should show the temporal and spatial expansion factors. + def extra_repr(self): + return f"texp={self.temporal_expansion}, sexp={self.spatial_expansion}" + + def forward(self, x: torch.Tensor): + """Forward pass. + + Args: + x: Input tensor. Shape: [B, C, T, H, W]. + + Returns: + x: Rearranged tensor. Shape: [B, C/(st*s*s), T*st, H*s, W*s]. + """ + x = rearrange( + x, + "B (C st sh sw) T H W -> B C (T st) (H sh) (W sw)", + st=self.temporal_expansion, + sh=self.spatial_expansion, + sw=self.spatial_expansion, + ) + + # cp_rank, _ = cp.get_cp_rank_size() + if self.temporal_expansion > 1: # and cp_rank == 0: + # Drop the first self.temporal_expansion - 1 frames. + # This is because we always want the 3x3x3 conv filter to only apply + # to the first frame, and the first frame doesn't need to be repeated. + assert all(x.shape) + x = x[:, :, self.temporal_expansion - 1 :] + assert all(x.shape) + + return x + + +def norm_fn( + in_channels: int, + affine: bool = True, +): + return GroupNormSpatial(affine=affine, num_groups=32, num_channels=in_channels) + + +class ResBlock(nn.Module): + """Residual block that preserves the spatial dimensions.""" + + def __init__( + self, + channels: int, + *, + affine: bool = True, + attn_block: Optional[nn.Module] = None, + padding_mode: str = "replicate", + causal: bool = True, + ): + super().__init__() + self.channels = channels + + assert causal + self.stack = nn.Sequential( + norm_fn(channels, affine=affine), + nn.SiLU(inplace=True), + PConv3d( + in_channels=channels, + out_channels=channels, + kernel_size=(3, 3, 3), + stride=(1, 1, 1), + padding_mode=padding_mode, + bias=True, + # causal=causal, + ), + norm_fn(channels, affine=affine), + nn.SiLU(inplace=True), + PConv3d( + in_channels=channels, + out_channels=channels, + kernel_size=(3, 3, 3), + stride=(1, 1, 1), + padding_mode=padding_mode, + bias=True, + # causal=causal, + ), + ) + + self.attn_block = attn_block if attn_block else nn.Identity() + + def forward(self, x: torch.Tensor): + """Forward pass. + + Args: + x: Input tensor. Shape: [B, C, T, H, W]. + """ + residual = x + x = self.stack(x) + x = x + residual + del residual + + return self.attn_block(x) + + +class CausalUpsampleBlock(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + num_res_blocks: int, + *, + temporal_expansion: int = 2, + spatial_expansion: int = 2, + **block_kwargs, + ): + super().__init__() + + blocks = [] + for _ in range(num_res_blocks): + blocks.append(block_fn(in_channels, **block_kwargs)) + self.blocks = nn.Sequential(*blocks) + + self.temporal_expansion = temporal_expansion + self.spatial_expansion = spatial_expansion + + # Change channels in the final convolution layer. + self.proj = Conv1x1( + in_channels, + out_channels * temporal_expansion * (spatial_expansion**2), + ) + + self.d2st = DepthToSpaceTime( + temporal_expansion=temporal_expansion, spatial_expansion=spatial_expansion + ) + + def forward(self, x): + x = self.blocks(x) + x = self.proj(x) + x = self.d2st(x) + return x + + +def block_fn(channels, *, has_attention: bool = False, **block_kwargs): + assert has_attention is False #NOTE: if this is ever true add back the attention code. + + attn_block = None #AttentionBlock(channels) if has_attention else None + + return ResBlock( + channels, affine=True, attn_block=attn_block, **block_kwargs + ) + + +class DownsampleBlock(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + num_res_blocks, + *, + temporal_reduction=2, + spatial_reduction=2, + **block_kwargs, + ): + """ + Downsample block for the VAE encoder. + + Args: + in_channels: Number of input channels. + out_channels: Number of output channels. + num_res_blocks: Number of residual blocks. + temporal_reduction: Temporal reduction factor. + spatial_reduction: Spatial reduction factor. + """ + super().__init__() + layers = [] + + # Change the channel count in the strided convolution. + # This lets the ResBlock have uniform channel count, + # as in ConvNeXt. + assert in_channels != out_channels + layers.append( + PConv3d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=(temporal_reduction, spatial_reduction, spatial_reduction), + stride=(temporal_reduction, spatial_reduction, spatial_reduction), + padding_mode="replicate", + bias=True, + ) + ) + + for _ in range(num_res_blocks): + layers.append(block_fn(out_channels, **block_kwargs)) + + self.layers = nn.Sequential(*layers) + + def forward(self, x): + return self.layers(x) + + +def add_fourier_features(inputs: torch.Tensor, start=6, stop=8, step=1): + num_freqs = (stop - start) // step + assert inputs.ndim == 5 + C = inputs.size(1) + + # Create Base 2 Fourier features. + freqs = torch.arange(start, stop, step, dtype=inputs.dtype, device=inputs.device) + assert num_freqs == len(freqs) + w = torch.pow(2.0, freqs) * (2 * torch.pi) # [num_freqs] + C = inputs.shape[1] + w = w.repeat(C)[None, :, None, None, None] # [1, C * num_freqs, 1, 1, 1] + + # Interleaved repeat of input channels to match w. + h = inputs.repeat_interleave(num_freqs, dim=1) # [B, C * num_freqs, T, H, W] + # Scale channels by frequency. + h = w * h + + return torch.cat( + [ + inputs, + torch.sin(h), + torch.cos(h), + ], + dim=1, + ) + + +class FourierFeatures(nn.Module): + def __init__(self, start: int = 6, stop: int = 8, step: int = 1): + super().__init__() + self.start = start + self.stop = stop + self.step = step + + def forward(self, inputs): + """Add Fourier features to inputs. + + Args: + inputs: Input tensor. Shape: [B, C, T, H, W] + + Returns: + h: Output tensor. Shape: [B, (1 + 2 * num_freqs) * C, T, H, W] + """ + return add_fourier_features(inputs, self.start, self.stop, self.step) + + +class Decoder(nn.Module): + def __init__( + self, + *, + out_channels: int = 3, + latent_dim: int, + base_channels: int, + channel_multipliers: List[int], + num_res_blocks: List[int], + temporal_expansions: Optional[List[int]] = None, + spatial_expansions: Optional[List[int]] = None, + has_attention: List[bool], + output_norm: bool = True, + nonlinearity: str = "silu", + output_nonlinearity: str = "silu", + causal: bool = True, + **block_kwargs, + ): + super().__init__() + self.input_channels = latent_dim + self.base_channels = base_channels + self.channel_multipliers = channel_multipliers + self.num_res_blocks = num_res_blocks + self.output_nonlinearity = output_nonlinearity + assert nonlinearity == "silu" + assert causal + + ch = [mult * base_channels for mult in channel_multipliers] + self.num_up_blocks = len(ch) - 1 + assert len(num_res_blocks) == self.num_up_blocks + 2 + + blocks = [] + + first_block = [ + nn.Conv3d(latent_dim, ch[-1], kernel_size=(1, 1, 1)) + ] # Input layer. + # First set of blocks preserve channel count. + for _ in range(num_res_blocks[-1]): + first_block.append( + block_fn( + ch[-1], + has_attention=has_attention[-1], + causal=causal, + **block_kwargs, + ) + ) + blocks.append(nn.Sequential(*first_block)) + + assert len(temporal_expansions) == len(spatial_expansions) == self.num_up_blocks + assert len(num_res_blocks) == len(has_attention) == self.num_up_blocks + 2 + + upsample_block_fn = CausalUpsampleBlock + + for i in range(self.num_up_blocks): + block = upsample_block_fn( + ch[-i - 1], + ch[-i - 2], + num_res_blocks=num_res_blocks[-i - 2], + has_attention=has_attention[-i - 2], + temporal_expansion=temporal_expansions[-i - 1], + spatial_expansion=spatial_expansions[-i - 1], + causal=causal, + **block_kwargs, + ) + blocks.append(block) + + assert not output_norm + + # Last block. Preserve channel count. + last_block = [] + for _ in range(num_res_blocks[0]): + last_block.append( + block_fn( + ch[0], has_attention=has_attention[0], causal=causal, **block_kwargs + ) + ) + blocks.append(nn.Sequential(*last_block)) + + self.blocks = nn.ModuleList(blocks) + self.output_proj = Conv1x1(ch[0], out_channels) + + def forward(self, x): + """Forward pass. + + Args: + x: Latent tensor. Shape: [B, input_channels, t, h, w]. Scaled [-1, 1]. + + Returns: + x: Reconstructed video tensor. Shape: [B, C, T, H, W]. Scaled to [-1, 1]. + T + 1 = (t - 1) * 4. + H = h * 16, W = w * 16. + """ + for block in self.blocks: + x = block(x) + + if self.output_nonlinearity == "silu": + x = F.silu(x, inplace=not self.training) + else: + assert ( + not self.output_nonlinearity + ) # StyleGAN3 omits the to-RGB nonlinearity. + + return self.output_proj(x).contiguous() + + +class VideoVAE(nn.Module): + def __init__(self): + super().__init__() + self.encoder = None #TODO once the model releases + self.decoder = Decoder( + out_channels=3, + base_channels=128, + channel_multipliers=[1, 2, 4, 6], + temporal_expansions=[1, 2, 3], + spatial_expansions=[2, 2, 2], + num_res_blocks=[3, 3, 4, 6, 3], + latent_dim=12, + has_attention=[False, False, False, False, False], + padding_mode="replicate", + output_norm=False, + nonlinearity="silu", + output_nonlinearity="silu", + causal=True, + ) + + def encode(self, x): + return self.encoder(x) + + def decode(self, x): + return self.decoder(x) diff --git a/src/comfyui/comfy/ldm/hydit/__pycache__/attn_layers.cpython-310.pyc b/src/comfyui/comfy/ldm/hydit/__pycache__/attn_layers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a641650c902746356dd94068073137fe8cf82598 Binary files /dev/null and b/src/comfyui/comfy/ldm/hydit/__pycache__/attn_layers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/hydit/__pycache__/controlnet.cpython-310.pyc b/src/comfyui/comfy/ldm/hydit/__pycache__/controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a385573fcc634c05824ef1c76018afce81a681b Binary files /dev/null and b/src/comfyui/comfy/ldm/hydit/__pycache__/controlnet.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/hydit/__pycache__/models.cpython-310.pyc b/src/comfyui/comfy/ldm/hydit/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e33979a1c0164d9fdacb55ec8d9c379f29dba1c7 Binary files /dev/null and b/src/comfyui/comfy/ldm/hydit/__pycache__/models.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/hydit/__pycache__/poolers.cpython-310.pyc b/src/comfyui/comfy/ldm/hydit/__pycache__/poolers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7cec90b9a22dc35d4d7f29ffa9de6113faaaad2 Binary files /dev/null and b/src/comfyui/comfy/ldm/hydit/__pycache__/poolers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/hydit/__pycache__/posemb_layers.cpython-310.pyc b/src/comfyui/comfy/ldm/hydit/__pycache__/posemb_layers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5a470ad3854fd9d0ae160197bc27132676c2065 Binary files /dev/null and b/src/comfyui/comfy/ldm/hydit/__pycache__/posemb_layers.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/hydit/attn_layers.py b/src/comfyui/comfy/ldm/hydit/attn_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..e2801f714956d89bfd8938fd2f5010387b49de77 --- /dev/null +++ b/src/comfyui/comfy/ldm/hydit/attn_layers.py @@ -0,0 +1,218 @@ +import torch +import torch.nn as nn +from typing import Tuple, Union, Optional +from comfy.ldm.modules.attention import optimized_attention + + +def reshape_for_broadcast(freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], x: torch.Tensor, head_first=False): + """ + Reshape frequency tensor for broadcasting it with another tensor. + + This function reshapes the frequency tensor to have the same shape as the target tensor 'x' + for the purpose of broadcasting the frequency tensor during element-wise operations. + + Args: + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Frequency tensor to be reshaped. + x (torch.Tensor): Target tensor for broadcasting compatibility. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + torch.Tensor: Reshaped frequency tensor. + + Raises: + AssertionError: If the frequency tensor doesn't match the expected shape. + AssertionError: If the target tensor 'x' doesn't have the expected number of dimensions. + """ + ndim = x.ndim + assert 0 <= 1 < ndim + + if isinstance(freqs_cis, tuple): + # freqs_cis: (cos, sin) in real space + if head_first: + assert freqs_cis[0].shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis[0].shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis[0].view(*shape), freqs_cis[1].view(*shape) + else: + # freqs_cis: values in complex space + if head_first: + assert freqs_cis.shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis.shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis.view(*shape) + + +def rotate_half(x): + x_real, x_imag = x.reshape(*x.shape[:-1], -1, 2).unbind(-1) # [B, S, H, D//2] + return torch.stack([-x_imag, x_real], dim=-1).flatten(3) + + +def apply_rotary_emb( + xq: torch.Tensor, + xk: Optional[torch.Tensor], + freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], + head_first: bool = False, +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Apply rotary embeddings to input tensors using the given frequency tensor. + + This function applies rotary embeddings to the given query 'xq' and key 'xk' tensors using the provided + frequency tensor 'freqs_cis'. The input tensors are reshaped as complex numbers, and the frequency tensor + is reshaped for broadcasting compatibility. The resulting tensors contain rotary embeddings and are + returned as real tensors. + + Args: + xq (torch.Tensor): Query tensor to apply rotary embeddings. [B, S, H, D] + xk (torch.Tensor): Key tensor to apply rotary embeddings. [B, S, H, D] + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Precomputed frequency tensor for complex exponentials. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Tuple of modified query tensor and key tensor with rotary embeddings. + + """ + xk_out = None + if isinstance(freqs_cis, tuple): + cos, sin = reshape_for_broadcast(freqs_cis, xq, head_first) # [S, D] + xq_out = (xq * cos + rotate_half(xq) * sin) + if xk is not None: + xk_out = (xk * cos + rotate_half(xk) * sin) + else: + xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2)) # [B, S, H, D//2] + freqs_cis = reshape_for_broadcast(freqs_cis, xq_, head_first).to(xq.device) # [S, D//2] --> [1, S, 1, D//2] + xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3).type_as(xq) + if xk is not None: + xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2)) # [B, S, H, D//2] + xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3).type_as(xk) + + return xq_out, xk_out + + + +class CrossAttention(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + qdim, + kdim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + attn_precision=None, + device=None, + dtype=None, + operations=None, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.attn_precision = attn_precision + self.qdim = qdim + self.kdim = kdim + self.num_heads = num_heads + assert self.qdim % num_heads == 0, "self.qdim must be divisible by num_heads" + self.head_dim = self.qdim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + self.q_proj = operations.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.kv_proj = operations.Linear(kdim, 2 * qdim, bias=qkv_bias, **factory_kwargs) + + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) if qk_norm else nn.Identity() + self.k_norm = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = operations.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, y, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen1, hidden_dim) (where hidden_dim = num heads * head dim) + y: torch.Tensor + (batch, seqlen2, hidden_dim2) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // 2), RoPE for image + """ + b, s1, c = x.shape # [b, s1, D] + _, s2, c = y.shape # [b, s2, 1024] + + q = self.q_proj(x).view(b, s1, self.num_heads, self.head_dim) # [b, s1, h, d] + kv = self.kv_proj(y).view(b, s2, 2, self.num_heads, self.head_dim) # [b, s2, 2, h, d] + k, v = kv.unbind(dim=2) # [b, s, h, d] + q = self.q_norm(q) + k = self.k_norm(k) + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, _ = apply_rotary_emb(q, None, freqs_cis_img) + assert qq.shape == q.shape, f'qq: {qq.shape}, q: {q.shape}' + q = qq + + q = q.transpose(-2, -3).contiguous() # q -> B, L1, H, C - B, H, L1, C + k = k.transpose(-2, -3).contiguous() # k -> B, L2, H, C - B, H, C, L2 + v = v.transpose(-2, -3).contiguous() + + context = optimized_attention(q, k, v, self.num_heads, skip_reshape=True, attn_precision=self.attn_precision) + + out = self.out_proj(context) # context.reshape - B, L1, -1 + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class Attention(nn.Module): + """ + We rename some layer names to align with flash attention + """ + def __init__(self, dim, num_heads, qkv_bias=True, qk_norm=False, attn_drop=0., proj_drop=0., attn_precision=None, dtype=None, device=None, operations=None): + super().__init__() + self.attn_precision = attn_precision + self.dim = dim + self.num_heads = num_heads + assert self.dim % num_heads == 0, 'dim should be divisible by num_heads' + self.head_dim = self.dim // num_heads + # This assertion is aligned with flash attention + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + # qkv --> Wqkv + self.Wqkv = operations.Linear(dim, dim * 3, bias=qkv_bias, dtype=dtype, device=device) + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) if qk_norm else nn.Identity() + self.k_norm = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = operations.Linear(dim, dim, dtype=dtype, device=device) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, freqs_cis_img=None): + B, N, C = x.shape + qkv = self.Wqkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4) # [3, b, h, s, d] + q, k, v = qkv.unbind(0) # [b, h, s, d] + q = self.q_norm(q) # [b, h, s, d] + k = self.k_norm(k) # [b, h, s, d] + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, kk = apply_rotary_emb(q, k, freqs_cis_img, head_first=True) + assert qq.shape == q.shape and kk.shape == k.shape, \ + f'qq: {qq.shape}, q: {q.shape}, kk: {kk.shape}, k: {k.shape}' + q, k = qq, kk + + x = optimized_attention(q, k, v, self.num_heads, skip_reshape=True, attn_precision=self.attn_precision) + x = self.out_proj(x) + x = self.proj_drop(x) + + out_tuple = (x,) + + return out_tuple diff --git a/src/comfyui/comfy/ldm/hydit/controlnet.py b/src/comfyui/comfy/ldm/hydit/controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..cd71fca31aaa65b7e6b085474852bb2c42bd6579 --- /dev/null +++ b/src/comfyui/comfy/ldm/hydit/controlnet.py @@ -0,0 +1,321 @@ +from typing import Any, Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from torch.utils import checkpoint + +from comfy.ldm.modules.diffusionmodules.mmdit import ( + Mlp, + TimestepEmbedder, + PatchEmbed, + RMSNorm, +) +from comfy.ldm.modules.diffusionmodules.util import timestep_embedding +from .poolers import AttentionPool + +import comfy.latent_formats +from .models import HunYuanDiTBlock, calc_rope + +from .posemb_layers import get_2d_rotary_pos_embed, get_fill_resize_and_crop + + +class HunYuanControlNet(nn.Module): + """ + HunYuanDiT: Diffusion model with a Transformer backbone. + + Inherit ModelMixin and ConfigMixin to be compatible with the sampler StableDiffusionPipeline of diffusers. + + Inherit PeftAdapterMixin to be compatible with the PEFT training pipeline. + + Parameters + ---------- + args: argparse.Namespace + The arguments parsed by argparse. + input_size: tuple + The size of the input image. + patch_size: int + The size of the patch. + in_channels: int + The number of input channels. + hidden_size: int + The hidden size of the transformer backbone. + depth: int + The number of transformer blocks. + num_heads: int + The number of attention heads. + mlp_ratio: float + The ratio of the hidden size of the MLP in the transformer block. + log_fn: callable + The logging function. + """ + + def __init__( + self, + input_size: tuple = 128, + patch_size: int = 2, + in_channels: int = 4, + hidden_size: int = 1408, + depth: int = 40, + num_heads: int = 16, + mlp_ratio: float = 4.3637, + text_states_dim=1024, + text_states_dim_t5=2048, + text_len=77, + text_len_t5=256, + qk_norm=True, # See http://arxiv.org/abs/2302.05442 for details. + size_cond=False, + use_style_cond=False, + learn_sigma=True, + norm="layer", + log_fn: callable = print, + attn_precision=None, + dtype=None, + device=None, + operations=None, + **kwargs, + ): + super().__init__() + self.log_fn = log_fn + self.depth = depth + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + self.text_states_dim = text_states_dim + self.text_states_dim_t5 = text_states_dim_t5 + self.text_len = text_len + self.text_len_t5 = text_len_t5 + self.size_cond = size_cond + self.use_style_cond = use_style_cond + self.norm = norm + self.dtype = dtype + self.latent_format = comfy.latent_formats.SDXL + + self.mlp_t5 = nn.Sequential( + nn.Linear( + self.text_states_dim_t5, + self.text_states_dim_t5 * 4, + bias=True, + dtype=dtype, + device=device, + ), + nn.SiLU(), + nn.Linear( + self.text_states_dim_t5 * 4, + self.text_states_dim, + bias=True, + dtype=dtype, + device=device, + ), + ) + # learnable replace + self.text_embedding_padding = nn.Parameter( + torch.randn( + self.text_len + self.text_len_t5, + self.text_states_dim, + dtype=dtype, + device=device, + ) + ) + + # Attention pooling + pooler_out_dim = 1024 + self.pooler = AttentionPool( + self.text_len_t5, + self.text_states_dim_t5, + num_heads=8, + output_dim=pooler_out_dim, + dtype=dtype, + device=device, + operations=operations, + ) + + # Dimension of the extra input vectors + self.extra_in_dim = pooler_out_dim + + if self.size_cond: + # Image size and crop size conditions + self.extra_in_dim += 6 * 256 + + if self.use_style_cond: + # Here we use a default learned embedder layer for future extension. + self.style_embedder = nn.Embedding( + 1, hidden_size, dtype=dtype, device=device + ) + self.extra_in_dim += hidden_size + + # Text embedding for `add` + self.x_embedder = PatchEmbed( + input_size, + patch_size, + in_channels, + hidden_size, + dtype=dtype, + device=device, + operations=operations, + ) + self.t_embedder = TimestepEmbedder( + hidden_size, dtype=dtype, device=device, operations=operations + ) + self.extra_embedder = nn.Sequential( + operations.Linear( + self.extra_in_dim, hidden_size * 4, dtype=dtype, device=device + ), + nn.SiLU(), + operations.Linear( + hidden_size * 4, hidden_size, bias=True, dtype=dtype, device=device + ), + ) + + # Image embedding + num_patches = self.x_embedder.num_patches + + # HUnYuanDiT Blocks + self.blocks = nn.ModuleList( + [ + HunYuanDiTBlock( + hidden_size=hidden_size, + c_emb_size=hidden_size, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + text_states_dim=self.text_states_dim, + qk_norm=qk_norm, + norm_type=self.norm, + skip=False, + attn_precision=attn_precision, + dtype=dtype, + device=device, + operations=operations, + ) + for _ in range(19) + ] + ) + + # Input zero linear for the first block + self.before_proj = operations.Linear(self.hidden_size, self.hidden_size, dtype=dtype, device=device) + + + # Output zero linear for the every block + self.after_proj_list = nn.ModuleList( + [ + + operations.Linear( + self.hidden_size, self.hidden_size, dtype=dtype, device=device + ) + for _ in range(len(self.blocks)) + ] + ) + + def forward( + self, + x, + hint, + timesteps, + context,#encoder_hidden_states=None, + text_embedding_mask=None, + encoder_hidden_states_t5=None, + text_embedding_mask_t5=None, + image_meta_size=None, + style=None, + return_dict=False, + **kwarg, + ): + """ + Forward pass of the encoder. + + Parameters + ---------- + x: torch.Tensor + (B, D, H, W) + t: torch.Tensor + (B) + encoder_hidden_states: torch.Tensor + CLIP text embedding, (B, L_clip, D) + text_embedding_mask: torch.Tensor + CLIP text embedding mask, (B, L_clip) + encoder_hidden_states_t5: torch.Tensor + T5 text embedding, (B, L_t5, D) + text_embedding_mask_t5: torch.Tensor + T5 text embedding mask, (B, L_t5) + image_meta_size: torch.Tensor + (B, 6) + style: torch.Tensor + (B) + cos_cis_img: torch.Tensor + sin_cis_img: torch.Tensor + return_dict: bool + Whether to return a dictionary. + """ + condition = hint + if condition.shape[0] == 1: + condition = torch.repeat_interleave(condition, x.shape[0], dim=0) + + text_states = context # 2,77,1024 + text_states_t5 = encoder_hidden_states_t5 # 2,256,2048 + text_states_mask = text_embedding_mask.bool() # 2,77 + text_states_t5_mask = text_embedding_mask_t5.bool() # 2,256 + b_t5, l_t5, c_t5 = text_states_t5.shape + text_states_t5 = self.mlp_t5(text_states_t5.view(-1, c_t5)).view(b_t5, l_t5, -1) + + padding = comfy.ops.cast_to_input(self.text_embedding_padding, text_states) + + text_states[:, -self.text_len :] = torch.where( + text_states_mask[:, -self.text_len :].unsqueeze(2), + text_states[:, -self.text_len :], + padding[: self.text_len], + ) + text_states_t5[:, -self.text_len_t5 :] = torch.where( + text_states_t5_mask[:, -self.text_len_t5 :].unsqueeze(2), + text_states_t5[:, -self.text_len_t5 :], + padding[self.text_len :], + ) + + text_states = torch.cat([text_states, text_states_t5], dim=1) # 2,205,1024 + + # _, _, oh, ow = x.shape + # th, tw = oh // self.patch_size, ow // self.patch_size + + # Get image RoPE embedding according to `reso`lution. + freqs_cis_img = calc_rope( + x, self.patch_size, self.hidden_size // self.num_heads + ) # (cos_cis_img, sin_cis_img) + + # ========================= Build time and image embedding ========================= + t = self.t_embedder(timesteps, dtype=self.dtype) + x = self.x_embedder(x) + + # ========================= Concatenate all extra vectors ========================= + # Build text tokens with pooling + extra_vec = self.pooler(encoder_hidden_states_t5) + + # Build image meta size tokens if applicable + # if image_meta_size is not None: + # image_meta_size = timestep_embedding(image_meta_size.view(-1), 256) # [B * 6, 256] + # if image_meta_size.dtype != self.dtype: + # image_meta_size = image_meta_size.half() + # image_meta_size = image_meta_size.view(-1, 6 * 256) + # extra_vec = torch.cat([extra_vec, image_meta_size], dim=1) # [B, D + 6 * 256] + + # Build style tokens + if style is not None: + style_embedding = self.style_embedder(style) + extra_vec = torch.cat([extra_vec, style_embedding], dim=1) + + # Concatenate all extra vectors + c = t + self.extra_embedder(extra_vec) # [B, D] + + # ========================= Deal with Condition ========================= + condition = self.x_embedder(condition) + + # ========================= Forward pass through HunYuanDiT blocks ========================= + controls = [] + x = x + self.before_proj(condition) # add condition + for layer, block in enumerate(self.blocks): + x = block(x, c, text_states, freqs_cis_img) + controls.append(self.after_proj_list[layer](x)) # zero linear for output + + return {"output": controls} diff --git a/src/comfyui/comfy/ldm/hydit/models.py b/src/comfyui/comfy/ldm/hydit/models.py new file mode 100644 index 0000000000000000000000000000000000000000..44e806cba94f496ccde59b42c26142bb538bfd0a --- /dev/null +++ b/src/comfyui/comfy/ldm/hydit/models.py @@ -0,0 +1,410 @@ +from typing import Any + +import torch +import torch.nn as nn +import torch.nn.functional as F + +import comfy.ops +from comfy.ldm.modules.diffusionmodules.mmdit import Mlp, TimestepEmbedder, PatchEmbed, RMSNorm +from comfy.ldm.modules.diffusionmodules.util import timestep_embedding +from torch.utils import checkpoint + +from .attn_layers import Attention, CrossAttention +from .poolers import AttentionPool +from .posemb_layers import get_2d_rotary_pos_embed, get_fill_resize_and_crop + +def calc_rope(x, patch_size, head_size): + th = (x.shape[2] + (patch_size // 2)) // patch_size + tw = (x.shape[3] + (patch_size // 2)) // patch_size + base_size = 512 // 8 // patch_size + start, stop = get_fill_resize_and_crop((th, tw), base_size) + sub_args = [start, stop, (th, tw)] + # head_size = HUNYUAN_DIT_CONFIG['DiT-g/2']['hidden_size'] // HUNYUAN_DIT_CONFIG['DiT-g/2']['num_heads'] + rope = get_2d_rotary_pos_embed(head_size, *sub_args) + rope = (rope[0].to(x), rope[1].to(x)) + return rope + + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +class HunYuanDiTBlock(nn.Module): + """ + A HunYuanDiT block with `add` conditioning. + """ + def __init__(self, + hidden_size, + c_emb_size, + num_heads, + mlp_ratio=4.0, + text_states_dim=1024, + qk_norm=False, + norm_type="layer", + skip=False, + attn_precision=None, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + use_ele_affine = True + + if norm_type == "layer": + norm_layer = operations.LayerNorm + elif norm_type == "rms": + norm_layer = RMSNorm + else: + raise ValueError(f"Unknown norm_type: {norm_type}") + + # ========================= Self-Attention ========================= + self.norm1 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6, dtype=dtype, device=device) + self.attn1 = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, qk_norm=qk_norm, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations) + + # ========================= FFN ========================= + self.norm2 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6, dtype=dtype, device=device) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0, dtype=dtype, device=device, operations=operations) + + # ========================= Add ========================= + # Simply use add like SDXL. + self.default_modulation = nn.Sequential( + nn.SiLU(), + operations.Linear(c_emb_size, hidden_size, bias=True, dtype=dtype, device=device) + ) + + # ========================= Cross-Attention ========================= + self.attn2 = CrossAttention(hidden_size, text_states_dim, num_heads=num_heads, qkv_bias=True, + qk_norm=qk_norm, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations) + self.norm3 = norm_layer(hidden_size, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) + + # ========================= Skip Connection ========================= + if skip: + self.skip_norm = norm_layer(2 * hidden_size, elementwise_affine=True, eps=1e-6, dtype=dtype, device=device) + self.skip_linear = operations.Linear(2 * hidden_size, hidden_size, dtype=dtype, device=device) + else: + self.skip_linear = None + + self.gradient_checkpointing = False + + def _forward(self, x, c=None, text_states=None, freq_cis_img=None, skip=None): + # Long Skip Connection + if self.skip_linear is not None: + cat = torch.cat([x, skip], dim=-1) + if cat.dtype != x.dtype: + cat = cat.to(x.dtype) + cat = self.skip_norm(cat) + x = self.skip_linear(cat) + + # Self-Attention + shift_msa = self.default_modulation(c).unsqueeze(dim=1) + attn_inputs = ( + self.norm1(x) + shift_msa, freq_cis_img, + ) + x = x + self.attn1(*attn_inputs)[0] + + # Cross-Attention + cross_inputs = ( + self.norm3(x), text_states, freq_cis_img + ) + x = x + self.attn2(*cross_inputs)[0] + + # FFN Layer + mlp_inputs = self.norm2(x) + x = x + self.mlp(mlp_inputs) + + return x + + def forward(self, x, c=None, text_states=None, freq_cis_img=None, skip=None): + if self.gradient_checkpointing and self.training: + return checkpoint.checkpoint(self._forward, x, c, text_states, freq_cis_img, skip) + return self._forward(x, c, text_states, freq_cis_img, skip) + + +class FinalLayer(nn.Module): + """ + The final layer of HunYuanDiT. + """ + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels, dtype=None, device=None, operations=None): + super().__init__() + self.norm_final = operations.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.linear = operations.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True, dtype=dtype, device=device) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + operations.Linear(c_emb_size, 2 * final_hidden_size, bias=True, dtype=dtype, device=device) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class HunYuanDiT(nn.Module): + """ + HunYuanDiT: Diffusion model with a Transformer backbone. + + Inherit ModelMixin and ConfigMixin to be compatible with the sampler StableDiffusionPipeline of diffusers. + + Inherit PeftAdapterMixin to be compatible with the PEFT training pipeline. + + Parameters + ---------- + args: argparse.Namespace + The arguments parsed by argparse. + input_size: tuple + The size of the input image. + patch_size: int + The size of the patch. + in_channels: int + The number of input channels. + hidden_size: int + The hidden size of the transformer backbone. + depth: int + The number of transformer blocks. + num_heads: int + The number of attention heads. + mlp_ratio: float + The ratio of the hidden size of the MLP in the transformer block. + log_fn: callable + The logging function. + """ + #@register_to_config + def __init__(self, + input_size: tuple = 32, + patch_size: int = 2, + in_channels: int = 4, + hidden_size: int = 1152, + depth: int = 28, + num_heads: int = 16, + mlp_ratio: float = 4.0, + text_states_dim = 1024, + text_states_dim_t5 = 2048, + text_len = 77, + text_len_t5 = 256, + qk_norm = True,# See http://arxiv.org/abs/2302.05442 for details. + size_cond = False, + use_style_cond = False, + learn_sigma = True, + norm = "layer", + log_fn: callable = print, + attn_precision=None, + dtype=None, + device=None, + operations=None, + **kwargs, + ): + super().__init__() + self.log_fn = log_fn + self.depth = depth + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + self.text_states_dim = text_states_dim + self.text_states_dim_t5 = text_states_dim_t5 + self.text_len = text_len + self.text_len_t5 = text_len_t5 + self.size_cond = size_cond + self.use_style_cond = use_style_cond + self.norm = norm + self.dtype = dtype + #import pdb + #pdb.set_trace() + + self.mlp_t5 = nn.Sequential( + operations.Linear(self.text_states_dim_t5, self.text_states_dim_t5 * 4, bias=True, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(self.text_states_dim_t5 * 4, self.text_states_dim, bias=True, dtype=dtype, device=device), + ) + # learnable replace + self.text_embedding_padding = nn.Parameter( + torch.empty(self.text_len + self.text_len_t5, self.text_states_dim, dtype=dtype, device=device)) + + # Attention pooling + pooler_out_dim = 1024 + self.pooler = AttentionPool(self.text_len_t5, self.text_states_dim_t5, num_heads=8, output_dim=pooler_out_dim, dtype=dtype, device=device, operations=operations) + + # Dimension of the extra input vectors + self.extra_in_dim = pooler_out_dim + + if self.size_cond: + # Image size and crop size conditions + self.extra_in_dim += 6 * 256 + + if self.use_style_cond: + # Here we use a default learned embedder layer for future extension. + self.style_embedder = operations.Embedding(1, hidden_size, dtype=dtype, device=device) + self.extra_in_dim += hidden_size + + # Text embedding for `add` + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size, dtype=dtype, device=device, operations=operations) + self.t_embedder = TimestepEmbedder(hidden_size, dtype=dtype, device=device, operations=operations) + self.extra_embedder = nn.Sequential( + operations.Linear(self.extra_in_dim, hidden_size * 4, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(hidden_size * 4, hidden_size, bias=True, dtype=dtype, device=device), + ) + + # Image embedding + num_patches = self.x_embedder.num_patches + + # HUnYuanDiT Blocks + self.blocks = nn.ModuleList([ + HunYuanDiTBlock(hidden_size=hidden_size, + c_emb_size=hidden_size, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + text_states_dim=self.text_states_dim, + qk_norm=qk_norm, + norm_type=self.norm, + skip=layer > depth // 2, + attn_precision=attn_precision, + dtype=dtype, + device=device, + operations=operations, + ) + for layer in range(depth) + ]) + + self.final_layer = FinalLayer(hidden_size, hidden_size, patch_size, self.out_channels, dtype=dtype, device=device, operations=operations) + self.unpatchify_channels = self.out_channels + + + + def forward(self, + x, + t, + context,#encoder_hidden_states=None, + text_embedding_mask=None, + encoder_hidden_states_t5=None, + text_embedding_mask_t5=None, + image_meta_size=None, + style=None, + return_dict=False, + control=None, + transformer_options=None, + ): + """ + Forward pass of the encoder. + + Parameters + ---------- + x: torch.Tensor + (B, D, H, W) + t: torch.Tensor + (B) + encoder_hidden_states: torch.Tensor + CLIP text embedding, (B, L_clip, D) + text_embedding_mask: torch.Tensor + CLIP text embedding mask, (B, L_clip) + encoder_hidden_states_t5: torch.Tensor + T5 text embedding, (B, L_t5, D) + text_embedding_mask_t5: torch.Tensor + T5 text embedding mask, (B, L_t5) + image_meta_size: torch.Tensor + (B, 6) + style: torch.Tensor + (B) + cos_cis_img: torch.Tensor + sin_cis_img: torch.Tensor + return_dict: bool + Whether to return a dictionary. + """ + #import pdb + #pdb.set_trace() + encoder_hidden_states = context + text_states = encoder_hidden_states # 2,77,1024 + text_states_t5 = encoder_hidden_states_t5 # 2,256,2048 + text_states_mask = text_embedding_mask.bool() # 2,77 + text_states_t5_mask = text_embedding_mask_t5.bool() # 2,256 + b_t5, l_t5, c_t5 = text_states_t5.shape + text_states_t5 = self.mlp_t5(text_states_t5.view(-1, c_t5)).view(b_t5, l_t5, -1) + + padding = comfy.ops.cast_to_input(self.text_embedding_padding, text_states) + + text_states[:,-self.text_len:] = torch.where(text_states_mask[:,-self.text_len:].unsqueeze(2), text_states[:,-self.text_len:], padding[:self.text_len]) + text_states_t5[:,-self.text_len_t5:] = torch.where(text_states_t5_mask[:,-self.text_len_t5:].unsqueeze(2), text_states_t5[:,-self.text_len_t5:], padding[self.text_len:]) + + text_states = torch.cat([text_states, text_states_t5], dim=1) # 2,205,1024 + # clip_t5_mask = torch.cat([text_states_mask, text_states_t5_mask], dim=-1) + + _, _, oh, ow = x.shape + th, tw = (oh + (self.patch_size // 2)) // self.patch_size, (ow + (self.patch_size // 2)) // self.patch_size + + + # Get image RoPE embedding according to `reso`lution. + freqs_cis_img = calc_rope(x, self.patch_size, self.hidden_size // self.num_heads) #(cos_cis_img, sin_cis_img) + + # ========================= Build time and image embedding ========================= + t = self.t_embedder(t, dtype=x.dtype) + x = self.x_embedder(x) + + # ========================= Concatenate all extra vectors ========================= + # Build text tokens with pooling + extra_vec = self.pooler(encoder_hidden_states_t5) + + # Build image meta size tokens if applicable + if self.size_cond: + image_meta_size = timestep_embedding(image_meta_size.view(-1), 256).to(x.dtype) # [B * 6, 256] + image_meta_size = image_meta_size.view(-1, 6 * 256) + extra_vec = torch.cat([extra_vec, image_meta_size], dim=1) # [B, D + 6 * 256] + + # Build style tokens + if self.use_style_cond: + if style is None: + style = torch.zeros((extra_vec.shape[0],), device=x.device, dtype=torch.int) + style_embedding = self.style_embedder(style, out_dtype=x.dtype) + extra_vec = torch.cat([extra_vec, style_embedding], dim=1) + + # Concatenate all extra vectors + c = t + self.extra_embedder(extra_vec) # [B, D] + + controls = None + if control: + controls = control.get("output", None) + # ========================= Forward pass through HunYuanDiT blocks ========================= + skips = [] + for layer, block in enumerate(self.blocks): + if layer > self.depth // 2: + if controls is not None: + skip = skips.pop() + controls.pop().to(dtype=x.dtype) + else: + skip = skips.pop() + x = block(x, c, text_states, freqs_cis_img, skip) # (N, L, D) + else: + x = block(x, c, text_states, freqs_cis_img) # (N, L, D) + + if layer < (self.depth // 2 - 1): + skips.append(x) + if controls is not None and len(controls) != 0: + raise ValueError("The number of controls is not equal to the number of skip connections.") + + # ========================= Final layer ========================= + x = self.final_layer(x, c) # (N, L, patch_size ** 2 * out_channels) + x = self.unpatchify(x, th, tw) # (N, out_channels, H, W) + + if return_dict: + return {'x': x} + if self.learn_sigma: + return x[:,:self.out_channels // 2,:oh,:ow] + return x[:,:,:oh,:ow] + + def unpatchify(self, x, h, w): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.unpatchify_channels + p = self.x_embedder.patch_size[0] + # h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) + return imgs diff --git a/src/comfyui/comfy/ldm/hydit/poolers.py b/src/comfyui/comfy/ldm/hydit/poolers.py new file mode 100644 index 0000000000000000000000000000000000000000..f5e5b406fcd4e50a2222c4719e07ea32d4a089f4 --- /dev/null +++ b/src/comfyui/comfy/ldm/hydit/poolers.py @@ -0,0 +1,37 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from comfy.ldm.modules.attention import optimized_attention +import comfy.ops + +class AttentionPool(nn.Module): + def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None, dtype=None, device=None, operations=None): + super().__init__() + self.positional_embedding = nn.Parameter(torch.empty(spacial_dim + 1, embed_dim, dtype=dtype, device=device)) + self.k_proj = operations.Linear(embed_dim, embed_dim, dtype=dtype, device=device) + self.q_proj = operations.Linear(embed_dim, embed_dim, dtype=dtype, device=device) + self.v_proj = operations.Linear(embed_dim, embed_dim, dtype=dtype, device=device) + self.c_proj = operations.Linear(embed_dim, output_dim or embed_dim, dtype=dtype, device=device) + self.num_heads = num_heads + self.embed_dim = embed_dim + + def forward(self, x): + x = x[:,:self.positional_embedding.shape[0] - 1] + x = x.permute(1, 0, 2) # NLC -> LNC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC + x = x + comfy.ops.cast_to_input(self.positional_embedding[:, None, :], x) # (L+1)NC + + q = self.q_proj(x[:1]) + k = self.k_proj(x) + v = self.v_proj(x) + + batch_size = q.shape[1] + head_dim = self.embed_dim // self.num_heads + q = q.view(1, batch_size * self.num_heads, head_dim).transpose(0, 1).view(batch_size, self.num_heads, -1, head_dim) + k = k.view(k.shape[0], batch_size * self.num_heads, head_dim).transpose(0, 1).view(batch_size, self.num_heads, -1, head_dim) + v = v.view(v.shape[0], batch_size * self.num_heads, head_dim).transpose(0, 1).view(batch_size, self.num_heads, -1, head_dim) + + attn_output = optimized_attention(q, k, v, self.num_heads, skip_reshape=True).transpose(0, 1) + + attn_output = self.c_proj(attn_output) + return attn_output.squeeze(0) diff --git a/src/comfyui/comfy/ldm/hydit/posemb_layers.py b/src/comfyui/comfy/ldm/hydit/posemb_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..dcb41a713cd94ea8472ff26e8865066887b1e486 --- /dev/null +++ b/src/comfyui/comfy/ldm/hydit/posemb_layers.py @@ -0,0 +1,224 @@ +import torch +import numpy as np +from typing import Union + + +def _to_tuple(x): + if isinstance(x, int): + return x, x + else: + return x + + +def get_fill_resize_and_crop(src, tgt): + th, tw = _to_tuple(tgt) + h, w = _to_tuple(src) + + tr = th / tw # base resolution + r = h / w # target resolution + + # resize + if r > tr: + resize_height = th + resize_width = int(round(th / h * w)) + else: + resize_width = tw + resize_height = int(round(tw / w * h)) # resize the target resolution down based on the base resolution + + crop_top = int(round((th - resize_height) / 2.0)) + crop_left = int(round((tw - resize_width) / 2.0)) + + return (crop_top, crop_left), (crop_top + resize_height, crop_left + resize_width) + + +def get_meshgrid(start, *args): + if len(args) == 0: + # start is grid_size + num = _to_tuple(start) + start = (0, 0) + stop = num + elif len(args) == 1: + # start is start, args[0] is stop, step is 1 + start = _to_tuple(start) + stop = _to_tuple(args[0]) + num = (stop[0] - start[0], stop[1] - start[1]) + elif len(args) == 2: + # start is start, args[0] is stop, args[1] is num + start = _to_tuple(start) + stop = _to_tuple(args[0]) + num = _to_tuple(args[1]) + else: + raise ValueError(f"len(args) should be 0, 1 or 2, but got {len(args)}") + + grid_h = np.linspace(start[0], stop[0], num[0], endpoint=False, dtype=np.float32) + grid_w = np.linspace(start[1], stop[1], num[1], endpoint=False, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) # [2, W, H] + return grid + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py + +def get_2d_sincos_pos_embed(embed_dim, start, *args, cls_token=False, extra_tokens=0): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid = get_meshgrid(start, *args) # [2, H, w] + # grid_h = np.arange(grid_size, dtype=np.float32) + # grid_w = np.arange(grid_size, dtype=np.float32) + # grid = np.meshgrid(grid_w, grid_h) # here w goes first + # grid = np.stack(grid, axis=0) # [2, W, H] + + grid = grid.reshape([2, 1, *grid.shape[1:]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (W,H) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +################################################################################# +# Rotary Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/llama/blob/main/llama/model.py#L443 + +def get_2d_rotary_pos_embed(embed_dim, start, *args, use_real=True): + """ + This is a 2d version of precompute_freqs_cis, which is a RoPE for image tokens with 2d structure. + + Parameters + ---------- + embed_dim: int + embedding dimension size + start: int or tuple of int + If len(args) == 0, start is num; If len(args) == 1, start is start, args[0] is stop, step is 1; + If len(args) == 2, start is start, args[0] is stop, args[1] is num. + use_real: bool + If True, return real part and imaginary part separately. Otherwise, return complex numbers. + + Returns + ------- + pos_embed: torch.Tensor + [HW, D/2] + """ + grid = get_meshgrid(start, *args) # [2, H, w] + grid = grid.reshape([2, 1, *grid.shape[1:]]) # Returns a sampling matrix with the same resolution as the target resolution + pos_embed = get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=use_real) + return pos_embed + + +def get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=False): + assert embed_dim % 4 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_rotary_pos_embed(embed_dim // 2, grid[0].reshape(-1), use_real=use_real) # (H*W, D/4) + emb_w = get_1d_rotary_pos_embed(embed_dim // 2, grid[1].reshape(-1), use_real=use_real) # (H*W, D/4) + + if use_real: + cos = torch.cat([emb_h[0], emb_w[0]], dim=1) # (H*W, D/2) + sin = torch.cat([emb_h[1], emb_w[1]], dim=1) # (H*W, D/2) + return cos, sin + else: + emb = torch.cat([emb_h, emb_w], dim=1) # (H*W, D/2) + return emb + + +def get_1d_rotary_pos_embed(dim: int, pos: Union[np.ndarray, int], theta: float = 10000.0, use_real=False): + """ + Precompute the frequency tensor for complex exponentials (cis) with given dimensions. + + This function calculates a frequency tensor with complex exponentials using the given dimension 'dim' + and the end index 'end'. The 'theta' parameter scales the frequencies. + The returned tensor contains complex values in complex64 data type. + + Args: + dim (int): Dimension of the frequency tensor. + pos (np.ndarray, int): Position indices for the frequency tensor. [S] or scalar + theta (float, optional): Scaling factor for frequency computation. Defaults to 10000.0. + use_real (bool, optional): If True, return real part and imaginary part separately. + Otherwise, return complex numbers. + + Returns: + torch.Tensor: Precomputed frequency tensor with complex exponentials. [S, D/2] + + """ + if isinstance(pos, int): + pos = np.arange(pos) + freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim)) # [D/2] + t = torch.from_numpy(pos).to(freqs.device) # type: ignore # [S] + freqs = torch.outer(t, freqs).float() # type: ignore # [S, D/2] + if use_real: + freqs_cos = freqs.cos().repeat_interleave(2, dim=1) # [S, D] + freqs_sin = freqs.sin().repeat_interleave(2, dim=1) # [S, D] + return freqs_cos, freqs_sin + else: + freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64 # [S, D/2] + return freqs_cis + + + +def calc_sizes(rope_img, patch_size, th, tw): + if rope_img == 'extend': + # Expansion mode + sub_args = [(th, tw)] + elif rope_img.startswith('base'): + # Based on the specified dimensions, other dimensions are obtained through interpolation. + base_size = int(rope_img[4:]) // 8 // patch_size + start, stop = get_fill_resize_and_crop((th, tw), base_size) + sub_args = [start, stop, (th, tw)] + else: + raise ValueError(f"Unknown rope_img: {rope_img}") + return sub_args + + +def init_image_posemb(rope_img, + resolutions, + patch_size, + hidden_size, + num_heads, + log_fn, + rope_real=True, + ): + freqs_cis_img = {} + for reso in resolutions: + th, tw = reso.height // 8 // patch_size, reso.width // 8 // patch_size + sub_args = calc_sizes(rope_img, patch_size, th, tw) + freqs_cis_img[str(reso)] = get_2d_rotary_pos_embed(hidden_size // num_heads, *sub_args, use_real=rope_real) + log_fn(f" Using image RoPE ({rope_img}) ({'real' if rope_real else 'complex'}): {sub_args} | ({reso}) " + f"{freqs_cis_img[str(reso)][0].shape if rope_real else freqs_cis_img[str(reso)].shape}") + return freqs_cis_img diff --git a/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-310.pyc b/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c6fd49079646c2928434df2661b2d6a49672b8a Binary files /dev/null and b/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-38.pyc b/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94252f162d9fdde669e1fd0d9a40c0953b28dab9 Binary files /dev/null and b/src/comfyui/comfy/ldm/models/__pycache__/autoencoder.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/models/autoencoder.py b/src/comfyui/comfy/ldm/models/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..f5f4de2883078dadee058aef437901069588321b --- /dev/null +++ b/src/comfyui/comfy/ldm/models/autoencoder.py @@ -0,0 +1,226 @@ +import torch +from contextlib import contextmanager +from typing import Any, Dict, List, Optional, Tuple, Union + +from comfy.ldm.modules.distributions.distributions import DiagonalGaussianDistribution + +from comfy.ldm.util import instantiate_from_config +from comfy.ldm.modules.ema import LitEma +import comfy.ops + +class DiagonalGaussianRegularizer(torch.nn.Module): + def __init__(self, sample: bool = True): + super().__init__() + self.sample = sample + + def get_trainable_parameters(self) -> Any: + yield from () + + def forward(self, z: torch.Tensor) -> Tuple[torch.Tensor, dict]: + log = dict() + posterior = DiagonalGaussianDistribution(z) + if self.sample: + z = posterior.sample() + else: + z = posterior.mode() + kl_loss = posterior.kl() + kl_loss = torch.sum(kl_loss) / kl_loss.shape[0] + log["kl_loss"] = kl_loss + return z, log + + +class AbstractAutoencoder(torch.nn.Module): + """ + This is the base class for all autoencoders, including image autoencoders, image autoencoders with discriminators, + unCLIP models, etc. Hence, it is fairly general, and specific features + (e.g. discriminator training, encoding, decoding) must be implemented in subclasses. + """ + + def __init__( + self, + ema_decay: Union[None, float] = None, + monitor: Union[None, str] = None, + input_key: str = "jpg", + **kwargs, + ): + super().__init__() + + self.input_key = input_key + self.use_ema = ema_decay is not None + if monitor is not None: + self.monitor = monitor + + if self.use_ema: + self.model_ema = LitEma(self, decay=ema_decay) + logpy.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + def get_input(self, batch) -> Any: + raise NotImplementedError() + + def on_train_batch_end(self, *args, **kwargs): + # for EMA computation + if self.use_ema: + self.model_ema(self) + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.parameters()) + self.model_ema.copy_to(self) + if context is not None: + logpy.info(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.parameters()) + if context is not None: + logpy.info(f"{context}: Restored training weights") + + def encode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("encode()-method of abstract base class called") + + def decode(self, *args, **kwargs) -> torch.Tensor: + raise NotImplementedError("decode()-method of abstract base class called") + + def instantiate_optimizer_from_config(self, params, lr, cfg): + logpy.info(f"loading >>> {cfg['target']} <<< optimizer from config") + return get_obj_from_str(cfg["target"])( + params, lr=lr, **cfg.get("params", dict()) + ) + + def configure_optimizers(self) -> Any: + raise NotImplementedError() + + +class AutoencodingEngine(AbstractAutoencoder): + """ + Base class for all image autoencoders that we train, like VQGAN or AutoencoderKL + (we also restore them explicitly as special cases for legacy reasons). + Regularizations such as KL or VQ are moved to the regularizer class. + """ + + def __init__( + self, + *args, + encoder_config: Dict, + decoder_config: Dict, + regularizer_config: Dict, + **kwargs, + ): + super().__init__(*args, **kwargs) + + self.encoder: torch.nn.Module = instantiate_from_config(encoder_config) + self.decoder: torch.nn.Module = instantiate_from_config(decoder_config) + self.regularization: AbstractRegularizer = instantiate_from_config( + regularizer_config + ) + + def get_last_layer(self): + return self.decoder.get_last_layer() + + def encode( + self, + x: torch.Tensor, + return_reg_log: bool = False, + unregularized: bool = False, + ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: + z = self.encoder(x) + if unregularized: + return z, dict() + z, reg_log = self.regularization(z) + if return_reg_log: + return z, reg_log + return z + + def decode(self, z: torch.Tensor, **kwargs) -> torch.Tensor: + x = self.decoder(z, **kwargs) + return x + + def forward( + self, x: torch.Tensor, **additional_decode_kwargs + ) -> Tuple[torch.Tensor, torch.Tensor, dict]: + z, reg_log = self.encode(x, return_reg_log=True) + dec = self.decode(z, **additional_decode_kwargs) + return z, dec, reg_log + + +class AutoencodingEngineLegacy(AutoencodingEngine): + def __init__(self, embed_dim: int, **kwargs): + self.max_batch_size = kwargs.pop("max_batch_size", None) + ddconfig = kwargs.pop("ddconfig") + super().__init__( + encoder_config={ + "target": "comfy.ldm.modules.diffusionmodules.model.Encoder", + "params": ddconfig, + }, + decoder_config={ + "target": "comfy.ldm.modules.diffusionmodules.model.Decoder", + "params": ddconfig, + }, + **kwargs, + ) + self.quant_conv = comfy.ops.disable_weight_init.Conv2d( + (1 + ddconfig["double_z"]) * ddconfig["z_channels"], + (1 + ddconfig["double_z"]) * embed_dim, + 1, + ) + self.post_quant_conv = comfy.ops.disable_weight_init.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + + def get_autoencoder_params(self) -> list: + params = super().get_autoencoder_params() + return params + + def encode( + self, x: torch.Tensor, return_reg_log: bool = False + ) -> Union[torch.Tensor, Tuple[torch.Tensor, dict]]: + if self.max_batch_size is None: + z = self.encoder(x) + z = self.quant_conv(z) + else: + N = x.shape[0] + bs = self.max_batch_size + n_batches = int(math.ceil(N / bs)) + z = list() + for i_batch in range(n_batches): + z_batch = self.encoder(x[i_batch * bs : (i_batch + 1) * bs]) + z_batch = self.quant_conv(z_batch) + z.append(z_batch) + z = torch.cat(z, 0) + + z, reg_log = self.regularization(z) + if return_reg_log: + return z, reg_log + return z + + def decode(self, z: torch.Tensor, **decoder_kwargs) -> torch.Tensor: + if self.max_batch_size is None: + dec = self.post_quant_conv(z) + dec = self.decoder(dec, **decoder_kwargs) + else: + N = z.shape[0] + bs = self.max_batch_size + n_batches = int(math.ceil(N / bs)) + dec = list() + for i_batch in range(n_batches): + dec_batch = self.post_quant_conv(z[i_batch * bs : (i_batch + 1) * bs]) + dec_batch = self.decoder(dec_batch, **decoder_kwargs) + dec.append(dec_batch) + dec = torch.cat(dec, 0) + + return dec + + +class AutoencoderKL(AutoencodingEngineLegacy): + def __init__(self, **kwargs): + if "lossconfig" in kwargs: + kwargs["loss_config"] = kwargs.pop("lossconfig") + super().__init__( + regularizer_config={ + "target": ( + "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer" + ) + }, + **kwargs, + ) diff --git a/src/comfyui/comfy/ldm/modules/__pycache__/attention.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/__pycache__/attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60d5e5cdda29aa028230f598de0acfc5aa2a2d9a Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/__pycache__/attention.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e56b88ae9045888faf7037fb4e918820d29723c8 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-38.pyc b/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd9e7dc54411ae9a97a463181bfa6de2bd84a0a3 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/__pycache__/ema.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ad851e37c365e60e1c8f26f36726c0b1f6fed8d6 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/__pycache__/sub_quadratic_attention.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/attention.py b/src/comfyui/comfy/ldm/modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..85ea406e0938b90d4cebdb244a53dde80d3158a3 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/attention.py @@ -0,0 +1,865 @@ +import math +import torch +import torch.nn.functional as F +from torch import nn, einsum +from einops import rearrange, repeat +from typing import Optional +import logging + +from .diffusionmodules.util import AlphaBlender, timestep_embedding +from .sub_quadratic_attention import efficient_dot_product_attention + +from comfy import model_management + +if model_management.xformers_enabled(): + import xformers + import xformers.ops + +from comfy.cli_args import args +import comfy.ops +ops = comfy.ops.disable_weight_init + +FORCE_UPCAST_ATTENTION_DTYPE = model_management.force_upcast_attention_dtype() + +def get_attn_precision(attn_precision): + if args.dont_upcast_attention: + return None + if FORCE_UPCAST_ATTENTION_DTYPE is not None: + return FORCE_UPCAST_ATTENTION_DTYPE + return attn_precision + +def exists(val): + return val is not None + + +def uniq(arr): + return{el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops): + super().__init__() + self.proj = operations.Linear(dim_in, dim_out * 2, dtype=dtype, device=device) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0., dtype=None, device=None, operations=ops): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + operations.Linear(dim, inner_dim, dtype=dtype, device=device), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim, dtype=dtype, device=device, operations=operations) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + operations.Linear(inner_dim, dim_out, dtype=dtype, device=device) + ) + + def forward(self, x): + return self.net(x) + +def Normalize(in_channels, dtype=None, device=None): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) + +def attention_basic(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): + attn_precision = get_attn_precision(attn_precision) + + if skip_reshape: + b, _, _, dim_head = q.shape + else: + b, _, dim_head = q.shape + dim_head //= heads + + scale = dim_head ** -0.5 + + h = heads + if skip_reshape: + q, k, v = map( + lambda t: t.reshape(b * heads, -1, dim_head), + (q, k, v), + ) + else: + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + # force cast to fp32 to avoid overflowing + if attn_precision == torch.float32: + sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale + else: + sim = einsum('b i d, b j d -> b i j', q, k) * scale + + del q, k + + if exists(mask): + if mask.dtype == torch.bool: + mask = rearrange(mask, 'b ... -> b (...)') #TODO: check if this bool part matches pytorch attention + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + else: + if len(mask.shape) == 2: + bs = 1 + else: + bs = mask.shape[0] + mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) + sim.add_(mask) + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return out + + +def attention_sub_quad(query, key, value, heads, mask=None, attn_precision=None, skip_reshape=False): + attn_precision = get_attn_precision(attn_precision) + + if skip_reshape: + b, _, _, dim_head = query.shape + else: + b, _, dim_head = query.shape + dim_head //= heads + + scale = dim_head ** -0.5 + + if skip_reshape: + query = query.reshape(b * heads, -1, dim_head) + value = value.reshape(b * heads, -1, dim_head) + key = key.reshape(b * heads, -1, dim_head).movedim(1, 2) + else: + query = query.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) + value = value.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 1, 3).reshape(b * heads, -1, dim_head) + key = key.unsqueeze(3).reshape(b, -1, heads, dim_head).permute(0, 2, 3, 1).reshape(b * heads, dim_head, -1) + + + dtype = query.dtype + upcast_attention = attn_precision == torch.float32 and query.dtype != torch.float32 + if upcast_attention: + bytes_per_token = torch.finfo(torch.float32).bits//8 + else: + bytes_per_token = torch.finfo(query.dtype).bits//8 + batch_x_heads, q_tokens, _ = query.shape + _, _, k_tokens = key.shape + qk_matmul_size_bytes = batch_x_heads * bytes_per_token * q_tokens * k_tokens + + mem_free_total, mem_free_torch = model_management.get_free_memory(query.device, True) + + kv_chunk_size_min = None + kv_chunk_size = None + query_chunk_size = None + + for x in [4096, 2048, 1024, 512, 256]: + count = mem_free_total / (batch_x_heads * bytes_per_token * x * 4.0) + if count >= k_tokens: + kv_chunk_size = k_tokens + query_chunk_size = x + break + + if query_chunk_size is None: + query_chunk_size = 512 + + if mask is not None: + if len(mask.shape) == 2: + bs = 1 + else: + bs = mask.shape[0] + mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) + + hidden_states = efficient_dot_product_attention( + query, + key, + value, + query_chunk_size=query_chunk_size, + kv_chunk_size=kv_chunk_size, + kv_chunk_size_min=kv_chunk_size_min, + use_checkpoint=False, + upcast_attention=upcast_attention, + mask=mask, + ) + + hidden_states = hidden_states.to(dtype) + + hidden_states = hidden_states.unflatten(0, (-1, heads)).transpose(1,2).flatten(start_dim=2) + return hidden_states + +def attention_split(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): + attn_precision = get_attn_precision(attn_precision) + + if skip_reshape: + b, _, _, dim_head = q.shape + else: + b, _, dim_head = q.shape + dim_head //= heads + + scale = dim_head ** -0.5 + + h = heads + if skip_reshape: + q, k, v = map( + lambda t: t.reshape(b * heads, -1, dim_head), + (q, k, v), + ) + else: + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + r1 = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device, dtype=q.dtype) + + mem_free_total = model_management.get_free_memory(q.device) + + if attn_precision == torch.float32: + element_size = 4 + upcast = True + else: + element_size = q.element_size() + upcast = False + + gb = 1024 ** 3 + tensor_size = q.shape[0] * q.shape[1] * k.shape[1] * element_size + modifier = 3 + mem_required = tensor_size * modifier + steps = 1 + + + if mem_required > mem_free_total: + steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) + # print(f"Expected tensor size:{tensor_size/gb:0.1f}GB, cuda free:{mem_free_cuda/gb:0.1f}GB " + # f"torch free:{mem_free_torch/gb:0.1f} total:{mem_free_total/gb:0.1f} steps:{steps}") + + if steps > 64: + max_res = math.floor(math.sqrt(math.sqrt(mem_free_total / 2.5)) / 8) * 64 + raise RuntimeError(f'Not enough memory, use lower resolution (max approx. {max_res}x{max_res}). ' + f'Need: {mem_required/64/gb:0.1f}GB free, Have:{mem_free_total/gb:0.1f}GB free') + + if mask is not None: + if len(mask.shape) == 2: + bs = 1 + else: + bs = mask.shape[0] + mask = mask.reshape(bs, -1, mask.shape[-2], mask.shape[-1]).expand(b, heads, -1, -1).reshape(-1, mask.shape[-2], mask.shape[-1]) + + # print("steps", steps, mem_required, mem_free_total, modifier, q.element_size(), tensor_size) + first_op_done = False + cleared_cache = False + while True: + try: + slice_size = q.shape[1] // steps if (q.shape[1] % steps) == 0 else q.shape[1] + for i in range(0, q.shape[1], slice_size): + end = i + slice_size + if upcast: + with torch.autocast(enabled=False, device_type = 'cuda'): + s1 = einsum('b i d, b j d -> b i j', q[:, i:end].float(), k.float()) * scale + else: + s1 = einsum('b i d, b j d -> b i j', q[:, i:end], k) * scale + + if mask is not None: + if len(mask.shape) == 2: + s1 += mask[i:end] + else: + s1 += mask[:, i:end] + + s2 = s1.softmax(dim=-1).to(v.dtype) + del s1 + first_op_done = True + + r1[:, i:end] = einsum('b i j, b j d -> b i d', s2, v) + del s2 + break + except model_management.OOM_EXCEPTION as e: + if first_op_done == False: + model_management.soft_empty_cache(True) + if cleared_cache == False: + cleared_cache = True + logging.warning("out of memory error, emptying cache and trying again") + continue + steps *= 2 + if steps > 64: + raise e + logging.warning("out of memory error, increasing steps and trying again {}".format(steps)) + else: + raise e + + del q, k, v + + r1 = ( + r1.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return r1 + +BROKEN_XFORMERS = False +try: + x_vers = xformers.__version__ + # XFormers bug confirmed on all versions from 0.0.21 to 0.0.26 (q with bs bigger than 65535 gives CUDA error) + BROKEN_XFORMERS = x_vers.startswith("0.0.2") and not x_vers.startswith("0.0.20") +except: + pass + +def attention_xformers(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): + if skip_reshape: + b, _, _, dim_head = q.shape + else: + b, _, dim_head = q.shape + dim_head //= heads + + disabled_xformers = False + + if BROKEN_XFORMERS: + if b * heads > 65535: + disabled_xformers = True + + if not disabled_xformers: + if torch.jit.is_tracing() or torch.jit.is_scripting(): + disabled_xformers = True + + if disabled_xformers: + return attention_pytorch(q, k, v, heads, mask, skip_reshape=skip_reshape) + + if skip_reshape: + q, k, v = map( + lambda t: t.reshape(b * heads, -1, dim_head), + (q, k, v), + ) + else: + q, k, v = map( + lambda t: t.reshape(b, -1, heads, dim_head), + (q, k, v), + ) + + if mask is not None: + pad = 8 - q.shape[1] % 8 + mask_out = torch.empty([q.shape[0], q.shape[1], q.shape[1] + pad], dtype=q.dtype, device=q.device) + mask_out[:, :, :mask.shape[-1]] = mask + mask = mask_out[:, :, :mask.shape[-1]] + + out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask) + + if skip_reshape: + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + else: + out = ( + out.reshape(b, -1, heads * dim_head) + ) + + return out + +def attention_pytorch(q, k, v, heads, mask=None, attn_precision=None, skip_reshape=False): + if skip_reshape: + b, _, _, dim_head = q.shape + else: + b, _, dim_head = q.shape + dim_head //= heads + q, k, v = map( + lambda t: t.view(b, -1, heads, dim_head).transpose(1, 2), + (q, k, v), + ) + + out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=mask, dropout_p=0.0, is_causal=False) + out = ( + out.transpose(1, 2).reshape(b, -1, heads * dim_head) + ) + return out + + +optimized_attention = attention_basic + +if model_management.xformers_enabled(): + logging.info("Using xformers cross attention") + optimized_attention = attention_xformers +elif model_management.pytorch_attention_enabled(): + logging.info("Using pytorch cross attention") + optimized_attention = attention_pytorch +else: + if args.use_split_cross_attention: + logging.info("Using split optimization for cross attention") + optimized_attention = attention_split + else: + logging.info("Using sub quadratic optimization for cross attention, if you have memory or speed issues try using: --use-split-cross-attention") + optimized_attention = attention_sub_quad + +optimized_attention_masked = optimized_attention + +def optimized_attention_for_device(device, mask=False, small_input=False): + if small_input: + if model_management.pytorch_attention_enabled(): + return attention_pytorch #TODO: need to confirm but this is probably slightly faster for small inputs in all cases + else: + return attention_basic + + if device == torch.device("cpu"): + return attention_sub_quad + + if mask: + return optimized_attention_masked + + return optimized_attention + + +class CrossAttention(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0., attn_precision=None, dtype=None, device=None, operations=ops): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + self.attn_precision = attn_precision + + self.heads = heads + self.dim_head = dim_head + + self.to_q = operations.Linear(query_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_k = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.to_v = operations.Linear(context_dim, inner_dim, bias=False, dtype=dtype, device=device) + + self.to_out = nn.Sequential(operations.Linear(inner_dim, query_dim, dtype=dtype, device=device), nn.Dropout(dropout)) + + def forward(self, x, context=None, value=None, mask=None): + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + if value is not None: + v = self.to_v(value) + del value + else: + v = self.to_v(context) + + if mask is None: + out = optimized_attention(q, k, v, self.heads, attn_precision=self.attn_precision) + else: + out = optimized_attention_masked(q, k, v, self.heads, mask, attn_precision=self.attn_precision) + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True, ff_in=False, inner_dim=None, + disable_self_attn=False, disable_temporal_crossattention=False, switch_temporal_ca_to_sa=False, attn_precision=None, dtype=None, device=None, operations=ops): + super().__init__() + + self.ff_in = ff_in or inner_dim is not None + if inner_dim is None: + inner_dim = dim + + self.is_res = inner_dim == dim + self.attn_precision = attn_precision + + if self.ff_in: + self.norm_in = operations.LayerNorm(dim, dtype=dtype, device=device) + self.ff_in = FeedForward(dim, dim_out=inner_dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) + + self.disable_self_attn = disable_self_attn + self.attn1 = CrossAttention(query_dim=inner_dim, heads=n_heads, dim_head=d_head, dropout=dropout, + context_dim=context_dim if self.disable_self_attn else None, attn_precision=self.attn_precision, dtype=dtype, device=device, operations=operations) # is a self-attention if not self.disable_self_attn + self.ff = FeedForward(inner_dim, dim_out=dim, dropout=dropout, glu=gated_ff, dtype=dtype, device=device, operations=operations) + + if disable_temporal_crossattention: + if switch_temporal_ca_to_sa: + raise ValueError + else: + self.attn2 = None + else: + context_dim_attn2 = None + if not switch_temporal_ca_to_sa: + context_dim_attn2 = context_dim + + self.attn2 = CrossAttention(query_dim=inner_dim, context_dim=context_dim_attn2, + heads=n_heads, dim_head=d_head, dropout=dropout, attn_precision=self.attn_precision, dtype=dtype, device=device, operations=operations) # is self-attn if context is none + self.norm2 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + + self.norm1 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + self.norm3 = operations.LayerNorm(inner_dim, dtype=dtype, device=device) + self.n_heads = n_heads + self.d_head = d_head + self.switch_temporal_ca_to_sa = switch_temporal_ca_to_sa + + def forward(self, x, context=None, transformer_options={}): + extra_options = {} + block = transformer_options.get("block", None) + block_index = transformer_options.get("block_index", 0) + transformer_patches = {} + transformer_patches_replace = {} + + for k in transformer_options: + if k == "patches": + transformer_patches = transformer_options[k] + elif k == "patches_replace": + transformer_patches_replace = transformer_options[k] + else: + extra_options[k] = transformer_options[k] + + extra_options["n_heads"] = self.n_heads + extra_options["dim_head"] = self.d_head + extra_options["attn_precision"] = self.attn_precision + + if self.ff_in: + x_skip = x + x = self.ff_in(self.norm_in(x)) + if self.is_res: + x += x_skip + + n = self.norm1(x) + if self.disable_self_attn: + context_attn1 = context + else: + context_attn1 = None + value_attn1 = None + + if "attn1_patch" in transformer_patches: + patch = transformer_patches["attn1_patch"] + if context_attn1 is None: + context_attn1 = n + value_attn1 = context_attn1 + for p in patch: + n, context_attn1, value_attn1 = p(n, context_attn1, value_attn1, extra_options) + + if block is not None: + transformer_block = (block[0], block[1], block_index) + else: + transformer_block = None + attn1_replace_patch = transformer_patches_replace.get("attn1", {}) + block_attn1 = transformer_block + if block_attn1 not in attn1_replace_patch: + block_attn1 = block + + if block_attn1 in attn1_replace_patch: + if context_attn1 is None: + context_attn1 = n + value_attn1 = n + n = self.attn1.to_q(n) + context_attn1 = self.attn1.to_k(context_attn1) + value_attn1 = self.attn1.to_v(value_attn1) + n = attn1_replace_patch[block_attn1](n, context_attn1, value_attn1, extra_options) + n = self.attn1.to_out(n) + else: + n = self.attn1(n, context=context_attn1, value=value_attn1) + + if "attn1_output_patch" in transformer_patches: + patch = transformer_patches["attn1_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if "middle_patch" in transformer_patches: + patch = transformer_patches["middle_patch"] + for p in patch: + x = p(x, extra_options) + + if self.attn2 is not None: + n = self.norm2(x) + if self.switch_temporal_ca_to_sa: + context_attn2 = n + else: + context_attn2 = context + value_attn2 = None + if "attn2_patch" in transformer_patches: + patch = transformer_patches["attn2_patch"] + value_attn2 = context_attn2 + for p in patch: + n, context_attn2, value_attn2 = p(n, context_attn2, value_attn2, extra_options) + + attn2_replace_patch = transformer_patches_replace.get("attn2", {}) + block_attn2 = transformer_block + if block_attn2 not in attn2_replace_patch: + block_attn2 = block + + if block_attn2 in attn2_replace_patch: + if value_attn2 is None: + value_attn2 = context_attn2 + n = self.attn2.to_q(n) + context_attn2 = self.attn2.to_k(context_attn2) + value_attn2 = self.attn2.to_v(value_attn2) + n = attn2_replace_patch[block_attn2](n, context_attn2, value_attn2, extra_options) + n = self.attn2.to_out(n) + else: + n = self.attn2(n, context=context_attn2, value=value_attn2) + + if "attn2_output_patch" in transformer_patches: + patch = transformer_patches["attn2_output_patch"] + for p in patch: + n = p(n, extra_options) + + x += n + if self.is_res: + x_skip = x + x = self.ff(self.norm3(x)) + if self.is_res: + x += x_skip + + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + NEW: use_linear for more efficiency instead of the 1x1 convs + """ + def __init__(self, in_channels, n_heads, d_head, + depth=1, dropout=0., context_dim=None, + disable_self_attn=False, use_linear=False, + use_checkpoint=True, attn_precision=None, dtype=None, device=None, operations=ops): + super().__init__() + if exists(context_dim) and not isinstance(context_dim, list): + context_dim = [context_dim] * depth + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = operations.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True, dtype=dtype, device=device) + if not use_linear: + self.proj_in = operations.Conv2d(in_channels, + inner_dim, + kernel_size=1, + stride=1, + padding=0, dtype=dtype, device=device) + else: + self.proj_in = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) + + self.transformer_blocks = nn.ModuleList( + [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim[d], + disable_self_attn=disable_self_attn, checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=dtype, device=device, operations=operations) + for d in range(depth)] + ) + if not use_linear: + self.proj_out = operations.Conv2d(inner_dim,in_channels, + kernel_size=1, + stride=1, + padding=0, dtype=dtype, device=device) + else: + self.proj_out = operations.Linear(in_channels, inner_dim, dtype=dtype, device=device) + self.use_linear = use_linear + + def forward(self, x, context=None, transformer_options={}): + # note: if no context is given, cross-attention defaults to self-attention + if not isinstance(context, list): + context = [context] * len(self.transformer_blocks) + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = x.movedim(1, 3).flatten(1, 2).contiguous() + if self.use_linear: + x = self.proj_in(x) + for i, block in enumerate(self.transformer_blocks): + transformer_options["block_index"] = i + x = block(x, context=context[i], transformer_options=transformer_options) + if self.use_linear: + x = self.proj_out(x) + x = x.reshape(x.shape[0], h, w, x.shape[-1]).movedim(3, 1).contiguous() + if not self.use_linear: + x = self.proj_out(x) + return x + x_in + + +class SpatialVideoTransformer(SpatialTransformer): + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0.0, + use_linear=False, + context_dim=None, + use_spatial_context=False, + timesteps=None, + merge_strategy: str = "fixed", + merge_factor: float = 0.5, + time_context_dim=None, + ff_in=False, + checkpoint=False, + time_depth=1, + disable_self_attn=False, + disable_temporal_crossattention=False, + max_time_embed_period: int = 10000, + attn_precision=None, + dtype=None, device=None, operations=ops + ): + super().__init__( + in_channels, + n_heads, + d_head, + depth=depth, + dropout=dropout, + use_checkpoint=checkpoint, + context_dim=context_dim, + use_linear=use_linear, + disable_self_attn=disable_self_attn, + attn_precision=attn_precision, + dtype=dtype, device=device, operations=operations + ) + self.time_depth = time_depth + self.depth = depth + self.max_time_embed_period = max_time_embed_period + + time_mix_d_head = d_head + n_time_mix_heads = n_heads + + time_mix_inner_dim = int(time_mix_d_head * n_time_mix_heads) + + inner_dim = n_heads * d_head + if use_spatial_context: + time_context_dim = context_dim + + self.time_stack = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + n_time_mix_heads, + time_mix_d_head, + dropout=dropout, + context_dim=time_context_dim, + # timesteps=timesteps, + checkpoint=checkpoint, + ff_in=ff_in, + inner_dim=time_mix_inner_dim, + disable_self_attn=disable_self_attn, + disable_temporal_crossattention=disable_temporal_crossattention, + attn_precision=attn_precision, + dtype=dtype, device=device, operations=operations + ) + for _ in range(self.depth) + ] + ) + + assert len(self.time_stack) == len(self.transformer_blocks) + + self.use_spatial_context = use_spatial_context + self.in_channels = in_channels + + time_embed_dim = self.in_channels * 4 + self.time_pos_embed = nn.Sequential( + operations.Linear(self.in_channels, time_embed_dim, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, self.in_channels, dtype=dtype, device=device), + ) + + self.time_mixer = AlphaBlender( + alpha=merge_factor, merge_strategy=merge_strategy + ) + + def forward( + self, + x: torch.Tensor, + context: Optional[torch.Tensor] = None, + time_context: Optional[torch.Tensor] = None, + timesteps: Optional[int] = None, + image_only_indicator: Optional[torch.Tensor] = None, + transformer_options={} + ) -> torch.Tensor: + _, _, h, w = x.shape + x_in = x + spatial_context = None + if exists(context): + spatial_context = context + + if self.use_spatial_context: + assert ( + context.ndim == 3 + ), f"n dims of spatial context should be 3 but are {context.ndim}" + + if time_context is None: + time_context = context + time_context_first_timestep = time_context[::timesteps] + time_context = repeat( + time_context_first_timestep, "b ... -> (b n) ...", n=h * w + ) + elif time_context is not None and not self.use_spatial_context: + time_context = repeat(time_context, "b ... -> (b n) ...", n=h * w) + if time_context.ndim == 2: + time_context = rearrange(time_context, "b c -> b 1 c") + + x = self.norm(x) + if not self.use_linear: + x = self.proj_in(x) + x = rearrange(x, "b c h w -> b (h w) c") + if self.use_linear: + x = self.proj_in(x) + + num_frames = torch.arange(timesteps, device=x.device) + num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps) + num_frames = rearrange(num_frames, "b t -> (b t)") + t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False, max_period=self.max_time_embed_period).to(x.dtype) + emb = self.time_pos_embed(t_emb) + emb = emb[:, None, :] + + for it_, (block, mix_block) in enumerate( + zip(self.transformer_blocks, self.time_stack) + ): + transformer_options["block_index"] = it_ + x = block( + x, + context=spatial_context, + transformer_options=transformer_options, + ) + + x_mix = x + x_mix = x_mix + emb + + B, S, C = x_mix.shape + x_mix = rearrange(x_mix, "(b t) s c -> (b s) t c", t=timesteps) + x_mix = mix_block(x_mix, context=time_context) #TODO: transformer_options + x_mix = rearrange( + x_mix, "(b s) t c -> (b t) s c", s=S, b=B // timesteps, c=C, t=timesteps + ) + + x = self.time_mixer(x_spatial=x, x_temporal=x_mix, image_only_indicator=image_only_indicator) + + if self.use_linear: + x = self.proj_out(x) + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + if not self.use_linear: + x = self.proj_out(x) + out = x + x_in + return out + + diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__init__.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..091f077554bf85629bce3cd748584c22c454c573 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/mmdit.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/mmdit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..140bfc2cd72eca6c6b31c0b58718147d5b5c1e11 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/mmdit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1b3c2b60cfa3e590ce0ecf51b19620caf916860 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d46094658332a835113038396188b39f7fa4e602 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a947710ddeab9c9371a3d3065a9a10675ecce972 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/upscaling.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f230bc8d4e776b71a001a1c2d17c3f0bf469fa5 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/mmdit.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/mmdit.py new file mode 100644 index 0000000000000000000000000000000000000000..6f8f506ce02d40cb3372eca166148cc9e9746294 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/diffusionmodules/mmdit.py @@ -0,0 +1,1042 @@ +import logging +import math +from typing import Dict, Optional, List + +import numpy as np +import torch +import torch.nn as nn +from ..attention import optimized_attention +from einops import rearrange, repeat +from .util import timestep_embedding +import comfy.ops +import comfy.ldm.common_dit + +def default(x, y): + if x is not None: + return x + return y + +class Mlp(nn.Module): + """ MLP as used in Vision Transformer, MLP-Mixer and related networks + """ + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + norm_layer=None, + bias=True, + drop=0., + use_conv=False, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + drop_probs = drop + linear_layer = partial(operations.Conv2d, kernel_size=1) if use_conv else operations.Linear + + self.fc1 = linear_layer(in_features, hidden_features, bias=bias, dtype=dtype, device=device) + self.act = act_layer() + self.drop1 = nn.Dropout(drop_probs) + self.norm = norm_layer(hidden_features) if norm_layer is not None else nn.Identity() + self.fc2 = linear_layer(hidden_features, out_features, bias=bias, dtype=dtype, device=device) + self.drop2 = nn.Dropout(drop_probs) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop1(x) + x = self.norm(x) + x = self.fc2(x) + x = self.drop2(x) + return x + +class PatchEmbed(nn.Module): + """ 2D Image to Patch Embedding + """ + dynamic_img_pad: torch.jit.Final[bool] + + def __init__( + self, + img_size: Optional[int] = 224, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + norm_layer = None, + flatten: bool = True, + bias: bool = True, + strict_img_size: bool = True, + dynamic_img_pad: bool = True, + padding_mode='circular', + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.patch_size = (patch_size, patch_size) + self.padding_mode = padding_mode + if img_size is not None: + self.img_size = (img_size, img_size) + self.grid_size = tuple([s // p for s, p in zip(self.img_size, self.patch_size)]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + else: + self.img_size = None + self.grid_size = None + self.num_patches = None + + # flatten spatial dim and transpose to channels last, kept for bwd compat + self.flatten = flatten + self.strict_img_size = strict_img_size + self.dynamic_img_pad = dynamic_img_pad + + self.proj = operations.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias, dtype=dtype, device=device) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + # B, C, H, W = x.shape + # if self.img_size is not None: + # if self.strict_img_size: + # _assert(H == self.img_size[0], f"Input height ({H}) doesn't match model ({self.img_size[0]}).") + # _assert(W == self.img_size[1], f"Input width ({W}) doesn't match model ({self.img_size[1]}).") + # elif not self.dynamic_img_pad: + # _assert( + # H % self.patch_size[0] == 0, + # f"Input height ({H}) should be divisible by patch size ({self.patch_size[0]})." + # ) + # _assert( + # W % self.patch_size[1] == 0, + # f"Input width ({W}) should be divisible by patch size ({self.patch_size[1]})." + # ) + if self.dynamic_img_pad: + x = comfy.ldm.common_dit.pad_to_patch_size(x, self.patch_size, padding_mode=self.padding_mode) + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # NCHW -> NLC + x = self.norm(x) + return x + +def modulate(x, shift, scale): + if shift is None: + shift = torch.zeros_like(scale) + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# + + +def get_2d_sincos_pos_embed( + embed_dim, + grid_size, + cls_token=False, + extra_tokens=0, + scaling_factor=None, + offset=None, +): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid_h = np.arange(grid_size, dtype=np.float32) + grid_w = np.arange(grid_size, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + if scaling_factor is not None: + grid = grid / scaling_factor + if offset is not None: + grid = grid - offset + + grid = grid.reshape([2, 1, grid_size, grid_size]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate( + [np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0 + ) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2.0 + omega = 1.0 / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum("m,d->md", pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + +def get_1d_sincos_pos_embed_from_grid_torch(embed_dim, pos, device=None, dtype=torch.float32): + omega = torch.arange(embed_dim // 2, device=device, dtype=dtype) + omega /= embed_dim / 2.0 + omega = 1.0 / 10000**omega # (D/2,) + pos = pos.reshape(-1) # (M,) + out = torch.einsum("m,d->md", pos, omega) # (M, D/2), outer product + emb_sin = torch.sin(out) # (M, D/2) + emb_cos = torch.cos(out) # (M, D/2) + emb = torch.cat([emb_sin, emb_cos], dim=1) # (M, D) + return emb + +def get_2d_sincos_pos_embed_torch(embed_dim, w, h, val_center=7.5, val_magnitude=7.5, device=None, dtype=torch.float32): + small = min(h, w) + val_h = (h / small) * val_magnitude + val_w = (w / small) * val_magnitude + grid_h, grid_w = torch.meshgrid(torch.linspace(-val_h + val_center, val_h + val_center, h, device=device, dtype=dtype), torch.linspace(-val_w + val_center, val_w + val_center, w, device=device, dtype=dtype), indexing='ij') + emb_h = get_1d_sincos_pos_embed_from_grid_torch(embed_dim // 2, grid_h, device=device, dtype=dtype) + emb_w = get_1d_sincos_pos_embed_from_grid_torch(embed_dim // 2, grid_w, device=device, dtype=dtype) + emb = torch.cat([emb_w, emb_h], dim=1) # (H*W, D) + return emb + + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# + + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256, dtype=None, device=None, operations=None): + super().__init__() + self.mlp = nn.Sequential( + operations.Linear(frequency_embedding_size, hidden_size, bias=True, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(hidden_size, hidden_size, bias=True, dtype=dtype, device=device), + ) + self.frequency_embedding_size = frequency_embedding_size + + def forward(self, t, dtype, **kwargs): + t_freq = timestep_embedding(t, self.frequency_embedding_size).to(dtype) + t_emb = self.mlp(t_freq) + return t_emb + + +class VectorEmbedder(nn.Module): + """ + Embeds a flat vector of dimension input_dim + """ + + def __init__(self, input_dim: int, hidden_size: int, dtype=None, device=None, operations=None): + super().__init__() + self.mlp = nn.Sequential( + operations.Linear(input_dim, hidden_size, bias=True, dtype=dtype, device=device), + nn.SiLU(), + operations.Linear(hidden_size, hidden_size, bias=True, dtype=dtype, device=device), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + emb = self.mlp(x) + return emb + + +################################################################################# +# Core DiT Model # +################################################################################# + + +def split_qkv(qkv, head_dim): + qkv = qkv.reshape(qkv.shape[0], qkv.shape[1], 3, -1, head_dim).movedim(2, 0) + return qkv[0], qkv[1], qkv[2] + + +class SelfAttention(nn.Module): + ATTENTION_MODES = ("xformers", "torch", "torch-hb", "math", "debug") + + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + qk_scale: Optional[float] = None, + proj_drop: float = 0.0, + attn_mode: str = "xformers", + pre_only: bool = False, + qk_norm: Optional[str] = None, + rmsnorm: bool = False, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.num_heads = num_heads + self.head_dim = dim // num_heads + + self.qkv = operations.Linear(dim, dim * 3, bias=qkv_bias, dtype=dtype, device=device) + if not pre_only: + self.proj = operations.Linear(dim, dim, dtype=dtype, device=device) + self.proj_drop = nn.Dropout(proj_drop) + assert attn_mode in self.ATTENTION_MODES + self.attn_mode = attn_mode + self.pre_only = pre_only + + if qk_norm == "rms": + self.ln_q = RMSNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) + self.ln_k = RMSNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) + elif qk_norm == "ln": + self.ln_q = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) + self.ln_k = operations.LayerNorm(self.head_dim, elementwise_affine=True, eps=1.0e-6, dtype=dtype, device=device) + elif qk_norm is None: + self.ln_q = nn.Identity() + self.ln_k = nn.Identity() + else: + raise ValueError(qk_norm) + + def pre_attention(self, x: torch.Tensor) -> torch.Tensor: + B, L, C = x.shape + qkv = self.qkv(x) + q, k, v = split_qkv(qkv, self.head_dim) + q = self.ln_q(q).reshape(q.shape[0], q.shape[1], -1) + k = self.ln_k(k).reshape(q.shape[0], q.shape[1], -1) + return (q, k, v) + + def post_attention(self, x: torch.Tensor) -> torch.Tensor: + assert not self.pre_only + x = self.proj(x) + x = self.proj_drop(x) + return x + + def forward(self, x: torch.Tensor) -> torch.Tensor: + q, k, v = self.pre_attention(x) + x = optimized_attention( + q, k, v, heads=self.num_heads + ) + x = self.post_attention(x) + return x + + +class RMSNorm(torch.nn.Module): + def __init__( + self, dim: int, elementwise_affine: bool = False, eps: float = 1e-6, device=None, dtype=None + ): + """ + Initialize the RMSNorm normalization layer. + Args: + dim (int): The dimension of the input tensor. + eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. + Attributes: + eps (float): A small value added to the denominator for numerical stability. + weight (nn.Parameter): Learnable scaling parameter. + """ + super().__init__() + self.eps = eps + self.learnable_scale = elementwise_affine + if self.learnable_scale: + self.weight = nn.Parameter(torch.empty(dim, device=device, dtype=dtype)) + else: + self.register_parameter("weight", None) + + def forward(self, x): + return comfy.ldm.common_dit.rms_norm(x, self.weight, self.eps) + + + +class SwiGLUFeedForward(nn.Module): + def __init__( + self, + dim: int, + hidden_dim: int, + multiple_of: int, + ffn_dim_multiplier: Optional[float] = None, + ): + """ + Initialize the FeedForward module. + + Args: + dim (int): Input dimension. + hidden_dim (int): Hidden dimension of the feedforward layer. + multiple_of (int): Value to ensure hidden dimension is a multiple of this value. + ffn_dim_multiplier (float, optional): Custom multiplier for hidden dimension. Defaults to None. + + Attributes: + w1 (ColumnParallelLinear): Linear transformation for the first layer. + w2 (RowParallelLinear): Linear transformation for the second layer. + w3 (ColumnParallelLinear): Linear transformation for the third layer. + + """ + super().__init__() + hidden_dim = int(2 * hidden_dim / 3) + # custom dim factor multiplier + if ffn_dim_multiplier is not None: + hidden_dim = int(ffn_dim_multiplier * hidden_dim) + hidden_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) + + self.w1 = nn.Linear(dim, hidden_dim, bias=False) + self.w2 = nn.Linear(hidden_dim, dim, bias=False) + self.w3 = nn.Linear(dim, hidden_dim, bias=False) + + def forward(self, x): + return self.w2(nn.functional.silu(self.w1(x)) * self.w3(x)) + + +class DismantledBlock(nn.Module): + """ + A DiT block with gated adaptive layer norm (adaLN) conditioning. + """ + + ATTENTION_MODES = ("xformers", "torch", "torch-hb", "math", "debug") + + def __init__( + self, + hidden_size: int, + num_heads: int, + mlp_ratio: float = 4.0, + attn_mode: str = "xformers", + qkv_bias: bool = False, + pre_only: bool = False, + rmsnorm: bool = False, + scale_mod_only: bool = False, + swiglu: bool = False, + qk_norm: Optional[str] = None, + x_block_self_attn: bool = False, + dtype=None, + device=None, + operations=None, + **block_kwargs, + ): + super().__init__() + assert attn_mode in self.ATTENTION_MODES + if not rmsnorm: + self.norm1 = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + else: + self.norm1 = RMSNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = SelfAttention( + dim=hidden_size, + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_mode=attn_mode, + pre_only=pre_only, + qk_norm=qk_norm, + rmsnorm=rmsnorm, + dtype=dtype, + device=device, + operations=operations + ) + if x_block_self_attn: + assert not pre_only + assert not scale_mod_only + self.x_block_self_attn = True + self.attn2 = SelfAttention( + dim=hidden_size, + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_mode=attn_mode, + pre_only=False, + qk_norm=qk_norm, + rmsnorm=rmsnorm, + dtype=dtype, + device=device, + operations=operations + ) + else: + self.x_block_self_attn = False + if not pre_only: + if not rmsnorm: + self.norm2 = operations.LayerNorm( + hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device + ) + else: + self.norm2 = RMSNorm(hidden_size, elementwise_affine=False, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + if not pre_only: + if not swiglu: + self.mlp = Mlp( + in_features=hidden_size, + hidden_features=mlp_hidden_dim, + act_layer=lambda: nn.GELU(approximate="tanh"), + drop=0, + dtype=dtype, + device=device, + operations=operations + ) + else: + self.mlp = SwiGLUFeedForward( + dim=hidden_size, + hidden_dim=mlp_hidden_dim, + multiple_of=256, + ) + self.scale_mod_only = scale_mod_only + if x_block_self_attn: + assert not pre_only + assert not scale_mod_only + n_mods = 9 + elif not scale_mod_only: + n_mods = 6 if not pre_only else 2 + else: + n_mods = 4 if not pre_only else 1 + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), operations.Linear(hidden_size, n_mods * hidden_size, bias=True, dtype=dtype, device=device) + ) + self.pre_only = pre_only + + def pre_attention(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: + if not self.pre_only: + if not self.scale_mod_only: + ( + shift_msa, + scale_msa, + gate_msa, + shift_mlp, + scale_mlp, + gate_mlp, + ) = self.adaLN_modulation(c).chunk(6, dim=1) + else: + shift_msa = None + shift_mlp = None + ( + scale_msa, + gate_msa, + scale_mlp, + gate_mlp, + ) = self.adaLN_modulation( + c + ).chunk(4, dim=1) + qkv = self.attn.pre_attention(modulate(self.norm1(x), shift_msa, scale_msa)) + return qkv, ( + x, + gate_msa, + shift_mlp, + scale_mlp, + gate_mlp, + ) + else: + if not self.scale_mod_only: + ( + shift_msa, + scale_msa, + ) = self.adaLN_modulation( + c + ).chunk(2, dim=1) + else: + shift_msa = None + scale_msa = self.adaLN_modulation(c) + qkv = self.attn.pre_attention(modulate(self.norm1(x), shift_msa, scale_msa)) + return qkv, None + + def post_attention(self, attn, x, gate_msa, shift_mlp, scale_mlp, gate_mlp): + assert not self.pre_only + x = x + gate_msa.unsqueeze(1) * self.attn.post_attention(attn) + x = x + gate_mlp.unsqueeze(1) * self.mlp( + modulate(self.norm2(x), shift_mlp, scale_mlp) + ) + return x + + def pre_attention_x(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: + assert self.x_block_self_attn + ( + shift_msa, + scale_msa, + gate_msa, + shift_mlp, + scale_mlp, + gate_mlp, + shift_msa2, + scale_msa2, + gate_msa2, + ) = self.adaLN_modulation(c).chunk(9, dim=1) + x_norm = self.norm1(x) + qkv = self.attn.pre_attention(modulate(x_norm, shift_msa, scale_msa)) + qkv2 = self.attn2.pre_attention(modulate(x_norm, shift_msa2, scale_msa2)) + return qkv, qkv2, ( + x, + gate_msa, + shift_mlp, + scale_mlp, + gate_mlp, + gate_msa2, + ) + + def post_attention_x(self, attn, attn2, x, gate_msa, shift_mlp, scale_mlp, gate_mlp, gate_msa2): + assert not self.pre_only + attn1 = self.attn.post_attention(attn) + attn2 = self.attn2.post_attention(attn2) + out1 = gate_msa.unsqueeze(1) * attn1 + out2 = gate_msa2.unsqueeze(1) * attn2 + x = x + out1 + x = x + out2 + x = x + gate_mlp.unsqueeze(1) * self.mlp( + modulate(self.norm2(x), shift_mlp, scale_mlp) + ) + return x + + def forward(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: + assert not self.pre_only + if self.x_block_self_attn: + qkv, qkv2, intermediates = self.pre_attention_x(x, c) + attn, _ = optimized_attention( + qkv[0], qkv[1], qkv[2], + num_heads=self.attn.num_heads, + ) + attn2, _ = optimized_attention( + qkv2[0], qkv2[1], qkv2[2], + num_heads=self.attn2.num_heads, + ) + return self.post_attention_x(attn, attn2, *intermediates) + else: + qkv, intermediates = self.pre_attention(x, c) + attn = optimized_attention( + qkv[0], qkv[1], qkv[2], + heads=self.attn.num_heads, + ) + return self.post_attention(attn, *intermediates) + + +def block_mixing(*args, use_checkpoint=True, **kwargs): + if use_checkpoint: + return torch.utils.checkpoint.checkpoint( + _block_mixing, *args, use_reentrant=False, **kwargs + ) + else: + return _block_mixing(*args, **kwargs) + + +def _block_mixing(context, x, context_block, x_block, c): + context_qkv, context_intermediates = context_block.pre_attention(context, c) + + if x_block.x_block_self_attn: + x_qkv, x_qkv2, x_intermediates = x_block.pre_attention_x(x, c) + else: + x_qkv, x_intermediates = x_block.pre_attention(x, c) + + o = [] + for t in range(3): + o.append(torch.cat((context_qkv[t], x_qkv[t]), dim=1)) + qkv = tuple(o) + + attn = optimized_attention( + qkv[0], qkv[1], qkv[2], + heads=x_block.attn.num_heads, + ) + context_attn, x_attn = ( + attn[:, : context_qkv[0].shape[1]], + attn[:, context_qkv[0].shape[1] :], + ) + + if not context_block.pre_only: + context = context_block.post_attention(context_attn, *context_intermediates) + + else: + context = None + if x_block.x_block_self_attn: + attn2 = optimized_attention( + x_qkv2[0], x_qkv2[1], x_qkv2[2], + heads=x_block.attn2.num_heads, + ) + x = x_block.post_attention_x(x_attn, attn2, *x_intermediates) + else: + x = x_block.post_attention(x_attn, *x_intermediates) + return context, x + + +class JointBlock(nn.Module): + """just a small wrapper to serve as a fsdp unit""" + + def __init__( + self, + *args, + **kwargs, + ): + super().__init__() + pre_only = kwargs.pop("pre_only") + qk_norm = kwargs.pop("qk_norm", None) + x_block_self_attn = kwargs.pop("x_block_self_attn", False) + self.context_block = DismantledBlock(*args, pre_only=pre_only, qk_norm=qk_norm, **kwargs) + self.x_block = DismantledBlock(*args, + pre_only=False, + qk_norm=qk_norm, + x_block_self_attn=x_block_self_attn, + **kwargs) + + def forward(self, *args, **kwargs): + return block_mixing( + *args, context_block=self.context_block, x_block=self.x_block, **kwargs + ) + + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + + def __init__( + self, + hidden_size: int, + patch_size: int, + out_channels: int, + total_out_channels: Optional[int] = None, + dtype=None, + device=None, + operations=None, + ): + super().__init__() + self.norm_final = operations.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.linear = ( + operations.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True, dtype=dtype, device=device) + if (total_out_channels is None) + else operations.Linear(hidden_size, total_out_channels, bias=True, dtype=dtype, device=device) + ) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), operations.Linear(hidden_size, 2 * hidden_size, bias=True, dtype=dtype, device=device) + ) + + def forward(self, x: torch.Tensor, c: torch.Tensor) -> torch.Tensor: + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + +class SelfAttentionContext(nn.Module): + def __init__(self, dim, heads=8, dim_head=64, dtype=None, device=None, operations=None): + super().__init__() + dim_head = dim // heads + inner_dim = dim + + self.heads = heads + self.dim_head = dim_head + + self.qkv = operations.Linear(dim, dim * 3, bias=True, dtype=dtype, device=device) + + self.proj = operations.Linear(inner_dim, dim, dtype=dtype, device=device) + + def forward(self, x): + qkv = self.qkv(x) + q, k, v = split_qkv(qkv, self.dim_head) + x = optimized_attention(q.reshape(q.shape[0], q.shape[1], -1), k, v, heads=self.heads) + return self.proj(x) + +class ContextProcessorBlock(nn.Module): + def __init__(self, context_size, dtype=None, device=None, operations=None): + super().__init__() + self.norm1 = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.attn = SelfAttentionContext(context_size, dtype=dtype, device=device, operations=operations) + self.norm2 = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + self.mlp = Mlp(in_features=context_size, hidden_features=(context_size * 4), act_layer=lambda: nn.GELU(approximate="tanh"), drop=0, dtype=dtype, device=device, operations=operations) + + def forward(self, x): + x += self.attn(self.norm1(x)) + x += self.mlp(self.norm2(x)) + return x + +class ContextProcessor(nn.Module): + def __init__(self, context_size, num_layers, dtype=None, device=None, operations=None): + super().__init__() + self.layers = torch.nn.ModuleList([ContextProcessorBlock(context_size, dtype=dtype, device=device, operations=operations) for i in range(num_layers)]) + self.norm = operations.LayerNorm(context_size, elementwise_affine=False, eps=1e-6, dtype=dtype, device=device) + + def forward(self, x): + for i, l in enumerate(self.layers): + x = l(x) + return self.norm(x) + +class MMDiT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__( + self, + input_size: int = 32, + patch_size: int = 2, + in_channels: int = 4, + depth: int = 28, + # hidden_size: Optional[int] = None, + # num_heads: Optional[int] = None, + mlp_ratio: float = 4.0, + learn_sigma: bool = False, + adm_in_channels: Optional[int] = None, + context_embedder_config: Optional[Dict] = None, + compile_core: bool = False, + use_checkpoint: bool = False, + register_length: int = 0, + attn_mode: str = "torch", + rmsnorm: bool = False, + scale_mod_only: bool = False, + swiglu: bool = False, + out_channels: Optional[int] = None, + pos_embed_scaling_factor: Optional[float] = None, + pos_embed_offset: Optional[float] = None, + pos_embed_max_size: Optional[int] = None, + num_patches = None, + qk_norm: Optional[str] = None, + qkv_bias: bool = True, + context_processor_layers = None, + x_block_self_attn: bool = False, + x_block_self_attn_layers: Optional[List[int]] = [], + context_size = 4096, + num_blocks = None, + final_layer = True, + skip_blocks = False, + dtype = None, #TODO + device = None, + operations = None, + ): + super().__init__() + self.dtype = dtype + self.learn_sigma = learn_sigma + self.in_channels = in_channels + default_out_channels = in_channels * 2 if learn_sigma else in_channels + self.out_channels = default(out_channels, default_out_channels) + self.patch_size = patch_size + self.pos_embed_scaling_factor = pos_embed_scaling_factor + self.pos_embed_offset = pos_embed_offset + self.pos_embed_max_size = pos_embed_max_size + self.x_block_self_attn_layers = x_block_self_attn_layers + + # hidden_size = default(hidden_size, 64 * depth) + # num_heads = default(num_heads, hidden_size // 64) + + # apply magic --> this defines a head_size of 64 + self.hidden_size = 64 * depth + num_heads = depth + if num_blocks is None: + num_blocks = depth + + self.depth = depth + self.num_heads = num_heads + + self.x_embedder = PatchEmbed( + input_size, + patch_size, + in_channels, + self.hidden_size, + bias=True, + strict_img_size=self.pos_embed_max_size is None, + dtype=dtype, + device=device, + operations=operations + ) + self.t_embedder = TimestepEmbedder(self.hidden_size, dtype=dtype, device=device, operations=operations) + + self.y_embedder = None + if adm_in_channels is not None: + assert isinstance(adm_in_channels, int) + self.y_embedder = VectorEmbedder(adm_in_channels, self.hidden_size, dtype=dtype, device=device, operations=operations) + + if context_processor_layers is not None: + self.context_processor = ContextProcessor(context_size, context_processor_layers, dtype=dtype, device=device, operations=operations) + else: + self.context_processor = None + + self.context_embedder = nn.Identity() + if context_embedder_config is not None: + if context_embedder_config["target"] == "torch.nn.Linear": + self.context_embedder = operations.Linear(**context_embedder_config["params"], dtype=dtype, device=device) + + self.register_length = register_length + if self.register_length > 0: + self.register = nn.Parameter(torch.randn(1, register_length, self.hidden_size, dtype=dtype, device=device)) + + # num_patches = self.x_embedder.num_patches + # Will use fixed sin-cos embedding: + # just use a buffer already + if num_patches is not None: + self.register_buffer( + "pos_embed", + torch.empty(1, num_patches, self.hidden_size, dtype=dtype, device=device), + ) + else: + self.pos_embed = None + + self.use_checkpoint = use_checkpoint + if not skip_blocks: + self.joint_blocks = nn.ModuleList( + [ + JointBlock( + self.hidden_size, + num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + attn_mode=attn_mode, + pre_only=(i == num_blocks - 1) and final_layer, + rmsnorm=rmsnorm, + scale_mod_only=scale_mod_only, + swiglu=swiglu, + qk_norm=qk_norm, + x_block_self_attn=(i in self.x_block_self_attn_layers) or x_block_self_attn, + dtype=dtype, + device=device, + operations=operations, + ) + for i in range(num_blocks) + ] + ) + + if final_layer: + self.final_layer = FinalLayer(self.hidden_size, patch_size, self.out_channels, dtype=dtype, device=device, operations=operations) + + if compile_core: + assert False + self.forward_core_with_concat = torch.compile(self.forward_core_with_concat) + + def cropped_pos_embed(self, hw, device=None): + p = self.x_embedder.patch_size[0] + h, w = hw + # patched size + h = (h + 1) // p + w = (w + 1) // p + if self.pos_embed is None: + return get_2d_sincos_pos_embed_torch(self.hidden_size, w, h, device=device) + assert self.pos_embed_max_size is not None + assert h <= self.pos_embed_max_size, (h, self.pos_embed_max_size) + assert w <= self.pos_embed_max_size, (w, self.pos_embed_max_size) + top = (self.pos_embed_max_size - h) // 2 + left = (self.pos_embed_max_size - w) // 2 + spatial_pos_embed = rearrange( + self.pos_embed, + "1 (h w) c -> 1 h w c", + h=self.pos_embed_max_size, + w=self.pos_embed_max_size, + ) + spatial_pos_embed = spatial_pos_embed[:, top : top + h, left : left + w, :] + spatial_pos_embed = rearrange(spatial_pos_embed, "1 h w c -> 1 (h w) c") + # print(spatial_pos_embed, top, left, h, w) + # # t = get_2d_sincos_pos_embed_torch(self.hidden_size, w, h, 7.875, 7.875, device=device) #matches exactly for 1024 res + # t = get_2d_sincos_pos_embed_torch(self.hidden_size, w, h, 7.5, 7.5, device=device) #scales better + # # print(t) + # return t + return spatial_pos_embed + + def unpatchify(self, x, hw=None): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p = self.x_embedder.patch_size[0] + if hw is None: + h = w = int(x.shape[1] ** 0.5) + else: + h, w = hw + h = (h + 1) // p + w = (w + 1) // p + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum("nhwpqc->nchpwq", x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) + return imgs + + def forward_core_with_concat( + self, + x: torch.Tensor, + c_mod: torch.Tensor, + context: Optional[torch.Tensor] = None, + control = None, + transformer_options = {}, + ) -> torch.Tensor: + patches_replace = transformer_options.get("patches_replace", {}) + if self.register_length > 0: + context = torch.cat( + ( + repeat(self.register, "1 ... -> b ...", b=x.shape[0]), + default(context, torch.Tensor([]).type_as(x)), + ), + 1, + ) + + # context is B, L', D + # x is B, L, D + blocks_replace = patches_replace.get("dit", {}) + blocks = len(self.joint_blocks) + for i in range(blocks): + if ("double_block", i) in blocks_replace: + def block_wrap(args): + out = {} + out["txt"], out["img"] = self.joint_blocks[i](args["txt"], args["img"], c=args["vec"]) + return out + + out = blocks_replace[("double_block", i)]({"img": x, "txt": context, "vec": c_mod}, {"original_block": block_wrap}) + context = out["txt"] + x = out["img"] + else: + context, x = self.joint_blocks[i]( + context, + x, + c=c_mod, + use_checkpoint=self.use_checkpoint, + ) + if control is not None: + control_o = control.get("output") + if i < len(control_o): + add = control_o[i] + if add is not None: + x += add + + x = self.final_layer(x, c_mod) # (N, T, patch_size ** 2 * out_channels) + return x + + def forward( + self, + x: torch.Tensor, + t: torch.Tensor, + y: Optional[torch.Tensor] = None, + context: Optional[torch.Tensor] = None, + control = None, + transformer_options = {}, + ) -> torch.Tensor: + """ + Forward pass of DiT. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N,) tensor of class labels + """ + + if self.context_processor is not None: + context = self.context_processor(context) + + hw = x.shape[-2:] + x = self.x_embedder(x) + comfy.ops.cast_to_input(self.cropped_pos_embed(hw, device=x.device), x) + c = self.t_embedder(t, dtype=x.dtype) # (N, D) + if y is not None and self.y_embedder is not None: + y = self.y_embedder(y) # (N, D) + c = c + y # (N, D) + + if context is not None: + context = self.context_embedder(context) + + x = self.forward_core_with_concat(x, c, context, control, transformer_options) + + x = self.unpatchify(x, hw=hw) # (N, out_channels, H, W) + return x[:,:,:hw[-2],:hw[-1]] + + +class OpenAISignatureMMDITWrapper(MMDiT): + def forward( + self, + x: torch.Tensor, + timesteps: torch.Tensor, + context: Optional[torch.Tensor] = None, + y: Optional[torch.Tensor] = None, + control = None, + transformer_options = {}, + **kwargs, + ) -> torch.Tensor: + return super().forward(x, timesteps, context=context, y=y, control=control, transformer_options=transformer_options) + diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/model.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/model.py new file mode 100644 index 0000000000000000000000000000000000000000..04eb83b2181253e3a88f7945f75e017060e02ebf --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/diffusionmodules/model.py @@ -0,0 +1,650 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from typing import Optional, Any +import logging + +from comfy import model_management +import comfy.ops +ops = comfy.ops.disable_weight_init + +if model_management.xformers_enabled_vae(): + import xformers + import xformers.ops + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0,1,0,0)) + return emb + + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return ops.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = ops.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + try: + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + except: #operation not implemented for bf16 + b, c, h, w = x.shape + out = torch.empty((b, c, h*2, w*2), dtype=x.dtype, layout=x.layout, device=x.device) + split = 8 + l = out.shape[1] // split + for i in range(0, out.shape[1], l): + out[:,i:i+l] = torch.nn.functional.interpolate(x[:,i:i+l].to(torch.float32), scale_factor=2.0, mode="nearest").to(x.dtype) + del x + x = out + + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = ops.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.swish = torch.nn.SiLU(inplace=True) + self.norm1 = Normalize(in_channels) + self.conv1 = ops.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = ops.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout, inplace=True) + self.conv2 = ops.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = ops.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = ops.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = self.swish(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(self.swish(temb))[:,:,None,None] + + h = self.norm2(h) + h = self.swish(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + +def slice_attention(q, k, v): + r1 = torch.zeros_like(k, device=q.device) + scale = (int(q.shape[-1])**(-0.5)) + + mem_free_total = model_management.get_free_memory(q.device) + + gb = 1024 ** 3 + tensor_size = q.shape[0] * q.shape[1] * k.shape[2] * q.element_size() + modifier = 3 if q.element_size() == 2 else 2.5 + mem_required = tensor_size * modifier + steps = 1 + + if mem_required > mem_free_total: + steps = 2**(math.ceil(math.log(mem_required / mem_free_total, 2))) + + while True: + try: + slice_size = q.shape[1] // steps if (q.shape[1] % steps) == 0 else q.shape[1] + for i in range(0, q.shape[1], slice_size): + end = i + slice_size + s1 = torch.bmm(q[:, i:end], k) * scale + + s2 = torch.nn.functional.softmax(s1, dim=2).permute(0,2,1) + del s1 + + r1[:, :, i:end] = torch.bmm(v, s2) + del s2 + break + except model_management.OOM_EXCEPTION as e: + model_management.soft_empty_cache(True) + steps *= 2 + if steps > 128: + raise e + logging.warning("out of memory error, increasing steps and trying again {}".format(steps)) + + return r1 + +def normal_attention(q, k, v): + # compute attention + b,c,h,w = q.shape + + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + v = v.reshape(b,c,h*w) + + r1 = slice_attention(q, k, v) + h_ = r1.reshape(b,c,h,w) + del r1 + return h_ + +def xformers_attention(q, k, v): + # compute attention + B, C, H, W = q.shape + q, k, v = map( + lambda t: t.view(B, C, -1).transpose(1, 2).contiguous(), + (q, k, v), + ) + + try: + out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=None) + out = out.transpose(1, 2).reshape(B, C, H, W) + except NotImplementedError as e: + out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) + return out + +def pytorch_attention(q, k, v): + # compute attention + B, C, H, W = q.shape + q, k, v = map( + lambda t: t.view(B, 1, C, -1).transpose(2, 3).contiguous(), + (q, k, v), + ) + + try: + out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False) + out = out.transpose(2, 3).reshape(B, C, H, W) + except model_management.OOM_EXCEPTION as e: + logging.warning("scaled_dot_product_attention OOMed: switched to slice attention") + out = slice_attention(q.view(B, -1, C), k.view(B, -1, C).transpose(1, 2), v.view(B, -1, C).transpose(1, 2)).reshape(B, C, H, W) + return out + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = ops.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + if model_management.xformers_enabled_vae(): + logging.info("Using xformers attention in VAE") + self.optimized_attention = xformers_attention + elif model_management.pytorch_attention_enabled(): + logging.info("Using pytorch attention in VAE") + self.optimized_attention = pytorch_attention + else: + logging.info("Using split attention in VAE") + self.optimized_attention = normal_attention + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + h_ = self.optimized_attention(q, k, v) + + h_ = self.proj_out(h_) + + return x+h_ + + +def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): + return AttnBlock(in_channels) + + +class Model(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla"): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch*4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList([ + ops.Linear(self.ch, + self.temb_ch), + ops.Linear(self.temb_ch, + self.temb_ch), + ]) + + # downsampling + self.conv_in = ops.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + skip_in = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + if i_block == self.num_res_blocks: + skip_in = ch*in_ch_mult[i_level] + block.append(ResnetBlock(in_channels=block_in+skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x, t=None, context=None): + #assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", + **ignore_kwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = ops.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = ops.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # timestep embedding + temb = None + # downsampling + h = self.conv_in(x) + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](h, temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + if i_level != self.num_resolutions-1: + h = self.down[i_level].downsample(h) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + conv_out_op=ops.Conv2d, + resnet_op=ResnetBlock, + attn_op=AttnBlock, + **ignorekwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + logging.debug("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = ops.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = resnet_op(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = attn_op(block_in) + self.mid.block_2 = resnet_op(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(resnet_op(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(attn_op(block_in)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = conv_out_op(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z, **kwargs): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb, **kwargs) + h = self.mid.attn_1(h, **kwargs) + h = self.mid.block_2(h, temb, **kwargs) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb, **kwargs) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h, **kwargs) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h, **kwargs) + if self.tanh_out: + h = torch.tanh(h) + return h diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/openaimodel.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..2902073d5ea777aabdd93dc95dc823949ca2f08f --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/diffusionmodules/openaimodel.py @@ -0,0 +1,897 @@ +from abc import abstractmethod + +import torch as th +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange +import logging + +from .util import ( + checkpoint, + avg_pool_nd, + zero_module, + timestep_embedding, + AlphaBlender, +) +from ..attention import SpatialTransformer, SpatialVideoTransformer, default +from comfy.ldm.util import exists +import comfy.ops +ops = comfy.ops.disable_weight_init + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + +#This is needed because accelerate makes a copy of transformer_options which breaks "transformer_index" +def forward_timestep_embed(ts, x, emb, context=None, transformer_options={}, output_shape=None, time_context=None, num_video_frames=None, image_only_indicator=None): + for layer in ts: + if isinstance(layer, VideoResBlock): + x = layer(x, emb, num_video_frames, image_only_indicator) + elif isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialVideoTransformer): + x = layer(x, context, time_context, num_video_frames, image_only_indicator, transformer_options) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + elif isinstance(layer, SpatialTransformer): + x = layer(x, context, transformer_options) + if "transformer_index" in transformer_options: + transformer_options["transformer_index"] += 1 + elif isinstance(layer, Upsample): + x = layer(x, output_shape=output_shape) + else: + x = layer(x) + return x + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, *args, **kwargs): + return forward_timestep_embed(self, *args, **kwargs) + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = operations.conv_nd(dims, self.channels, self.out_channels, 3, padding=padding, dtype=dtype, device=device) + + def forward(self, x, output_shape=None): + assert x.shape[1] == self.channels + if self.dims == 3: + shape = [x.shape[2], x.shape[3] * 2, x.shape[4] * 2] + if output_shape is not None: + shape[1] = output_shape[3] + shape[2] = output_shape[4] + else: + shape = [x.shape[2] * 2, x.shape[3] * 2] + if output_shape is not None: + shape[0] = output_shape[2] + shape[1] = output_shape[3] + + x = F.interpolate(x, size=shape, mode="nearest") + if self.use_conv: + x = self.conv(x) + return x + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1, dtype=None, device=None, operations=ops): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = operations.conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding, dtype=dtype, device=device + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + kernel_size=3, + exchange_temb_dims=False, + skip_t_emb=False, + dtype=None, + device=None, + operations=ops + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + self.exchange_temb_dims = exchange_temb_dims + + if isinstance(kernel_size, list): + padding = [k // 2 for k in kernel_size] + else: + padding = kernel_size // 2 + + self.in_layers = nn.Sequential( + operations.GroupNorm(32, channels, dtype=dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Upsample(channels, False, dims, dtype=dtype, device=device) + elif down: + self.h_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + self.x_upd = Downsample(channels, False, dims, dtype=dtype, device=device) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.skip_t_emb = skip_t_emb + if self.skip_t_emb: + self.emb_layers = None + self.exchange_temb_dims = False + else: + self.emb_layers = nn.Sequential( + nn.SiLU(), + operations.Linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, dtype=dtype, device=device + ), + ) + self.out_layers = nn.Sequential( + operations.GroupNorm(32, self.out_channels, dtype=dtype, device=device), + nn.SiLU(), + nn.Dropout(p=dropout), + operations.conv_nd(dims, self.out_channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device) + , + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = operations.conv_nd( + dims, channels, self.out_channels, kernel_size, padding=padding, dtype=dtype, device=device + ) + else: + self.skip_connection = operations.conv_nd(dims, channels, self.out_channels, 1, dtype=dtype, device=device) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + + emb_out = None + if not self.skip_t_emb: + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + h = out_norm(h) + if emb_out is not None: + scale, shift = th.chunk(emb_out, 2, dim=1) + h *= (1 + scale) + h += shift + h = out_rest(h) + else: + if emb_out is not None: + if self.exchange_temb_dims: + emb_out = emb_out.movedim(1, 2) + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class VideoResBlock(ResBlock): + def __init__( + self, + channels: int, + emb_channels: int, + dropout: float, + video_kernel_size=3, + merge_strategy: str = "fixed", + merge_factor: float = 0.5, + out_channels=None, + use_conv: bool = False, + use_scale_shift_norm: bool = False, + dims: int = 2, + use_checkpoint: bool = False, + up: bool = False, + down: bool = False, + dtype=None, + device=None, + operations=ops + ): + super().__init__( + channels, + emb_channels, + dropout, + out_channels=out_channels, + use_conv=use_conv, + use_scale_shift_norm=use_scale_shift_norm, + dims=dims, + use_checkpoint=use_checkpoint, + up=up, + down=down, + dtype=dtype, + device=device, + operations=operations + ) + + self.time_stack = ResBlock( + default(out_channels, channels), + emb_channels, + dropout=dropout, + dims=3, + out_channels=default(out_channels, channels), + use_scale_shift_norm=False, + use_conv=False, + up=False, + down=False, + kernel_size=video_kernel_size, + use_checkpoint=use_checkpoint, + exchange_temb_dims=True, + dtype=dtype, + device=device, + operations=operations + ) + self.time_mixer = AlphaBlender( + alpha=merge_factor, + merge_strategy=merge_strategy, + rearrange_pattern="b t -> b 1 t 1 1", + ) + + def forward( + self, + x: th.Tensor, + emb: th.Tensor, + num_video_frames: int, + image_only_indicator = None, + ) -> th.Tensor: + x = super().forward(x, emb) + + x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) + x = rearrange(x, "(b t) c h w -> b c t h w", t=num_video_frames) + + x = self.time_stack( + x, rearrange(emb, "(b t) ... -> b t ...", t=num_video_frames) + ) + x = self.time_mixer( + x_spatial=x_mix, x_temporal=x, image_only_indicator=image_only_indicator + ) + x = rearrange(x, "b c t h w -> (b t) c h w") + return x + + +class Timestep(nn.Module): + def __init__(self, dim): + super().__init__() + self.dim = dim + + def forward(self, t): + return timestep_embedding(t, self.dim) + +def apply_control(h, control, name): + if control is not None and name in control and len(control[name]) > 0: + ctrl = control[name].pop() + if ctrl is not None: + try: + h += ctrl + except: + logging.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) + return h + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + dtype=th.float32, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + disable_self_attentions=None, + num_attention_blocks=None, + disable_middle_self_attn=False, + use_linear_in_transformer=False, + adm_in_channels=None, + transformer_depth_middle=None, + transformer_depth_output=None, + use_temporal_resblock=False, + use_temporal_attention=False, + time_context_dim=None, + extra_ff_mix_layer=False, + use_spatial_context=False, + merge_strategy=None, + merge_factor=0.0, + video_kernel_size=None, + disable_temporal_crossattention=False, + max_ddpm_temb_period=10000, + attn_precision=None, + device=None, + operations=ops, + ): + super().__init__() + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + # from omegaconf.listconfig import ListConfig + # if type(context_dim) == ListConfig: + # context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + + if isinstance(num_res_blocks, int): + self.num_res_blocks = len(channel_mult) * [num_res_blocks] + else: + if len(num_res_blocks) != len(channel_mult): + raise ValueError("provide num_res_blocks either as an int (globally constant) or " + "as a list/tuple (per-level) with the same length as channel_mult") + self.num_res_blocks = num_res_blocks + + if disable_self_attentions is not None: + # should be a list of booleans, indicating whether to disable self-attention in TransformerBlocks or not + assert len(disable_self_attentions) == len(channel_mult) + if num_attention_blocks is not None: + assert len(num_attention_blocks) == len(self.num_res_blocks) + + transformer_depth = transformer_depth[:] + transformer_depth_output = transformer_depth_output[:] + + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = dtype + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.use_temporal_resblocks = use_temporal_resblock + self.predict_codebook_ids = n_embed is not None + + self.default_num_video_frames = None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + operations.Linear(model_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + + if self.num_classes is not None: + if isinstance(self.num_classes, int): + self.label_emb = nn.Embedding(num_classes, time_embed_dim, dtype=self.dtype, device=device) + elif self.num_classes == "continuous": + logging.debug("setting up linear c_adm embedding layer") + self.label_emb = nn.Linear(1, time_embed_dim) + elif self.num_classes == "sequential": + assert adm_in_channels is not None + self.label_emb = nn.Sequential( + nn.Sequential( + operations.Linear(adm_in_channels, time_embed_dim, dtype=self.dtype, device=device), + nn.SiLU(), + operations.Linear(time_embed_dim, time_embed_dim, dtype=self.dtype, device=device), + ) + ) + else: + raise ValueError() + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + operations.conv_nd(dims, in_channels, model_channels, 3, padding=1, dtype=self.dtype, device=device) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + + def get_attention_layer( + ch, + num_heads, + dim_head, + depth=1, + context_dim=None, + use_checkpoint=False, + disable_self_attn=False, + ): + if use_temporal_attention: + return SpatialVideoTransformer( + ch, + num_heads, + dim_head, + depth=depth, + context_dim=context_dim, + time_context_dim=time_context_dim, + dropout=dropout, + ff_in=extra_ff_mix_layer, + use_spatial_context=use_spatial_context, + merge_strategy=merge_strategy, + merge_factor=merge_factor, + checkpoint=use_checkpoint, + use_linear=use_linear_in_transformer, + disable_self_attn=disable_self_attn, + disable_temporal_crossattention=disable_temporal_crossattention, + max_time_embed_period=max_ddpm_temb_period, + attn_precision=attn_precision, + dtype=self.dtype, device=device, operations=operations + ) + else: + return SpatialTransformer( + ch, num_heads, dim_head, depth=depth, context_dim=context_dim, + disable_self_attn=disable_self_attn, use_linear=use_linear_in_transformer, + use_checkpoint=use_checkpoint, attn_precision=attn_precision, dtype=self.dtype, device=device, operations=operations + ) + + def get_resblock( + merge_factor, + merge_strategy, + video_kernel_size, + ch, + time_embed_dim, + dropout, + out_channels, + dims, + use_checkpoint, + use_scale_shift_norm, + down=False, + up=False, + dtype=None, + device=None, + operations=ops + ): + if self.use_temporal_resblocks: + return VideoResBlock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + channels=ch, + emb_channels=time_embed_dim, + dropout=dropout, + out_channels=out_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=down, + up=up, + dtype=dtype, + device=device, + operations=operations + ) + else: + return ResBlock( + channels=ch, + emb_channels=time_embed_dim, + dropout=dropout, + out_channels=out_channels, + use_checkpoint=use_checkpoint, + dims=dims, + use_scale_shift_norm=use_scale_shift_norm, + down=down, + up=up, + dtype=dtype, + device=device, + operations=operations + ) + + for level, mult in enumerate(channel_mult): + for nr in range(self.num_res_blocks[level]): + layers = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations, + ) + ] + ch = mult * model_channels + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or nr < num_attention_blocks[level]: + layers.append(get_attention_layer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + mid_block = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=None, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + + self.middle_block = None + if transformer_depth_middle >= -1: + if transformer_depth_middle >= 0: + mid_block += [get_attention_layer( # always uses a self-attn + ch, num_heads, dim_head, depth=transformer_depth_middle, context_dim=context_dim, + disable_self_attn=disable_middle_self_attn, use_checkpoint=use_checkpoint + ), + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=None, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + )] + self.middle_block = TimestepEmbedSequential(*mid_block) + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(self.num_res_blocks[level] + 1): + ich = input_block_chans.pop() + layers = [ + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch + ich, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + dtype=self.dtype, + device=device, + operations=operations + ) + ] + ch = model_channels * mult + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + if exists(disable_self_attentions): + disabled_sa = disable_self_attentions[level] + else: + disabled_sa = False + + if not exists(num_attention_blocks) or i < num_attention_blocks[level]: + layers.append( + get_attention_layer( + ch, num_heads, dim_head, depth=num_transformers, context_dim=context_dim, + disable_self_attn=disabled_sa, use_checkpoint=use_checkpoint + ) + ) + if level and i == self.num_res_blocks[level]: + out_ch = ch + layers.append( + get_resblock( + merge_factor=merge_factor, + merge_strategy=merge_strategy, + video_kernel_size=video_kernel_size, + ch=ch, + time_embed_dim=time_embed_dim, + dropout=dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + dtype=self.dtype, + device=device, + operations=operations + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch, dtype=self.dtype, device=device, operations=operations) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = nn.Sequential( + operations.GroupNorm(32, ch, dtype=self.dtype, device=device), + nn.SiLU(), + operations.conv_nd(dims, model_channels, out_channels, 3, padding=1, dtype=self.dtype, device=device), + ) + if self.predict_codebook_ids: + self.id_predictor = nn.Sequential( + operations.GroupNorm(32, ch, dtype=self.dtype, device=device), + operations.conv_nd(dims, model_channels, n_embed, 1, dtype=self.dtype, device=device), + #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + + def forward(self, x, timesteps=None, context=None, y=None, control=None, transformer_options={}, **kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. + :return: an [N x C x ...] Tensor of outputs. + """ + transformer_options["original_shape"] = list(x.shape) + transformer_options["transformer_index"] = 0 + transformer_patches = transformer_options.get("patches", {}) + + num_video_frames = kwargs.get("num_video_frames", self.default_num_video_frames) + image_only_indicator = kwargs.get("image_only_indicator", None) + time_context = kwargs.get("time_context", None) + + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False).to(x.dtype) + emb = self.time_embed(t_emb) + + if "emb_patch" in transformer_patches: + patch = transformer_patches["emb_patch"] + for p in patch: + emb = p(emb, self.model_channels, transformer_options) + + if self.num_classes is not None: + assert y.shape[0] == x.shape[0] + emb = emb + self.label_emb(y) + + h = x + for id, module in enumerate(self.input_blocks): + transformer_options["block"] = ("input", id) + h = forward_timestep_embed(module, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'input') + if "input_block_patch" in transformer_patches: + patch = transformer_patches["input_block_patch"] + for p in patch: + h = p(h, transformer_options) + + hs.append(h) + if "input_block_patch_after_skip" in transformer_patches: + patch = transformer_patches["input_block_patch_after_skip"] + for p in patch: + h = p(h, transformer_options) + + transformer_options["block"] = ("middle", 0) + if self.middle_block is not None: + h = forward_timestep_embed(self.middle_block, h, emb, context, transformer_options, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = apply_control(h, control, 'middle') + + + for id, module in enumerate(self.output_blocks): + transformer_options["block"] = ("output", id) + hsp = hs.pop() + hsp = apply_control(hsp, control, 'output') + + if "output_block_patch" in transformer_patches: + patch = transformer_patches["output_block_patch"] + for p in patch: + h, hsp = p(h, hsp, transformer_options) + + h = th.cat([h, hsp], dim=1) + del hsp + if len(hs) > 0: + output_shape = hs[-1].shape + else: + output_shape = None + h = forward_timestep_embed(module, h, emb, context, transformer_options, output_shape, time_context=time_context, num_video_frames=num_video_frames, image_only_indicator=image_only_indicator) + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/upscaling.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/upscaling.py new file mode 100644 index 0000000000000000000000000000000000000000..f5ac7c2f9138d6d34cda735d2201225d46831154 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/diffusionmodules/upscaling.py @@ -0,0 +1,85 @@ +import torch +import torch.nn as nn +import numpy as np +from functools import partial + +from .util import extract_into_tensor, make_beta_schedule +from comfy.ldm.util import default + + +class AbstractLowScaleModel(nn.Module): + # for concatenating a downsampled image to the latent representation + def __init__(self, noise_schedule_config=None): + super(AbstractLowScaleModel, self).__init__() + if noise_schedule_config is not None: + self.register_schedule(**noise_schedule_config) + + def register_schedule(self, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, + cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep' + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + + def q_sample(self, x_start, t, noise=None, seed=None): + if noise is None: + if seed is None: + noise = torch.randn_like(x_start) + else: + noise = torch.randn(x_start.size(), dtype=x_start.dtype, layout=x_start.layout, generator=torch.manual_seed(seed)).to(x_start.device) + return (extract_into_tensor(self.sqrt_alphas_cumprod.to(x_start.device), t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod.to(x_start.device), t, x_start.shape) * noise) + + def forward(self, x): + return x, None + + def decode(self, x): + return x + + +class SimpleImageConcat(AbstractLowScaleModel): + # no noise level conditioning + def __init__(self): + super(SimpleImageConcat, self).__init__(noise_schedule_config=None) + self.max_noise_level = 0 + + def forward(self, x): + # fix to constant noise level + return x, torch.zeros(x.shape[0], device=x.device).long() + + +class ImageConcatWithNoiseAugmentation(AbstractLowScaleModel): + def __init__(self, noise_schedule_config, max_noise_level=1000, to_cuda=False): + super().__init__(noise_schedule_config=noise_schedule_config) + self.max_noise_level = max_noise_level + + def forward(self, x, noise_level=None, seed=None): + if noise_level is None: + noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() + else: + assert isinstance(noise_level, torch.Tensor) + z = self.q_sample(x, noise_level, seed=seed) + return z, noise_level + + + diff --git a/src/comfyui/comfy/ldm/modules/diffusionmodules/util.py b/src/comfyui/comfy/ldm/modules/diffusionmodules/util.py new file mode 100644 index 0000000000000000000000000000000000000000..ce14ad5e18cf1c8f821878f395cc1bab50fad476 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/diffusionmodules/util.py @@ -0,0 +1,306 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + + +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat, rearrange + +from comfy.ldm.util import instantiate_from_config + +class AlphaBlender(nn.Module): + strategies = ["learned", "fixed", "learned_with_images"] + + def __init__( + self, + alpha: float, + merge_strategy: str = "learned_with_images", + rearrange_pattern: str = "b t -> (b t) 1 1", + ): + super().__init__() + self.merge_strategy = merge_strategy + self.rearrange_pattern = rearrange_pattern + + assert ( + merge_strategy in self.strategies + ), f"merge_strategy needs to be in {self.strategies}" + + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif ( + self.merge_strategy == "learned" + or self.merge_strategy == "learned_with_images" + ): + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def get_alpha(self, image_only_indicator: torch.Tensor, device) -> torch.Tensor: + # skip_time_mix = rearrange(repeat(skip_time_mix, 'b -> (b t) () () ()', t=t), '(b t) 1 ... -> b 1 t ...', t=t) + if self.merge_strategy == "fixed": + # make shape compatible + # alpha = repeat(self.mix_factor, '1 -> b () t () ()', t=t, b=bs) + alpha = self.mix_factor.to(device) + elif self.merge_strategy == "learned": + alpha = torch.sigmoid(self.mix_factor.to(device)) + # make shape compatible + # alpha = repeat(alpha, '1 -> s () ()', s = t * bs) + elif self.merge_strategy == "learned_with_images": + if image_only_indicator is None: + alpha = rearrange(torch.sigmoid(self.mix_factor.to(device)), "... -> ... 1") + else: + alpha = torch.where( + image_only_indicator.bool(), + torch.ones(1, 1, device=image_only_indicator.device), + rearrange(torch.sigmoid(self.mix_factor.to(image_only_indicator.device)), "... -> ... 1"), + ) + alpha = rearrange(alpha, self.rearrange_pattern) + # make shape compatible + # alpha = repeat(alpha, '1 -> s () ()', s = t * bs) + else: + raise NotImplementedError() + return alpha + + def forward( + self, + x_spatial, + x_temporal, + image_only_indicator=None, + ) -> torch.Tensor: + alpha = self.get_alpha(image_only_indicator, x_spatial.device) + x = ( + alpha.to(x_spatial.dtype) * x_spatial + + (1.0 - alpha).to(x_spatial.dtype) * x_temporal + ) + return x + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = torch.clamp(betas, min=0, max=0.999) + + elif schedule == "squaredcos_cap_v2": # used for karlo prior + # return early + return betas_for_alpha_bar( + n_timestep, + lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2, + ) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas + + +def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): + if ddim_discr_method == 'uniform': + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == 'quad': + ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) + else: + raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + steps_out = ddim_timesteps + 1 + if verbose: + print(f'Selected timesteps for ddim sampler: {steps_out}') + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) + if verbose: + print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') + print(f'For the chosen value of eta, which is {eta}, ' + f'this results in the following sigma_t schedule for ddim sampler {sigmas}') + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + ctx.gpu_autocast_kwargs = {"enabled": torch.is_autocast_enabled(), + "dtype": torch.get_autocast_gpu_dtype(), + "cache_enabled": torch.is_autocast_cache_enabled()} + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(), \ + torch.cuda.amp.autocast(**ctx.gpu_autocast_kwargs): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=timesteps.device) / half + ) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() diff --git a/src/comfyui/comfy/ldm/modules/distributions/__init__.py b/src/comfyui/comfy/ldm/modules/distributions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e8f632bce7bd3e91806b41553f8ec42411f6a53 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-38.pyc b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aae16744ebceea09516aa41da4a063fce8436256 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81be549fd9efcc6415fad121ef6895b4b6099195 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-38.pyc b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d75bbf397055026352e8628905612f15ca45e1a Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/distributions/__pycache__/distributions.cpython-38.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/distributions/distributions.py b/src/comfyui/comfy/ldm/modules/distributions/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..f2b8ef901130efc171aa69742ca0244d94d3f2e9 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/distributions/distributions.py @@ -0,0 +1,92 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/src/comfyui/comfy/ldm/modules/ema.py b/src/comfyui/comfy/ldm/modules/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..bded25019b9bcbcd0260f0b8185f8c7859ca58c4 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/ema.py @@ -0,0 +1,80 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError('Decay must be between 0 and 1') + + self.m_name2s_name = {} + self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) + self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_upates + else torch.tensor(-1, dtype=torch.int)) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace('.', '') + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def reset_num_updates(self): + del self.num_updates + self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int)) + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/src/comfyui/comfy/ldm/modules/encoders/__init__.py b/src/comfyui/comfy/ldm/modules/encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/comfy/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f8c4d63910c5e6a03e6092f22530b1c1fb9f777 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/encoders/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc b/src/comfyui/comfy/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbaff302cadc05125d892a2d1e4cc944ff766fe0 Binary files /dev/null and b/src/comfyui/comfy/ldm/modules/encoders/__pycache__/noise_aug_modules.cpython-310.pyc differ diff --git a/src/comfyui/comfy/ldm/modules/encoders/noise_aug_modules.py b/src/comfyui/comfy/ldm/modules/encoders/noise_aug_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d8660301636fde75808cba50afa539cf1162e0 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/encoders/noise_aug_modules.py @@ -0,0 +1,35 @@ +from ..diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation +from ..diffusionmodules.openaimodel import Timestep +import torch + +class CLIPEmbeddingNoiseAugmentation(ImageConcatWithNoiseAugmentation): + def __init__(self, *args, clip_stats_path=None, timestep_dim=256, **kwargs): + super().__init__(*args, **kwargs) + if clip_stats_path is None: + clip_mean, clip_std = torch.zeros(timestep_dim), torch.ones(timestep_dim) + else: + clip_mean, clip_std = torch.load(clip_stats_path, map_location="cpu") + self.register_buffer("data_mean", clip_mean[None, :], persistent=False) + self.register_buffer("data_std", clip_std[None, :], persistent=False) + self.time_embed = Timestep(timestep_dim) + + def scale(self, x): + # re-normalize to centered mean and unit variance + x = (x - self.data_mean.to(x.device)) * 1. / self.data_std.to(x.device) + return x + + def unscale(self, x): + # back to original data stats + x = (x * self.data_std.to(x.device)) + self.data_mean.to(x.device) + return x + + def forward(self, x, noise_level=None, seed=None): + if noise_level is None: + noise_level = torch.randint(0, self.max_noise_level, (x.shape[0],), device=x.device).long() + else: + assert isinstance(noise_level, torch.Tensor) + x = self.scale(x) + z = self.q_sample(x, noise_level, seed=seed) + z = self.unscale(z) + noise_level = self.time_embed(noise_level) + return z, noise_level diff --git a/src/comfyui/comfy/ldm/modules/sub_quadratic_attention.py b/src/comfyui/comfy/ldm/modules/sub_quadratic_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..1bc4138c318125047bf7a58237fd8cbf45f2ed72 --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/sub_quadratic_attention.py @@ -0,0 +1,274 @@ +# original source: +# https://github.com/AminRezaei0x443/memory-efficient-attention/blob/1bc0d9e6ac5f82ea43a375135c4e1d3896ee1694/memory_efficient_attention/attention_torch.py +# license: +# MIT +# credit: +# Amin Rezaei (original author) +# Alex Birch (optimized algorithm for 3D tensors, at the expense of removing bias, masking and callbacks) +# implementation of: +# Self-attention Does Not Need O(n2) Memory": +# https://arxiv.org/abs/2112.05682v2 + +from functools import partial +import torch +from torch import Tensor +from torch.utils.checkpoint import checkpoint +import math +import logging + +try: + from typing import Optional, NamedTuple, List, Protocol +except ImportError: + from typing import Optional, NamedTuple, List + from typing_extensions import Protocol + +from torch import Tensor +from typing import List + +from comfy import model_management + +def dynamic_slice( + x: Tensor, + starts: List[int], + sizes: List[int], +) -> Tensor: + slicing = [slice(start, start + size) for start, size in zip(starts, sizes)] + return x[slicing] + +class AttnChunk(NamedTuple): + exp_values: Tensor + exp_weights_sum: Tensor + max_score: Tensor + +class SummarizeChunk(Protocol): + @staticmethod + def __call__( + query: Tensor, + key_t: Tensor, + value: Tensor, + ) -> AttnChunk: ... + +class ComputeQueryChunkAttn(Protocol): + @staticmethod + def __call__( + query: Tensor, + key_t: Tensor, + value: Tensor, + ) -> Tensor: ... + +def _summarize_chunk( + query: Tensor, + key_t: Tensor, + value: Tensor, + scale: float, + upcast_attention: bool, + mask, +) -> AttnChunk: + if upcast_attention: + with torch.autocast(enabled=False, device_type = 'cuda'): + query = query.float() + key_t = key_t.float() + attn_weights = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + else: + attn_weights = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + max_score, _ = torch.max(attn_weights, -1, keepdim=True) + max_score = max_score.detach() + attn_weights -= max_score + if mask is not None: + attn_weights += mask + torch.exp(attn_weights, out=attn_weights) + exp_weights = attn_weights.to(value.dtype) + exp_values = torch.bmm(exp_weights, value) + max_score = max_score.squeeze(-1) + return AttnChunk(exp_values, exp_weights.sum(dim=-1), max_score) + +def _query_chunk_attention( + query: Tensor, + key_t: Tensor, + value: Tensor, + summarize_chunk: SummarizeChunk, + kv_chunk_size: int, + mask, +) -> Tensor: + batch_x_heads, k_channels_per_head, k_tokens = key_t.shape + _, _, v_channels_per_head = value.shape + + def chunk_scanner(chunk_idx: int, mask) -> AttnChunk: + key_chunk = dynamic_slice( + key_t, + (0, 0, chunk_idx), + (batch_x_heads, k_channels_per_head, kv_chunk_size) + ) + value_chunk = dynamic_slice( + value, + (0, chunk_idx, 0), + (batch_x_heads, kv_chunk_size, v_channels_per_head) + ) + if mask is not None: + mask = mask[:,:,chunk_idx:chunk_idx + kv_chunk_size] + + return summarize_chunk(query, key_chunk, value_chunk, mask=mask) + + chunks: List[AttnChunk] = [ + chunk_scanner(chunk, mask) for chunk in torch.arange(0, k_tokens, kv_chunk_size) + ] + acc_chunk = AttnChunk(*map(torch.stack, zip(*chunks))) + chunk_values, chunk_weights, chunk_max = acc_chunk + + global_max, _ = torch.max(chunk_max, 0, keepdim=True) + max_diffs = torch.exp(chunk_max - global_max) + chunk_values *= torch.unsqueeze(max_diffs, -1) + chunk_weights *= max_diffs + + all_values = chunk_values.sum(dim=0) + all_weights = torch.unsqueeze(chunk_weights, -1).sum(dim=0) + return all_values / all_weights + +# TODO: refactor CrossAttention#get_attention_scores to share code with this +def _get_attention_scores_no_kv_chunking( + query: Tensor, + key_t: Tensor, + value: Tensor, + scale: float, + upcast_attention: bool, + mask, +) -> Tensor: + if upcast_attention: + with torch.autocast(enabled=False, device_type = 'cuda'): + query = query.float() + key_t = key_t.float() + attn_scores = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + else: + attn_scores = torch.baddbmm( + torch.empty(1, 1, 1, device=query.device, dtype=query.dtype), + query, + key_t, + alpha=scale, + beta=0, + ) + + if mask is not None: + attn_scores += mask + try: + attn_probs = attn_scores.softmax(dim=-1) + del attn_scores + except model_management.OOM_EXCEPTION: + logging.warning("ran out of memory while running softmax in _get_attention_scores_no_kv_chunking, trying slower in place softmax instead") + attn_scores -= attn_scores.max(dim=-1, keepdim=True).values + torch.exp(attn_scores, out=attn_scores) + summed = torch.sum(attn_scores, dim=-1, keepdim=True) + attn_scores /= summed + attn_probs = attn_scores + + hidden_states_slice = torch.bmm(attn_probs.to(value.dtype), value) + return hidden_states_slice + +class ScannedChunk(NamedTuple): + chunk_idx: int + attn_chunk: AttnChunk + +def efficient_dot_product_attention( + query: Tensor, + key_t: Tensor, + value: Tensor, + query_chunk_size=1024, + kv_chunk_size: Optional[int] = None, + kv_chunk_size_min: Optional[int] = None, + use_checkpoint=True, + upcast_attention=False, + mask = None, +): + """Computes efficient dot-product attention given query, transposed key, and value. + This is efficient version of attention presented in + https://arxiv.org/abs/2112.05682v2 which comes with O(sqrt(n)) memory requirements. + Args: + query: queries for calculating attention with shape of + `[batch * num_heads, tokens, channels_per_head]`. + key_t: keys for calculating attention with shape of + `[batch * num_heads, channels_per_head, tokens]`. + value: values to be used in attention with shape of + `[batch * num_heads, tokens, channels_per_head]`. + query_chunk_size: int: query chunks size + kv_chunk_size: Optional[int]: key/value chunks size. if None: defaults to sqrt(key_tokens) + kv_chunk_size_min: Optional[int]: key/value minimum chunk size. only considered when kv_chunk_size is None. changes `sqrt(key_tokens)` into `max(sqrt(key_tokens), kv_chunk_size_min)`, to ensure our chunk sizes don't get too small (smaller chunks = more chunks = less concurrent work done). + use_checkpoint: bool: whether to use checkpointing (recommended True for training, False for inference) + Returns: + Output of shape `[batch * num_heads, query_tokens, channels_per_head]`. + """ + batch_x_heads, q_tokens, q_channels_per_head = query.shape + _, _, k_tokens = key_t.shape + scale = q_channels_per_head ** -0.5 + + kv_chunk_size = min(kv_chunk_size or int(math.sqrt(k_tokens)), k_tokens) + if kv_chunk_size_min is not None: + kv_chunk_size = max(kv_chunk_size, kv_chunk_size_min) + + if mask is not None and len(mask.shape) == 2: + mask = mask.unsqueeze(0) + + def get_query_chunk(chunk_idx: int) -> Tensor: + return dynamic_slice( + query, + (0, chunk_idx, 0), + (batch_x_heads, min(query_chunk_size, q_tokens), q_channels_per_head) + ) + + def get_mask_chunk(chunk_idx: int) -> Tensor: + if mask is None: + return None + chunk = min(query_chunk_size, q_tokens) + return mask[:,chunk_idx:chunk_idx + chunk] + + summarize_chunk: SummarizeChunk = partial(_summarize_chunk, scale=scale, upcast_attention=upcast_attention) + summarize_chunk: SummarizeChunk = partial(checkpoint, summarize_chunk) if use_checkpoint else summarize_chunk + compute_query_chunk_attn: ComputeQueryChunkAttn = partial( + _get_attention_scores_no_kv_chunking, + scale=scale, + upcast_attention=upcast_attention + ) if k_tokens <= kv_chunk_size else ( + # fast-path for when there's just 1 key-value chunk per query chunk (this is just sliced attention btw) + partial( + _query_chunk_attention, + kv_chunk_size=kv_chunk_size, + summarize_chunk=summarize_chunk, + ) + ) + + if q_tokens <= query_chunk_size: + # fast-path for when there's just 1 query chunk + return compute_query_chunk_attn( + query=query, + key_t=key_t, + value=value, + mask=mask, + ) + + # TODO: maybe we should use torch.empty_like(query) to allocate storage in-advance, + # and pass slices to be mutated, instead of torch.cat()ing the returned slices + res = torch.cat([ + compute_query_chunk_attn( + query=get_query_chunk(i * query_chunk_size), + key_t=key_t, + value=value, + mask=get_mask_chunk(i * query_chunk_size) + ) for i in range(math.ceil(q_tokens / query_chunk_size)) + ], dim=1) + return res diff --git a/src/comfyui/comfy/ldm/modules/temporal_ae.py b/src/comfyui/comfy/ldm/modules/temporal_ae.py new file mode 100644 index 0000000000000000000000000000000000000000..2992aeafc35ae8ca9e4ecac236810fa5a1fb84ad --- /dev/null +++ b/src/comfyui/comfy/ldm/modules/temporal_ae.py @@ -0,0 +1,245 @@ +import functools +from typing import Callable, Iterable, Union + +import torch +from einops import rearrange, repeat + +import comfy.ops +ops = comfy.ops.disable_weight_init + +from .diffusionmodules.model import ( + AttnBlock, + Decoder, + ResnetBlock, +) +from .diffusionmodules.openaimodel import ResBlock, timestep_embedding +from .attention import BasicTransformerBlock + +def partialclass(cls, *args, **kwargs): + class NewCls(cls): + __init__ = functools.partialmethod(cls.__init__, *args, **kwargs) + + return NewCls + + +class VideoResBlock(ResnetBlock): + def __init__( + self, + out_channels, + *args, + dropout=0.0, + video_kernel_size=3, + alpha=0.0, + merge_strategy="learned", + **kwargs, + ): + super().__init__(out_channels=out_channels, dropout=dropout, *args, **kwargs) + if video_kernel_size is None: + video_kernel_size = [3, 1, 1] + self.time_stack = ResBlock( + channels=out_channels, + emb_channels=0, + dropout=dropout, + dims=3, + use_scale_shift_norm=False, + use_conv=False, + up=False, + down=False, + kernel_size=video_kernel_size, + use_checkpoint=False, + skip_t_emb=True, + ) + + self.merge_strategy = merge_strategy + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif self.merge_strategy == "learned": + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def get_alpha(self, bs): + if self.merge_strategy == "fixed": + return self.mix_factor + elif self.merge_strategy == "learned": + return torch.sigmoid(self.mix_factor) + else: + raise NotImplementedError() + + def forward(self, x, temb, skip_video=False, timesteps=None): + b, c, h, w = x.shape + if timesteps is None: + timesteps = b + + x = super().forward(x, temb) + + if not skip_video: + x_mix = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + + x = self.time_stack(x, temb) + + alpha = self.get_alpha(bs=b // timesteps).to(x.device) + x = alpha * x + (1.0 - alpha) * x_mix + + x = rearrange(x, "b c t h w -> (b t) c h w") + return x + + +class AE3DConv(ops.Conv2d): + def __init__(self, in_channels, out_channels, video_kernel_size=3, *args, **kwargs): + super().__init__(in_channels, out_channels, *args, **kwargs) + if isinstance(video_kernel_size, Iterable): + padding = [int(k // 2) for k in video_kernel_size] + else: + padding = int(video_kernel_size // 2) + + self.time_mix_conv = ops.Conv3d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=video_kernel_size, + padding=padding, + ) + + def forward(self, input, timesteps=None, skip_video=False): + if timesteps is None: + timesteps = input.shape[0] + x = super().forward(input) + if skip_video: + return x + x = rearrange(x, "(b t) c h w -> b c t h w", t=timesteps) + x = self.time_mix_conv(x) + return rearrange(x, "b c t h w -> (b t) c h w") + + +class AttnVideoBlock(AttnBlock): + def __init__( + self, in_channels: int, alpha: float = 0, merge_strategy: str = "learned" + ): + super().__init__(in_channels) + # no context, single headed, as in base class + self.time_mix_block = BasicTransformerBlock( + dim=in_channels, + n_heads=1, + d_head=in_channels, + checkpoint=False, + ff_in=True, + ) + + time_embed_dim = self.in_channels * 4 + self.video_time_embed = torch.nn.Sequential( + ops.Linear(self.in_channels, time_embed_dim), + torch.nn.SiLU(), + ops.Linear(time_embed_dim, self.in_channels), + ) + + self.merge_strategy = merge_strategy + if self.merge_strategy == "fixed": + self.register_buffer("mix_factor", torch.Tensor([alpha])) + elif self.merge_strategy == "learned": + self.register_parameter( + "mix_factor", torch.nn.Parameter(torch.Tensor([alpha])) + ) + else: + raise ValueError(f"unknown merge strategy {self.merge_strategy}") + + def forward(self, x, timesteps=None, skip_time_block=False): + if skip_time_block: + return super().forward(x) + + if timesteps is None: + timesteps = x.shape[0] + + x_in = x + x = self.attention(x) + h, w = x.shape[2:] + x = rearrange(x, "b c h w -> b (h w) c") + + x_mix = x + num_frames = torch.arange(timesteps, device=x.device) + num_frames = repeat(num_frames, "t -> b t", b=x.shape[0] // timesteps) + num_frames = rearrange(num_frames, "b t -> (b t)") + t_emb = timestep_embedding(num_frames, self.in_channels, repeat_only=False) + emb = self.video_time_embed(t_emb) # b, n_channels + emb = emb[:, None, :] + x_mix = x_mix + emb + + alpha = self.get_alpha().to(x.device) + x_mix = self.time_mix_block(x_mix, timesteps=timesteps) + x = alpha * x + (1.0 - alpha) * x_mix # alpha merge + + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + x = self.proj_out(x) + + return x_in + x + + def get_alpha( + self, + ): + if self.merge_strategy == "fixed": + return self.mix_factor + elif self.merge_strategy == "learned": + return torch.sigmoid(self.mix_factor) + else: + raise NotImplementedError(f"unknown merge strategy {self.merge_strategy}") + + + +def make_time_attn( + in_channels, + attn_type="vanilla", + attn_kwargs=None, + alpha: float = 0, + merge_strategy: str = "learned", +): + return partialclass( + AttnVideoBlock, in_channels, alpha=alpha, merge_strategy=merge_strategy + ) + + +class Conv2DWrapper(torch.nn.Conv2d): + def forward(self, input: torch.Tensor, **kwargs) -> torch.Tensor: + return super().forward(input) + + +class VideoDecoder(Decoder): + available_time_modes = ["all", "conv-only", "attn-only"] + + def __init__( + self, + *args, + video_kernel_size: Union[int, list] = 3, + alpha: float = 0.0, + merge_strategy: str = "learned", + time_mode: str = "conv-only", + **kwargs, + ): + self.video_kernel_size = video_kernel_size + self.alpha = alpha + self.merge_strategy = merge_strategy + self.time_mode = time_mode + assert ( + self.time_mode in self.available_time_modes + ), f"time_mode parameter has to be in {self.available_time_modes}" + + if self.time_mode != "attn-only": + kwargs["conv_out_op"] = partialclass(AE3DConv, video_kernel_size=self.video_kernel_size) + if self.time_mode not in ["conv-only", "only-last-conv"]: + kwargs["attn_op"] = partialclass(make_time_attn, alpha=self.alpha, merge_strategy=self.merge_strategy) + if self.time_mode not in ["attn-only", "only-last-conv"]: + kwargs["resnet_op"] = partialclass(VideoResBlock, video_kernel_size=self.video_kernel_size, alpha=self.alpha, merge_strategy=self.merge_strategy) + + super().__init__(*args, **kwargs) + + def get_last_layer(self, skip_time_mix=False, **kwargs): + if self.time_mode == "attn-only": + raise NotImplementedError("TODO") + else: + return ( + self.conv_out.time_mix_conv.weight + if not skip_time_mix + else self.conv_out.weight + ) diff --git a/src/comfyui/comfy/ldm/util.py b/src/comfyui/comfy/ldm/util.py new file mode 100644 index 0000000000000000000000000000000000000000..8c09ca1c72f7ceb3f9d7f9546aae5561baf62b13 --- /dev/null +++ b/src/comfyui/comfy/ldm/util.py @@ -0,0 +1,197 @@ +import importlib + +import torch +from torch import optim +import numpy as np + +from inspect import isfunction +from PIL import Image, ImageDraw, ImageFont + + +def log_txt_as_img(wh, xc, size=10): + # wh a tuple of (width, height) + # xc a list of captions to plot + b = len(xc) + txts = list() + for bi in range(b): + txt = Image.new("RGB", wh, color="white") + draw = ImageDraw.Draw(txt) + font = ImageFont.truetype('data/DejaVuSans.ttf', size=size) + nc = int(40 * (wh[0] / 256)) + lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc)) + + try: + draw.text((0, 0), lines, fill="black", font=font) + except UnicodeEncodeError: + print("Cant encode string for logging. Skipping.") + + txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 + txts.append(txt) + txts = np.stack(txts) + txts = torch.tensor(txts) + return txts + + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + + +def isimage(x): + if not isinstance(x,torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + + +def exists(x): + return x is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def mean_flat(tensor): + """ + https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86 + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") + return total_params + + +def instantiate_from_config(config): + if not "target" in config: + if config == '__is_first_stage__': + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +class AdamWwithEMAandWings(optim.Optimizer): + # credit to https://gist.github.com/crowsonkb/65f7265353f403714fce3b2595e0b298 + def __init__(self, params, lr=1.e-3, betas=(0.9, 0.999), eps=1.e-8, # TODO: check hyperparameters before using + weight_decay=1.e-2, amsgrad=False, ema_decay=0.9999, # ema decay to match previous code + ema_power=1., param_names=()): + """AdamW that saves EMA versions of the parameters.""" + if not 0.0 <= lr: + raise ValueError("Invalid learning rate: {}".format(lr)) + if not 0.0 <= eps: + raise ValueError("Invalid epsilon value: {}".format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + if not 0.0 <= weight_decay: + raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) + if not 0.0 <= ema_decay <= 1.0: + raise ValueError("Invalid ema_decay value: {}".format(ema_decay)) + defaults = dict(lr=lr, betas=betas, eps=eps, + weight_decay=weight_decay, amsgrad=amsgrad, ema_decay=ema_decay, + ema_power=ema_power, param_names=param_names) + super().__init__(params, defaults) + + def __setstate__(self, state): + super().__setstate__(state) + for group in self.param_groups: + group.setdefault('amsgrad', False) + + @torch.no_grad() + def step(self, closure=None): + """Performs a single optimization step. + Args: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + for group in self.param_groups: + params_with_grad = [] + grads = [] + exp_avgs = [] + exp_avg_sqs = [] + ema_params_with_grad = [] + state_sums = [] + max_exp_avg_sqs = [] + state_steps = [] + amsgrad = group['amsgrad'] + beta1, beta2 = group['betas'] + ema_decay = group['ema_decay'] + ema_power = group['ema_power'] + + for p in group['params']: + if p.grad is None: + continue + params_with_grad.append(p) + if p.grad.is_sparse: + raise RuntimeError('AdamW does not support sparse gradients') + grads.append(p.grad) + + state = self.state[p] + + # State initialization + if len(state) == 0: + state['step'] = 0 + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + if amsgrad: + # Maintains max of all exp. moving avg. of sq. grad. values + state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format) + # Exponential moving average of parameter values + state['param_exp_avg'] = p.detach().float().clone() + + exp_avgs.append(state['exp_avg']) + exp_avg_sqs.append(state['exp_avg_sq']) + ema_params_with_grad.append(state['param_exp_avg']) + + if amsgrad: + max_exp_avg_sqs.append(state['max_exp_avg_sq']) + + # update the steps for each param group update + state['step'] += 1 + # record the step after step update + state_steps.append(state['step']) + + optim._functional.adamw(params_with_grad, + grads, + exp_avgs, + exp_avg_sqs, + max_exp_avg_sqs, + state_steps, + amsgrad=amsgrad, + beta1=beta1, + beta2=beta2, + lr=group['lr'], + weight_decay=group['weight_decay'], + eps=group['eps'], + maximize=False) + + cur_ema_decay = min(ema_decay, 1 - state['step'] ** -ema_power) + for param, ema_param in zip(params_with_grad, ema_params_with_grad): + ema_param.mul_(cur_ema_decay).add_(param.float(), alpha=1 - cur_ema_decay) + + return loss \ No newline at end of file diff --git a/src/comfyui/comfy/lora.py b/src/comfyui/comfy/lora.py new file mode 100644 index 0000000000000000000000000000000000000000..b745ca4d5f69bd4745aa0f7828066a34a4fd20d7 --- /dev/null +++ b/src/comfyui/comfy/lora.py @@ -0,0 +1,595 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +from __future__ import annotations +import comfy.utils +import comfy.model_management +import comfy.model_base +import logging +import torch + +LORA_CLIP_MAP = { + "mlp.fc1": "mlp_fc1", + "mlp.fc2": "mlp_fc2", + "self_attn.k_proj": "self_attn_k_proj", + "self_attn.q_proj": "self_attn_q_proj", + "self_attn.v_proj": "self_attn_v_proj", + "self_attn.out_proj": "self_attn_out_proj", +} + + +def load_lora(lora, to_load): + patch_dict = {} + loaded_keys = set() + for x in to_load: + alpha_name = "{}.alpha".format(x) + alpha = None + if alpha_name in lora.keys(): + alpha = lora[alpha_name].item() + loaded_keys.add(alpha_name) + + dora_scale_name = "{}.dora_scale".format(x) + dora_scale = None + if dora_scale_name in lora.keys(): + dora_scale = lora[dora_scale_name] + loaded_keys.add(dora_scale_name) + + regular_lora = "{}.lora_up.weight".format(x) + diffusers_lora = "{}_lora.up.weight".format(x) + diffusers2_lora = "{}.lora_B.weight".format(x) + diffusers3_lora = "{}.lora.up.weight".format(x) + transformers_lora = "{}.lora_linear_layer.up.weight".format(x) + A_name = None + + if regular_lora in lora.keys(): + A_name = regular_lora + B_name = "{}.lora_down.weight".format(x) + mid_name = "{}.lora_mid.weight".format(x) + elif diffusers_lora in lora.keys(): + A_name = diffusers_lora + B_name = "{}_lora.down.weight".format(x) + mid_name = None + elif diffusers2_lora in lora.keys(): + A_name = diffusers2_lora + B_name = "{}.lora_A.weight".format(x) + mid_name = None + elif diffusers3_lora in lora.keys(): + A_name = diffusers3_lora + B_name = "{}.lora.down.weight".format(x) + mid_name = None + elif transformers_lora in lora.keys(): + A_name = transformers_lora + B_name ="{}.lora_linear_layer.down.weight".format(x) + mid_name = None + + if A_name is not None: + mid = None + if mid_name is not None and mid_name in lora.keys(): + mid = lora[mid_name] + loaded_keys.add(mid_name) + patch_dict[to_load[x]] = ("lora", (lora[A_name], lora[B_name], alpha, mid, dora_scale)) + loaded_keys.add(A_name) + loaded_keys.add(B_name) + + + ######## loha + hada_w1_a_name = "{}.hada_w1_a".format(x) + hada_w1_b_name = "{}.hada_w1_b".format(x) + hada_w2_a_name = "{}.hada_w2_a".format(x) + hada_w2_b_name = "{}.hada_w2_b".format(x) + hada_t1_name = "{}.hada_t1".format(x) + hada_t2_name = "{}.hada_t2".format(x) + if hada_w1_a_name in lora.keys(): + hada_t1 = None + hada_t2 = None + if hada_t1_name in lora.keys(): + hada_t1 = lora[hada_t1_name] + hada_t2 = lora[hada_t2_name] + loaded_keys.add(hada_t1_name) + loaded_keys.add(hada_t2_name) + + patch_dict[to_load[x]] = ("loha", (lora[hada_w1_a_name], lora[hada_w1_b_name], alpha, lora[hada_w2_a_name], lora[hada_w2_b_name], hada_t1, hada_t2, dora_scale)) + loaded_keys.add(hada_w1_a_name) + loaded_keys.add(hada_w1_b_name) + loaded_keys.add(hada_w2_a_name) + loaded_keys.add(hada_w2_b_name) + + + ######## lokr + lokr_w1_name = "{}.lokr_w1".format(x) + lokr_w2_name = "{}.lokr_w2".format(x) + lokr_w1_a_name = "{}.lokr_w1_a".format(x) + lokr_w1_b_name = "{}.lokr_w1_b".format(x) + lokr_t2_name = "{}.lokr_t2".format(x) + lokr_w2_a_name = "{}.lokr_w2_a".format(x) + lokr_w2_b_name = "{}.lokr_w2_b".format(x) + + lokr_w1 = None + if lokr_w1_name in lora.keys(): + lokr_w1 = lora[lokr_w1_name] + loaded_keys.add(lokr_w1_name) + + lokr_w2 = None + if lokr_w2_name in lora.keys(): + lokr_w2 = lora[lokr_w2_name] + loaded_keys.add(lokr_w2_name) + + lokr_w1_a = None + if lokr_w1_a_name in lora.keys(): + lokr_w1_a = lora[lokr_w1_a_name] + loaded_keys.add(lokr_w1_a_name) + + lokr_w1_b = None + if lokr_w1_b_name in lora.keys(): + lokr_w1_b = lora[lokr_w1_b_name] + loaded_keys.add(lokr_w1_b_name) + + lokr_w2_a = None + if lokr_w2_a_name in lora.keys(): + lokr_w2_a = lora[lokr_w2_a_name] + loaded_keys.add(lokr_w2_a_name) + + lokr_w2_b = None + if lokr_w2_b_name in lora.keys(): + lokr_w2_b = lora[lokr_w2_b_name] + loaded_keys.add(lokr_w2_b_name) + + lokr_t2 = None + if lokr_t2_name in lora.keys(): + lokr_t2 = lora[lokr_t2_name] + loaded_keys.add(lokr_t2_name) + + if (lokr_w1 is not None) or (lokr_w2 is not None) or (lokr_w1_a is not None) or (lokr_w2_a is not None): + patch_dict[to_load[x]] = ("lokr", (lokr_w1, lokr_w2, alpha, lokr_w1_a, lokr_w1_b, lokr_w2_a, lokr_w2_b, lokr_t2, dora_scale)) + + #glora + a1_name = "{}.a1.weight".format(x) + a2_name = "{}.a2.weight".format(x) + b1_name = "{}.b1.weight".format(x) + b2_name = "{}.b2.weight".format(x) + if a1_name in lora: + patch_dict[to_load[x]] = ("glora", (lora[a1_name], lora[a2_name], lora[b1_name], lora[b2_name], alpha, dora_scale)) + loaded_keys.add(a1_name) + loaded_keys.add(a2_name) + loaded_keys.add(b1_name) + loaded_keys.add(b2_name) + + w_norm_name = "{}.w_norm".format(x) + b_norm_name = "{}.b_norm".format(x) + w_norm = lora.get(w_norm_name, None) + b_norm = lora.get(b_norm_name, None) + + if w_norm is not None: + loaded_keys.add(w_norm_name) + patch_dict[to_load[x]] = ("diff", (w_norm,)) + if b_norm is not None: + loaded_keys.add(b_norm_name) + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (b_norm,)) + + diff_name = "{}.diff".format(x) + diff_weight = lora.get(diff_name, None) + if diff_weight is not None: + patch_dict[to_load[x]] = ("diff", (diff_weight,)) + loaded_keys.add(diff_name) + + diff_bias_name = "{}.diff_b".format(x) + diff_bias = lora.get(diff_bias_name, None) + if diff_bias is not None: + patch_dict["{}.bias".format(to_load[x][:-len(".weight")])] = ("diff", (diff_bias,)) + loaded_keys.add(diff_bias_name) + + for x in lora.keys(): + if x not in loaded_keys: + logging.warning("lora key not loaded: {}".format(x)) + + return patch_dict + +def model_lora_keys_clip(model, key_map={}): + sdk = model.state_dict().keys() + for k in sdk: + if k.endswith(".weight"): + key_map["text_encoders.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names + + text_model_lora_key = "lora_te_text_model_encoder_layers_{}_{}" + clip_l_present = False + clip_g_present = False + for b in range(32): #TODO: clean up + for c in LORA_CLIP_MAP: + k = "clip_h.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + + k = "clip_l.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + lora_key = text_model_lora_key.format(b, LORA_CLIP_MAP[c]) + key_map[lora_key] = k + lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base + key_map[lora_key] = k + clip_l_present = True + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + + k = "clip_g.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) + if k in sdk: + clip_g_present = True + if clip_l_present: + lora_key = "lora_te2_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base + key_map[lora_key] = k + lora_key = "text_encoder_2.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + else: + lora_key = "lora_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #TODO: test if this is correct for SDXL-Refiner + key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k + lora_key = "lora_prior_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #cascade lora: TODO put lora key prefix in the model config + key_map[lora_key] = k + + for k in sdk: + if k.endswith(".weight"): + if k.startswith("t5xxl.transformer."):#OneTrainer SD3 and Flux lora + l_key = k[len("t5xxl.transformer."):-len(".weight")] + t5_index = 1 + if clip_g_present: + t5_index += 1 + if clip_l_present: + t5_index += 1 + if t5_index == 2: + key_map["lora_te{}_{}".format(t5_index, l_key.replace(".", "_"))] = k #OneTrainer Flux + t5_index += 1 + + key_map["lora_te{}_{}".format(t5_index, l_key.replace(".", "_"))] = k + elif k.startswith("hydit_clip.transformer.bert."): #HunyuanDiT Lora + l_key = k[len("hydit_clip.transformer.bert."):-len(".weight")] + lora_key = "lora_te1_{}".format(l_key.replace(".", "_")) + key_map[lora_key] = k + + + k = "clip_g.transformer.text_projection.weight" + if k in sdk: + key_map["lora_prior_te_text_projection"] = k #cascade lora? + # key_map["text_encoder.text_projection"] = k #TODO: check if other lora have the text_projection too + key_map["lora_te2_text_projection"] = k #OneTrainer SD3 lora + + k = "clip_l.transformer.text_projection.weight" + if k in sdk: + key_map["lora_te1_text_projection"] = k #OneTrainer SD3 lora, not necessary but omits warning + + return key_map + +def model_lora_keys_unet(model, key_map={}): + sd = model.state_dict() + sdk = sd.keys() + + for k in sdk: + if k.startswith("diffusion_model.") and k.endswith(".weight"): + key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_") + key_map["lora_unet_{}".format(key_lora)] = k + key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config + key_map["{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names + + diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config) + for k in diffusers_keys: + if k.endswith(".weight"): + unet_key = "diffusion_model.{}".format(diffusers_keys[k]) + key_lora = k[:-len(".weight")].replace(".", "_") + key_map["lora_unet_{}".format(key_lora)] = unet_key + key_map["lycoris_{}".format(key_lora)] = unet_key #simpletuner lycoris format + + diffusers_lora_prefix = ["", "unet."] + for p in diffusers_lora_prefix: + diffusers_lora_key = "{}{}".format(p, k[:-len(".weight")].replace(".to_", ".processor.to_")) + if diffusers_lora_key.endswith(".to_out.0"): + diffusers_lora_key = diffusers_lora_key[:-2] + key_map[diffusers_lora_key] = unet_key + + if isinstance(model, comfy.model_base.SD3): #Diffusers lora SD3 + diffusers_keys = comfy.utils.mmdit_to_diffusers(model.model_config.unet_config, output_prefix="diffusion_model.") + for k in diffusers_keys: + if k.endswith(".weight"): + to = diffusers_keys[k] + key_lora = "transformer.{}".format(k[:-len(".weight")]) #regular diffusers sd3 lora format + key_map[key_lora] = to + + key_lora = "base_model.model.{}".format(k[:-len(".weight")]) #format for flash-sd3 lora and others? + key_map[key_lora] = to + + key_lora = "lora_transformer_{}".format(k[:-len(".weight")].replace(".", "_")) #OneTrainer lora + key_map[key_lora] = to + + key_lora = "lycoris_{}".format(k[:-len(".weight")].replace(".", "_")) #simpletuner lycoris format + key_map[key_lora] = to + + + if isinstance(model, comfy.model_base.AuraFlow): #Diffusers lora AuraFlow + diffusers_keys = comfy.utils.auraflow_to_diffusers(model.model_config.unet_config, output_prefix="diffusion_model.") + for k in diffusers_keys: + if k.endswith(".weight"): + to = diffusers_keys[k] + key_lora = "transformer.{}".format(k[:-len(".weight")]) #simpletrainer and probably regular diffusers lora format + key_map[key_lora] = to + + if isinstance(model, comfy.model_base.HunyuanDiT): + for k in sdk: + if k.startswith("diffusion_model.") and k.endswith(".weight"): + key_lora = k[len("diffusion_model."):-len(".weight")] + key_map["base_model.model.{}".format(key_lora)] = k #official hunyuan lora format + + if isinstance(model, comfy.model_base.Flux): #Diffusers lora Flux + diffusers_keys = comfy.utils.flux_to_diffusers(model.model_config.unet_config, output_prefix="diffusion_model.") + for k in diffusers_keys: + if k.endswith(".weight"): + to = diffusers_keys[k] + key_map["transformer.{}".format(k[:-len(".weight")])] = to #simpletrainer and probably regular diffusers flux lora format + key_map["lycoris_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #simpletrainer lycoris + key_map["lora_transformer_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #onetrainer + + return key_map + + +def weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function): + dora_scale = comfy.model_management.cast_to_device(dora_scale, weight.device, intermediate_dtype) + lora_diff *= alpha + weight_calc = weight + function(lora_diff).type(weight.dtype) + weight_norm = ( + weight_calc.transpose(0, 1) + .reshape(weight_calc.shape[1], -1) + .norm(dim=1, keepdim=True) + .reshape(weight_calc.shape[1], *[1] * (weight_calc.dim() - 1)) + .transpose(0, 1) + ) + + weight_calc *= (dora_scale / weight_norm).type(weight.dtype) + if strength != 1.0: + weight_calc -= weight + weight += strength * (weight_calc) + else: + weight[:] = weight_calc + return weight + +def pad_tensor_to_shape(tensor: torch.Tensor, new_shape: list[int]) -> torch.Tensor: + """ + Pad a tensor to a new shape with zeros. + + Args: + tensor (torch.Tensor): The original tensor to be padded. + new_shape (List[int]): The desired shape of the padded tensor. + + Returns: + torch.Tensor: A new tensor padded with zeros to the specified shape. + + Note: + If the new shape is smaller than the original tensor in any dimension, + the original tensor will be truncated in that dimension. + """ + if any([new_shape[i] < tensor.shape[i] for i in range(len(new_shape))]): + raise ValueError("The new shape must be larger than the original tensor in all dimensions") + + if len(new_shape) != len(tensor.shape): + raise ValueError("The new shape must have the same number of dimensions as the original tensor") + + # Create a new tensor filled with zeros + padded_tensor = torch.zeros(new_shape, dtype=tensor.dtype, device=tensor.device) + + # Create slicing tuples for both tensors + orig_slices = tuple(slice(0, dim) for dim in tensor.shape) + new_slices = tuple(slice(0, dim) for dim in tensor.shape) + + # Copy the original tensor into the new tensor + padded_tensor[new_slices] = tensor[orig_slices] + + return padded_tensor + +def calculate_weight(patches, weight, key, intermediate_dtype=torch.float32): + for p in patches: + strength = p[0] + v = p[1] + strength_model = p[2] + offset = p[3] + function = p[4] + if function is None: + function = lambda a: a + + old_weight = None + if offset is not None: + old_weight = weight + weight = weight.narrow(offset[0], offset[1], offset[2]) + + if strength_model != 1.0: + weight *= strength_model + + if isinstance(v, list): + v = (calculate_weight(v[1:], v[0][1](comfy.model_management.cast_to_device(v[0][0], weight.device, intermediate_dtype, copy=True), inplace=True), key, intermediate_dtype=intermediate_dtype), ) + + if len(v) == 1: + patch_type = "diff" + elif len(v) == 2: + patch_type = v[0] + v = v[1] + + if patch_type == "diff": + diff: torch.Tensor = v[0] + # An extra flag to pad the weight if the diff's shape is larger than the weight + do_pad_weight = len(v) > 1 and v[1]['pad_weight'] + if do_pad_weight and diff.shape != weight.shape: + logging.info("Pad weight {} from {} to shape: {}".format(key, weight.shape, diff.shape)) + weight = pad_tensor_to_shape(weight, diff.shape) + + if strength != 0.0: + if diff.shape != weight.shape: + logging.warning("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, diff.shape, weight.shape)) + else: + weight += function(strength * comfy.model_management.cast_to_device(diff, weight.device, weight.dtype)) + elif patch_type == "lora": #lora/locon + mat1 = comfy.model_management.cast_to_device(v[0], weight.device, intermediate_dtype) + mat2 = comfy.model_management.cast_to_device(v[1], weight.device, intermediate_dtype) + dora_scale = v[4] + if v[2] is not None: + alpha = v[2] / mat2.shape[0] + else: + alpha = 1.0 + + if v[3] is not None: + #locon mid weights, hopefully the math is fine because I didn't properly test it + mat3 = comfy.model_management.cast_to_device(v[3], weight.device, intermediate_dtype) + final_shape = [mat2.shape[1], mat2.shape[0], mat3.shape[2], mat3.shape[3]] + mat2 = torch.mm(mat2.transpose(0, 1).flatten(start_dim=1), mat3.transpose(0, 1).flatten(start_dim=1)).reshape(final_shape).transpose(0, 1) + try: + lora_diff = torch.mm(mat1.flatten(start_dim=1), mat2.flatten(start_dim=1)).reshape(weight.shape) + if dora_scale is not None: + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) + else: + weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) + except Exception as e: + logging.error("ERROR {} {} {}".format(patch_type, key, e)) + elif patch_type == "lokr": + w1 = v[0] + w2 = v[1] + w1_a = v[3] + w1_b = v[4] + w2_a = v[5] + w2_b = v[6] + t2 = v[7] + dora_scale = v[8] + dim = None + + if w1 is None: + dim = w1_b.shape[0] + w1 = torch.mm(comfy.model_management.cast_to_device(w1_a, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w1_b, weight.device, intermediate_dtype)) + else: + w1 = comfy.model_management.cast_to_device(w1, weight.device, intermediate_dtype) + + if w2 is None: + dim = w2_b.shape[0] + if t2 is None: + w2 = torch.mm(comfy.model_management.cast_to_device(w2_a, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2_b, weight.device, intermediate_dtype)) + else: + w2 = torch.einsum('i j k l, j r, i p -> p r k l', + comfy.model_management.cast_to_device(t2, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2_b, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2_a, weight.device, intermediate_dtype)) + else: + w2 = comfy.model_management.cast_to_device(w2, weight.device, intermediate_dtype) + + if len(w2.shape) == 4: + w1 = w1.unsqueeze(2).unsqueeze(2) + if v[2] is not None and dim is not None: + alpha = v[2] / dim + else: + alpha = 1.0 + + try: + lora_diff = torch.kron(w1, w2).reshape(weight.shape) + if dora_scale is not None: + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) + else: + weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) + except Exception as e: + logging.error("ERROR {} {} {}".format(patch_type, key, e)) + elif patch_type == "loha": + w1a = v[0] + w1b = v[1] + if v[2] is not None: + alpha = v[2] / w1b.shape[0] + else: + alpha = 1.0 + + w2a = v[3] + w2b = v[4] + dora_scale = v[7] + if v[5] is not None: #cp decomposition + t1 = v[5] + t2 = v[6] + m1 = torch.einsum('i j k l, j r, i p -> p r k l', + comfy.model_management.cast_to_device(t1, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w1b, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w1a, weight.device, intermediate_dtype)) + + m2 = torch.einsum('i j k l, j r, i p -> p r k l', + comfy.model_management.cast_to_device(t2, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2b, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2a, weight.device, intermediate_dtype)) + else: + m1 = torch.mm(comfy.model_management.cast_to_device(w1a, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w1b, weight.device, intermediate_dtype)) + m2 = torch.mm(comfy.model_management.cast_to_device(w2a, weight.device, intermediate_dtype), + comfy.model_management.cast_to_device(w2b, weight.device, intermediate_dtype)) + + try: + lora_diff = (m1 * m2).reshape(weight.shape) + if dora_scale is not None: + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) + else: + weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) + except Exception as e: + logging.error("ERROR {} {} {}".format(patch_type, key, e)) + elif patch_type == "glora": + dora_scale = v[5] + + old_glora = False + if v[3].shape[1] == v[2].shape[0] == v[0].shape[0] == v[1].shape[1]: + rank = v[0].shape[0] + old_glora = True + + if v[3].shape[0] == v[2].shape[1] == v[0].shape[1] == v[1].shape[0]: + if old_glora and v[1].shape[0] == weight.shape[0] and weight.shape[0] == weight.shape[1]: + pass + else: + old_glora = False + rank = v[1].shape[0] + + a1 = comfy.model_management.cast_to_device(v[0].flatten(start_dim=1), weight.device, intermediate_dtype) + a2 = comfy.model_management.cast_to_device(v[1].flatten(start_dim=1), weight.device, intermediate_dtype) + b1 = comfy.model_management.cast_to_device(v[2].flatten(start_dim=1), weight.device, intermediate_dtype) + b2 = comfy.model_management.cast_to_device(v[3].flatten(start_dim=1), weight.device, intermediate_dtype) + + if v[4] is not None: + alpha = v[4] / rank + else: + alpha = 1.0 + + try: + if old_glora: + lora_diff = (torch.mm(b2, b1) + torch.mm(torch.mm(weight.flatten(start_dim=1).to(dtype=intermediate_dtype), a2), a1)).reshape(weight.shape) #old lycoris glora + else: + if weight.dim() > 2: + lora_diff = torch.einsum("o i ..., i j -> o j ...", torch.einsum("o i ..., i j -> o j ...", weight.to(dtype=intermediate_dtype), a1), a2).reshape(weight.shape) + else: + lora_diff = torch.mm(torch.mm(weight.to(dtype=intermediate_dtype), a1), a2).reshape(weight.shape) + lora_diff += torch.mm(b1, b2).reshape(weight.shape) + + if dora_scale is not None: + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) + else: + weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) + except Exception as e: + logging.error("ERROR {} {} {}".format(patch_type, key, e)) + else: + logging.warning("patch type not recognized {} {}".format(patch_type, key)) + + if old_weight is not None: + weight = old_weight + + return weight diff --git a/src/comfyui/comfy/model_base.py b/src/comfyui/comfy/model_base.py new file mode 100644 index 0000000000000000000000000000000000000000..f283316821214ccbc55ef2c35632a0842c44e02f --- /dev/null +++ b/src/comfyui/comfy/model_base.py @@ -0,0 +1,736 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import logging +from comfy.ldm.modules.diffusionmodules.openaimodel import UNetModel, Timestep +from comfy.ldm.cascade.stage_c import StageC +from comfy.ldm.cascade.stage_b import StageB +from comfy.ldm.modules.encoders.noise_aug_modules import CLIPEmbeddingNoiseAugmentation +from comfy.ldm.modules.diffusionmodules.upscaling import ImageConcatWithNoiseAugmentation +from comfy.ldm.modules.diffusionmodules.mmdit import OpenAISignatureMMDITWrapper +import comfy.ldm.genmo.joint_model.asymm_models_joint +import comfy.ldm.aura.mmdit +import comfy.ldm.hydit.models +import comfy.ldm.audio.dit +import comfy.ldm.audio.embedders +import comfy.ldm.flux.model + +import comfy.model_management +import comfy.conds +import comfy.ops +from enum import Enum +from . import utils +import comfy.latent_formats +import math + +class ModelType(Enum): + EPS = 1 + V_PREDICTION = 2 + V_PREDICTION_EDM = 3 + STABLE_CASCADE = 4 + EDM = 5 + FLOW = 6 + V_PREDICTION_CONTINUOUS = 7 + FLUX = 8 + + +from comfy.model_sampling import EPS, V_PREDICTION, EDM, ModelSamplingDiscrete, ModelSamplingContinuousEDM, StableCascadeSampling, ModelSamplingContinuousV + + +def model_sampling(model_config, model_type): + s = ModelSamplingDiscrete + + if model_type == ModelType.EPS: + c = EPS + elif model_type == ModelType.V_PREDICTION: + c = V_PREDICTION + elif model_type == ModelType.V_PREDICTION_EDM: + c = V_PREDICTION + s = ModelSamplingContinuousEDM + elif model_type == ModelType.FLOW: + c = comfy.model_sampling.CONST + s = comfy.model_sampling.ModelSamplingDiscreteFlow + elif model_type == ModelType.STABLE_CASCADE: + c = EPS + s = StableCascadeSampling + elif model_type == ModelType.EDM: + c = EDM + s = ModelSamplingContinuousEDM + elif model_type == ModelType.V_PREDICTION_CONTINUOUS: + c = V_PREDICTION + s = ModelSamplingContinuousV + elif model_type == ModelType.FLUX: + c = comfy.model_sampling.CONST + s = comfy.model_sampling.ModelSamplingFlux + + class ModelSampling(s, c): + pass + + return ModelSampling(model_config) + + +class BaseModel(torch.nn.Module): + def __init__(self, model_config, model_type=ModelType.EPS, device=None, unet_model=UNetModel): + super().__init__() + + unet_config = model_config.unet_config + self.latent_format = model_config.latent_format + self.model_config = model_config + self.manual_cast_dtype = model_config.manual_cast_dtype + self.device = device + + if not unet_config.get("disable_unet_model_creation", False): + if model_config.custom_operations is None: + fp8 = model_config.optimizations.get("fp8", model_config.scaled_fp8 is not None) + operations = comfy.ops.pick_operations(unet_config.get("dtype", None), self.manual_cast_dtype, fp8_optimizations=fp8, scaled_fp8=model_config.scaled_fp8) + else: + operations = model_config.custom_operations + self.diffusion_model = unet_model(**unet_config, device=device, operations=operations) + if comfy.model_management.force_channels_last(): + self.diffusion_model.to(memory_format=torch.channels_last) + logging.debug("using channels last mode for diffusion model") + logging.info("model weight dtype {}, manual cast: {}".format(self.get_dtype(), self.manual_cast_dtype)) + self.model_type = model_type + self.model_sampling = model_sampling(model_config, model_type) + + self.adm_channels = unet_config.get("adm_in_channels", None) + if self.adm_channels is None: + self.adm_channels = 0 + + self.concat_keys = () + logging.info("model_type {}".format(model_type.name)) + logging.debug("adm {}".format(self.adm_channels)) + self.memory_usage_factor = model_config.memory_usage_factor + + def apply_model(self, x, t, c_concat=None, c_crossattn=None, control=None, transformer_options={}, **kwargs): + sigma = t + xc = self.model_sampling.calculate_input(sigma, x) + if c_concat is not None: + xc = torch.cat([xc] + [c_concat], dim=1) + + context = c_crossattn + dtype = self.get_dtype() + + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + + xc = xc.to(dtype) + t = self.model_sampling.timestep(t).float() + context = context.to(dtype) + extra_conds = {} + for o in kwargs: + extra = kwargs[o] + if hasattr(extra, "dtype"): + if extra.dtype != torch.int and extra.dtype != torch.long: + extra = extra.to(dtype) + extra_conds[o] = extra + + model_output = self.diffusion_model(xc, t, context=context, control=control, transformer_options=transformer_options, **extra_conds).float() + return self.model_sampling.calculate_denoised(sigma, model_output, x) + + def get_dtype(self): + return self.diffusion_model.dtype + + def is_adm(self): + return self.adm_channels > 0 + + def encode_adm(self, **kwargs): + return None + + def extra_conds(self, **kwargs): + out = {} + if len(self.concat_keys) > 0: + cond_concat = [] + denoise_mask = kwargs.get("concat_mask", kwargs.get("denoise_mask", None)) + concat_latent_image = kwargs.get("concat_latent_image", None) + if concat_latent_image is None: + concat_latent_image = kwargs.get("latent_image", None) + else: + concat_latent_image = self.process_latent_in(concat_latent_image) + + noise = kwargs.get("noise", None) + device = kwargs["device"] + + if concat_latent_image.shape[1:] != noise.shape[1:]: + concat_latent_image = utils.common_upscale(concat_latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + concat_latent_image = utils.resize_to_batch_size(concat_latent_image, noise.shape[0]) + + if denoise_mask is not None: + if len(denoise_mask.shape) == len(noise.shape): + denoise_mask = denoise_mask[:,:1] + + denoise_mask = denoise_mask.reshape((-1, 1, denoise_mask.shape[-2], denoise_mask.shape[-1])) + if denoise_mask.shape[-2:] != noise.shape[-2:]: + denoise_mask = utils.common_upscale(denoise_mask, noise.shape[-1], noise.shape[-2], "bilinear", "center") + denoise_mask = utils.resize_to_batch_size(denoise_mask.round(), noise.shape[0]) + + for ck in self.concat_keys: + if denoise_mask is not None: + if ck == "mask": + cond_concat.append(denoise_mask.to(device)) + elif ck == "masked_image": + cond_concat.append(concat_latent_image.to(device)) #NOTE: the latent_image should be masked by the mask in pixel space + else: + if ck == "mask": + cond_concat.append(torch.ones_like(noise)[:,:1]) + elif ck == "masked_image": + cond_concat.append(self.blank_inpaint_image_like(noise)) + data = torch.cat(cond_concat, dim=1) + out['c_concat'] = comfy.conds.CONDNoiseShape(data) + + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = comfy.conds.CONDRegular(adm) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) + + cross_attn_cnet = kwargs.get("cross_attn_controlnet", None) + if cross_attn_cnet is not None: + out['crossattn_controlnet'] = comfy.conds.CONDCrossAttn(cross_attn_cnet) + + c_concat = kwargs.get("noise_concat", None) + if c_concat is not None: + out['c_concat'] = comfy.conds.CONDNoiseShape(c_concat) + + return out + + def load_model_weights(self, sd, unet_prefix=""): + to_load = {} + keys = list(sd.keys()) + for k in keys: + if k.startswith(unet_prefix): + to_load[k[len(unet_prefix):]] = sd.pop(k) + + to_load = self.model_config.process_unet_state_dict(to_load) + m, u = self.diffusion_model.load_state_dict(to_load, strict=False) + if len(m) > 0: + logging.warning("unet missing: {}".format(m)) + + if len(u) > 0: + logging.warning("unet unexpected: {}".format(u)) + del to_load + return self + + def process_latent_in(self, latent): + return self.latent_format.process_in(latent) + + def process_latent_out(self, latent): + return self.latent_format.process_out(latent) + + def state_dict_for_saving(self, clip_state_dict=None, vae_state_dict=None, clip_vision_state_dict=None): + extra_sds = [] + if clip_state_dict is not None: + extra_sds.append(self.model_config.process_clip_state_dict_for_saving(clip_state_dict)) + if vae_state_dict is not None: + extra_sds.append(self.model_config.process_vae_state_dict_for_saving(vae_state_dict)) + if clip_vision_state_dict is not None: + extra_sds.append(self.model_config.process_clip_vision_state_dict_for_saving(clip_vision_state_dict)) + + unet_state_dict = self.diffusion_model.state_dict() + + if self.model_config.scaled_fp8 is not None: + unet_state_dict["scaled_fp8"] = torch.tensor([], dtype=self.model_config.scaled_fp8) + + unet_state_dict = self.model_config.process_unet_state_dict_for_saving(unet_state_dict) + + if self.model_type == ModelType.V_PREDICTION: + unet_state_dict["v_pred"] = torch.tensor([]) + + for sd in extra_sds: + unet_state_dict.update(sd) + + return unet_state_dict + + def set_inpaint(self): + self.concat_keys = ("mask", "masked_image") + def blank_inpaint_image_like(latent_image): + blank_image = torch.ones_like(latent_image) + # these are the values for "zero" in pixel space translated to latent space + blank_image[:,0] *= 0.8223 + blank_image[:,1] *= -0.6876 + blank_image[:,2] *= 0.6364 + blank_image[:,3] *= 0.1380 + return blank_image + self.blank_inpaint_image_like = blank_inpaint_image_like + + def memory_required(self, input_shape): + if comfy.model_management.xformers_enabled() or comfy.model_management.pytorch_attention_flash_attention(): + dtype = self.get_dtype() + if self.manual_cast_dtype is not None: + dtype = self.manual_cast_dtype + #TODO: this needs to be tweaked + area = input_shape[0] * math.prod(input_shape[2:]) + return (area * comfy.model_management.dtype_size(dtype) * 0.01 * self.memory_usage_factor) * (1024 * 1024) + else: + #TODO: this formula might be too aggressive since I tweaked the sub-quad and split algorithms to use less memory. + area = input_shape[0] * math.prod(input_shape[2:]) + return (area * 0.15 * self.memory_usage_factor) * (1024 * 1024) + + +def unclip_adm(unclip_conditioning, device, noise_augmentor, noise_augment_merge=0.0, seed=None): + adm_inputs = [] + weights = [] + noise_aug = [] + for unclip_cond in unclip_conditioning: + for adm_cond in unclip_cond["clip_vision_output"].image_embeds: + weight = unclip_cond["strength"] + noise_augment = unclip_cond["noise_augmentation"] + noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) + c_adm, noise_level_emb = noise_augmentor(adm_cond.to(device), noise_level=torch.tensor([noise_level], device=device), seed=seed) + adm_out = torch.cat((c_adm, noise_level_emb), 1) * weight + weights.append(weight) + noise_aug.append(noise_augment) + adm_inputs.append(adm_out) + + if len(noise_aug) > 1: + adm_out = torch.stack(adm_inputs).sum(0) + noise_augment = noise_augment_merge + noise_level = round((noise_augmentor.max_noise_level - 1) * noise_augment) + c_adm, noise_level_emb = noise_augmentor(adm_out[:, :noise_augmentor.time_embed.dim], noise_level=torch.tensor([noise_level], device=device)) + adm_out = torch.cat((c_adm, noise_level_emb), 1) + + return adm_out + +class SD21UNCLIP(BaseModel): + def __init__(self, model_config, noise_aug_config, model_type=ModelType.V_PREDICTION, device=None): + super().__init__(model_config, model_type, device=device) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**noise_aug_config) + + def encode_adm(self, **kwargs): + unclip_conditioning = kwargs.get("unclip_conditioning", None) + device = kwargs["device"] + if unclip_conditioning is None: + return torch.zeros((1, self.adm_channels)) + else: + return unclip_adm(unclip_conditioning, device, self.noise_augmentor, kwargs.get("unclip_noise_augment_merge", 0.05), kwargs.get("seed", 0) - 10) + +def sdxl_pooled(args, noise_augmentor): + if "unclip_conditioning" in args: + return unclip_adm(args.get("unclip_conditioning", None), args["device"], noise_augmentor, seed=args.get("seed", 0) - 10)[:,:1280] + else: + return args["pooled_output"] + +class SDXLRefiner(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + + if kwargs.get("prompt_type", "") == "negative": + aesthetic_score = kwargs.get("aesthetic_score", 2.5) + else: + aesthetic_score = kwargs.get("aesthetic_score", 6) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([aesthetic_score]))) + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + +class SDXL(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + self.noise_augmentor = CLIPEmbeddingNoiseAugmentation(**{"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1280}) + + def encode_adm(self, **kwargs): + clip_pooled = sdxl_pooled(kwargs, self.noise_augmentor) + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + target_width = kwargs.get("target_width", width) + target_height = kwargs.get("target_height", height) + + out = [] + out.append(self.embedder(torch.Tensor([height]))) + out.append(self.embedder(torch.Tensor([width]))) + out.append(self.embedder(torch.Tensor([crop_h]))) + out.append(self.embedder(torch.Tensor([crop_w]))) + out.append(self.embedder(torch.Tensor([target_height]))) + out.append(self.embedder(torch.Tensor([target_width]))) + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0).repeat(clip_pooled.shape[0], 1) + return torch.cat((clip_pooled.to(flat.device), flat), dim=1) + + +class SVD_img2vid(BaseModel): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION_EDM, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder = Timestep(256) + + def encode_adm(self, **kwargs): + fps_id = kwargs.get("fps", 6) - 1 + motion_bucket_id = kwargs.get("motion_bucket_id", 127) + augmentation = kwargs.get("augmentation_level", 0) + + out = [] + out.append(self.embedder(torch.Tensor([fps_id]))) + out.append(self.embedder(torch.Tensor([motion_bucket_id]))) + out.append(self.embedder(torch.Tensor([augmentation]))) + + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0) + return flat + + def extra_conds(self, **kwargs): + out = {} + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = comfy.conds.CONDRegular(adm) + + latent_image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + device = kwargs["device"] + + if latent_image is None: + latent_image = torch.zeros_like(noise) + + if latent_image.shape[1:] != noise.shape[1:]: + latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) + + out['c_concat'] = comfy.conds.CONDNoiseShape(latent_image) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) + + if "time_conditioning" in kwargs: + out["time_context"] = comfy.conds.CONDCrossAttn(kwargs["time_conditioning"]) + + out['num_video_frames'] = comfy.conds.CONDConstant(noise.shape[0]) + return out + +class SV3D_u(SVD_img2vid): + def encode_adm(self, **kwargs): + augmentation = kwargs.get("augmentation_level", 0) + + out = [] + out.append(self.embedder(torch.flatten(torch.Tensor([augmentation])))) + + flat = torch.flatten(torch.cat(out)).unsqueeze(dim=0) + return flat + +class SV3D_p(SVD_img2vid): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION_EDM, device=None): + super().__init__(model_config, model_type, device=device) + self.embedder_512 = Timestep(512) + + def encode_adm(self, **kwargs): + augmentation = kwargs.get("augmentation_level", 0) + elevation = kwargs.get("elevation", 0) #elevation and azimuth are in degrees here + azimuth = kwargs.get("azimuth", 0) + noise = kwargs.get("noise", None) + + out = [] + out.append(self.embedder(torch.flatten(torch.Tensor([augmentation])))) + out.append(self.embedder_512(torch.deg2rad(torch.fmod(torch.flatten(90 - torch.Tensor([elevation])), 360.0)))) + out.append(self.embedder_512(torch.deg2rad(torch.fmod(torch.flatten(torch.Tensor([azimuth])), 360.0)))) + + out = list(map(lambda a: utils.resize_to_batch_size(a, noise.shape[0]), out)) + return torch.cat(out, dim=1) + + +class Stable_Zero123(BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None, cc_projection_weight=None, cc_projection_bias=None): + super().__init__(model_config, model_type, device=device) + self.cc_projection = comfy.ops.manual_cast.Linear(cc_projection_weight.shape[1], cc_projection_weight.shape[0], dtype=self.get_dtype(), device=device) + self.cc_projection.weight.copy_(cc_projection_weight) + self.cc_projection.bias.copy_(cc_projection_bias) + + def extra_conds(self, **kwargs): + out = {} + + latent_image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + + if latent_image is None: + latent_image = torch.zeros_like(noise) + + if latent_image.shape[1:] != noise.shape[1:]: + latent_image = utils.common_upscale(latent_image, noise.shape[-1], noise.shape[-2], "bilinear", "center") + + latent_image = utils.resize_to_batch_size(latent_image, noise.shape[0]) + + out['c_concat'] = comfy.conds.CONDNoiseShape(latent_image) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + if cross_attn.shape[-1] != 768: + cross_attn = self.cc_projection(cross_attn) + out['c_crossattn'] = comfy.conds.CONDCrossAttn(cross_attn) + return out + +class SD_X4Upscaler(BaseModel): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION, device=None): + super().__init__(model_config, model_type, device=device) + self.noise_augmentor = ImageConcatWithNoiseAugmentation(noise_schedule_config={"linear_start": 0.0001, "linear_end": 0.02}, max_noise_level=350) + + def extra_conds(self, **kwargs): + out = {} + + image = kwargs.get("concat_image", None) + noise = kwargs.get("noise", None) + noise_augment = kwargs.get("noise_augmentation", 0.0) + device = kwargs["device"] + seed = kwargs["seed"] - 10 + + noise_level = round((self.noise_augmentor.max_noise_level) * noise_augment) + + if image is None: + image = torch.zeros_like(noise)[:,:3] + + if image.shape[1:] != noise.shape[1:]: + image = utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") + + noise_level = torch.tensor([noise_level], device=device) + if noise_augment > 0: + image, noise_level = self.noise_augmentor(image.to(device), noise_level=noise_level, seed=seed) + + image = utils.resize_to_batch_size(image, noise.shape[0]) + + out['c_concat'] = comfy.conds.CONDNoiseShape(image) + out['y'] = comfy.conds.CONDRegular(noise_level) + return out + +class IP2P: + def extra_conds(self, **kwargs): + out = {} + + image = kwargs.get("concat_latent_image", None) + noise = kwargs.get("noise", None) + device = kwargs["device"] + + if image is None: + image = torch.zeros_like(noise) + + if image.shape[1:] != noise.shape[1:]: + image = utils.common_upscale(image.to(device), noise.shape[-1], noise.shape[-2], "bilinear", "center") + + image = utils.resize_to_batch_size(image, noise.shape[0]) + + out['c_concat'] = comfy.conds.CONDNoiseShape(self.process_ip2p_image_in(image)) + adm = self.encode_adm(**kwargs) + if adm is not None: + out['y'] = comfy.conds.CONDRegular(adm) + return out + +class SD15_instructpix2pix(IP2P, BaseModel): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + self.process_ip2p_image_in = lambda image: image + +class SDXL_instructpix2pix(IP2P, SDXL): + def __init__(self, model_config, model_type=ModelType.EPS, device=None): + super().__init__(model_config, model_type, device=device) + if model_type == ModelType.V_PREDICTION_EDM: + self.process_ip2p_image_in = lambda image: comfy.latent_formats.SDXL().process_in(image) #cosxl ip2p + else: + self.process_ip2p_image_in = lambda image: image #diffusers ip2p + + +class StableCascade_C(BaseModel): + def __init__(self, model_config, model_type=ModelType.STABLE_CASCADE, device=None): + super().__init__(model_config, model_type, device=device, unet_model=StageC) + self.diffusion_model.eval().requires_grad_(False) + + def extra_conds(self, **kwargs): + out = {} + clip_text_pooled = kwargs["pooled_output"] + if clip_text_pooled is not None: + out['clip_text_pooled'] = comfy.conds.CONDRegular(clip_text_pooled) + + if "unclip_conditioning" in kwargs: + embeds = [] + for unclip_cond in kwargs["unclip_conditioning"]: + weight = unclip_cond["strength"] + embeds.append(unclip_cond["clip_vision_output"].image_embeds.unsqueeze(0) * weight) + clip_img = torch.cat(embeds, dim=1) + else: + clip_img = torch.zeros((1, 1, 768)) + out["clip_img"] = comfy.conds.CONDRegular(clip_img) + out["sca"] = comfy.conds.CONDRegular(torch.zeros((1,))) + out["crp"] = comfy.conds.CONDRegular(torch.zeros((1,))) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['clip_text'] = comfy.conds.CONDCrossAttn(cross_attn) + return out + + +class StableCascade_B(BaseModel): + def __init__(self, model_config, model_type=ModelType.STABLE_CASCADE, device=None): + super().__init__(model_config, model_type, device=device, unet_model=StageB) + self.diffusion_model.eval().requires_grad_(False) + + def extra_conds(self, **kwargs): + out = {} + noise = kwargs.get("noise", None) + + clip_text_pooled = kwargs["pooled_output"] + if clip_text_pooled is not None: + out['clip'] = comfy.conds.CONDRegular(clip_text_pooled) + + #size of prior doesn't really matter if zeros because it gets resized but I still want it to get batched + prior = kwargs.get("stable_cascade_prior", torch.zeros((1, 16, (noise.shape[2] * 4) // 42, (noise.shape[3] * 4) // 42), dtype=noise.dtype, layout=noise.layout, device=noise.device)) + + out["effnet"] = comfy.conds.CONDRegular(prior) + out["sca"] = comfy.conds.CONDRegular(torch.zeros((1,))) + return out + + +class SD3(BaseModel): + def __init__(self, model_config, model_type=ModelType.FLOW, device=None): + super().__init__(model_config, model_type, device=device, unet_model=OpenAISignatureMMDITWrapper) + + def encode_adm(self, **kwargs): + return kwargs["pooled_output"] + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + return out + + +class AuraFlow(BaseModel): + def __init__(self, model_config, model_type=ModelType.FLOW, device=None): + super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.aura.mmdit.MMDiT) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + return out + + +class StableAudio1(BaseModel): + def __init__(self, model_config, seconds_start_embedder_weights, seconds_total_embedder_weights, model_type=ModelType.V_PREDICTION_CONTINUOUS, device=None): + super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.audio.dit.AudioDiffusionTransformer) + self.seconds_start_embedder = comfy.ldm.audio.embedders.NumberConditioner(768, min_val=0, max_val=512) + self.seconds_total_embedder = comfy.ldm.audio.embedders.NumberConditioner(768, min_val=0, max_val=512) + self.seconds_start_embedder.load_state_dict(seconds_start_embedder_weights) + self.seconds_total_embedder.load_state_dict(seconds_total_embedder_weights) + + def extra_conds(self, **kwargs): + out = {} + + noise = kwargs.get("noise", None) + device = kwargs["device"] + + seconds_start = kwargs.get("seconds_start", 0) + seconds_total = kwargs.get("seconds_total", int(noise.shape[-1] / 21.53)) + + seconds_start_embed = self.seconds_start_embedder([seconds_start])[0].to(device) + seconds_total_embed = self.seconds_total_embedder([seconds_total])[0].to(device) + + global_embed = torch.cat([seconds_start_embed, seconds_total_embed], dim=-1).reshape((1, -1)) + out['global_embed'] = comfy.conds.CONDRegular(global_embed) + + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + cross_attn = torch.cat([cross_attn.to(device), seconds_start_embed.repeat((cross_attn.shape[0], 1, 1)), seconds_total_embed.repeat((cross_attn.shape[0], 1, 1))], dim=1) + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + return out + + def state_dict_for_saving(self, clip_state_dict=None, vae_state_dict=None, clip_vision_state_dict=None): + sd = super().state_dict_for_saving(clip_state_dict=clip_state_dict, vae_state_dict=vae_state_dict, clip_vision_state_dict=clip_vision_state_dict) + d = {"conditioner.conditioners.seconds_start.": self.seconds_start_embedder.state_dict(), "conditioner.conditioners.seconds_total.": self.seconds_total_embedder.state_dict()} + for k in d: + s = d[k] + for l in s: + sd["{}{}".format(k, l)] = s[l] + return sd + +class HunyuanDiT(BaseModel): + def __init__(self, model_config, model_type=ModelType.V_PREDICTION, device=None): + super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.hydit.models.HunYuanDiT) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + + attention_mask = kwargs.get("attention_mask", None) + if attention_mask is not None: + out['text_embedding_mask'] = comfy.conds.CONDRegular(attention_mask) + + conditioning_mt5xl = kwargs.get("conditioning_mt5xl", None) + if conditioning_mt5xl is not None: + out['encoder_hidden_states_t5'] = comfy.conds.CONDRegular(conditioning_mt5xl) + + attention_mask_mt5xl = kwargs.get("attention_mask_mt5xl", None) + if attention_mask_mt5xl is not None: + out['text_embedding_mask_t5'] = comfy.conds.CONDRegular(attention_mask_mt5xl) + + width = kwargs.get("width", 768) + height = kwargs.get("height", 768) + crop_w = kwargs.get("crop_w", 0) + crop_h = kwargs.get("crop_h", 0) + target_width = kwargs.get("target_width", width) + target_height = kwargs.get("target_height", height) + + out['image_meta_size'] = comfy.conds.CONDRegular(torch.FloatTensor([[height, width, target_height, target_width, 0, 0]])) + return out + +class Flux(BaseModel): + def __init__(self, model_config, model_type=ModelType.FLUX, device=None): + super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.flux.model.Flux) + + def encode_adm(self, **kwargs): + return kwargs["pooled_output"] + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + out['guidance'] = comfy.conds.CONDRegular(torch.FloatTensor([kwargs.get("guidance", 3.5)])) + return out + +class GenmoMochi(BaseModel): + def __init__(self, model_config, model_type=ModelType.FLOW, device=None): + super().__init__(model_config, model_type, device=device, unet_model=comfy.ldm.genmo.joint_model.asymm_models_joint.AsymmDiTJoint) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + attention_mask = kwargs.get("attention_mask", None) + if attention_mask is not None: + out['attention_mask'] = comfy.conds.CONDRegular(attention_mask) + out['num_tokens'] = comfy.conds.CONDConstant(max(1, torch.sum(attention_mask).item())) + cross_attn = kwargs.get("cross_attn", None) + if cross_attn is not None: + out['c_crossattn'] = comfy.conds.CONDRegular(cross_attn) + return out diff --git a/src/comfyui/comfy/model_detection.py b/src/comfyui/comfy/model_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..229fe499d02f0e840a64e1be4de55367d2becfed --- /dev/null +++ b/src/comfyui/comfy/model_detection.py @@ -0,0 +1,592 @@ +import comfy.supported_models +import comfy.supported_models_base +import comfy.utils +import math +import logging +import torch + +def count_blocks(state_dict_keys, prefix_string): + count = 0 + while True: + c = False + for k in state_dict_keys: + if k.startswith(prefix_string.format(count)): + c = True + break + if c == False: + break + count += 1 + return count + +def calculate_transformer_depth(prefix, state_dict_keys, state_dict): + context_dim = None + use_linear_in_transformer = False + + transformer_prefix = prefix + "1.transformer_blocks." + transformer_keys = sorted(list(filter(lambda a: a.startswith(transformer_prefix), state_dict_keys))) + if len(transformer_keys) > 0: + last_transformer_depth = count_blocks(state_dict_keys, transformer_prefix + '{}') + context_dim = state_dict['{}0.attn2.to_k.weight'.format(transformer_prefix)].shape[1] + use_linear_in_transformer = len(state_dict['{}1.proj_in.weight'.format(prefix)].shape) == 2 + time_stack = '{}1.time_stack.0.attn1.to_q.weight'.format(prefix) in state_dict or '{}1.time_mix_blocks.0.attn1.to_q.weight'.format(prefix) in state_dict + time_stack_cross = '{}1.time_stack.0.attn2.to_q.weight'.format(prefix) in state_dict or '{}1.time_mix_blocks.0.attn2.to_q.weight'.format(prefix) in state_dict + return last_transformer_depth, context_dim, use_linear_in_transformer, time_stack, time_stack_cross + return None + +def detect_unet_config(state_dict, key_prefix): + state_dict_keys = list(state_dict.keys()) + + if '{}joint_blocks.0.context_block.attn.qkv.weight'.format(key_prefix) in state_dict_keys: #mmdit model + unet_config = {} + unet_config["in_channels"] = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[1] + patch_size = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[2] + unet_config["patch_size"] = patch_size + final_layer = '{}final_layer.linear.weight'.format(key_prefix) + if final_layer in state_dict: + unet_config["out_channels"] = state_dict[final_layer].shape[0] // (patch_size * patch_size) + + unet_config["depth"] = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[0] // 64 + unet_config["input_size"] = None + y_key = '{}y_embedder.mlp.0.weight'.format(key_prefix) + if y_key in state_dict_keys: + unet_config["adm_in_channels"] = state_dict[y_key].shape[1] + + context_key = '{}context_embedder.weight'.format(key_prefix) + if context_key in state_dict_keys: + in_features = state_dict[context_key].shape[1] + out_features = state_dict[context_key].shape[0] + unet_config["context_embedder_config"] = {"target": "torch.nn.Linear", "params": {"in_features": in_features, "out_features": out_features}} + num_patches_key = '{}pos_embed'.format(key_prefix) + if num_patches_key in state_dict_keys: + num_patches = state_dict[num_patches_key].shape[1] + unet_config["num_patches"] = num_patches + unet_config["pos_embed_max_size"] = round(math.sqrt(num_patches)) + + rms_qk = '{}joint_blocks.0.context_block.attn.ln_q.weight'.format(key_prefix) + if rms_qk in state_dict_keys: + unet_config["qk_norm"] = "rms" + + unet_config["pos_embed_scaling_factor"] = None #unused for inference + context_processor = '{}context_processor.layers.0.attn.qkv.weight'.format(key_prefix) + if context_processor in state_dict_keys: + unet_config["context_processor_layers"] = count_blocks(state_dict_keys, '{}context_processor.layers.'.format(key_prefix) + '{}.') + unet_config["x_block_self_attn_layers"] = [] + for key in state_dict_keys: + if key.startswith('{}joint_blocks.'.format(key_prefix)) and key.endswith('.x_block.attn2.qkv.weight'): + layer = key[len('{}joint_blocks.'.format(key_prefix)):-len('.x_block.attn2.qkv.weight')] + unet_config["x_block_self_attn_layers"].append(int(layer)) + return unet_config + + if '{}clf.1.weight'.format(key_prefix) in state_dict_keys: #stable cascade + unet_config = {} + text_mapper_name = '{}clip_txt_mapper.weight'.format(key_prefix) + if text_mapper_name in state_dict_keys: + unet_config['stable_cascade_stage'] = 'c' + w = state_dict[text_mapper_name] + if w.shape[0] == 1536: #stage c lite + unet_config['c_cond'] = 1536 + unet_config['c_hidden'] = [1536, 1536] + unet_config['nhead'] = [24, 24] + unet_config['blocks'] = [[4, 12], [12, 4]] + elif w.shape[0] == 2048: #stage c full + unet_config['c_cond'] = 2048 + elif '{}clip_mapper.weight'.format(key_prefix) in state_dict_keys: + unet_config['stable_cascade_stage'] = 'b' + w = state_dict['{}down_blocks.1.0.channelwise.0.weight'.format(key_prefix)] + if w.shape[-1] == 640: + unet_config['c_hidden'] = [320, 640, 1280, 1280] + unet_config['nhead'] = [-1, -1, 20, 20] + unet_config['blocks'] = [[2, 6, 28, 6], [6, 28, 6, 2]] + unet_config['block_repeat'] = [[1, 1, 1, 1], [3, 3, 2, 2]] + elif w.shape[-1] == 576: #stage b lite + unet_config['c_hidden'] = [320, 576, 1152, 1152] + unet_config['nhead'] = [-1, 9, 18, 18] + unet_config['blocks'] = [[2, 4, 14, 4], [4, 14, 4, 2]] + unet_config['block_repeat'] = [[1, 1, 1, 1], [2, 2, 2, 2]] + return unet_config + + if '{}transformer.rotary_pos_emb.inv_freq'.format(key_prefix) in state_dict_keys: #stable audio dit + unet_config = {} + unet_config["audio_model"] = "dit1.0" + return unet_config + + if '{}double_layers.0.attn.w1q.weight'.format(key_prefix) in state_dict_keys: #aura flow dit + unet_config = {} + unet_config["max_seq"] = state_dict['{}positional_encoding'.format(key_prefix)].shape[1] + unet_config["cond_seq_dim"] = state_dict['{}cond_seq_linear.weight'.format(key_prefix)].shape[1] + double_layers = count_blocks(state_dict_keys, '{}double_layers.'.format(key_prefix) + '{}.') + single_layers = count_blocks(state_dict_keys, '{}single_layers.'.format(key_prefix) + '{}.') + unet_config["n_double_layers"] = double_layers + unet_config["n_layers"] = double_layers + single_layers + return unet_config + + if '{}mlp_t5.0.weight'.format(key_prefix) in state_dict_keys: #Hunyuan DiT + unet_config = {} + unet_config["image_model"] = "hydit" + unet_config["depth"] = count_blocks(state_dict_keys, '{}blocks.'.format(key_prefix) + '{}.') + unet_config["hidden_size"] = state_dict['{}x_embedder.proj.weight'.format(key_prefix)].shape[0] + if unet_config["hidden_size"] == 1408 and unet_config["depth"] == 40: #DiT-g/2 + unet_config["mlp_ratio"] = 4.3637 + if state_dict['{}extra_embedder.0.weight'.format(key_prefix)].shape[1] == 3968: + unet_config["size_cond"] = True + unet_config["use_style_cond"] = True + unet_config["image_model"] = "hydit1" + return unet_config + + if '{}double_blocks.0.img_attn.norm.key_norm.scale'.format(key_prefix) in state_dict_keys: #Flux + dit_config = {} + dit_config["image_model"] = "flux" + dit_config["in_channels"] = 16 + dit_config["vec_in_dim"] = 768 + dit_config["context_in_dim"] = 4096 + dit_config["hidden_size"] = 3072 + dit_config["mlp_ratio"] = 4.0 + dit_config["num_heads"] = 24 + dit_config["depth"] = count_blocks(state_dict_keys, '{}double_blocks.'.format(key_prefix) + '{}.') + dit_config["depth_single_blocks"] = count_blocks(state_dict_keys, '{}single_blocks.'.format(key_prefix) + '{}.') + dit_config["axes_dim"] = [16, 56, 56] + dit_config["theta"] = 10000 + dit_config["qkv_bias"] = True + dit_config["guidance_embed"] = "{}guidance_in.in_layer.weight".format(key_prefix) in state_dict_keys + return dit_config + + if '{}t5_yproj.weight'.format(key_prefix) in state_dict_keys: #Genmo mochi preview + dit_config = {} + dit_config["image_model"] = "mochi_preview" + dit_config["depth"] = 48 + dit_config["patch_size"] = 2 + dit_config["num_heads"] = 24 + dit_config["hidden_size_x"] = 3072 + dit_config["hidden_size_y"] = 1536 + dit_config["mlp_ratio_x"] = 4.0 + dit_config["mlp_ratio_y"] = 4.0 + dit_config["learn_sigma"] = False + dit_config["in_channels"] = 12 + dit_config["qk_norm"] = True + dit_config["qkv_bias"] = False + dit_config["out_bias"] = True + dit_config["attn_drop"] = 0.0 + dit_config["patch_embed_bias"] = True + dit_config["posenc_preserve_area"] = True + dit_config["timestep_mlp_bias"] = True + dit_config["attend_to_padding"] = False + dit_config["timestep_scale"] = 1000.0 + dit_config["use_t5"] = True + dit_config["t5_feat_dim"] = 4096 + dit_config["t5_token_length"] = 256 + dit_config["rope_theta"] = 10000.0 + return dit_config + + + if '{}input_blocks.0.0.weight'.format(key_prefix) not in state_dict_keys: + return None + + unet_config = { + "use_checkpoint": False, + "image_size": 32, + "use_spatial_transformer": True, + "legacy": False + } + + y_input = '{}label_emb.0.0.weight'.format(key_prefix) + if y_input in state_dict_keys: + unet_config["num_classes"] = "sequential" + unet_config["adm_in_channels"] = state_dict[y_input].shape[1] + else: + unet_config["adm_in_channels"] = None + + model_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[0] + in_channels = state_dict['{}input_blocks.0.0.weight'.format(key_prefix)].shape[1] + + out_key = '{}out.2.weight'.format(key_prefix) + if out_key in state_dict: + out_channels = state_dict[out_key].shape[0] + else: + out_channels = 4 + + num_res_blocks = [] + channel_mult = [] + attention_resolutions = [] + transformer_depth = [] + transformer_depth_output = [] + context_dim = None + use_linear_in_transformer = False + + video_model = False + video_model_cross = False + + current_res = 1 + count = 0 + + last_res_blocks = 0 + last_channel_mult = 0 + + input_block_count = count_blocks(state_dict_keys, '{}input_blocks'.format(key_prefix) + '.{}.') + for count in range(input_block_count): + prefix = '{}input_blocks.{}.'.format(key_prefix, count) + prefix_output = '{}output_blocks.{}.'.format(key_prefix, input_block_count - count - 1) + + block_keys = sorted(list(filter(lambda a: a.startswith(prefix), state_dict_keys))) + if len(block_keys) == 0: + break + + block_keys_output = sorted(list(filter(lambda a: a.startswith(prefix_output), state_dict_keys))) + + if "{}0.op.weight".format(prefix) in block_keys: #new layer + num_res_blocks.append(last_res_blocks) + channel_mult.append(last_channel_mult) + + current_res *= 2 + last_res_blocks = 0 + last_channel_mult = 0 + out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) + if out is not None: + transformer_depth_output.append(out[0]) + else: + transformer_depth_output.append(0) + else: + res_block_prefix = "{}0.in_layers.0.weight".format(prefix) + if res_block_prefix in block_keys: + last_res_blocks += 1 + last_channel_mult = state_dict["{}0.out_layers.3.weight".format(prefix)].shape[0] // model_channels + + out = calculate_transformer_depth(prefix, state_dict_keys, state_dict) + if out is not None: + transformer_depth.append(out[0]) + if context_dim is None: + context_dim = out[1] + use_linear_in_transformer = out[2] + video_model = out[3] + video_model_cross = out[4] + else: + transformer_depth.append(0) + + res_block_prefix = "{}0.in_layers.0.weight".format(prefix_output) + if res_block_prefix in block_keys_output: + out = calculate_transformer_depth(prefix_output, state_dict_keys, state_dict) + if out is not None: + transformer_depth_output.append(out[0]) + else: + transformer_depth_output.append(0) + + + num_res_blocks.append(last_res_blocks) + channel_mult.append(last_channel_mult) + if "{}middle_block.1.proj_in.weight".format(key_prefix) in state_dict_keys: + transformer_depth_middle = count_blocks(state_dict_keys, '{}middle_block.1.transformer_blocks.'.format(key_prefix) + '{}') + elif "{}middle_block.0.in_layers.0.weight".format(key_prefix) in state_dict_keys: + transformer_depth_middle = -1 + else: + transformer_depth_middle = -2 + + unet_config["in_channels"] = in_channels + unet_config["out_channels"] = out_channels + unet_config["model_channels"] = model_channels + unet_config["num_res_blocks"] = num_res_blocks + unet_config["transformer_depth"] = transformer_depth + unet_config["transformer_depth_output"] = transformer_depth_output + unet_config["channel_mult"] = channel_mult + unet_config["transformer_depth_middle"] = transformer_depth_middle + unet_config['use_linear_in_transformer'] = use_linear_in_transformer + unet_config["context_dim"] = context_dim + + if video_model: + unet_config["extra_ff_mix_layer"] = True + unet_config["use_spatial_context"] = True + unet_config["merge_strategy"] = "learned_with_images" + unet_config["merge_factor"] = 0.0 + unet_config["video_kernel_size"] = [3, 1, 1] + unet_config["use_temporal_resblock"] = True + unet_config["use_temporal_attention"] = True + unet_config["disable_temporal_crossattention"] = not video_model_cross + else: + unet_config["use_temporal_resblock"] = False + unet_config["use_temporal_attention"] = False + + return unet_config + +def model_config_from_unet_config(unet_config, state_dict=None): + for model_config in comfy.supported_models.models: + if model_config.matches(unet_config, state_dict): + return model_config(unet_config) + + logging.error("no match {}".format(unet_config)) + return None + +def model_config_from_unet(state_dict, unet_key_prefix, use_base_if_no_match=False): + unet_config = detect_unet_config(state_dict, unet_key_prefix) + if unet_config is None: + return None + model_config = model_config_from_unet_config(unet_config, state_dict) + if model_config is None and use_base_if_no_match: + model_config = comfy.supported_models_base.BASE(unet_config) + + scaled_fp8_weight = state_dict.get("{}scaled_fp8".format(unet_key_prefix), None) + if scaled_fp8_weight is not None: + model_config.scaled_fp8 = scaled_fp8_weight.dtype + if model_config.scaled_fp8 == torch.float32: + model_config.scaled_fp8 = torch.float8_e4m3fn + + return model_config + +def unet_prefix_from_state_dict(state_dict): + candidates = ["model.diffusion_model.", #ldm/sgm models + "model.model.", #audio models + ] + counts = {k: 0 for k in candidates} + for k in state_dict: + for c in candidates: + if k.startswith(c): + counts[c] += 1 + break + + top = max(counts, key=counts.get) + if counts[top] > 5: + return top + else: + return "model." #aura flow and others + + +def convert_config(unet_config): + new_config = unet_config.copy() + num_res_blocks = new_config.get("num_res_blocks", None) + channel_mult = new_config.get("channel_mult", None) + + if isinstance(num_res_blocks, int): + num_res_blocks = len(channel_mult) * [num_res_blocks] + + if "attention_resolutions" in new_config: + attention_resolutions = new_config.pop("attention_resolutions") + transformer_depth = new_config.get("transformer_depth", None) + transformer_depth_middle = new_config.get("transformer_depth_middle", None) + + if isinstance(transformer_depth, int): + transformer_depth = len(channel_mult) * [transformer_depth] + if transformer_depth_middle is None: + transformer_depth_middle = transformer_depth[-1] + t_in = [] + t_out = [] + s = 1 + for i in range(len(num_res_blocks)): + res = num_res_blocks[i] + d = 0 + if s in attention_resolutions: + d = transformer_depth[i] + + t_in += [d] * res + t_out += [d] * (res + 1) + s *= 2 + transformer_depth = t_in + transformer_depth_output = t_out + new_config["transformer_depth"] = t_in + new_config["transformer_depth_output"] = t_out + new_config["transformer_depth_middle"] = transformer_depth_middle + + new_config["num_res_blocks"] = num_res_blocks + return new_config + + +def unet_config_from_diffusers_unet(state_dict, dtype=None): + match = {} + transformer_depth = [] + + attn_res = 1 + down_blocks = count_blocks(state_dict, "down_blocks.{}") + for i in range(down_blocks): + attn_blocks = count_blocks(state_dict, "down_blocks.{}.attentions.".format(i) + '{}') + res_blocks = count_blocks(state_dict, "down_blocks.{}.resnets.".format(i) + '{}') + for ab in range(attn_blocks): + transformer_count = count_blocks(state_dict, "down_blocks.{}.attentions.{}.transformer_blocks.".format(i, ab) + '{}') + transformer_depth.append(transformer_count) + if transformer_count > 0: + match["context_dim"] = state_dict["down_blocks.{}.attentions.{}.transformer_blocks.0.attn2.to_k.weight".format(i, ab)].shape[1] + + attn_res *= 2 + if attn_blocks == 0: + for i in range(res_blocks): + transformer_depth.append(0) + + match["transformer_depth"] = transformer_depth + + match["model_channels"] = state_dict["conv_in.weight"].shape[0] + match["in_channels"] = state_dict["conv_in.weight"].shape[1] + match["adm_in_channels"] = None + if "class_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["class_embedding.linear_1.weight"].shape[1] + elif "add_embedding.linear_1.weight" in state_dict: + match["adm_in_channels"] = state_dict["add_embedding.linear_1.weight"].shape[1] + + SDXL = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_refiner = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2560, 'dtype': dtype, 'in_channels': 4, 'model_channels': 384, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [0, 0, 4, 4, 4, 4, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 4, + 'use_linear_in_transformer': True, 'context_dim': 1280, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 4, 4, 4, 4, 4, 4, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], + 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': True, + 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21_uncliph = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2048, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD21_unclipl = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 1536, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD15 = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, + 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], + 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_heads': 8, + 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_mid_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 1, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_small_cnet = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 0, 0, 0, 0], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 0, + 'use_linear_in_transformer': True, 'num_head_channels': 64, 'context_dim': 1, 'transformer_depth_output': [0, 0, 0, 0, 0, 0, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_diffusers_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 9, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SDXL_diffusers_ip2p = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 8, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 10, 10], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 10, + 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 0, 2, 2, 2, 10, 10, 10], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SSD_1B = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 2, 2, 4, 4], 'transformer_depth_output': [0, 0, 0, 1, 1, 2, 10, 4, 4], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + Segmind_Vega = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [2, 2, 2], 'transformer_depth': [0, 0, 1, 1, 2, 2], 'transformer_depth_output': [0, 0, 0, 1, 1, 1, 2, 2, 2], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -1, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + KOALA_700M = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [1, 1, 1], 'transformer_depth': [0, 2, 5], 'transformer_depth_output': [0, 0, 2, 2, 5, 5], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + KOALA_1B = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'num_classes': 'sequential', 'adm_in_channels': 2816, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, + 'num_res_blocks': [1, 1, 1], 'transformer_depth': [0, 2, 6], 'transformer_depth_output': [0, 0, 2, 2, 6, 6], + 'channel_mult': [1, 2, 4], 'transformer_depth_middle': 6, 'use_linear_in_transformer': True, 'context_dim': 2048, 'num_head_channels': 64, + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD09_XS = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], + 'transformer_depth': [1, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': True, + 'context_dim': 1024, 'num_head_channels': 64, 'transformer_depth_output': [1, 1, 1, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False, 'disable_self_attentions': [True, False, False]} + + SD_XS = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, + 'adm_in_channels': None, 'dtype': dtype, 'in_channels': 4, 'model_channels': 320, 'num_res_blocks': [1, 1, 1], + 'transformer_depth': [0, 1, 1], 'channel_mult': [1, 2, 4], 'transformer_depth_middle': -2, 'use_linear_in_transformer': False, + 'context_dim': 768, 'num_head_channels': 64, 'transformer_depth_output': [0, 0, 1, 1, 1, 1], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + SD15_diffusers_inpaint = {'use_checkpoint': False, 'image_size': 32, 'out_channels': 4, 'use_spatial_transformer': True, 'legacy': False, 'adm_in_channels': None, + 'dtype': dtype, 'in_channels': 9, 'model_channels': 320, 'num_res_blocks': [2, 2, 2, 2], 'transformer_depth': [1, 1, 1, 1, 1, 1, 0, 0], + 'channel_mult': [1, 2, 4, 4], 'transformer_depth_middle': 1, 'use_linear_in_transformer': False, 'context_dim': 768, 'num_heads': 8, + 'transformer_depth_output': [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0], + 'use_temporal_attention': False, 'use_temporal_resblock': False} + + + supported_models = [SDXL, SDXL_refiner, SD21, SD15, SD21_uncliph, SD21_unclipl, SDXL_mid_cnet, SDXL_small_cnet, SDXL_diffusers_inpaint, SSD_1B, Segmind_Vega, KOALA_700M, KOALA_1B, SD09_XS, SD_XS, SDXL_diffusers_ip2p, SD15_diffusers_inpaint] + + for unet_config in supported_models: + matches = True + for k in match: + if match[k] != unet_config[k]: + matches = False + break + if matches: + return convert_config(unet_config) + return None + +def model_config_from_diffusers_unet(state_dict): + unet_config = unet_config_from_diffusers_unet(state_dict) + if unet_config is not None: + return model_config_from_unet_config(unet_config) + return None + +def convert_diffusers_mmdit(state_dict, output_prefix=""): + out_sd = {} + + if 'joint_transformer_blocks.0.attn.add_k_proj.weight' in state_dict: #AuraFlow + num_joint = count_blocks(state_dict, 'joint_transformer_blocks.{}.') + num_single = count_blocks(state_dict, 'single_transformer_blocks.{}.') + sd_map = comfy.utils.auraflow_to_diffusers({"n_double_layers": num_joint, "n_layers": num_joint + num_single}, output_prefix=output_prefix) + elif 'x_embedder.weight' in state_dict: #Flux + depth = count_blocks(state_dict, 'transformer_blocks.{}.') + depth_single_blocks = count_blocks(state_dict, 'single_transformer_blocks.{}.') + hidden_size = state_dict["x_embedder.bias"].shape[0] + sd_map = comfy.utils.flux_to_diffusers({"depth": depth, "depth_single_blocks": depth_single_blocks, "hidden_size": hidden_size}, output_prefix=output_prefix) + elif 'transformer_blocks.0.attn.add_q_proj.weight' in state_dict: #SD3 + num_blocks = count_blocks(state_dict, 'transformer_blocks.{}.') + depth = state_dict["pos_embed.proj.weight"].shape[0] // 64 + sd_map = comfy.utils.mmdit_to_diffusers({"depth": depth, "num_blocks": num_blocks}, output_prefix=output_prefix) + else: + return None + + for k in sd_map: + weight = state_dict.get(k, None) + if weight is not None: + t = sd_map[k] + + if not isinstance(t, str): + if len(t) > 2: + fun = t[2] + else: + fun = lambda a: a + offset = t[1] + if offset is not None: + old_weight = out_sd.get(t[0], None) + if old_weight is None: + old_weight = torch.empty_like(weight) + if old_weight.shape[offset[0]] < offset[1] + offset[2]: + exp = list(weight.shape) + exp[offset[0]] = offset[1] + offset[2] + new = torch.empty(exp, device=weight.device, dtype=weight.dtype) + new[:old_weight.shape[0]] = old_weight + old_weight = new + + w = old_weight.narrow(offset[0], offset[1], offset[2]) + else: + old_weight = weight + w = weight + w[:] = fun(weight) + t = t[0] + out_sd[t] = old_weight + else: + out_sd[t] = weight + state_dict.pop(k) + + return out_sd diff --git a/src/comfyui/comfy/model_management.py b/src/comfyui/comfy/model_management.py new file mode 100644 index 0000000000000000000000000000000000000000..fd493aff0782574936cb2f70229ec020820e6a8a --- /dev/null +++ b/src/comfyui/comfy/model_management.py @@ -0,0 +1,1131 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import psutil +import logging +from enum import Enum +from comfy.cli_args import args +import torch +import sys +import platform + +class VRAMState(Enum): + DISABLED = 0 #No vram present: no need to move models to vram + NO_VRAM = 1 #Very low vram: enable all the options to save vram + LOW_VRAM = 2 + NORMAL_VRAM = 3 + HIGH_VRAM = 4 + SHARED = 5 #No dedicated vram: memory shared between CPU and GPU but models still need to be moved between both. + +class CPUState(Enum): + GPU = 0 + CPU = 1 + MPS = 2 + +# Determine VRAM State +vram_state = VRAMState.NORMAL_VRAM +set_vram_to = VRAMState.NORMAL_VRAM +cpu_state = CPUState.GPU + +total_vram = 0 + +xpu_available = False +torch_version = "" +try: + torch_version = torch.version.__version__ + xpu_available = (int(torch_version[0]) < 2 or (int(torch_version[0]) == 2 and int(torch_version[2]) <= 4)) and torch.xpu.is_available() +except: + pass + +lowvram_available = True +if args.deterministic: + logging.info("Using deterministic algorithms for pytorch") + torch.use_deterministic_algorithms(True, warn_only=True) + +directml_enabled = False +if args.directml is not None: + import torch_directml + directml_enabled = True + device_index = args.directml + if device_index < 0: + directml_device = torch_directml.device() + else: + directml_device = torch_directml.device(device_index) + logging.info("Using directml with device: {}".format(torch_directml.device_name(device_index))) + # torch_directml.disable_tiled_resources(True) + lowvram_available = False #TODO: need to find a way to get free memory in directml before this can be enabled by default. + +try: + import intel_extension_for_pytorch as ipex + _ = torch.xpu.device_count() + xpu_available = torch.xpu.is_available() +except: + xpu_available = xpu_available or (hasattr(torch, "xpu") and torch.xpu.is_available()) + +try: + if torch.backends.mps.is_available(): + cpu_state = CPUState.MPS + import torch.mps +except: + pass + +if args.cpu: + cpu_state = CPUState.CPU + +def is_intel_xpu(): + global cpu_state + global xpu_available + if cpu_state == CPUState.GPU: + if xpu_available: + return True + return False + +def get_torch_device(): + global directml_enabled + global cpu_state + if directml_enabled: + global directml_device + return directml_device + if cpu_state == CPUState.MPS: + return torch.device("mps") + if cpu_state == CPUState.CPU: + return torch.device("cpu") + else: + if is_intel_xpu(): + return torch.device("xpu", torch.xpu.current_device()) + else: + return torch.device(torch.cuda.current_device()) + +def get_total_memory(dev=None, torch_total_too=False): + global directml_enabled + if dev is None: + dev = get_torch_device() + + if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): + mem_total = psutil.virtual_memory().total + mem_total_torch = mem_total + else: + if directml_enabled: + mem_total = 1024 * 1024 * 1024 #TODO + mem_total_torch = mem_total + elif is_intel_xpu(): + stats = torch.xpu.memory_stats(dev) + mem_reserved = stats['reserved_bytes.all.current'] + mem_total_torch = mem_reserved + mem_total = torch.xpu.get_device_properties(dev).total_memory + else: + stats = torch.cuda.memory_stats(dev) + mem_reserved = stats['reserved_bytes.all.current'] + _, mem_total_cuda = torch.cuda.mem_get_info(dev) + mem_total_torch = mem_reserved + mem_total = mem_total_cuda + + if torch_total_too: + return (mem_total, mem_total_torch) + else: + return mem_total + +total_vram = get_total_memory(get_torch_device()) / (1024 * 1024) +total_ram = psutil.virtual_memory().total / (1024 * 1024) +logging.info("Total VRAM {:0.0f} MB, total RAM {:0.0f} MB".format(total_vram, total_ram)) + +try: + logging.info("pytorch version: {}".format(torch_version)) +except: + pass + +try: + OOM_EXCEPTION = torch.cuda.OutOfMemoryError +except: + OOM_EXCEPTION = Exception + +XFORMERS_VERSION = "" +XFORMERS_ENABLED_VAE = True +if args.disable_xformers: + XFORMERS_IS_AVAILABLE = False +else: + try: + import xformers + import xformers.ops + XFORMERS_IS_AVAILABLE = True + try: + XFORMERS_IS_AVAILABLE = xformers._has_cpp_library + except: + pass + try: + XFORMERS_VERSION = xformers.version.__version__ + logging.info("xformers version: {}".format(XFORMERS_VERSION)) + if XFORMERS_VERSION.startswith("0.0.18"): + logging.warning("\nWARNING: This version of xformers has a major bug where you will get black images when generating high resolution images.") + logging.warning("Please downgrade or upgrade xformers to a different version.\n") + XFORMERS_ENABLED_VAE = False + except: + pass + except: + XFORMERS_IS_AVAILABLE = False + +def is_nvidia(): + global cpu_state + if cpu_state == CPUState.GPU: + if torch.version.cuda: + return True + return False + +ENABLE_PYTORCH_ATTENTION = False +if args.use_pytorch_cross_attention: + ENABLE_PYTORCH_ATTENTION = True + XFORMERS_IS_AVAILABLE = False + +VAE_DTYPES = [torch.float32] + +try: + if is_nvidia(): + if int(torch_version[0]) >= 2: + if ENABLE_PYTORCH_ATTENTION == False and args.use_split_cross_attention == False and args.use_quad_cross_attention == False: + ENABLE_PYTORCH_ATTENTION = True + if torch.cuda.is_bf16_supported() and torch.cuda.get_device_properties(torch.cuda.current_device()).major >= 8: + VAE_DTYPES = [torch.bfloat16] + VAE_DTYPES + if is_intel_xpu(): + if args.use_split_cross_attention == False and args.use_quad_cross_attention == False: + ENABLE_PYTORCH_ATTENTION = True +except: + pass + +if is_intel_xpu(): + VAE_DTYPES = [torch.bfloat16] + VAE_DTYPES + +if args.cpu_vae: + VAE_DTYPES = [torch.float32] + + +if ENABLE_PYTORCH_ATTENTION: + torch.backends.cuda.enable_math_sdp(True) + torch.backends.cuda.enable_flash_sdp(True) + torch.backends.cuda.enable_mem_efficient_sdp(True) + +if args.lowvram: + set_vram_to = VRAMState.LOW_VRAM + lowvram_available = True +elif args.novram: + set_vram_to = VRAMState.NO_VRAM +elif args.highvram or args.gpu_only: + vram_state = VRAMState.HIGH_VRAM + +FORCE_FP32 = False +FORCE_FP16 = False +if args.force_fp32: + logging.info("Forcing FP32, if this improves things please report it.") + FORCE_FP32 = True + +if args.force_fp16: + logging.info("Forcing FP16.") + FORCE_FP16 = True + +if lowvram_available: + if set_vram_to in (VRAMState.LOW_VRAM, VRAMState.NO_VRAM): + vram_state = set_vram_to + + +if cpu_state != CPUState.GPU: + vram_state = VRAMState.DISABLED + +if cpu_state == CPUState.MPS: + vram_state = VRAMState.SHARED + +logging.info(f"Set vram state to: {vram_state.name}") + +DISABLE_SMART_MEMORY = args.disable_smart_memory + +if DISABLE_SMART_MEMORY: + logging.info("Disabling smart memory management") + +def get_torch_device_name(device): + if hasattr(device, 'type'): + if device.type == "cuda": + try: + allocator_backend = torch.cuda.get_allocator_backend() + except: + allocator_backend = "" + return "{} {} : {}".format(device, torch.cuda.get_device_name(device), allocator_backend) + else: + return "{}".format(device.type) + elif is_intel_xpu(): + return "{} {}".format(device, torch.xpu.get_device_name(device)) + else: + return "CUDA {}: {}".format(device, torch.cuda.get_device_name(device)) + +try: + logging.info("Device: {}".format(get_torch_device_name(get_torch_device()))) +except: + logging.warning("Could not pick default device.") + + +current_loaded_models = [] + +def module_size(module): + module_mem = 0 + sd = module.state_dict() + for k in sd: + t = sd[k] + module_mem += t.nelement() * t.element_size() + return module_mem + +class LoadedModel: + def __init__(self, model): + self.model = model + self.device = model.load_device + self.weights_loaded = False + self.real_model = None + self.currently_used = True + + def model_memory(self): + return self.model.model_size() + + def model_offloaded_memory(self): + return self.model.model_size() - self.model.loaded_size() + + def model_memory_required(self, device): + if device == self.model.current_loaded_device(): + return self.model_offloaded_memory() + else: + return self.model_memory() + + def model_load(self, lowvram_model_memory=0, force_patch_weights=False): + patch_model_to = self.device + + self.model.model_patches_to(self.device) + self.model.model_patches_to(self.model.model_dtype()) + + load_weights = not self.weights_loaded + + if self.model.loaded_size() > 0: + use_more_vram = lowvram_model_memory + if use_more_vram == 0: + use_more_vram = 1e32 + self.model_use_more_vram(use_more_vram) + else: + try: + self.real_model = self.model.patch_model(device_to=patch_model_to, lowvram_model_memory=lowvram_model_memory, load_weights=load_weights, force_patch_weights=force_patch_weights) + except Exception as e: + self.model.unpatch_model(self.model.offload_device) + self.model_unload() + raise e + + if is_intel_xpu() and not args.disable_ipex_optimize and 'ipex' in globals() and self.real_model is not None: + with torch.no_grad(): + self.real_model = ipex.optimize(self.real_model.eval(), inplace=True, graph_mode=True, concat_linear=True) + + self.weights_loaded = True + return self.real_model + + def should_reload_model(self, force_patch_weights=False): + if force_patch_weights and self.model.lowvram_patch_counter() > 0: + return True + return False + + def model_unload(self, memory_to_free=None, unpatch_weights=True): + if memory_to_free is not None: + if memory_to_free < self.model.loaded_size(): + freed = self.model.partially_unload(self.model.offload_device, memory_to_free) + if freed >= memory_to_free: + return False + self.model.unpatch_model(self.model.offload_device, unpatch_weights=unpatch_weights) + self.model.model_patches_to(self.model.offload_device) + self.weights_loaded = self.weights_loaded and not unpatch_weights + self.real_model = None + return True + + def model_use_more_vram(self, extra_memory): + return self.model.partially_load(self.device, extra_memory) + + def __eq__(self, other): + return self.model is other.model + +def use_more_memory(extra_memory, loaded_models, device): + for m in loaded_models: + if m.device == device: + extra_memory -= m.model_use_more_vram(extra_memory) + if extra_memory <= 0: + break + +def offloaded_memory(loaded_models, device): + offloaded_mem = 0 + for m in loaded_models: + if m.device == device: + offloaded_mem += m.model_offloaded_memory() + return offloaded_mem + +WINDOWS = any(platform.win32_ver()) + +EXTRA_RESERVED_VRAM = 400 * 1024 * 1024 +if WINDOWS: + EXTRA_RESERVED_VRAM = 600 * 1024 * 1024 #Windows is higher because of the shared vram issue + +if args.reserve_vram is not None: + EXTRA_RESERVED_VRAM = args.reserve_vram * 1024 * 1024 * 1024 + logging.debug("Reserving {}MB vram for other applications.".format(EXTRA_RESERVED_VRAM / (1024 * 1024))) + +def extra_reserved_memory(): + return EXTRA_RESERVED_VRAM + +def minimum_inference_memory(): + return (1024 * 1024 * 1024) * 0.8 + extra_reserved_memory() + +def unload_model_clones(model, unload_weights_only=True, force_unload=True): + to_unload = [] + for i in range(len(current_loaded_models)): + if model.is_clone(current_loaded_models[i].model): + to_unload = [i] + to_unload + + if len(to_unload) == 0: + return True + + same_weights = 0 + for i in to_unload: + if model.clone_has_same_weights(current_loaded_models[i].model): + same_weights += 1 + + if same_weights == len(to_unload): + unload_weight = False + else: + unload_weight = True + + if not force_unload: + if unload_weights_only and unload_weight == False: + return None + else: + unload_weight = True + + for i in to_unload: + logging.debug("unload clone {} {}".format(i, unload_weight)) + current_loaded_models.pop(i).model_unload(unpatch_weights=unload_weight) + + return unload_weight + +def free_memory(memory_required, device, keep_loaded=[]): + unloaded_model = [] + can_unload = [] + unloaded_models = [] + + for i in range(len(current_loaded_models) -1, -1, -1): + shift_model = current_loaded_models[i] + if shift_model.device == device: + if shift_model not in keep_loaded: + can_unload.append((-shift_model.model_offloaded_memory(), sys.getrefcount(shift_model.model), shift_model.model_memory(), i)) + shift_model.currently_used = False + + for x in sorted(can_unload): + i = x[-1] + memory_to_free = None + if not DISABLE_SMART_MEMORY: + free_mem = get_free_memory(device) + if free_mem > memory_required: + break + memory_to_free = memory_required - free_mem + logging.debug(f"Unloading {current_loaded_models[i].model.model.__class__.__name__}") + if current_loaded_models[i].model_unload(memory_to_free): + unloaded_model.append(i) + + for i in sorted(unloaded_model, reverse=True): + unloaded_models.append(current_loaded_models.pop(i)) + + if len(unloaded_model) > 0: + soft_empty_cache() + else: + if vram_state != VRAMState.HIGH_VRAM: + mem_free_total, mem_free_torch = get_free_memory(device, torch_free_too=True) + if mem_free_torch > mem_free_total * 0.25: + soft_empty_cache() + return unloaded_models + +def load_models_gpu(models, memory_required=0, force_patch_weights=False, minimum_memory_required=None, force_full_load=False): + global vram_state + + inference_memory = minimum_inference_memory() + extra_mem = max(inference_memory, memory_required + extra_reserved_memory()) + if minimum_memory_required is None: + minimum_memory_required = extra_mem + else: + minimum_memory_required = max(inference_memory, minimum_memory_required + extra_reserved_memory()) + + models = set(models) + + models_to_load = [] + models_already_loaded = [] + for x in models: + loaded_model = LoadedModel(x) + loaded = None + + try: + loaded_model_index = current_loaded_models.index(loaded_model) + except: + loaded_model_index = None + + if loaded_model_index is not None: + loaded = current_loaded_models[loaded_model_index] + if loaded.should_reload_model(force_patch_weights=force_patch_weights): #TODO: cleanup this model reload logic + current_loaded_models.pop(loaded_model_index).model_unload(unpatch_weights=True) + loaded = None + else: + loaded.currently_used = True + models_already_loaded.append(loaded) + + if loaded is None: + if hasattr(x, "model"): + logging.info(f"Requested to load {x.model.__class__.__name__}") + models_to_load.append(loaded_model) + + if len(models_to_load) == 0: + devs = set(map(lambda a: a.device, models_already_loaded)) + for d in devs: + if d != torch.device("cpu"): + free_memory(extra_mem + offloaded_memory(models_already_loaded, d), d, models_already_loaded) + free_mem = get_free_memory(d) + if free_mem < minimum_memory_required: + logging.info("Unloading models for lowram load.") #TODO: partial model unloading when this case happens, also handle the opposite case where models can be unlowvramed. + models_to_load = free_memory(minimum_memory_required, d) + logging.info("{} models unloaded.".format(len(models_to_load))) + else: + use_more_memory(free_mem - minimum_memory_required, models_already_loaded, d) + if len(models_to_load) == 0: + return + + logging.info(f"Loading {len(models_to_load)} new model{'s' if len(models_to_load) > 1 else ''}") + + total_memory_required = {} + for loaded_model in models_to_load: + unload_model_clones(loaded_model.model, unload_weights_only=True, force_unload=False) #unload clones where the weights are different + total_memory_required[loaded_model.device] = total_memory_required.get(loaded_model.device, 0) + loaded_model.model_memory_required(loaded_model.device) + + for loaded_model in models_already_loaded: + total_memory_required[loaded_model.device] = total_memory_required.get(loaded_model.device, 0) + loaded_model.model_memory_required(loaded_model.device) + + for loaded_model in models_to_load: + weights_unloaded = unload_model_clones(loaded_model.model, unload_weights_only=False, force_unload=False) #unload the rest of the clones where the weights can stay loaded + if weights_unloaded is not None: + loaded_model.weights_loaded = not weights_unloaded + + for device in total_memory_required: + if device != torch.device("cpu"): + free_memory(total_memory_required[device] * 1.1 + extra_mem, device, models_already_loaded) + + for loaded_model in models_to_load: + model = loaded_model.model + torch_dev = model.load_device + if is_device_cpu(torch_dev): + vram_set_state = VRAMState.DISABLED + else: + vram_set_state = vram_state + lowvram_model_memory = 0 + if lowvram_available and (vram_set_state == VRAMState.LOW_VRAM or vram_set_state == VRAMState.NORMAL_VRAM) and not force_full_load: + model_size = loaded_model.model_memory_required(torch_dev) + current_free_mem = get_free_memory(torch_dev) + lowvram_model_memory = max(64 * (1024 * 1024), (current_free_mem - minimum_memory_required), min(current_free_mem * 0.4, current_free_mem - minimum_inference_memory())) + if model_size <= lowvram_model_memory: #only switch to lowvram if really necessary + lowvram_model_memory = 0 + + if vram_set_state == VRAMState.NO_VRAM: + lowvram_model_memory = 64 * 1024 * 1024 + + cur_loaded_model = loaded_model.model_load(lowvram_model_memory, force_patch_weights=force_patch_weights) + current_loaded_models.insert(0, loaded_model) + + + devs = set(map(lambda a: a.device, models_already_loaded)) + for d in devs: + if d != torch.device("cpu"): + free_mem = get_free_memory(d) + if free_mem > minimum_memory_required: + use_more_memory(free_mem - minimum_memory_required, models_already_loaded, d) + return + + +def load_model_gpu(model): + return load_models_gpu([model]) + +def loaded_models(only_currently_used=False): + output = [] + for m in current_loaded_models: + if only_currently_used: + if not m.currently_used: + continue + + output.append(m.model) + return output + +def cleanup_models(keep_clone_weights_loaded=False): + to_delete = [] + for i in range(len(current_loaded_models)): + #TODO: very fragile function needs improvement + num_refs = sys.getrefcount(current_loaded_models[i].model) + if num_refs <= 2: + if not keep_clone_weights_loaded: + to_delete = [i] + to_delete + #TODO: find a less fragile way to do this. + elif sys.getrefcount(current_loaded_models[i].real_model) <= 3: #references from .real_model + the .model + to_delete = [i] + to_delete + + for i in to_delete: + x = current_loaded_models.pop(i) + x.model_unload() + del x + +def dtype_size(dtype): + dtype_size = 4 + if dtype == torch.float16 or dtype == torch.bfloat16: + dtype_size = 2 + elif dtype == torch.float32: + dtype_size = 4 + else: + try: + dtype_size = dtype.itemsize + except: #Old pytorch doesn't have .itemsize + pass + return dtype_size + +def unet_offload_device(): + if vram_state == VRAMState.HIGH_VRAM: + return get_torch_device() + else: + return torch.device("cpu") + +def unet_inital_load_device(parameters, dtype): + torch_dev = get_torch_device() + if vram_state == VRAMState.HIGH_VRAM: + return torch_dev + + cpu_dev = torch.device("cpu") + if DISABLE_SMART_MEMORY: + return cpu_dev + + model_size = dtype_size(dtype) * parameters + + mem_dev = get_free_memory(torch_dev) + mem_cpu = get_free_memory(cpu_dev) + if mem_dev > mem_cpu and model_size < mem_dev: + return torch_dev + else: + return cpu_dev + +def maximum_vram_for_weights(device=None): + return (get_total_memory(device) * 0.88 - minimum_inference_memory()) + +def unet_dtype(device=None, model_params=0, supported_dtypes=[torch.float16, torch.bfloat16, torch.float32]): + if model_params < 0: + model_params = 1000000000000000000000 + if args.bf16_unet: + return torch.bfloat16 + if args.fp16_unet: + return torch.float16 + if args.fp8_e4m3fn_unet: + return torch.float8_e4m3fn + if args.fp8_e5m2_unet: + return torch.float8_e5m2 + + fp8_dtype = None + try: + for dtype in [torch.float8_e4m3fn, torch.float8_e5m2]: + if dtype in supported_dtypes: + fp8_dtype = dtype + break + except: + pass + + if fp8_dtype is not None: + if supports_fp8_compute(device): #if fp8 compute is supported the casting is most likely not expensive + return fp8_dtype + + free_model_memory = maximum_vram_for_weights(device) + if model_params * 2 > free_model_memory: + return fp8_dtype + + for dt in supported_dtypes: + if dt == torch.float16 and should_use_fp16(device=device, model_params=model_params): + if torch.float16 in supported_dtypes: + return torch.float16 + if dt == torch.bfloat16 and should_use_bf16(device, model_params=model_params): + if torch.bfloat16 in supported_dtypes: + return torch.bfloat16 + + for dt in supported_dtypes: + if dt == torch.float16 and should_use_fp16(device=device, model_params=model_params, manual_cast=True): + if torch.float16 in supported_dtypes: + return torch.float16 + if dt == torch.bfloat16 and should_use_bf16(device, model_params=model_params, manual_cast=True): + if torch.bfloat16 in supported_dtypes: + return torch.bfloat16 + + return torch.float32 + +# None means no manual cast +def unet_manual_cast(weight_dtype, inference_device, supported_dtypes=[torch.float16, torch.bfloat16, torch.float32]): + if weight_dtype == torch.float32: + return None + + fp16_supported = should_use_fp16(inference_device, prioritize_performance=False) + if fp16_supported and weight_dtype == torch.float16: + return None + + bf16_supported = should_use_bf16(inference_device) + if bf16_supported and weight_dtype == torch.bfloat16: + return None + + fp16_supported = should_use_fp16(inference_device, prioritize_performance=True) + for dt in supported_dtypes: + if dt == torch.float16 and fp16_supported: + return torch.float16 + if dt == torch.bfloat16 and bf16_supported: + return torch.bfloat16 + + return torch.float32 + +def text_encoder_offload_device(): + if args.gpu_only: + return get_torch_device() + else: + return torch.device("cpu") + +def text_encoder_device(): + if args.gpu_only: + return get_torch_device() + elif vram_state == VRAMState.HIGH_VRAM or vram_state == VRAMState.NORMAL_VRAM: + if should_use_fp16(prioritize_performance=False): + return get_torch_device() + else: + return torch.device("cpu") + else: + return torch.device("cpu") + +def text_encoder_initial_device(load_device, offload_device, model_size=0): + if load_device == offload_device or model_size <= 1024 * 1024 * 1024: + return offload_device + + if is_device_mps(load_device): + return offload_device + + mem_l = get_free_memory(load_device) + mem_o = get_free_memory(offload_device) + if mem_l > (mem_o * 0.5) and model_size * 1.2 < mem_l: + return load_device + else: + return offload_device + +def text_encoder_dtype(device=None): + if args.fp8_e4m3fn_text_enc: + return torch.float8_e4m3fn + elif args.fp8_e5m2_text_enc: + return torch.float8_e5m2 + elif args.fp16_text_enc: + return torch.float16 + elif args.fp32_text_enc: + return torch.float32 + + if is_device_cpu(device): + return torch.float16 + + return torch.float16 + + +def intermediate_device(): + if args.gpu_only: + return get_torch_device() + else: + return torch.device("cpu") + +def vae_device(): + if args.cpu_vae: + return torch.device("cpu") + return get_torch_device() + +def vae_offload_device(): + if args.gpu_only: + return get_torch_device() + else: + return torch.device("cpu") + +def vae_dtype(device=None, allowed_dtypes=[]): + global VAE_DTYPES + if args.fp16_vae: + return torch.float16 + elif args.bf16_vae: + return torch.bfloat16 + elif args.fp32_vae: + return torch.float32 + + for d in allowed_dtypes: + if d == torch.float16 and should_use_fp16(device, prioritize_performance=False): + return d + if d in VAE_DTYPES: + return d + + return VAE_DTYPES[0] + +def get_autocast_device(dev): + if hasattr(dev, 'type'): + return dev.type + return "cuda" + +def supports_dtype(device, dtype): #TODO + if dtype == torch.float32: + return True + if is_device_cpu(device): + return False + if dtype == torch.float16: + return True + if dtype == torch.bfloat16: + return True + return False + +def supports_cast(device, dtype): #TODO + if dtype == torch.float32: + return True + if dtype == torch.float16: + return True + if directml_enabled: #TODO: test this + return False + if dtype == torch.bfloat16: + return True + if is_device_mps(device): + return False + if dtype == torch.float8_e4m3fn: + return True + if dtype == torch.float8_e5m2: + return True + return False + +def pick_weight_dtype(dtype, fallback_dtype, device=None): + if dtype is None: + dtype = fallback_dtype + elif dtype_size(dtype) > dtype_size(fallback_dtype): + dtype = fallback_dtype + + if not supports_cast(device, dtype): + dtype = fallback_dtype + + return dtype + +def device_supports_non_blocking(device): + if is_device_mps(device): + return False #pytorch bug? mps doesn't support non blocking + if is_intel_xpu(): + return False + if args.deterministic: #TODO: figure out why deterministic breaks non blocking from gpu to cpu (previews) + return False + if directml_enabled: + return False + return True + +def device_should_use_non_blocking(device): + if not device_supports_non_blocking(device): + return False + return False + # return True #TODO: figure out why this causes memory issues on Nvidia and possibly others + +def force_channels_last(): + if args.force_channels_last: + return True + + #TODO + return False + +def cast_to(weight, dtype=None, device=None, non_blocking=False, copy=False): + if device is None or weight.device == device: + if not copy: + if dtype is None or weight.dtype == dtype: + return weight + return weight.to(dtype=dtype, copy=copy) + + r = torch.empty_like(weight, dtype=dtype, device=device) + r.copy_(weight, non_blocking=non_blocking) + return r + +def cast_to_device(tensor, device, dtype, copy=False): + non_blocking = device_supports_non_blocking(device) + return cast_to(tensor, dtype=dtype, device=device, non_blocking=non_blocking, copy=copy) + + +def xformers_enabled(): + global directml_enabled + global cpu_state + if cpu_state != CPUState.GPU: + return False + if is_intel_xpu(): + return False + if directml_enabled: + return False + return XFORMERS_IS_AVAILABLE + + +def xformers_enabled_vae(): + enabled = xformers_enabled() + if not enabled: + return False + + return XFORMERS_ENABLED_VAE + +def pytorch_attention_enabled(): + global ENABLE_PYTORCH_ATTENTION + return ENABLE_PYTORCH_ATTENTION + +def pytorch_attention_flash_attention(): + global ENABLE_PYTORCH_ATTENTION + if ENABLE_PYTORCH_ATTENTION: + #TODO: more reliable way of checking for flash attention? + if is_nvidia(): #pytorch flash attention only works on Nvidia + return True + if is_intel_xpu(): + return True + return False + +def force_upcast_attention_dtype(): + upcast = args.force_upcast_attention + try: + macos_version = tuple(int(n) for n in platform.mac_ver()[0].split(".")) + if (14, 5) <= macos_version <= (15, 2): # black image bug on recent versions of macOS + upcast = True + except: + pass + if upcast: + return torch.float32 + else: + return None + +def get_free_memory(dev=None, torch_free_too=False): + global directml_enabled + if dev is None: + dev = get_torch_device() + + if hasattr(dev, 'type') and (dev.type == 'cpu' or dev.type == 'mps'): + mem_free_total = psutil.virtual_memory().available + mem_free_torch = mem_free_total + else: + if directml_enabled: + mem_free_total = 1024 * 1024 * 1024 #TODO + mem_free_torch = mem_free_total + elif is_intel_xpu(): + stats = torch.xpu.memory_stats(dev) + mem_active = stats['active_bytes.all.current'] + mem_reserved = stats['reserved_bytes.all.current'] + mem_free_torch = mem_reserved - mem_active + mem_free_xpu = torch.xpu.get_device_properties(dev).total_memory - mem_reserved + mem_free_total = mem_free_xpu + mem_free_torch + else: + stats = torch.cuda.memory_stats(dev) + mem_active = stats['active_bytes.all.current'] + mem_reserved = stats['reserved_bytes.all.current'] + mem_free_cuda, _ = torch.cuda.mem_get_info(dev) + mem_free_torch = mem_reserved - mem_active + mem_free_total = mem_free_cuda + mem_free_torch + + if torch_free_too: + return (mem_free_total, mem_free_torch) + else: + return mem_free_total + +def cpu_mode(): + global cpu_state + return cpu_state == CPUState.CPU + +def mps_mode(): + global cpu_state + return cpu_state == CPUState.MPS + +def is_device_type(device, type): + if hasattr(device, 'type'): + if (device.type == type): + return True + return False + +def is_device_cpu(device): + return is_device_type(device, 'cpu') + +def is_device_mps(device): + return is_device_type(device, 'mps') + +def is_device_cuda(device): + return is_device_type(device, 'cuda') + +def should_use_fp16(device=None, model_params=0, prioritize_performance=True, manual_cast=False): + global directml_enabled + + if device is not None: + if is_device_cpu(device): + return False + + if FORCE_FP16: + return True + + if device is not None: + if is_device_mps(device): + return True + + if FORCE_FP32: + return False + + if directml_enabled: + return False + + if mps_mode(): + return True + + if cpu_mode(): + return False + + if is_intel_xpu(): + return True + + if torch.version.hip: + return True + + props = torch.cuda.get_device_properties(device) + if props.major >= 8: + return True + + if props.major < 6: + return False + + #FP16 is confirmed working on a 1080 (GP104) and on latest pytorch actually seems faster than fp32 + nvidia_10_series = ["1080", "1070", "titan x", "p3000", "p3200", "p4000", "p4200", "p5000", "p5200", "p6000", "1060", "1050", "p40", "p100", "p6", "p4"] + for x in nvidia_10_series: + if x in props.name.lower(): + if WINDOWS or manual_cast: + return True + else: + return False #weird linux behavior where fp32 is faster + + if manual_cast: + free_model_memory = maximum_vram_for_weights(device) + if (not prioritize_performance) or model_params * 4 > free_model_memory: + return True + + if props.major < 7: + return False + + #FP16 is just broken on these cards + nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600", "MX550", "MX450", "CMP 30HX", "T2000", "T1000", "T1200"] + for x in nvidia_16_series: + if x in props.name: + return False + + return True + +def should_use_bf16(device=None, model_params=0, prioritize_performance=True, manual_cast=False): + if device is not None: + if is_device_cpu(device): #TODO ? bf16 works on CPU but is extremely slow + return False + + if device is not None: + if is_device_mps(device): + return True + + if FORCE_FP32: + return False + + if directml_enabled: + return False + + if mps_mode(): + return True + + if cpu_mode(): + return False + + if is_intel_xpu(): + return True + + props = torch.cuda.get_device_properties(device) + if props.major >= 8: + return True + + bf16_works = torch.cuda.is_bf16_supported() + + if bf16_works or manual_cast: + free_model_memory = maximum_vram_for_weights(device) + if (not prioritize_performance) or model_params * 4 > free_model_memory: + return True + + return False + +def supports_fp8_compute(device=None): + if not is_nvidia(): + return False + + props = torch.cuda.get_device_properties(device) + if props.major >= 9: + return True + if props.major < 8: + return False + if props.minor < 9: + return False + + if int(torch_version[0]) < 2 or (int(torch_version[0]) == 2 and int(torch_version[2]) < 3): + return False + + if WINDOWS: + if (int(torch_version[0]) == 2 and int(torch_version[2]) < 4): + return False + + return True + +def soft_empty_cache(force=False): + global cpu_state + if cpu_state == CPUState.MPS: + torch.mps.empty_cache() + elif is_intel_xpu(): + torch.xpu.empty_cache() + elif torch.cuda.is_available(): + if force or is_nvidia(): #This seems to make things worse on ROCm so I only do it for cuda + torch.cuda.empty_cache() + torch.cuda.ipc_collect() + +def unload_all_models(): + free_memory(1e30, get_torch_device()) + + +def resolve_lowvram_weight(weight, model, key): #TODO: remove + print("WARNING: The comfy.model_management.resolve_lowvram_weight function will be removed soon, please stop using it.") + return weight + +#TODO: might be cleaner to put this somewhere else +import threading + +class InterruptProcessingException(Exception): + pass + +interrupt_processing_mutex = threading.RLock() + +interrupt_processing = False +def interrupt_current_processing(value=True): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + interrupt_processing = value + +def processing_interrupted(): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + return interrupt_processing + +def throw_exception_if_processing_interrupted(): + global interrupt_processing + global interrupt_processing_mutex + with interrupt_processing_mutex: + if interrupt_processing: + interrupt_processing = False + raise InterruptProcessingException() diff --git a/src/comfyui/comfy/model_patcher.py b/src/comfyui/comfy/model_patcher.py new file mode 100644 index 0000000000000000000000000000000000000000..3bba217acb99f8f8c483c0b331ef126297ff47d8 --- /dev/null +++ b/src/comfyui/comfy/model_patcher.py @@ -0,0 +1,572 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import copy +import inspect +import logging +import uuid +import collections +import math + +import comfy.utils +import comfy.float +import comfy.model_management +import comfy.lora +from comfy.comfy_types import UnetWrapperFunction + +def string_to_seed(data): + crc = 0xFFFFFFFF + for byte in data: + if isinstance(byte, str): + byte = ord(byte) + crc ^= byte + for _ in range(8): + if crc & 1: + crc = (crc >> 1) ^ 0xEDB88320 + else: + crc >>= 1 + return crc ^ 0xFFFFFFFF + +def set_model_options_patch_replace(model_options, patch, name, block_name, number, transformer_index=None): + to = model_options["transformer_options"].copy() + + if "patches_replace" not in to: + to["patches_replace"] = {} + else: + to["patches_replace"] = to["patches_replace"].copy() + + if name not in to["patches_replace"]: + to["patches_replace"][name] = {} + else: + to["patches_replace"][name] = to["patches_replace"][name].copy() + + if transformer_index is not None: + block = (block_name, number, transformer_index) + else: + block = (block_name, number) + to["patches_replace"][name][block] = patch + model_options["transformer_options"] = to + return model_options + +def set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=False): + model_options["sampler_post_cfg_function"] = model_options.get("sampler_post_cfg_function", []) + [post_cfg_function] + if disable_cfg1_optimization: + model_options["disable_cfg1_optimization"] = True + return model_options + +def set_model_options_pre_cfg_function(model_options, pre_cfg_function, disable_cfg1_optimization=False): + model_options["sampler_pre_cfg_function"] = model_options.get("sampler_pre_cfg_function", []) + [pre_cfg_function] + if disable_cfg1_optimization: + model_options["disable_cfg1_optimization"] = True + return model_options + +def wipe_lowvram_weight(m): + if hasattr(m, "prev_comfy_cast_weights"): + m.comfy_cast_weights = m.prev_comfy_cast_weights + del m.prev_comfy_cast_weights + m.weight_function = None + m.bias_function = None + +class LowVramPatch: + def __init__(self, key, patches): + self.key = key + self.patches = patches + def __call__(self, weight): + intermediate_dtype = weight.dtype + if intermediate_dtype not in [torch.float32, torch.float16, torch.bfloat16]: #intermediate_dtype has to be one that is supported in math ops + intermediate_dtype = torch.float32 + return comfy.float.stochastic_rounding(comfy.lora.calculate_weight(self.patches[self.key], weight.to(intermediate_dtype), self.key, intermediate_dtype=intermediate_dtype), weight.dtype, seed=string_to_seed(self.key)) + + return comfy.lora.calculate_weight(self.patches[self.key], weight, self.key, intermediate_dtype=intermediate_dtype) + +def get_key_weight(model, key): + set_func = None + convert_func = None + op_keys = key.rsplit('.', 1) + if len(op_keys) < 2: + weight = comfy.utils.get_attr(model, key) + else: + op = comfy.utils.get_attr(model, op_keys[0]) + try: + set_func = getattr(op, "set_{}".format(op_keys[1])) + except AttributeError: + pass + + try: + convert_func = getattr(op, "convert_{}".format(op_keys[1])) + except AttributeError: + pass + + weight = getattr(op, op_keys[1]) + if convert_func is not None: + weight = comfy.utils.get_attr(model, key) + + return weight, set_func, convert_func + +class ModelPatcher: + def __init__(self, model, load_device, offload_device, size=0, weight_inplace_update=False): + self.size = size + self.model = model + if not hasattr(self.model, 'device'): + logging.debug("Model doesn't have a device attribute.") + self.model.device = offload_device + elif self.model.device is None: + self.model.device = offload_device + + self.patches = {} + self.backup = {} + self.object_patches = {} + self.object_patches_backup = {} + self.model_options = {"transformer_options":{}} + self.model_size() + self.load_device = load_device + self.offload_device = offload_device + self.weight_inplace_update = weight_inplace_update + self.patches_uuid = uuid.uuid4() + + if not hasattr(self.model, 'model_loaded_weight_memory'): + self.model.model_loaded_weight_memory = 0 + + if not hasattr(self.model, 'lowvram_patch_counter'): + self.model.lowvram_patch_counter = 0 + + if not hasattr(self.model, 'model_lowvram'): + self.model.model_lowvram = False + + def model_size(self): + if self.size > 0: + return self.size + self.size = comfy.model_management.module_size(self.model) + return self.size + + def loaded_size(self): + return self.model.model_loaded_weight_memory + + def lowvram_patch_counter(self): + return self.model.lowvram_patch_counter + + def clone(self): + n = ModelPatcher(self.model, self.load_device, self.offload_device, self.size, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + n.patches_uuid = self.patches_uuid + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.backup = self.backup + n.object_patches_backup = self.object_patches_backup + return n + + def is_clone(self, other): + if hasattr(other, 'model') and self.model is other.model: + return True + return False + + def clone_has_same_weights(self, clone): + if not self.is_clone(clone): + return False + + if len(self.patches) == 0 and len(clone.patches) == 0: + return True + + if self.patches_uuid == clone.patches_uuid: + if len(self.patches) != len(clone.patches): + logging.warning("WARNING: something went wrong, same patch uuid but different length of patches.") + else: + return True + + def memory_required(self, input_shape): + return self.model.memory_required(input_shape=input_shape) + + def set_model_sampler_cfg_function(self, sampler_cfg_function, disable_cfg1_optimization=False): + if len(inspect.signature(sampler_cfg_function).parameters) == 3: + self.model_options["sampler_cfg_function"] = lambda args: sampler_cfg_function(args["cond"], args["uncond"], args["cond_scale"]) #Old way + else: + self.model_options["sampler_cfg_function"] = sampler_cfg_function + if disable_cfg1_optimization: + self.model_options["disable_cfg1_optimization"] = True + + def set_model_sampler_post_cfg_function(self, post_cfg_function, disable_cfg1_optimization=False): + self.model_options = set_model_options_post_cfg_function(self.model_options, post_cfg_function, disable_cfg1_optimization) + + def set_model_sampler_pre_cfg_function(self, pre_cfg_function, disable_cfg1_optimization=False): + self.model_options = set_model_options_pre_cfg_function(self.model_options, pre_cfg_function, disable_cfg1_optimization) + + def set_model_unet_function_wrapper(self, unet_wrapper_function: UnetWrapperFunction): + self.model_options["model_function_wrapper"] = unet_wrapper_function + + def set_model_denoise_mask_function(self, denoise_mask_function): + self.model_options["denoise_mask_function"] = denoise_mask_function + + def set_model_patch(self, patch, name): + to = self.model_options["transformer_options"] + if "patches" not in to: + to["patches"] = {} + to["patches"][name] = to["patches"].get(name, []) + [patch] + + def set_model_patch_replace(self, patch, name, block_name, number, transformer_index=None): + self.model_options = set_model_options_patch_replace(self.model_options, patch, name, block_name, number, transformer_index=transformer_index) + + def set_model_attn1_patch(self, patch): + self.set_model_patch(patch, "attn1_patch") + + def set_model_attn2_patch(self, patch): + self.set_model_patch(patch, "attn2_patch") + + def set_model_attn1_replace(self, patch, block_name, number, transformer_index=None): + self.set_model_patch_replace(patch, "attn1", block_name, number, transformer_index) + + def set_model_attn2_replace(self, patch, block_name, number, transformer_index=None): + self.set_model_patch_replace(patch, "attn2", block_name, number, transformer_index) + + def set_model_attn1_output_patch(self, patch): + self.set_model_patch(patch, "attn1_output_patch") + + def set_model_attn2_output_patch(self, patch): + self.set_model_patch(patch, "attn2_output_patch") + + def set_model_input_block_patch(self, patch): + self.set_model_patch(patch, "input_block_patch") + + def set_model_input_block_patch_after_skip(self, patch): + self.set_model_patch(patch, "input_block_patch_after_skip") + + def set_model_output_block_patch(self, patch): + self.set_model_patch(patch, "output_block_patch") + + def add_object_patch(self, name, obj): + self.object_patches[name] = obj + + def get_model_object(self, name): + if name in self.object_patches: + return self.object_patches[name] + else: + if name in self.object_patches_backup: + return self.object_patches_backup[name] + else: + return comfy.utils.get_attr(self.model, name) + + def model_patches_to(self, device): + to = self.model_options["transformer_options"] + if "patches" in to: + patches = to["patches"] + for name in patches: + patch_list = patches[name] + for i in range(len(patch_list)): + if hasattr(patch_list[i], "to"): + patch_list[i] = patch_list[i].to(device) + if "patches_replace" in to: + patches = to["patches_replace"] + for name in patches: + patch_list = patches[name] + for k in patch_list: + if hasattr(patch_list[k], "to"): + patch_list[k] = patch_list[k].to(device) + if "model_function_wrapper" in self.model_options: + wrap_func = self.model_options["model_function_wrapper"] + if hasattr(wrap_func, "to"): + self.model_options["model_function_wrapper"] = wrap_func.to(device) + + def model_dtype(self): + if hasattr(self.model, "get_dtype"): + return self.model.get_dtype() + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + p = set() + model_sd = self.model.state_dict() + for k in patches: + offset = None + function = None + if isinstance(k, str): + key = k + else: + offset = k[1] + key = k[0] + if len(k) > 2: + function = k[2] + + if key in model_sd: + p.add(k) + current_patches = self.patches.get(key, []) + current_patches.append((strength_patch, patches[k], strength_model, offset, function)) + self.patches[key] = current_patches + + self.patches_uuid = uuid.uuid4() + return list(p) + + def get_key_patches(self, filter_prefix=None): + model_sd = self.model_state_dict() + p = {} + for k in model_sd: + if filter_prefix is not None: + if not k.startswith(filter_prefix): + continue + bk = self.backup.get(k, None) + weight, set_func, convert_func = get_key_weight(self.model, k) + if bk is not None: + weight = bk.weight + if convert_func is None: + convert_func = lambda a, **kwargs: a + + if k in self.patches: + p[k] = [(weight, convert_func)] + self.patches[k] + else: + p[k] = [(weight, convert_func)] + return p + + def model_state_dict(self, filter_prefix=None): + sd = self.model.state_dict() + keys = list(sd.keys()) + if filter_prefix is not None: + for k in keys: + if not k.startswith(filter_prefix): + sd.pop(k) + return sd + + def patch_weight_to_device(self, key, device_to=None, inplace_update=False): + if key not in self.patches: + return + + weight, set_func, convert_func = get_key_weight(self.model, key) + inplace_update = self.weight_inplace_update or inplace_update + + if key not in self.backup: + self.backup[key] = collections.namedtuple('Dimension', ['weight', 'inplace_update'])(weight.to(device=self.offload_device, copy=inplace_update), inplace_update) + + if device_to is not None: + temp_weight = comfy.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) + else: + temp_weight = weight.to(torch.float32, copy=True) + if convert_func is not None: + temp_weight = convert_func(temp_weight, inplace=True) + + out_weight = comfy.lora.calculate_weight(self.patches[key], temp_weight, key) + if set_func is None: + out_weight = comfy.float.stochastic_rounding(out_weight, weight.dtype, seed=string_to_seed(key)) + if inplace_update: + comfy.utils.copy_to_param(self.model, key, out_weight) + else: + comfy.utils.set_attr_param(self.model, key, out_weight) + else: + set_func(out_weight, inplace_update=inplace_update, seed=string_to_seed(key)) + + def load(self, device_to=None, lowvram_model_memory=0, force_patch_weights=False, full_load=False): + mem_counter = 0 + patch_counter = 0 + lowvram_counter = 0 + loading = [] + for n, m in self.model.named_modules(): + if hasattr(m, "comfy_cast_weights") or hasattr(m, "weight"): + loading.append((comfy.model_management.module_size(m), n, m)) + + load_completely = [] + loading.sort(reverse=True) + for x in loading: + n = x[1] + m = x[2] + module_mem = x[0] + + lowvram_weight = False + + if not full_load and hasattr(m, "comfy_cast_weights"): + if mem_counter + module_mem >= lowvram_model_memory: + lowvram_weight = True + lowvram_counter += 1 + if hasattr(m, "prev_comfy_cast_weights"): #Already lowvramed + continue + + weight_key = "{}.weight".format(n) + bias_key = "{}.bias".format(n) + + if lowvram_weight: + if weight_key in self.patches: + if force_patch_weights: + self.patch_weight_to_device(weight_key) + else: + m.weight_function = LowVramPatch(weight_key, self.patches) + patch_counter += 1 + if bias_key in self.patches: + if force_patch_weights: + self.patch_weight_to_device(bias_key) + else: + m.bias_function = LowVramPatch(bias_key, self.patches) + patch_counter += 1 + + m.prev_comfy_cast_weights = m.comfy_cast_weights + m.comfy_cast_weights = True + else: + if hasattr(m, "comfy_cast_weights"): + if m.comfy_cast_weights: + wipe_lowvram_weight(m) + + if hasattr(m, "weight"): + mem_counter += module_mem + load_completely.append((module_mem, n, m)) + + load_completely.sort(reverse=True) + for x in load_completely: + n = x[1] + m = x[2] + weight_key = "{}.weight".format(n) + bias_key = "{}.bias".format(n) + if hasattr(m, "comfy_patched_weights"): + if m.comfy_patched_weights == True: + continue + + self.patch_weight_to_device(weight_key, device_to=device_to) + self.patch_weight_to_device(bias_key, device_to=device_to) + logging.debug("lowvram: loaded module regularly {} {}".format(n, m)) + m.comfy_patched_weights = True + + for x in load_completely: + x[2].to(device_to) + + if lowvram_counter > 0: + logging.info("loaded partially {} {} {}".format(lowvram_model_memory / (1024 * 1024), mem_counter / (1024 * 1024), patch_counter)) + self.model.model_lowvram = True + else: + logging.info("loaded completely {} {} {}".format(lowvram_model_memory / (1024 * 1024), mem_counter / (1024 * 1024), full_load)) + self.model.model_lowvram = False + if full_load: + self.model.to(device_to) + mem_counter = self.model_size() + + self.model.lowvram_patch_counter += patch_counter + self.model.device = device_to + self.model.model_loaded_weight_memory = mem_counter + + def patch_model(self, device_to=None, lowvram_model_memory=0, load_weights=True, force_patch_weights=False): + for k in self.object_patches: + old = comfy.utils.set_attr(self.model, k, self.object_patches[k]) + if k not in self.object_patches_backup: + self.object_patches_backup[k] = old + + if lowvram_model_memory == 0: + full_load = True + else: + full_load = False + + if load_weights: + self.load(device_to, lowvram_model_memory=lowvram_model_memory, force_patch_weights=force_patch_weights, full_load=full_load) + return self.model + + def unpatch_model(self, device_to=None, unpatch_weights=True): + if unpatch_weights: + if self.model.model_lowvram: + for m in self.model.modules(): + wipe_lowvram_weight(m) + + self.model.model_lowvram = False + self.model.lowvram_patch_counter = 0 + + keys = list(self.backup.keys()) + + for k in keys: + bk = self.backup[k] + if bk.inplace_update: + comfy.utils.copy_to_param(self.model, k, bk.weight) + else: + comfy.utils.set_attr_param(self.model, k, bk.weight) + + self.backup.clear() + + if device_to is not None: + self.model.to(device_to) + self.model.device = device_to + self.model.model_loaded_weight_memory = 0 + + for m in self.model.modules(): + if hasattr(m, "comfy_patched_weights"): + del m.comfy_patched_weights + + keys = list(self.object_patches_backup.keys()) + for k in keys: + comfy.utils.set_attr(self.model, k, self.object_patches_backup[k]) + + self.object_patches_backup.clear() + + def partially_unload(self, device_to, memory_to_free=0): + memory_freed = 0 + patch_counter = 0 + unload_list = [] + + for n, m in self.model.named_modules(): + shift_lowvram = False + if hasattr(m, "comfy_cast_weights"): + module_mem = comfy.model_management.module_size(m) + unload_list.append((module_mem, n, m)) + + unload_list.sort() + for unload in unload_list: + if memory_to_free < memory_freed: + break + module_mem = unload[0] + n = unload[1] + m = unload[2] + weight_key = "{}.weight".format(n) + bias_key = "{}.bias".format(n) + + if hasattr(m, "comfy_patched_weights") and m.comfy_patched_weights == True: + for key in [weight_key, bias_key]: + bk = self.backup.get(key, None) + if bk is not None: + if bk.inplace_update: + comfy.utils.copy_to_param(self.model, key, bk.weight) + else: + comfy.utils.set_attr_param(self.model, key, bk.weight) + self.backup.pop(key) + + m.to(device_to) + if weight_key in self.patches: + m.weight_function = LowVramPatch(weight_key, self.patches) + patch_counter += 1 + if bias_key in self.patches: + m.bias_function = LowVramPatch(bias_key, self.patches) + patch_counter += 1 + + m.prev_comfy_cast_weights = m.comfy_cast_weights + m.comfy_cast_weights = True + m.comfy_patched_weights = False + memory_freed += module_mem + logging.debug("freed {}".format(n)) + + self.model.model_lowvram = True + self.model.lowvram_patch_counter += patch_counter + self.model.model_loaded_weight_memory -= memory_freed + return memory_freed + + def partially_load(self, device_to, extra_memory=0): + self.unpatch_model(unpatch_weights=False) + self.patch_model(load_weights=False) + full_load = False + if self.model.model_lowvram == False: + return 0 + if self.model.model_loaded_weight_memory + extra_memory > self.model_size(): + full_load = True + current_used = self.model.model_loaded_weight_memory + self.load(device_to, lowvram_model_memory=current_used + extra_memory, full_load=full_load) + return self.model.model_loaded_weight_memory - current_used + + def current_loaded_device(self): + return self.model.device + + def calculate_weight(self, patches, weight, key, intermediate_dtype=torch.float32): + print("WARNING the ModelPatcher.calculate_weight function is deprecated, please use: comfy.lora.calculate_weight instead") + return comfy.lora.calculate_weight(patches, weight, key, intermediate_dtype=intermediate_dtype) diff --git a/src/comfyui/comfy/model_sampling.py b/src/comfyui/comfy/model_sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..4a0f2db607235d87e19ae5eb01ace993e20cf464 --- /dev/null +++ b/src/comfyui/comfy/model_sampling.py @@ -0,0 +1,314 @@ +import torch +from comfy.ldm.modules.diffusionmodules.util import make_beta_schedule +import math + +class EPS: + def calculate_input(self, sigma, noise): + sigma = sigma.view(sigma.shape[:1] + (1,) * (noise.ndim - 1)) + return noise / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input - model_output * sigma + + def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): + if max_denoise: + noise = noise * torch.sqrt(1.0 + sigma ** 2.0) + else: + noise = noise * sigma + + noise += latent_image + return noise + + def inverse_noise_scaling(self, sigma, latent): + return latent + +class V_PREDICTION(EPS): + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input * self.sigma_data ** 2 / (sigma ** 2 + self.sigma_data ** 2) - model_output * sigma * self.sigma_data / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 + +class EDM(V_PREDICTION): + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input * self.sigma_data ** 2 / (sigma ** 2 + self.sigma_data ** 2) + model_output * sigma * self.sigma_data / (sigma ** 2 + self.sigma_data ** 2) ** 0.5 + +class CONST: + def calculate_input(self, sigma, noise): + return noise + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input - model_output * sigma + + def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): + return sigma * noise + (1.0 - sigma) * latent_image + + def inverse_noise_scaling(self, sigma, latent): + return latent / (1.0 - sigma) + +class ModelSamplingDiscrete(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + beta_schedule = sampling_settings.get("beta_schedule", "linear") + linear_start = sampling_settings.get("linear_start", 0.00085) + linear_end = sampling_settings.get("linear_end", 0.012) + timesteps = sampling_settings.get("timesteps", 1000) + + self._register_schedule(given_betas=None, beta_schedule=beta_schedule, timesteps=timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=8e-3) + self.sigma_data = 1.0 + + def _register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if given_betas is not None: + betas = given_betas + else: + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = torch.cumprod(alphas, dim=0) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + + # self.register_buffer('betas', torch.tensor(betas, dtype=torch.float32)) + # self.register_buffer('alphas_cumprod', torch.tensor(alphas_cumprod, dtype=torch.float32)) + # self.register_buffer('alphas_cumprod_prev', torch.tensor(alphas_cumprod_prev, dtype=torch.float32)) + + sigmas = ((1 - alphas_cumprod) / alphas_cumprod) ** 0.5 + self.set_sigmas(sigmas) + + def set_sigmas(self, sigmas): + self.register_buffer('sigmas', sigmas.float()) + self.register_buffer('log_sigmas', sigmas.log().float()) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + log_sigma = sigma.log() + dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] + return dists.abs().argmin(dim=0).view(sigma.shape).to(sigma.device) + + def sigma(self, timestep): + t = torch.clamp(timestep.float().to(self.log_sigmas.device), min=0, max=(len(self.sigmas) - 1)) + low_idx = t.floor().long() + high_idx = t.ceil().long() + w = t.frac() + log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] + return log_sigma.exp().to(timestep.device) + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 999999999.9 + if percent >= 1.0: + return 0.0 + percent = 1.0 - percent + return self.sigma(torch.tensor(percent * 999.0)).item() + +class ModelSamplingDiscreteEDM(ModelSamplingDiscrete): + def timestep(self, sigma): + return 0.25 * sigma.log() + + def sigma(self, timestep): + return (timestep / 0.25).exp() + +class ModelSamplingContinuousEDM(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + sigma_min = sampling_settings.get("sigma_min", 0.002) + sigma_max = sampling_settings.get("sigma_max", 120.0) + sigma_data = sampling_settings.get("sigma_data", 1.0) + self.set_parameters(sigma_min, sigma_max, sigma_data) + + def set_parameters(self, sigma_min, sigma_max, sigma_data): + self.sigma_data = sigma_data + sigmas = torch.linspace(math.log(sigma_min), math.log(sigma_max), 1000).exp() + + self.register_buffer('sigmas', sigmas) #for compatibility with some schedulers + self.register_buffer('log_sigmas', sigmas.log()) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + return 0.25 * sigma.log() + + def sigma(self, timestep): + return (timestep / 0.25).exp() + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 999999999.9 + if percent >= 1.0: + return 0.0 + percent = 1.0 - percent + + log_sigma_min = math.log(self.sigma_min) + return math.exp((math.log(self.sigma_max) - log_sigma_min) * percent + log_sigma_min) + + +class ModelSamplingContinuousV(ModelSamplingContinuousEDM): + def timestep(self, sigma): + return sigma.atan() / math.pi * 2 + + def sigma(self, timestep): + return (timestep * math.pi / 2).tan() + + +def time_snr_shift(alpha, t): + if alpha == 1.0: + return t + return alpha * t / (1 + (alpha - 1) * t) + +class ModelSamplingDiscreteFlow(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + self.set_parameters(shift=sampling_settings.get("shift", 1.0), multiplier=sampling_settings.get("multiplier", 1000)) + + def set_parameters(self, shift=1.0, timesteps=1000, multiplier=1000): + self.shift = shift + self.multiplier = multiplier + ts = self.sigma((torch.arange(1, timesteps + 1, 1) / timesteps) * multiplier) + self.register_buffer('sigmas', ts) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + return sigma * self.multiplier + + def sigma(self, timestep): + return time_snr_shift(self.shift, timestep / self.multiplier) + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 1.0 + if percent >= 1.0: + return 0.0 + return 1.0 - percent + +class StableCascadeSampling(ModelSamplingDiscrete): + def __init__(self, model_config=None): + super().__init__() + + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + self.set_parameters(sampling_settings.get("shift", 1.0)) + + def set_parameters(self, shift=1.0, cosine_s=8e-3): + self.shift = shift + self.cosine_s = torch.tensor(cosine_s) + self._init_alpha_cumprod = torch.cos(self.cosine_s / (1 + self.cosine_s) * torch.pi * 0.5) ** 2 + + #This part is just for compatibility with some schedulers in the codebase + self.num_timesteps = 10000 + sigmas = torch.empty((self.num_timesteps), dtype=torch.float32) + for x in range(self.num_timesteps): + t = (x + 1) / self.num_timesteps + sigmas[x] = self.sigma(t) + + self.set_sigmas(sigmas) + + def sigma(self, timestep): + alpha_cumprod = (torch.cos((timestep + self.cosine_s) / (1 + self.cosine_s) * torch.pi * 0.5) ** 2 / self._init_alpha_cumprod) + + if self.shift != 1.0: + var = alpha_cumprod + logSNR = (var/(1-var)).log() + logSNR += 2 * torch.log(1.0 / torch.tensor(self.shift)) + alpha_cumprod = logSNR.sigmoid() + + alpha_cumprod = alpha_cumprod.clamp(0.0001, 0.9999) + return ((1 - alpha_cumprod) / alpha_cumprod) ** 0.5 + + def timestep(self, sigma): + var = 1 / ((sigma * sigma) + 1) + var = var.clamp(0, 1.0) + s, min_var = self.cosine_s.to(var.device), self._init_alpha_cumprod.to(var.device) + t = (((var * min_var) ** 0.5).acos() / (torch.pi * 0.5)) * (1 + s) - s + return t + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 999999999.9 + if percent >= 1.0: + return 0.0 + + percent = 1.0 - percent + return self.sigma(torch.tensor(percent)) + + +def flux_time_shift(mu: float, sigma: float, t): + return math.exp(mu) / (math.exp(mu) + (1 / t - 1) ** sigma) + +class ModelSamplingFlux(torch.nn.Module): + def __init__(self, model_config=None): + super().__init__() + if model_config is not None: + sampling_settings = model_config.sampling_settings + else: + sampling_settings = {} + + self.set_parameters(shift=sampling_settings.get("shift", 1.15)) + + def set_parameters(self, shift=1.15, timesteps=10000): + self.shift = shift + ts = self.sigma((torch.arange(1, timesteps + 1, 1) / timesteps)) + self.register_buffer('sigmas', ts) + + @property + def sigma_min(self): + return self.sigmas[0] + + @property + def sigma_max(self): + return self.sigmas[-1] + + def timestep(self, sigma): + return sigma + + def sigma(self, timestep): + return flux_time_shift(self.shift, 1.0, timestep) + + def percent_to_sigma(self, percent): + if percent <= 0.0: + return 1.0 + if percent >= 1.0: + return 0.0 + return 1.0 - percent diff --git a/src/comfyui/comfy/ops.py b/src/comfyui/comfy/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..3c5ba0124b5ad59fc72f31e557c558f8eaf1c23e --- /dev/null +++ b/src/comfyui/comfy/ops.py @@ -0,0 +1,366 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import comfy.model_management +from comfy.cli_args import args +import comfy.float + +cast_to = comfy.model_management.cast_to #TODO: remove once no more references + +def cast_to_input(weight, input, non_blocking=False, copy=True): + return comfy.model_management.cast_to(weight, input.dtype, input.device, non_blocking=non_blocking, copy=copy) + +def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): + if input is not None: + if dtype is None: + dtype = input.dtype + if bias_dtype is None: + bias_dtype = dtype + if device is None: + device = input.device + + bias = None + non_blocking = comfy.model_management.device_supports_non_blocking(device) + if s.bias is not None: + has_function = s.bias_function is not None + bias = comfy.model_management.cast_to(s.bias, bias_dtype, device, non_blocking=non_blocking, copy=has_function) + if has_function: + bias = s.bias_function(bias) + + has_function = s.weight_function is not None + weight = comfy.model_management.cast_to(s.weight, dtype, device, non_blocking=non_blocking, copy=has_function) + if has_function: + weight = s.weight_function(weight) + return weight, bias + +class CastWeightBiasOp: + comfy_cast_weights = False + weight_function = None + bias_function = None + +class disable_weight_init: + class Linear(torch.nn.Linear, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.linear(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Conv1d(torch.nn.Conv1d, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return self._conv_forward(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Conv2d(torch.nn.Conv2d, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return self._conv_forward(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Conv3d(torch.nn.Conv3d, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return self._conv_forward(input, weight, bias) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class GroupNorm(torch.nn.GroupNorm, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + + class LayerNorm(torch.nn.LayerNorm, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input): + if self.weight is not None: + weight, bias = cast_bias_weight(self, input) + else: + weight = None + bias = None + return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class ConvTranspose2d(torch.nn.ConvTranspose2d, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input, output_size=None): + num_spatial_dims = 2 + output_padding = self._output_padding( + input, output_size, self.stride, self.padding, self.kernel_size, + num_spatial_dims, self.dilation) + + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.conv_transpose2d( + input, weight, bias, self.stride, self.padding, + output_padding, self.groups, self.dilation) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class ConvTranspose1d(torch.nn.ConvTranspose1d, CastWeightBiasOp): + def reset_parameters(self): + return None + + def forward_comfy_cast_weights(self, input, output_size=None): + num_spatial_dims = 1 + output_padding = self._output_padding( + input, output_size, self.stride, self.padding, self.kernel_size, + num_spatial_dims, self.dilation) + + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.conv_transpose1d( + input, weight, bias, self.stride, self.padding, + output_padding, self.groups, self.dilation) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + return super().forward(*args, **kwargs) + + class Embedding(torch.nn.Embedding, CastWeightBiasOp): + def reset_parameters(self): + self.bias = None + return None + + def forward_comfy_cast_weights(self, input, out_dtype=None): + output_dtype = out_dtype + if self.weight.dtype == torch.float16 or self.weight.dtype == torch.bfloat16: + out_dtype = None + weight, bias = cast_bias_weight(self, device=input.device, dtype=out_dtype) + return torch.nn.functional.embedding(input, weight, self.padding_idx, self.max_norm, self.norm_type, self.scale_grad_by_freq, self.sparse).to(dtype=output_dtype) + + def forward(self, *args, **kwargs): + if self.comfy_cast_weights: + return self.forward_comfy_cast_weights(*args, **kwargs) + else: + if "out_dtype" in kwargs: + kwargs.pop("out_dtype") + return super().forward(*args, **kwargs) + + @classmethod + def conv_nd(s, dims, *args, **kwargs): + if dims == 2: + return s.Conv2d(*args, **kwargs) + elif dims == 3: + return s.Conv3d(*args, **kwargs) + else: + raise ValueError(f"unsupported dimensions: {dims}") + + +class manual_cast(disable_weight_init): + class Linear(disable_weight_init.Linear): + comfy_cast_weights = True + + class Conv1d(disable_weight_init.Conv1d): + comfy_cast_weights = True + + class Conv2d(disable_weight_init.Conv2d): + comfy_cast_weights = True + + class Conv3d(disable_weight_init.Conv3d): + comfy_cast_weights = True + + class GroupNorm(disable_weight_init.GroupNorm): + comfy_cast_weights = True + + class LayerNorm(disable_weight_init.LayerNorm): + comfy_cast_weights = True + + class ConvTranspose2d(disable_weight_init.ConvTranspose2d): + comfy_cast_weights = True + + class ConvTranspose1d(disable_weight_init.ConvTranspose1d): + comfy_cast_weights = True + + class Embedding(disable_weight_init.Embedding): + comfy_cast_weights = True + + +def fp8_linear(self, input): + dtype = self.weight.dtype + if dtype not in [torch.float8_e4m3fn]: + return None + + tensor_2d = False + if len(input.shape) == 2: + tensor_2d = True + input = input.unsqueeze(1) + + + if len(input.shape) == 3: + w, bias = cast_bias_weight(self, input, dtype=dtype, bias_dtype=input.dtype) + w = w.t() + + scale_weight = self.scale_weight + scale_input = self.scale_input + if scale_weight is None: + scale_weight = torch.ones((), device=input.device, dtype=torch.float32) + else: + scale_weight = scale_weight.to(input.device) + + if scale_input is None: + scale_input = torch.ones((), device=input.device, dtype=torch.float32) + inn = input.reshape(-1, input.shape[2]).to(dtype) + else: + scale_input = scale_input.to(input.device) + inn = (input * (1.0 / scale_input).to(input.dtype)).reshape(-1, input.shape[2]).to(dtype) + + if bias is not None: + o = torch._scaled_mm(inn, w, out_dtype=input.dtype, bias=bias, scale_a=scale_input, scale_b=scale_weight) + else: + o = torch._scaled_mm(inn, w, out_dtype=input.dtype, scale_a=scale_input, scale_b=scale_weight) + + if isinstance(o, tuple): + o = o[0] + + if tensor_2d: + return o.reshape(input.shape[0], -1) + + return o.reshape((-1, input.shape[1], self.weight.shape[0])) + + return None + +class fp8_ops(manual_cast): + class Linear(manual_cast.Linear): + def reset_parameters(self): + self.scale_weight = None + self.scale_input = None + return None + + def forward_comfy_cast_weights(self, input): + out = fp8_linear(self, input) + if out is not None: + return out + + weight, bias = cast_bias_weight(self, input) + return torch.nn.functional.linear(input, weight, bias) + +def scaled_fp8_ops(fp8_matrix_mult=False, scale_input=False, override_dtype=None): + class scaled_fp8_op(manual_cast): + class Linear(manual_cast.Linear): + def __init__(self, *args, **kwargs): + if override_dtype is not None: + kwargs['dtype'] = override_dtype + super().__init__(*args, **kwargs) + + def reset_parameters(self): + if not hasattr(self, 'scale_weight'): + self.scale_weight = torch.nn.parameter.Parameter(data=torch.ones((), device=self.weight.device, dtype=torch.float32), requires_grad=False) + + if not scale_input: + self.scale_input = None + + if not hasattr(self, 'scale_input'): + self.scale_input = torch.nn.parameter.Parameter(data=torch.ones((), device=self.weight.device, dtype=torch.float32), requires_grad=False) + return None + + def forward_comfy_cast_weights(self, input): + if fp8_matrix_mult: + out = fp8_linear(self, input) + if out is not None: + return out + + weight, bias = cast_bias_weight(self, input) + + if weight.numel() < input.numel(): #TODO: optimize + return torch.nn.functional.linear(input, weight * self.scale_weight.to(device=weight.device, dtype=weight.dtype), bias) + else: + return torch.nn.functional.linear(input * self.scale_weight.to(device=weight.device, dtype=weight.dtype), weight, bias) + + def convert_weight(self, weight, inplace=False, **kwargs): + if inplace: + weight *= self.scale_weight.to(device=weight.device, dtype=weight.dtype) + return weight + else: + return weight * self.scale_weight.to(device=weight.device, dtype=weight.dtype) + + def set_weight(self, weight, inplace_update=False, seed=None, **kwargs): + weight = comfy.float.stochastic_rounding(weight / self.scale_weight.to(device=weight.device, dtype=weight.dtype), self.weight.dtype, seed=seed) + if inplace_update: + self.weight.data.copy_(weight) + else: + self.weight = torch.nn.Parameter(weight, requires_grad=False) + + return scaled_fp8_op + +def pick_operations(weight_dtype, compute_dtype, load_device=None, disable_fast_fp8=False, fp8_optimizations=False, scaled_fp8=None): + fp8_compute = comfy.model_management.supports_fp8_compute(load_device) + if scaled_fp8 is not None: + return scaled_fp8_ops(fp8_matrix_mult=fp8_compute, scale_input=True, override_dtype=scaled_fp8) + + if fp8_compute and (fp8_optimizations or args.fast) and not disable_fast_fp8: + return fp8_ops + + if compute_dtype is None or weight_dtype == compute_dtype: + return disable_weight_init + + return manual_cast diff --git a/src/comfyui/comfy/options.py b/src/comfyui/comfy/options.py new file mode 100644 index 0000000000000000000000000000000000000000..f7f8af41ebd8b9669ef0ef21827ea6195bcb4752 --- /dev/null +++ b/src/comfyui/comfy/options.py @@ -0,0 +1,6 @@ + +args_parsing = False + +def enable_args_parsing(enable=True): + global args_parsing + args_parsing = enable diff --git a/src/comfyui/comfy/sample.py b/src/comfyui/comfy/sample.py new file mode 100644 index 0000000000000000000000000000000000000000..98dcaca7f38e76754bdce7fffaccf620fd0ba497 --- /dev/null +++ b/src/comfyui/comfy/sample.py @@ -0,0 +1,50 @@ +import torch +import comfy.model_management +import comfy.samplers +import comfy.utils +import numpy as np +import logging + +def prepare_noise(latent_image, seed, noise_inds=None): + """ + creates random noise given a latent image and a seed. + optional arg skip can be used to skip and discard x number of noise generations for a given seed + """ + generator = torch.manual_seed(seed) + if noise_inds is None: + return torch.randn(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") + + unique_inds, inverse = np.unique(noise_inds, return_inverse=True) + noises = [] + for i in range(unique_inds[-1]+1): + noise = torch.randn([1] + list(latent_image.size())[1:], dtype=latent_image.dtype, layout=latent_image.layout, generator=generator, device="cpu") + if i in unique_inds: + noises.append(noise) + noises = [noises[i] for i in inverse] + noises = torch.cat(noises, axis=0) + return noises + +def fix_empty_latent_channels(model, latent_image): + latent_channels = model.get_model_object("latent_format").latent_channels #Resize the empty latent image so it has the right number of channels + if latent_channels != latent_image.shape[1] and torch.count_nonzero(latent_image) == 0: + latent_image = comfy.utils.repeat_to_batch_size(latent_image, latent_channels, dim=1) + return latent_image + +def prepare_sampling(model, noise_shape, positive, negative, noise_mask): + logging.warning("Warning: comfy.sample.prepare_sampling isn't used anymore and can be removed") + return model, positive, negative, noise_mask, [] + +def cleanup_additional_models(models): + logging.warning("Warning: comfy.sample.cleanup_additional_models isn't used anymore and can be removed") + +def sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False, noise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): + sampler = comfy.samplers.KSampler(model, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) + + samples = sampler.sample(noise, positive, negative, cfg=cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=sigmas, callback=callback, disable_pbar=disable_pbar, seed=seed) + samples = samples.to(comfy.model_management.intermediate_device()) + return samples + +def sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=None, callback=None, disable_pbar=False, seed=None): + samples = comfy.samplers.sample(model, noise, positive, negative, cfg, model.load_device, sampler, sigmas, model_options=model.model_options, latent_image=latent_image, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) + samples = samples.to(comfy.model_management.intermediate_device()) + return samples diff --git a/src/comfyui/comfy/sampler_helpers.py b/src/comfyui/comfy/sampler_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..4a2ec123b5ccdaed92ee7a6a42c4c6bfbfbfb265 --- /dev/null +++ b/src/comfyui/comfy/sampler_helpers.py @@ -0,0 +1,78 @@ +import torch +import comfy.model_management +import comfy.conds + +def prepare_mask(noise_mask, shape, device): + """ensures noise mask is of proper dimensions""" + noise_mask = torch.nn.functional.interpolate(noise_mask.reshape((-1, 1, noise_mask.shape[-2], noise_mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear") + noise_mask = torch.cat([noise_mask] * shape[1], dim=1) + noise_mask = comfy.utils.repeat_to_batch_size(noise_mask, shape[0]) + noise_mask = noise_mask.to(device) + return noise_mask + +def get_models_from_cond(cond, model_type): + models = [] + for c in cond: + if model_type in c: + models += [c[model_type]] + return models + +def convert_cond(cond): + out = [] + for c in cond: + temp = c[1].copy() + model_conds = temp.get("model_conds", {}) + if c[0] is not None: + model_conds["c_crossattn"] = comfy.conds.CONDCrossAttn(c[0]) #TODO: remove + temp["cross_attn"] = c[0] + temp["model_conds"] = model_conds + out.append(temp) + return out + +def get_additional_models(conds, dtype): + """loads additional models in conditioning""" + cnets = [] + gligen = [] + + for k in conds: + cnets += get_models_from_cond(conds[k], "control") + gligen += get_models_from_cond(conds[k], "gligen") + + control_nets = set(cnets) + + inference_memory = 0 + control_models = [] + for m in control_nets: + control_models += m.get_models() + inference_memory += m.inference_memory_requirements(dtype) + + gligen = [x[1] for x in gligen] + models = control_models + gligen + return models, inference_memory + +def cleanup_additional_models(models): + """cleanup additional models that were loaded""" + for m in models: + if hasattr(m, 'cleanup'): + m.cleanup() + + +def prepare_sampling(model, noise_shape, conds): + device = model.load_device + real_model = None + models, inference_memory = get_additional_models(conds, model.model_dtype()) + memory_required = model.memory_required([noise_shape[0] * 2] + list(noise_shape[1:])) + inference_memory + minimum_memory_required = model.memory_required([noise_shape[0]] + list(noise_shape[1:])) + inference_memory + comfy.model_management.load_models_gpu([model] + models, memory_required=memory_required, minimum_memory_required=minimum_memory_required) + real_model = model.model + + return real_model, conds, models + +def cleanup_models(conds, models): + cleanup_additional_models(models) + + control_cleanup = [] + for k in conds: + control_cleanup += get_models_from_cond(conds[k], "control") + + cleanup_additional_models(set(control_cleanup)) diff --git a/src/comfyui/comfy/samplers.py b/src/comfyui/comfy/samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..94cba03b88fd1b40ff60b99e04998868077948b1 --- /dev/null +++ b/src/comfyui/comfy/samplers.py @@ -0,0 +1,855 @@ +from .k_diffusion import sampling as k_diffusion_sampling +from .extra_samplers import uni_pc +import torch +import collections +from comfy import model_management +import math +import logging +import comfy.sampler_helpers +import scipy.stats +import numpy + +def get_area_and_mult(conds, x_in, timestep_in): + dims = tuple(x_in.shape[2:]) + area = None + strength = 1.0 + + if 'timestep_start' in conds: + timestep_start = conds['timestep_start'] + if timestep_in[0] > timestep_start: + return None + if 'timestep_end' in conds: + timestep_end = conds['timestep_end'] + if timestep_in[0] < timestep_end: + return None + if 'area' in conds: + area = list(conds['area']) + if 'strength' in conds: + strength = conds['strength'] + + input_x = x_in + if area is not None: + for i in range(len(dims)): + area[i] = min(input_x.shape[i + 2] - area[len(dims) + i], area[i]) + input_x = input_x.narrow(i + 2, area[len(dims) + i], area[i]) + + if 'mask' in conds: + # Scale the mask to the size of the input + # The mask should have been resized as we began the sampling process + mask_strength = 1.0 + if "mask_strength" in conds: + mask_strength = conds["mask_strength"] + mask = conds['mask'] + assert(mask.shape[1:] == x_in.shape[2:]) + + mask = mask[:input_x.shape[0]] + if area is not None: + for i in range(len(dims)): + mask = mask.narrow(i + 1, area[len(dims) + i], area[i]) + + mask = mask * mask_strength + mask = mask.unsqueeze(1).repeat(input_x.shape[0] // mask.shape[0], input_x.shape[1], 1, 1) + else: + mask = torch.ones_like(input_x) + mult = mask * strength + + if 'mask' not in conds and area is not None: + rr = 8 + for i in range(len(dims)): + if area[len(dims) + i] != 0: + for t in range(rr): + m = mult.narrow(i + 2, t, 1) + m *= ((1.0/rr) * (t + 1)) + if (area[i] + area[len(dims) + i]) < x_in.shape[i + 2]: + for t in range(rr): + m = mult.narrow(i + 2, area[i] - 1 - t, 1) + m *= ((1.0/rr) * (t + 1)) + + conditioning = {} + model_conds = conds["model_conds"] + for c in model_conds: + conditioning[c] = model_conds[c].process_cond(batch_size=x_in.shape[0], device=x_in.device, area=area) + + control = conds.get('control', None) + + patches = None + if 'gligen' in conds: + gligen = conds['gligen'] + patches = {} + gligen_type = gligen[0] + gligen_model = gligen[1] + if gligen_type == "position": + gligen_patch = gligen_model.model.set_position(input_x.shape, gligen[2], input_x.device) + else: + gligen_patch = gligen_model.model.set_empty(input_x.shape, input_x.device) + + patches['middle_patch'] = [gligen_patch] + + cond_obj = collections.namedtuple('cond_obj', ['input_x', 'mult', 'conditioning', 'area', 'control', 'patches']) + return cond_obj(input_x, mult, conditioning, area, control, patches) + +def cond_equal_size(c1, c2): + if c1 is c2: + return True + if c1.keys() != c2.keys(): + return False + for k in c1: + if not c1[k].can_concat(c2[k]): + return False + return True + +def can_concat_cond(c1, c2): + if c1.input_x.shape != c2.input_x.shape: + return False + + def objects_concatable(obj1, obj2): + if (obj1 is None) != (obj2 is None): + return False + if obj1 is not None: + if obj1 is not obj2: + return False + return True + + if not objects_concatable(c1.control, c2.control): + return False + + if not objects_concatable(c1.patches, c2.patches): + return False + + return cond_equal_size(c1.conditioning, c2.conditioning) + +def cond_cat(c_list): + c_crossattn = [] + c_concat = [] + c_adm = [] + crossattn_max_len = 0 + + temp = {} + for x in c_list: + for k in x: + cur = temp.get(k, []) + cur.append(x[k]) + temp[k] = cur + + out = {} + for k in temp: + conds = temp[k] + out[k] = conds[0].concat(conds[1:]) + + return out + +def calc_cond_batch(model, conds, x_in, timestep, model_options): + out_conds = [] + out_counts = [] + to_run = [] + + for i in range(len(conds)): + out_conds.append(torch.zeros_like(x_in)) + out_counts.append(torch.ones_like(x_in) * 1e-37) + + cond = conds[i] + if cond is not None: + for x in cond: + p = get_area_and_mult(x, x_in, timestep) + if p is None: + continue + + to_run += [(p, i)] + + while len(to_run) > 0: + first = to_run[0] + first_shape = first[0][0].shape + to_batch_temp = [] + for x in range(len(to_run)): + if can_concat_cond(to_run[x][0], first[0]): + to_batch_temp += [x] + + to_batch_temp.reverse() + to_batch = to_batch_temp[:1] + + free_memory = model_management.get_free_memory(x_in.device) + for i in range(1, len(to_batch_temp) + 1): + batch_amount = to_batch_temp[:len(to_batch_temp)//i] + input_shape = [len(batch_amount) * first_shape[0]] + list(first_shape)[1:] + if model.memory_required(input_shape) * 1.5 < free_memory: + to_batch = batch_amount + break + + input_x = [] + mult = [] + c = [] + cond_or_uncond = [] + area = [] + control = None + patches = None + for x in to_batch: + o = to_run.pop(x) + p = o[0] + input_x.append(p.input_x) + mult.append(p.mult) + c.append(p.conditioning) + area.append(p.area) + cond_or_uncond.append(o[1]) + control = p.control + patches = p.patches + + batch_chunks = len(cond_or_uncond) + input_x = torch.cat(input_x) + c = cond_cat(c) + timestep_ = torch.cat([timestep] * batch_chunks) + + if control is not None: + c['control'] = control.get_control(input_x, timestep_, c, len(cond_or_uncond)) + + transformer_options = {} + if 'transformer_options' in model_options: + transformer_options = model_options['transformer_options'].copy() + + if patches is not None: + if "patches" in transformer_options: + cur_patches = transformer_options["patches"].copy() + for p in patches: + if p in cur_patches: + cur_patches[p] = cur_patches[p] + patches[p] + else: + cur_patches[p] = patches[p] + transformer_options["patches"] = cur_patches + else: + transformer_options["patches"] = patches + + transformer_options["cond_or_uncond"] = cond_or_uncond[:] + transformer_options["sigmas"] = timestep + + c['transformer_options'] = transformer_options + + if 'model_function_wrapper' in model_options: + output = model_options['model_function_wrapper'](model.apply_model, {"input": input_x, "timestep": timestep_, "c": c, "cond_or_uncond": cond_or_uncond}).chunk(batch_chunks) + else: + output = model.apply_model(input_x, timestep_, **c).chunk(batch_chunks) + + for o in range(batch_chunks): + cond_index = cond_or_uncond[o] + a = area[o] + if a is None: + out_conds[cond_index] += output[o] * mult[o] + out_counts[cond_index] += mult[o] + else: + out_c = out_conds[cond_index] + out_cts = out_counts[cond_index] + dims = len(a) // 2 + for i in range(dims): + out_c = out_c.narrow(i + 2, a[i + dims], a[i]) + out_cts = out_cts.narrow(i + 2, a[i + dims], a[i]) + out_c += output[o] * mult[o] + out_cts += mult[o] + + for i in range(len(out_conds)): + out_conds[i] /= out_counts[i] + + return out_conds + +def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options): #TODO: remove + logging.warning("WARNING: The comfy.samplers.calc_cond_uncond_batch function is deprecated please use the calc_cond_batch one instead.") + return tuple(calc_cond_batch(model, [cond, uncond], x_in, timestep, model_options)) + +def cfg_function(model, cond_pred, uncond_pred, cond_scale, x, timestep, model_options={}, cond=None, uncond=None): + if "sampler_cfg_function" in model_options: + args = {"cond": x - cond_pred, "uncond": x - uncond_pred, "cond_scale": cond_scale, "timestep": timestep, "input": x, "sigma": timestep, + "cond_denoised": cond_pred, "uncond_denoised": uncond_pred, "model": model, "model_options": model_options} + cfg_result = x - model_options["sampler_cfg_function"](args) + else: + cfg_result = uncond_pred + (cond_pred - uncond_pred) * cond_scale + + for fn in model_options.get("sampler_post_cfg_function", []): + args = {"denoised": cfg_result, "cond": cond, "uncond": uncond, "model": model, "uncond_denoised": uncond_pred, "cond_denoised": cond_pred, + "sigma": timestep, "model_options": model_options, "input": x} + cfg_result = fn(args) + + return cfg_result + +#The main sampling function shared by all the samplers +#Returns denoised +def sampling_function(model, x, timestep, uncond, cond, cond_scale, model_options={}, seed=None): + if math.isclose(cond_scale, 1.0) and model_options.get("disable_cfg1_optimization", False) == False: + uncond_ = None + else: + uncond_ = uncond + + conds = [cond, uncond_] + out = calc_cond_batch(model, conds, x, timestep, model_options) + + for fn in model_options.get("sampler_pre_cfg_function", []): + args = {"conds":conds, "conds_out": out, "cond_scale": cond_scale, "timestep": timestep, + "input": x, "sigma": timestep, "model": model, "model_options": model_options} + out = fn(args) + + return cfg_function(model, out[0], out[1], cond_scale, x, timestep, model_options=model_options, cond=cond, uncond=uncond_) + + +class KSamplerX0Inpaint: + def __init__(self, model, sigmas): + self.inner_model = model + self.sigmas = sigmas + def __call__(self, x, sigma, denoise_mask, model_options={}, seed=None): + if denoise_mask is not None: + if "denoise_mask_function" in model_options: + denoise_mask = model_options["denoise_mask_function"](sigma, denoise_mask, extra_options={"model": self.inner_model, "sigmas": self.sigmas}) + latent_mask = 1. - denoise_mask + x = x * denoise_mask + self.inner_model.inner_model.model_sampling.noise_scaling(sigma.reshape([sigma.shape[0]] + [1] * (len(self.noise.shape) - 1)), self.noise, self.latent_image) * latent_mask + out = self.inner_model(x, sigma, model_options=model_options, seed=seed) + if denoise_mask is not None: + out = out * denoise_mask + self.latent_image * latent_mask + return out + +def simple_scheduler(model_sampling, steps): + s = model_sampling + sigs = [] + ss = len(s.sigmas) / steps + for x in range(steps): + sigs += [float(s.sigmas[-(1 + int(x * ss))])] + sigs += [0.0] + return torch.FloatTensor(sigs) + +def ddim_scheduler(model_sampling, steps): + s = model_sampling + sigs = [] + x = 1 + if math.isclose(float(s.sigmas[x]), 0, abs_tol=0.00001): + steps += 1 + sigs = [] + else: + sigs = [0.0] + + ss = max(len(s.sigmas) // steps, 1) + while x < len(s.sigmas): + sigs += [float(s.sigmas[x])] + x += ss + sigs = sigs[::-1] + return torch.FloatTensor(sigs) + +def normal_scheduler(model_sampling, steps, sgm=False, floor=False): + s = model_sampling + start = s.timestep(s.sigma_max) + end = s.timestep(s.sigma_min) + + append_zero = True + if sgm: + timesteps = torch.linspace(start, end, steps + 1)[:-1] + else: + if math.isclose(float(s.sigma(end)), 0, abs_tol=0.00001): + steps += 1 + append_zero = False + timesteps = torch.linspace(start, end, steps) + + sigs = [] + for x in range(len(timesteps)): + ts = timesteps[x] + sigs.append(float(s.sigma(ts))) + + if append_zero: + sigs += [0.0] + + return torch.FloatTensor(sigs) + +# Implemented based on: https://arxiv.org/abs/2407.12173 +def beta_scheduler(model_sampling, steps, alpha=0.6, beta=0.6): + total_timesteps = (len(model_sampling.sigmas) - 1) + ts = 1 - numpy.linspace(0, 1, steps, endpoint=False) + ts = numpy.rint(scipy.stats.beta.ppf(ts, alpha, beta) * total_timesteps) + + sigs = [] + last_t = -1 + for t in ts: + if t != last_t: + sigs += [float(model_sampling.sigmas[int(t)])] + last_t = t + sigs += [0.0] + return torch.FloatTensor(sigs) + +# from: https://github.com/genmoai/models/blob/main/src/mochi_preview/infer.py#L41 +def linear_quadratic_schedule(model_sampling, steps, threshold_noise=0.025, linear_steps=None): + if steps == 1: + sigma_schedule = [1.0, 0.0] + else: + if linear_steps is None: + linear_steps = steps // 2 + linear_sigma_schedule = [i * threshold_noise / linear_steps for i in range(linear_steps)] + threshold_noise_step_diff = linear_steps - threshold_noise * steps + quadratic_steps = steps - linear_steps + quadratic_coef = threshold_noise_step_diff / (linear_steps * quadratic_steps ** 2) + linear_coef = threshold_noise / linear_steps - 2 * threshold_noise_step_diff / (quadratic_steps ** 2) + const = quadratic_coef * (linear_steps ** 2) + quadratic_sigma_schedule = [ + quadratic_coef * (i ** 2) + linear_coef * i + const + for i in range(linear_steps, steps) + ] + sigma_schedule = linear_sigma_schedule + quadratic_sigma_schedule + [1.0] + sigma_schedule = [1.0 - x for x in sigma_schedule] + return torch.FloatTensor(sigma_schedule) * model_sampling.sigma_max.cpu() + +def get_mask_aabb(masks): + if masks.numel() == 0: + return torch.zeros((0, 4), device=masks.device, dtype=torch.int) + + b = masks.shape[0] + + bounding_boxes = torch.zeros((b, 4), device=masks.device, dtype=torch.int) + is_empty = torch.zeros((b), device=masks.device, dtype=torch.bool) + for i in range(b): + mask = masks[i] + if mask.numel() == 0: + continue + if torch.max(mask != 0) == False: + is_empty[i] = True + continue + y, x = torch.where(mask) + bounding_boxes[i, 0] = torch.min(x) + bounding_boxes[i, 1] = torch.min(y) + bounding_boxes[i, 2] = torch.max(x) + bounding_boxes[i, 3] = torch.max(y) + + return bounding_boxes, is_empty + +def resolve_areas_and_cond_masks_multidim(conditions, dims, device): + # We need to decide on an area outside the sampling loop in order to properly generate opposite areas of equal sizes. + # While we're doing this, we can also resolve the mask device and scaling for performance reasons + for i in range(len(conditions)): + c = conditions[i] + if 'area' in c: + area = c['area'] + if area[0] == "percentage": + modified = c.copy() + a = area[1:] + a_len = len(a) // 2 + area = () + for d in range(len(dims)): + area += (max(1, round(a[d] * dims[d])),) + for d in range(len(dims)): + area += (round(a[d + a_len] * dims[d]),) + + modified['area'] = area + c = modified + conditions[i] = c + + if 'mask' in c: + mask = c['mask'] + mask = mask.to(device=device) + modified = c.copy() + if len(mask.shape) == len(dims): + mask = mask.unsqueeze(0) + if mask.shape[1:] != dims: + mask = torch.nn.functional.interpolate(mask.unsqueeze(1), size=dims, mode='bilinear', align_corners=False).squeeze(1) + + if modified.get("set_area_to_bounds", False): #TODO: handle dim != 2 + bounds = torch.max(torch.abs(mask),dim=0).values.unsqueeze(0) + boxes, is_empty = get_mask_aabb(bounds) + if is_empty[0]: + # Use the minimum possible size for efficiency reasons. (Since the mask is all-0, this becomes a noop anyway) + modified['area'] = (8, 8, 0, 0) + else: + box = boxes[0] + H, W, Y, X = (box[3] - box[1] + 1, box[2] - box[0] + 1, box[1], box[0]) + H = max(8, H) + W = max(8, W) + area = (int(H), int(W), int(Y), int(X)) + modified['area'] = area + + modified['mask'] = mask + conditions[i] = modified + +def resolve_areas_and_cond_masks(conditions, h, w, device): + logging.warning("WARNING: The comfy.samplers.resolve_areas_and_cond_masks function is deprecated please use the resolve_areas_and_cond_masks_multidim one instead.") + return resolve_areas_and_cond_masks_multidim(conditions, [h, w], device) + +def create_cond_with_same_area_if_none(conds, c): #TODO: handle dim != 2 + if 'area' not in c: + return + + c_area = c['area'] + smallest = None + for x in conds: + if 'area' in x: + a = x['area'] + if c_area[2] >= a[2] and c_area[3] >= a[3]: + if a[0] + a[2] >= c_area[0] + c_area[2]: + if a[1] + a[3] >= c_area[1] + c_area[3]: + if smallest is None: + smallest = x + elif 'area' not in smallest: + smallest = x + else: + if smallest['area'][0] * smallest['area'][1] > a[0] * a[1]: + smallest = x + else: + if smallest is None: + smallest = x + if smallest is None: + return + if 'area' in smallest: + if smallest['area'] == c_area: + return + + out = c.copy() + out['model_conds'] = smallest['model_conds'].copy() #TODO: which fields should be copied? + conds += [out] + +def calculate_start_end_timesteps(model, conds): + s = model.model_sampling + for t in range(len(conds)): + x = conds[t] + + timestep_start = None + timestep_end = None + if 'start_percent' in x: + timestep_start = s.percent_to_sigma(x['start_percent']) + if 'end_percent' in x: + timestep_end = s.percent_to_sigma(x['end_percent']) + + if (timestep_start is not None) or (timestep_end is not None): + n = x.copy() + if (timestep_start is not None): + n['timestep_start'] = timestep_start + if (timestep_end is not None): + n['timestep_end'] = timestep_end + conds[t] = n + +def pre_run_control(model, conds): + s = model.model_sampling + for t in range(len(conds)): + x = conds[t] + + timestep_start = None + timestep_end = None + percent_to_timestep_function = lambda a: s.percent_to_sigma(a) + if 'control' in x: + x['control'].pre_run(model, percent_to_timestep_function) + +def apply_empty_x_to_equal_area(conds, uncond, name, uncond_fill_func): + cond_cnets = [] + cond_other = [] + uncond_cnets = [] + uncond_other = [] + for t in range(len(conds)): + x = conds[t] + if 'area' not in x: + if name in x and x[name] is not None: + cond_cnets.append(x[name]) + else: + cond_other.append((x, t)) + for t in range(len(uncond)): + x = uncond[t] + if 'area' not in x: + if name in x and x[name] is not None: + uncond_cnets.append(x[name]) + else: + uncond_other.append((x, t)) + + if len(uncond_cnets) > 0: + return + + for x in range(len(cond_cnets)): + temp = uncond_other[x % len(uncond_other)] + o = temp[0] + if name in o and o[name] is not None: + n = o.copy() + n[name] = uncond_fill_func(cond_cnets, x) + uncond += [n] + else: + n = o.copy() + n[name] = uncond_fill_func(cond_cnets, x) + uncond[temp[1]] = n + +def encode_model_conds(model_function, conds, noise, device, prompt_type, **kwargs): + for t in range(len(conds)): + x = conds[t] + params = x.copy() + params["device"] = device + params["noise"] = noise + default_width = None + if len(noise.shape) >= 4: #TODO: 8 multiple should be set by the model + default_width = noise.shape[3] * 8 + params["width"] = params.get("width", default_width) + params["height"] = params.get("height", noise.shape[2] * 8) + params["prompt_type"] = params.get("prompt_type", prompt_type) + for k in kwargs: + if k not in params: + params[k] = kwargs[k] + + out = model_function(**params) + x = x.copy() + model_conds = x['model_conds'].copy() + for k in out: + model_conds[k] = out[k] + x['model_conds'] = model_conds + conds[t] = x + return conds + +class Sampler: + def sample(self): + pass + + def max_denoise(self, model_wrap, sigmas): + max_sigma = float(model_wrap.inner_model.model_sampling.sigma_max) + sigma = float(sigmas[0]) + return math.isclose(max_sigma, sigma, rel_tol=1e-05) or sigma > max_sigma + +KSAMPLER_NAMES = ["euler", "euler_cfg_pp", "euler_ancestral", "euler_ancestral_cfg_pp", "heun", "heunpp2","dpm_2", "dpm_2_ancestral", + "lms", "dpm_fast", "dpm_adaptive", "dpmpp_2s_ancestral", "dpmpp_2s_ancestral_cfg_pp", "dpmpp_sde", "dpmpp_sde_gpu", + "dpmpp_2m", "dpmpp_2m_cfg_pp", "dpmpp_2m_sde", "dpmpp_2m_sde_gpu", "dpmpp_3m_sde", "dpmpp_3m_sde_gpu", "ddpm", "lcm", + "ipndm", "ipndm_v", "deis"] + +class KSAMPLER(Sampler): + def __init__(self, sampler_function, extra_options={}, inpaint_options={}): + self.sampler_function = sampler_function + self.extra_options = extra_options + self.inpaint_options = inpaint_options + + def sample(self, model_wrap, sigmas, extra_args, callback, noise, latent_image=None, denoise_mask=None, disable_pbar=False): + extra_args["denoise_mask"] = denoise_mask + model_k = KSamplerX0Inpaint(model_wrap, sigmas) + model_k.latent_image = latent_image + if self.inpaint_options.get("random", False): #TODO: Should this be the default? + generator = torch.manual_seed(extra_args.get("seed", 41) + 1) + model_k.noise = torch.randn(noise.shape, generator=generator, device="cpu").to(noise.dtype).to(noise.device) + else: + model_k.noise = noise + + noise = model_wrap.inner_model.model_sampling.noise_scaling(sigmas[0], noise, latent_image, self.max_denoise(model_wrap, sigmas)) + + k_callback = None + total_steps = len(sigmas) - 1 + if callback is not None: + k_callback = lambda x: callback(x["i"], x["denoised"], x["x"], total_steps) + + samples = self.sampler_function(model_k, noise, sigmas, extra_args=extra_args, callback=k_callback, disable=disable_pbar, **self.extra_options) + samples = model_wrap.inner_model.model_sampling.inverse_noise_scaling(sigmas[-1], samples) + return samples + + +def ksampler(sampler_name, extra_options={}, inpaint_options={}): + if sampler_name == "dpm_fast": + def dpm_fast_function(model, noise, sigmas, extra_args, callback, disable): + if len(sigmas) <= 1: + return noise + + sigma_min = sigmas[-1] + if sigma_min == 0: + sigma_min = sigmas[-2] + total_steps = len(sigmas) - 1 + return k_diffusion_sampling.sample_dpm_fast(model, noise, sigma_min, sigmas[0], total_steps, extra_args=extra_args, callback=callback, disable=disable) + sampler_function = dpm_fast_function + elif sampler_name == "dpm_adaptive": + def dpm_adaptive_function(model, noise, sigmas, extra_args, callback, disable, **extra_options): + if len(sigmas) <= 1: + return noise + + sigma_min = sigmas[-1] + if sigma_min == 0: + sigma_min = sigmas[-2] + return k_diffusion_sampling.sample_dpm_adaptive(model, noise, sigma_min, sigmas[0], extra_args=extra_args, callback=callback, disable=disable, **extra_options) + sampler_function = dpm_adaptive_function + else: + sampler_function = getattr(k_diffusion_sampling, "sample_{}".format(sampler_name)) + + return KSAMPLER(sampler_function, extra_options, inpaint_options) + + +def process_conds(model, noise, conds, device, latent_image=None, denoise_mask=None, seed=None): + for k in conds: + conds[k] = conds[k][:] + resolve_areas_and_cond_masks_multidim(conds[k], noise.shape[2:], device) + + for k in conds: + calculate_start_end_timesteps(model, conds[k]) + + if hasattr(model, 'extra_conds'): + for k in conds: + conds[k] = encode_model_conds(model.extra_conds, conds[k], noise, device, k, latent_image=latent_image, denoise_mask=denoise_mask, seed=seed) + + #make sure each cond area has an opposite one with the same area + for k in conds: + for c in conds[k]: + for kk in conds: + if k != kk: + create_cond_with_same_area_if_none(conds[kk], c) + + for k in conds: + pre_run_control(model, conds[k]) + + if "positive" in conds: + positive = conds["positive"] + for k in conds: + if k != "positive": + apply_empty_x_to_equal_area(list(filter(lambda c: c.get('control_apply_to_uncond', False) == True, positive)), conds[k], 'control', lambda cond_cnets, x: cond_cnets[x]) + apply_empty_x_to_equal_area(positive, conds[k], 'gligen', lambda cond_cnets, x: cond_cnets[x]) + + return conds + +class CFGGuider: + def __init__(self, model_patcher): + self.model_patcher = model_patcher + self.model_options = model_patcher.model_options + self.original_conds = {} + self.cfg = 1.0 + + def set_conds(self, positive, negative): + self.inner_set_conds({"positive": positive, "negative": negative}) + + def set_cfg(self, cfg): + self.cfg = cfg + + def inner_set_conds(self, conds): + for k in conds: + self.original_conds[k] = comfy.sampler_helpers.convert_cond(conds[k]) + + def __call__(self, *args, **kwargs): + return self.predict_noise(*args, **kwargs) + + def predict_noise(self, x, timestep, model_options={}, seed=None): + return sampling_function(self.inner_model, x, timestep, self.conds.get("negative", None), self.conds.get("positive", None), self.cfg, model_options=model_options, seed=seed) + + def inner_sample(self, noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed): + if latent_image is not None and torch.count_nonzero(latent_image) > 0: #Don't shift the empty latent image. + latent_image = self.inner_model.process_latent_in(latent_image) + + self.conds = process_conds(self.inner_model, noise, self.conds, device, latent_image, denoise_mask, seed) + + extra_args = {"model_options": self.model_options, "seed":seed} + + samples = sampler.sample(self, sigmas, extra_args, callback, noise, latent_image, denoise_mask, disable_pbar) + return self.inner_model.process_latent_out(samples.to(torch.float32)) + + def sample(self, noise, latent_image, sampler, sigmas, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + if sigmas.shape[-1] == 0: + return latent_image + + self.conds = {} + for k in self.original_conds: + self.conds[k] = list(map(lambda a: a.copy(), self.original_conds[k])) + + self.inner_model, self.conds, self.loaded_models = comfy.sampler_helpers.prepare_sampling(self.model_patcher, noise.shape, self.conds) + device = self.model_patcher.load_device + + if denoise_mask is not None: + denoise_mask = comfy.sampler_helpers.prepare_mask(denoise_mask, noise.shape, device) + + noise = noise.to(device) + latent_image = latent_image.to(device) + sigmas = sigmas.to(device) + + output = self.inner_sample(noise, latent_image, device, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) + + comfy.sampler_helpers.cleanup_models(self.conds, self.loaded_models) + del self.inner_model + del self.conds + del self.loaded_models + return output + + +def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + cfg_guider = CFGGuider(model) + cfg_guider.set_conds(positive, negative) + cfg_guider.set_cfg(cfg) + return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) + + +SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform", "beta", "linear_quadratic"] +SAMPLER_NAMES = KSAMPLER_NAMES + ["ddim", "uni_pc", "uni_pc_bh2"] + +def calculate_sigmas(model_sampling, scheduler_name, steps): + if scheduler_name == "karras": + sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=float(model_sampling.sigma_min), sigma_max=float(model_sampling.sigma_max)) + elif scheduler_name == "exponential": + sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=float(model_sampling.sigma_min), sigma_max=float(model_sampling.sigma_max)) + elif scheduler_name == "normal": + sigmas = normal_scheduler(model_sampling, steps) + elif scheduler_name == "simple": + sigmas = simple_scheduler(model_sampling, steps) + elif scheduler_name == "ddim_uniform": + sigmas = ddim_scheduler(model_sampling, steps) + elif scheduler_name == "sgm_uniform": + sigmas = normal_scheduler(model_sampling, steps, sgm=True) + elif scheduler_name == "beta": + sigmas = beta_scheduler(model_sampling, steps) + elif scheduler_name == "linear_quadratic": + sigmas = linear_quadratic_schedule(model_sampling, steps) + else: + logging.error("error invalid scheduler {}".format(scheduler_name)) + return sigmas + +def sampler_object(name): + if name == "uni_pc": + sampler = KSAMPLER(uni_pc.sample_unipc) + elif name == "uni_pc_bh2": + sampler = KSAMPLER(uni_pc.sample_unipc_bh2) + elif name == "ddim": + sampler = ksampler("euler", inpaint_options={"random": True}) + else: + sampler = ksampler(name) + return sampler + +class KSampler: + SCHEDULERS = SCHEDULER_NAMES + SAMPLERS = SAMPLER_NAMES + DISCARD_PENULTIMATE_SIGMA_SAMPLERS = set(('dpm_2', 'dpm_2_ancestral', 'uni_pc', 'uni_pc_bh2')) + + def __init__(self, model, steps, device, sampler=None, scheduler=None, denoise=None, model_options={}): + self.model = model + self.device = device + if scheduler not in self.SCHEDULERS: + scheduler = self.SCHEDULERS[0] + if sampler not in self.SAMPLERS: + sampler = self.SAMPLERS[0] + self.scheduler = scheduler + self.sampler = sampler + self.set_steps(steps, denoise) + self.denoise = denoise + self.model_options = model_options + + def calculate_sigmas(self, steps): + sigmas = None + + discard_penultimate_sigma = False + if self.sampler in self.DISCARD_PENULTIMATE_SIGMA_SAMPLERS: + steps += 1 + discard_penultimate_sigma = True + + sigmas = calculate_sigmas(self.model.get_model_object("model_sampling"), self.scheduler, steps) + + if discard_penultimate_sigma: + sigmas = torch.cat([sigmas[:-2], sigmas[-1:]]) + return sigmas + + def set_steps(self, steps, denoise=None): + self.steps = steps + if denoise is None or denoise > 0.9999: + self.sigmas = self.calculate_sigmas(steps).to(self.device) + else: + if denoise <= 0.0: + self.sigmas = torch.FloatTensor([]) + else: + new_steps = int(steps/denoise) + sigmas = self.calculate_sigmas(new_steps).to(self.device) + self.sigmas = sigmas[-(steps + 1):] + + def sample(self, noise, positive, negative, cfg, latent_image=None, start_step=None, last_step=None, force_full_denoise=False, denoise_mask=None, sigmas=None, callback=None, disable_pbar=False, seed=None): + if sigmas is None: + sigmas = self.sigmas + + if last_step is not None and last_step < (len(sigmas) - 1): + sigmas = sigmas[:last_step + 1] + if force_full_denoise: + sigmas[-1] = 0 + + if start_step is not None: + if start_step < (len(sigmas) - 1): + sigmas = sigmas[start_step:] + else: + if latent_image is not None: + return latent_image + else: + return torch.zeros_like(noise) + + sampler = sampler_object(self.sampler) + + return sample(self.model, noise, positive, negative, cfg, self.device, sampler, sigmas, self.model_options, latent_image=latent_image, denoise_mask=denoise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) diff --git a/src/comfyui/comfy/sd.py b/src/comfyui/comfy/sd.py new file mode 100644 index 0000000000000000000000000000000000000000..e9a3579c98b61ef6acbe741f466585f2a51b9933 --- /dev/null +++ b/src/comfyui/comfy/sd.py @@ -0,0 +1,763 @@ +import torch +from enum import Enum +import logging + +from comfy import model_management +from .ldm.models.autoencoder import AutoencoderKL, AutoencodingEngine +from .ldm.cascade.stage_a import StageA +from .ldm.cascade.stage_c_coder import StageC_coder +from .ldm.audio.autoencoder import AudioOobleckVAE +import comfy.ldm.genmo.vae.model +import yaml + +import comfy.utils + +from . import clip_vision +from . import gligen +from . import diffusers_convert +from . import model_detection + +from . import sd1_clip +from . import sdxl_clip +import comfy.text_encoders.sd2_clip +import comfy.text_encoders.sd3_clip +import comfy.text_encoders.sa_t5 +import comfy.text_encoders.aura_t5 +import comfy.text_encoders.hydit +import comfy.text_encoders.flux +import comfy.text_encoders.long_clipl +import comfy.text_encoders.genmo + +import comfy.model_patcher +import comfy.lora +import comfy.t2i_adapter.adapter +import comfy.taesd.taesd + +def load_lora_for_models(model, clip, lora, strength_model, strength_clip): + key_map = {} + if model is not None: + key_map = comfy.lora.model_lora_keys_unet(model.model, key_map) + if clip is not None: + key_map = comfy.lora.model_lora_keys_clip(clip.cond_stage_model, key_map) + + loaded = comfy.lora.load_lora(lora, key_map) + if model is not None: + new_modelpatcher = model.clone() + k = new_modelpatcher.add_patches(loaded, strength_model) + else: + k = () + new_modelpatcher = None + + if clip is not None: + new_clip = clip.clone() + k1 = new_clip.add_patches(loaded, strength_clip) + else: + k1 = () + new_clip = None + k = set(k) + k1 = set(k1) + for x in loaded: + if (x not in k) and (x not in k1): + logging.warning("NOT LOADED {}".format(x)) + + return (new_modelpatcher, new_clip) + + +class CLIP: + def __init__(self, target=None, embedding_directory=None, no_init=False, tokenizer_data={}, parameters=0, model_options={}): + if no_init: + return + params = target.params.copy() + clip = target.clip + tokenizer = target.tokenizer + + load_device = model_options.get("load_device", model_management.text_encoder_device()) + offload_device = model_options.get("offload_device", model_management.text_encoder_offload_device()) + dtype = model_options.get("dtype", None) + if dtype is None: + dtype = model_management.text_encoder_dtype(load_device) + + params['dtype'] = dtype + params['device'] = model_options.get("initial_device", model_management.text_encoder_initial_device(load_device, offload_device, parameters * model_management.dtype_size(dtype))) + params['model_options'] = model_options + + self.cond_stage_model = clip(**(params)) + + for dt in self.cond_stage_model.dtypes: + if not model_management.supports_cast(load_device, dt): + load_device = offload_device + if params['device'] != offload_device: + self.cond_stage_model.to(offload_device) + logging.warning("Had to shift TE back.") + + self.tokenizer = tokenizer(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data) + self.patcher = comfy.model_patcher.ModelPatcher(self.cond_stage_model, load_device=load_device, offload_device=offload_device) + if params['device'] == load_device: + model_management.load_models_gpu([self.patcher], force_full_load=True) + self.layer_idx = None + logging.debug("CLIP model load device: {}, offload device: {}, current: {}".format(load_device, offload_device, params['device'])) + + def clone(self): + n = CLIP(no_init=True) + n.patcher = self.patcher.clone() + n.cond_stage_model = self.cond_stage_model + n.tokenizer = self.tokenizer + n.layer_idx = self.layer_idx + return n + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + return self.patcher.add_patches(patches, strength_patch, strength_model) + + def clip_layer(self, layer_idx): + self.layer_idx = layer_idx + + def tokenize(self, text, return_word_ids=False): + return self.tokenizer.tokenize_with_weights(text, return_word_ids) + + def encode_from_tokens(self, tokens, return_pooled=False, return_dict=False): + self.cond_stage_model.reset_clip_options() + + if self.layer_idx is not None: + self.cond_stage_model.set_clip_options({"layer": self.layer_idx}) + + if return_pooled == "unprojected": + self.cond_stage_model.set_clip_options({"projected_pooled": False}) + + self.load_model() + o = self.cond_stage_model.encode_token_weights(tokens) + cond, pooled = o[:2] + if return_dict: + out = {"cond": cond, "pooled_output": pooled} + if len(o) > 2: + for k in o[2]: + out[k] = o[2][k] + return out + + if return_pooled: + return cond, pooled + return cond + + def encode(self, text): + tokens = self.tokenize(text) + return self.encode_from_tokens(tokens) + + def load_sd(self, sd, full_model=False): + if full_model: + return self.cond_stage_model.load_state_dict(sd, strict=False) + else: + return self.cond_stage_model.load_sd(sd) + + def get_sd(self): + sd_clip = self.cond_stage_model.state_dict() + sd_tokenizer = self.tokenizer.state_dict() + for k in sd_tokenizer: + sd_clip[k] = sd_tokenizer[k] + return sd_clip + + def load_model(self): + model_management.load_model_gpu(self.patcher) + return self.patcher + + def get_key_patches(self): + return self.patcher.get_key_patches() + +class VAE: + def __init__(self, sd=None, device=None, config=None, dtype=None): + if 'decoder.up_blocks.0.resnets.0.norm1.weight' in sd.keys(): #diffusers format + sd = diffusers_convert.convert_vae_state_dict(sd) + + self.memory_used_encode = lambda shape, dtype: (1767 * shape[2] * shape[3]) * model_management.dtype_size(dtype) #These are for AutoencoderKL and need tweaking (should be lower) + self.memory_used_decode = lambda shape, dtype: (2178 * shape[2] * shape[3] * 64) * model_management.dtype_size(dtype) + self.downscale_ratio = 8 + self.upscale_ratio = 8 + self.latent_channels = 4 + self.output_channels = 3 + self.process_input = lambda image: image * 2.0 - 1.0 + self.process_output = lambda image: torch.clamp((image + 1.0) / 2.0, min=0.0, max=1.0) + self.working_dtypes = [torch.bfloat16, torch.float32] + + if config is None: + if "decoder.mid.block_1.mix_factor" in sd: + encoder_config = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} + decoder_config = encoder_config.copy() + decoder_config["video_kernel_size"] = [3, 1, 1] + decoder_config["alpha"] = 0.0 + self.first_stage_model = AutoencodingEngine(regularizer_config={'target': "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer"}, + encoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Encoder", 'params': encoder_config}, + decoder_config={'target': "comfy.ldm.modules.temporal_ae.VideoDecoder", 'params': decoder_config}) + elif "taesd_decoder.1.weight" in sd: + self.latent_channels = sd["taesd_decoder.1.weight"].shape[1] + self.first_stage_model = comfy.taesd.taesd.TAESD(latent_channels=self.latent_channels) + elif "vquantizer.codebook.weight" in sd: #VQGan: stage a of stable cascade + self.first_stage_model = StageA() + self.downscale_ratio = 4 + self.upscale_ratio = 4 + #TODO + #self.memory_used_encode + #self.memory_used_decode + self.process_input = lambda image: image + self.process_output = lambda image: image + elif "backbone.1.0.block.0.1.num_batches_tracked" in sd: #effnet: encoder for stage c latent of stable cascade + self.first_stage_model = StageC_coder() + self.downscale_ratio = 32 + self.latent_channels = 16 + new_sd = {} + for k in sd: + new_sd["encoder.{}".format(k)] = sd[k] + sd = new_sd + elif "blocks.11.num_batches_tracked" in sd: #previewer: decoder for stage c latent of stable cascade + self.first_stage_model = StageC_coder() + self.latent_channels = 16 + new_sd = {} + for k in sd: + new_sd["previewer.{}".format(k)] = sd[k] + sd = new_sd + elif "encoder.backbone.1.0.block.0.1.num_batches_tracked" in sd: #combined effnet and previewer for stable cascade + self.first_stage_model = StageC_coder() + self.downscale_ratio = 32 + self.latent_channels = 16 + elif "decoder.conv_in.weight" in sd: + #default SD1.x/SD2.x VAE parameters + ddconfig = {'double_z': True, 'z_channels': 4, 'resolution': 256, 'in_channels': 3, 'out_ch': 3, 'ch': 128, 'ch_mult': [1, 2, 4, 4], 'num_res_blocks': 2, 'attn_resolutions': [], 'dropout': 0.0} + + if 'encoder.down.2.downsample.conv.weight' not in sd and 'decoder.up.3.upsample.conv.weight' not in sd: #Stable diffusion x4 upscaler VAE + ddconfig['ch_mult'] = [1, 2, 4] + self.downscale_ratio = 4 + self.upscale_ratio = 4 + + self.latent_channels = ddconfig['z_channels'] = sd["decoder.conv_in.weight"].shape[1] + if 'quant_conv.weight' in sd: + self.first_stage_model = AutoencoderKL(ddconfig=ddconfig, embed_dim=4) + else: + self.first_stage_model = AutoencodingEngine(regularizer_config={'target': "comfy.ldm.models.autoencoder.DiagonalGaussianRegularizer"}, + encoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Encoder", 'params': ddconfig}, + decoder_config={'target': "comfy.ldm.modules.diffusionmodules.model.Decoder", 'params': ddconfig}) + elif "decoder.layers.1.layers.0.beta" in sd: + self.first_stage_model = AudioOobleckVAE() + self.memory_used_encode = lambda shape, dtype: (1000 * shape[2]) * model_management.dtype_size(dtype) + self.memory_used_decode = lambda shape, dtype: (1000 * shape[2] * 2048) * model_management.dtype_size(dtype) + self.latent_channels = 64 + self.output_channels = 2 + self.upscale_ratio = 2048 + self.downscale_ratio = 2048 + self.process_output = lambda audio: audio + self.process_input = lambda audio: audio + self.working_dtypes = [torch.float16, torch.bfloat16, torch.float32] + elif "blocks.2.blocks.3.stack.5.weight" in sd or "decoder.blocks.2.blocks.3.stack.5.weight" in sd: #genmo mochi vae + if "blocks.2.blocks.3.stack.5.weight" in sd: + sd = comfy.utils.state_dict_prefix_replace(sd, {"": "decoder."}) + self.first_stage_model = comfy.ldm.genmo.vae.model.VideoVAE() + self.latent_channels = 12 + self.memory_used_decode = lambda shape, dtype: (1000 * shape[2] * shape[3] * shape[4] * (6 * 8 * 8)) * model_management.dtype_size(dtype) + self.upscale_ratio = (lambda a: max(0, a * 6 - 5), 8, 8) + else: + logging.warning("WARNING: No VAE weights detected, VAE not initalized.") + self.first_stage_model = None + return + else: + self.first_stage_model = AutoencoderKL(**(config['params'])) + self.first_stage_model = self.first_stage_model.eval() + + m, u = self.first_stage_model.load_state_dict(sd, strict=False) + if len(m) > 0: + logging.warning("Missing VAE keys {}".format(m)) + + if len(u) > 0: + logging.debug("Leftover VAE keys {}".format(u)) + + if device is None: + device = model_management.vae_device() + self.device = device + offload_device = model_management.vae_offload_device() + if dtype is None: + dtype = model_management.vae_dtype(self.device, self.working_dtypes) + self.vae_dtype = dtype + self.first_stage_model.to(self.vae_dtype) + self.output_device = model_management.intermediate_device() + + self.patcher = comfy.model_patcher.ModelPatcher(self.first_stage_model, load_device=self.device, offload_device=offload_device) + logging.debug("VAE load device: {}, offload device: {}, dtype: {}".format(self.device, offload_device, self.vae_dtype)) + + def vae_encode_crop_pixels(self, pixels): + dims = pixels.shape[1:-1] + for d in range(len(dims)): + x = (dims[d] // self.downscale_ratio) * self.downscale_ratio + x_offset = (dims[d] % self.downscale_ratio) // 2 + if x != dims[d]: + pixels = pixels.narrow(d + 1, x_offset, x) + return pixels + + def decode_tiled_(self, samples, tile_x=64, tile_y=64, overlap = 16): + steps = samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x, tile_y, overlap) + steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += samples.shape[0] * comfy.utils.get_tiled_scale_steps(samples.shape[3], samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = comfy.utils.ProgressBar(steps) + + decode_fn = lambda a: self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)).float() + output = self.process_output( + (comfy.utils.tiled_scale(samples, decode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = self.upscale_ratio, output_device=self.output_device, pbar = pbar) + + comfy.utils.tiled_scale(samples, decode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = self.upscale_ratio, output_device=self.output_device, pbar = pbar) + + comfy.utils.tiled_scale(samples, decode_fn, tile_x, tile_y, overlap, upscale_amount = self.upscale_ratio, output_device=self.output_device, pbar = pbar)) + / 3.0) + return output + + def decode_tiled_1d(self, samples, tile_x=128, overlap=32): + decode_fn = lambda a: self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)).float() + return comfy.utils.tiled_scale_multidim(samples, decode_fn, tile=(tile_x,), overlap=overlap, upscale_amount=self.upscale_ratio, out_channels=self.output_channels, output_device=self.output_device) + + def decode_tiled_3d(self, samples, tile_t=999, tile_x=32, tile_y=32, overlap=(1, 8, 8)): + decode_fn = lambda a: self.first_stage_model.decode(a.to(self.vae_dtype).to(self.device)).float() + return self.process_output(comfy.utils.tiled_scale_multidim(samples, decode_fn, tile=(tile_t, tile_x, tile_y), overlap=overlap, upscale_amount=self.upscale_ratio, out_channels=self.output_channels, output_device=self.output_device)) + + def encode_tiled_(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + steps = pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) + steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += pixel_samples.shape[0] * comfy.utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = comfy.utils.ProgressBar(steps) + + encode_fn = lambda a: self.first_stage_model.encode((self.process_input(a)).to(self.vae_dtype).to(self.device)).float() + samples = comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x, tile_y, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples += comfy.utils.tiled_scale(pixel_samples, encode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device, pbar=pbar) + samples /= 3.0 + return samples + + def encode_tiled_1d(self, samples, tile_x=128 * 2048, overlap=32 * 2048): + encode_fn = lambda a: self.first_stage_model.encode((self.process_input(a)).to(self.vae_dtype).to(self.device)).float() + return comfy.utils.tiled_scale_multidim(samples, encode_fn, tile=(tile_x,), overlap=overlap, upscale_amount=(1/self.downscale_ratio), out_channels=self.latent_channels, output_device=self.output_device) + + def decode(self, samples_in): + pixel_samples = None + try: + memory_used = self.memory_used_decode(samples_in.shape, self.vae_dtype) + model_management.load_models_gpu([self.patcher], memory_required=memory_used) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / memory_used) + batch_number = max(1, batch_number) + + for x in range(0, samples_in.shape[0], batch_number): + samples = samples_in[x:x+batch_number].to(self.vae_dtype).to(self.device) + out = self.process_output(self.first_stage_model.decode(samples).to(self.output_device).float()) + if pixel_samples is None: + pixel_samples = torch.empty((samples_in.shape[0],) + tuple(out.shape[1:]), device=self.output_device) + pixel_samples[x:x+batch_number] = out + except model_management.OOM_EXCEPTION as e: + logging.warning("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") + dims = samples_in.ndim - 2 + if dims == 1: + pixel_samples = self.decode_tiled_1d(samples_in) + elif dims == 2: + pixel_samples = self.decode_tiled_(samples_in) + elif dims == 3: + pixel_samples = self.decode_tiled_3d(samples_in) + + pixel_samples = pixel_samples.to(self.output_device).movedim(1,-1) + return pixel_samples + + def decode_tiled(self, samples, tile_x=64, tile_y=64, overlap = 16): + model_management.load_model_gpu(self.patcher) + output = self.decode_tiled_(samples, tile_x, tile_y, overlap) + return output.movedim(1,-1) + + def encode(self, pixel_samples): + pixel_samples = self.vae_encode_crop_pixels(pixel_samples) + pixel_samples = pixel_samples.movedim(-1,1) + try: + memory_used = self.memory_used_encode(pixel_samples.shape, self.vae_dtype) + model_management.load_models_gpu([self.patcher], memory_required=memory_used) + free_memory = model_management.get_free_memory(self.device) + batch_number = int(free_memory / max(1, memory_used)) + batch_number = max(1, batch_number) + samples = torch.empty((pixel_samples.shape[0], self.latent_channels) + tuple(map(lambda a: a // self.downscale_ratio, pixel_samples.shape[2:])), device=self.output_device) + for x in range(0, pixel_samples.shape[0], batch_number): + pixels_in = self.process_input(pixel_samples[x:x+batch_number]).to(self.vae_dtype).to(self.device) + samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).to(self.output_device).float() + + except model_management.OOM_EXCEPTION as e: + logging.warning("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") + if len(pixel_samples.shape) == 3: + samples = self.encode_tiled_1d(pixel_samples) + else: + samples = self.encode_tiled_(pixel_samples) + + return samples + + def encode_tiled(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + pixel_samples = self.vae_encode_crop_pixels(pixel_samples) + model_management.load_model_gpu(self.patcher) + pixel_samples = pixel_samples.movedim(-1,1) + samples = self.encode_tiled_(pixel_samples, tile_x=tile_x, tile_y=tile_y, overlap=overlap) + return samples + + def get_sd(self): + return self.first_stage_model.state_dict() + +class StyleModel: + def __init__(self, model, device="cpu"): + self.model = model + + def get_cond(self, input): + return self.model(input.last_hidden_state) + + +def load_style_model(ckpt_path): + model_data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + keys = model_data.keys() + if "style_embedding" in keys: + model = comfy.t2i_adapter.adapter.StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8) + else: + raise Exception("invalid style model {}".format(ckpt_path)) + model.load_state_dict(model_data) + return StyleModel(model) + +class CLIPType(Enum): + STABLE_DIFFUSION = 1 + STABLE_CASCADE = 2 + SD3 = 3 + STABLE_AUDIO = 4 + HUNYUAN_DIT = 5 + FLUX = 6 + MOCHI = 7 + +def load_clip(ckpt_paths, embedding_directory=None, clip_type=CLIPType.STABLE_DIFFUSION, model_options={}): + clip_data = [] + for p in ckpt_paths: + clip_data.append(comfy.utils.load_torch_file(p, safe_load=True)) + return load_text_encoder_state_dicts(clip_data, embedding_directory=embedding_directory, clip_type=clip_type, model_options=model_options) + + +class TEModel(Enum): + CLIP_L = 1 + CLIP_H = 2 + CLIP_G = 3 + T5_XXL = 4 + T5_XL = 5 + T5_BASE = 6 + +def detect_te_model(sd): + if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: + return TEModel.CLIP_G + if "text_model.encoder.layers.22.mlp.fc1.weight" in sd: + return TEModel.CLIP_H + if "text_model.encoder.layers.0.mlp.fc1.weight" in sd: + return TEModel.CLIP_L + if "encoder.block.23.layer.1.DenseReluDense.wi_1.weight" in sd: + weight = sd["encoder.block.23.layer.1.DenseReluDense.wi_1.weight"] + if weight.shape[-1] == 4096: + return TEModel.T5_XXL + elif weight.shape[-1] == 2048: + return TEModel.T5_XL + if "encoder.block.0.layer.0.SelfAttention.k.weight" in sd: + return TEModel.T5_BASE + return None + + +def t5xxl_detect(clip_data): + weight_name = "encoder.block.23.layer.1.DenseReluDense.wi_1.weight" + + for sd in clip_data: + if weight_name in sd: + return comfy.text_encoders.sd3_clip.t5_xxl_detect(sd) + + return {} + + +def load_text_encoder_state_dicts(state_dicts=[], embedding_directory=None, clip_type=CLIPType.STABLE_DIFFUSION, model_options={}): + clip_data = state_dicts + + class EmptyClass: + pass + + for i in range(len(clip_data)): + if "transformer.resblocks.0.ln_1.weight" in clip_data[i]: + clip_data[i] = comfy.utils.clip_text_transformers_convert(clip_data[i], "", "") + else: + if "text_projection" in clip_data[i]: + clip_data[i]["text_projection.weight"] = clip_data[i]["text_projection"].transpose(0, 1) #old models saved with the CLIPSave node + + clip_target = EmptyClass() + clip_target.params = {} + if len(clip_data) == 1: + te_model = detect_te_model(clip_data[0]) + if te_model == TEModel.CLIP_G: + if clip_type == CLIPType.STABLE_CASCADE: + clip_target.clip = sdxl_clip.StableCascadeClipModel + clip_target.tokenizer = sdxl_clip.StableCascadeTokenizer + elif clip_type == CLIPType.SD3: + clip_target.clip = comfy.text_encoders.sd3_clip.sd3_clip(clip_l=False, clip_g=True, t5=False) + clip_target.tokenizer = comfy.text_encoders.sd3_clip.SD3Tokenizer + else: + clip_target.clip = sdxl_clip.SDXLRefinerClipModel + clip_target.tokenizer = sdxl_clip.SDXLTokenizer + elif te_model == TEModel.CLIP_H: + clip_target.clip = comfy.text_encoders.sd2_clip.SD2ClipModel + clip_target.tokenizer = comfy.text_encoders.sd2_clip.SD2Tokenizer + elif te_model == TEModel.T5_XXL: + if clip_type == CLIPType.SD3: + clip_target.clip = comfy.text_encoders.sd3_clip.sd3_clip(clip_l=False, clip_g=False, t5=True, **t5xxl_detect(clip_data)) + clip_target.tokenizer = comfy.text_encoders.sd3_clip.SD3Tokenizer + else: #CLIPType.MOCHI + clip_target.clip = comfy.text_encoders.genmo.mochi_te(**t5xxl_detect(clip_data)) + clip_target.tokenizer = comfy.text_encoders.genmo.MochiT5Tokenizer + elif te_model == TEModel.T5_XL: + clip_target.clip = comfy.text_encoders.aura_t5.AuraT5Model + clip_target.tokenizer = comfy.text_encoders.aura_t5.AuraT5Tokenizer + elif te_model == TEModel.T5_BASE: + clip_target.clip = comfy.text_encoders.sa_t5.SAT5Model + clip_target.tokenizer = comfy.text_encoders.sa_t5.SAT5Tokenizer + else: + if clip_type == CLIPType.SD3: + clip_target.clip = comfy.text_encoders.sd3_clip.sd3_clip(clip_l=True, clip_g=False, t5=False) + clip_target.tokenizer = comfy.text_encoders.sd3_clip.SD3Tokenizer + else: + clip_target.clip = sd1_clip.SD1ClipModel + clip_target.tokenizer = sd1_clip.SD1Tokenizer + elif len(clip_data) == 2: + if clip_type == CLIPType.SD3: + te_models = [detect_te_model(clip_data[0]), detect_te_model(clip_data[1])] + clip_target.clip = comfy.text_encoders.sd3_clip.sd3_clip(clip_l=TEModel.CLIP_L in te_models, clip_g=TEModel.CLIP_G in te_models, t5=TEModel.T5_XXL in te_models, **t5xxl_detect(clip_data)) + clip_target.tokenizer = comfy.text_encoders.sd3_clip.SD3Tokenizer + elif clip_type == CLIPType.HUNYUAN_DIT: + clip_target.clip = comfy.text_encoders.hydit.HyditModel + clip_target.tokenizer = comfy.text_encoders.hydit.HyditTokenizer + elif clip_type == CLIPType.FLUX: + clip_target.clip = comfy.text_encoders.flux.flux_clip(**t5xxl_detect(clip_data)) + clip_target.tokenizer = comfy.text_encoders.flux.FluxTokenizer + else: + clip_target.clip = sdxl_clip.SDXLClipModel + clip_target.tokenizer = sdxl_clip.SDXLTokenizer + elif len(clip_data) == 3: + clip_target.clip = comfy.text_encoders.sd3_clip.sd3_clip(**t5xxl_detect(clip_data)) + clip_target.tokenizer = comfy.text_encoders.sd3_clip.SD3Tokenizer + + parameters = 0 + tokenizer_data = {} + for c in clip_data: + parameters += comfy.utils.calculate_parameters(c) + tokenizer_data, model_options = comfy.text_encoders.long_clipl.model_options_long_clip(c, tokenizer_data, model_options) + + clip = CLIP(clip_target, embedding_directory=embedding_directory, parameters=parameters, tokenizer_data=tokenizer_data, model_options=model_options) + for c in clip_data: + m, u = clip.load_sd(c) + if len(m) > 0: + logging.warning("clip missing: {}".format(m)) + + if len(u) > 0: + logging.debug("clip unexpected: {}".format(u)) + return clip + +def load_gligen(ckpt_path): + data = comfy.utils.load_torch_file(ckpt_path, safe_load=True) + model = gligen.load_gligen(data) + if model_management.should_use_fp16(): + model = model.half() + return comfy.model_patcher.ModelPatcher(model, load_device=model_management.get_torch_device(), offload_device=model_management.unet_offload_device()) + +def load_checkpoint(config_path=None, ckpt_path=None, output_vae=True, output_clip=True, embedding_directory=None, state_dict=None, config=None): + logging.warning("Warning: The load checkpoint with config function is deprecated and will eventually be removed, please use the other one.") + model, clip, vae, _ = load_checkpoint_guess_config(ckpt_path, output_vae=output_vae, output_clip=output_clip, output_clipvision=False, embedding_directory=embedding_directory, output_model=True) + #TODO: this function is a mess and should be removed eventually + if config is None: + with open(config_path, 'r') as stream: + config = yaml.safe_load(stream) + model_config_params = config['model']['params'] + clip_config = model_config_params['cond_stage_config'] + scale_factor = model_config_params['scale_factor'] + + if "parameterization" in model_config_params: + if model_config_params["parameterization"] == "v": + m = model.clone() + class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingDiscrete, comfy.model_sampling.V_PREDICTION): + pass + m.add_object_patch("model_sampling", ModelSamplingAdvanced(model.model.model_config)) + model = m + + layer_idx = clip_config.get("params", {}).get("layer_idx", None) + if layer_idx is not None: + clip.clip_layer(layer_idx) + + return (model, clip, vae) + +def load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, output_clipvision=False, embedding_directory=None, output_model=True, model_options={}, te_model_options={}): + sd = comfy.utils.load_torch_file(ckpt_path) + out = load_state_dict_guess_config(sd, output_vae, output_clip, output_clipvision, embedding_directory, output_model, model_options, te_model_options=te_model_options) + if out is None: + raise RuntimeError("ERROR: Could not detect model type of: {}".format(ckpt_path)) + return out + +def load_state_dict_guess_config(sd, output_vae=True, output_clip=True, output_clipvision=False, embedding_directory=None, output_model=True, model_options={}, te_model_options={}): + clip = None + clipvision = None + vae = None + model = None + model_patcher = None + + diffusion_model_prefix = model_detection.unet_prefix_from_state_dict(sd) + parameters = comfy.utils.calculate_parameters(sd, diffusion_model_prefix) + weight_dtype = comfy.utils.weight_dtype(sd, diffusion_model_prefix) + load_device = model_management.get_torch_device() + + model_config = model_detection.model_config_from_unet(sd, diffusion_model_prefix) + if model_config is None: + return None + + unet_weight_dtype = list(model_config.supported_inference_dtypes) + if weight_dtype is not None and model_config.scaled_fp8 is None: + unet_weight_dtype.append(weight_dtype) + + model_config.custom_operations = model_options.get("custom_operations", None) + unet_dtype = model_options.get("dtype", model_options.get("weight_dtype", None)) + + if unet_dtype is None: + unet_dtype = model_management.unet_dtype(model_params=parameters, supported_dtypes=unet_weight_dtype) + + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device, model_config.supported_inference_dtypes) + model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) + + if model_config.clip_vision_prefix is not None: + if output_clipvision: + clipvision = clip_vision.load_clipvision_from_sd(sd, model_config.clip_vision_prefix, True) + + if output_model: + inital_load_device = model_management.unet_inital_load_device(parameters, unet_dtype) + model = model_config.get_model(sd, diffusion_model_prefix, device=inital_load_device) + model.load_model_weights(sd, diffusion_model_prefix) + + if output_vae: + vae_sd = comfy.utils.state_dict_prefix_replace(sd, {k: "" for k in model_config.vae_key_prefix}, filter_keys=True) + vae_sd = model_config.process_vae_state_dict(vae_sd) + vae = VAE(sd=vae_sd) + + if output_clip: + clip_target = model_config.clip_target(state_dict=sd) + if clip_target is not None: + clip_sd = model_config.process_clip_state_dict(sd) + if len(clip_sd) > 0: + parameters = comfy.utils.calculate_parameters(clip_sd) + clip = CLIP(clip_target, embedding_directory=embedding_directory, tokenizer_data=clip_sd, parameters=parameters, model_options=te_model_options) + m, u = clip.load_sd(clip_sd, full_model=True) + if len(m) > 0: + m_filter = list(filter(lambda a: ".logit_scale" not in a and ".transformer.text_projection.weight" not in a, m)) + if len(m_filter) > 0: + logging.warning("clip missing: {}".format(m)) + else: + logging.debug("clip missing: {}".format(m)) + + if len(u) > 0: + logging.debug("clip unexpected {}:".format(u)) + else: + logging.warning("no CLIP/text encoder weights in checkpoint, the text encoder model will not be loaded.") + + left_over = sd.keys() + if len(left_over) > 0: + logging.debug("left over keys: {}".format(left_over)) + + if output_model: + model_patcher = comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device()) + if inital_load_device != torch.device("cpu"): + logging.info("loaded straight to GPU") + model_management.load_models_gpu([model_patcher], force_full_load=True) + + return (model_patcher, clip, vae, clipvision) + + +def load_diffusion_model_state_dict(sd, model_options={}): #load unet in diffusers or regular format + dtype = model_options.get("dtype", None) + + #Allow loading unets from checkpoint files + diffusion_model_prefix = model_detection.unet_prefix_from_state_dict(sd) + temp_sd = comfy.utils.state_dict_prefix_replace(sd, {diffusion_model_prefix: ""}, filter_keys=True) + if len(temp_sd) > 0: + sd = temp_sd + + parameters = comfy.utils.calculate_parameters(sd) + weight_dtype = comfy.utils.weight_dtype(sd) + + load_device = model_management.get_torch_device() + model_config = model_detection.model_config_from_unet(sd, "") + + if model_config is not None: + new_sd = sd + else: + new_sd = model_detection.convert_diffusers_mmdit(sd, "") + if new_sd is not None: #diffusers mmdit + model_config = model_detection.model_config_from_unet(new_sd, "") + if model_config is None: + return None + else: #diffusers unet + model_config = model_detection.model_config_from_diffusers_unet(sd) + if model_config is None: + return None + + diffusers_keys = comfy.utils.unet_to_diffusers(model_config.unet_config) + + new_sd = {} + for k in diffusers_keys: + if k in sd: + new_sd[diffusers_keys[k]] = sd.pop(k) + else: + logging.warning("{} {}".format(diffusers_keys[k], k)) + + offload_device = model_management.unet_offload_device() + unet_weight_dtype = list(model_config.supported_inference_dtypes) + if weight_dtype is not None and model_config.scaled_fp8 is None: + unet_weight_dtype.append(weight_dtype) + + if dtype is None: + unet_dtype = model_management.unet_dtype(model_params=parameters, supported_dtypes=unet_weight_dtype) + else: + unet_dtype = dtype + + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device, model_config.supported_inference_dtypes) + model_config.set_inference_dtype(unet_dtype, manual_cast_dtype) + model_config.custom_operations = model_options.get("custom_operations", model_config.custom_operations) + if model_options.get("fp8_optimizations", False): + model_config.optimizations["fp8"] = True + + model = model_config.get_model(new_sd, "") + model = model.to(offload_device) + model.load_model_weights(new_sd, "") + left_over = sd.keys() + if len(left_over) > 0: + logging.info("left over keys in unet: {}".format(left_over)) + return comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=offload_device) + + +def load_diffusion_model(unet_path, model_options={}): + sd = comfy.utils.load_torch_file(unet_path) + model = load_diffusion_model_state_dict(sd, model_options=model_options) + if model is None: + logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path)) + raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) + return model + +def load_unet(unet_path, dtype=None): + print("WARNING: the load_unet function has been deprecated and will be removed please switch to: load_diffusion_model") + return load_diffusion_model(unet_path, model_options={"dtype": dtype}) + +def load_unet_state_dict(sd, dtype=None): + print("WARNING: the load_unet_state_dict function has been deprecated and will be removed please switch to: load_diffusion_model_state_dict") + return load_diffusion_model_state_dict(sd, model_options={"dtype": dtype}) + +def save_checkpoint(output_path, model, clip=None, vae=None, clip_vision=None, metadata=None, extra_keys={}): + clip_sd = None + load_models = [model] + if clip is not None: + load_models.append(clip.load_model()) + clip_sd = clip.get_sd() + vae_sd = None + if vae is not None: + vae_sd = vae.get_sd() + + model_management.load_models_gpu(load_models, force_patch_weights=True) + clip_vision_sd = clip_vision.get_sd() if clip_vision is not None else None + sd = model.model.state_dict_for_saving(clip_sd, vae_sd, clip_vision_sd) + for k in extra_keys: + sd[k] = extra_keys[k] + + for k in sd: + t = sd[k] + if not t.is_contiguous(): + sd[k] = t.contiguous() + + comfy.utils.save_torch_file(sd, output_path, metadata=metadata) diff --git a/src/comfyui/comfy/sd1_clip.py b/src/comfyui/comfy/sd1_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..a454f3bb3f1a6f9f5895bef039524e3867666361 --- /dev/null +++ b/src/comfyui/comfy/sd1_clip.py @@ -0,0 +1,602 @@ +import os + +from transformers import CLIPTokenizer +import comfy.ops +import torch +import traceback +import zipfile +from . import model_management +import comfy.clip_model +import json +import logging +import numbers + +def gen_empty_tokens(special_tokens, length): + start_token = special_tokens.get("start", None) + end_token = special_tokens.get("end", None) + pad_token = special_tokens.get("pad") + output = [] + if start_token is not None: + output.append(start_token) + if end_token is not None: + output.append(end_token) + output += [pad_token] * (length - len(output)) + return output + +class ClipTokenWeightEncoder: + def encode_token_weights(self, token_weight_pairs): + to_encode = list() + max_token_len = 0 + has_weights = False + for x in token_weight_pairs: + tokens = list(map(lambda a: a[0], x)) + max_token_len = max(len(tokens), max_token_len) + has_weights = has_weights or not all(map(lambda a: a[1] == 1.0, x)) + to_encode.append(tokens) + + sections = len(to_encode) + if has_weights or sections == 0: + to_encode.append(gen_empty_tokens(self.special_tokens, max_token_len)) + + o = self.encode(to_encode) + out, pooled = o[:2] + + if pooled is not None: + first_pooled = pooled[0:1].to(model_management.intermediate_device()) + else: + first_pooled = pooled + + output = [] + for k in range(0, sections): + z = out[k:k+1] + if has_weights: + z_empty = out[-1] + for i in range(len(z)): + for j in range(len(z[i])): + weight = token_weight_pairs[k][j][1] + if weight != 1.0: + z[i][j] = (z[i][j] - z_empty[j]) * weight + z_empty[j] + output.append(z) + + if (len(output) == 0): + r = (out[-1:].to(model_management.intermediate_device()), first_pooled) + else: + r = (torch.cat(output, dim=-2).to(model_management.intermediate_device()), first_pooled) + + if len(o) > 2: + extra = {} + for k in o[2]: + v = o[2][k] + if k == "attention_mask": + v = v[:sections].flatten().unsqueeze(dim=0).to(model_management.intermediate_device()) + extra[k] = v + + r = r + (extra,) + return r + +class SDClipModel(torch.nn.Module, ClipTokenWeightEncoder): + LAYERS = [ + "last", + "pooled", + "hidden" + ] + def __init__(self, device="cpu", max_length=77, + freeze=True, layer="last", layer_idx=None, textmodel_json_config=None, dtype=None, model_class=comfy.clip_model.CLIPTextModel, + special_tokens={"start": 49406, "end": 49407, "pad": 49407}, layer_norm_hidden_state=True, enable_attention_masks=False, zero_out_masked=False, + return_projected_pooled=True, return_attention_masks=False, model_options={}): # clip-vit-base-patch32 + super().__init__() + assert layer in self.LAYERS + + if textmodel_json_config is None: + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_clip_config.json") + + with open(textmodel_json_config) as f: + config = json.load(f) + + operations = model_options.get("custom_operations", None) + scaled_fp8 = None + + if operations is None: + scaled_fp8 = model_options.get("scaled_fp8", None) + if scaled_fp8 is not None: + operations = comfy.ops.scaled_fp8_ops(fp8_matrix_mult=False, override_dtype=scaled_fp8) + else: + operations = comfy.ops.manual_cast + + self.operations = operations + self.transformer = model_class(config, dtype, device, self.operations) + if scaled_fp8 is not None: + self.transformer.scaled_fp8 = torch.nn.Parameter(torch.tensor([], dtype=scaled_fp8)) + + self.num_layers = self.transformer.num_layers + + self.max_length = max_length + if freeze: + self.freeze() + self.layer = layer + self.layer_idx = None + self.special_tokens = special_tokens + + self.logit_scale = torch.nn.Parameter(torch.tensor(4.6055)) + self.enable_attention_masks = enable_attention_masks + self.zero_out_masked = zero_out_masked + + self.layer_norm_hidden_state = layer_norm_hidden_state + self.return_projected_pooled = return_projected_pooled + self.return_attention_masks = return_attention_masks + + if layer == "hidden": + assert layer_idx is not None + assert abs(layer_idx) < self.num_layers + self.set_clip_options({"layer": layer_idx}) + self.options_default = (self.layer, self.layer_idx, self.return_projected_pooled) + + def freeze(self): + self.transformer = self.transformer.eval() + #self.train = disabled_train + for param in self.parameters(): + param.requires_grad = False + + def set_clip_options(self, options): + layer_idx = options.get("layer", self.layer_idx) + self.return_projected_pooled = options.get("projected_pooled", self.return_projected_pooled) + if layer_idx is None or abs(layer_idx) > self.num_layers: + self.layer = "last" + else: + self.layer = "hidden" + self.layer_idx = layer_idx + + def reset_clip_options(self): + self.layer = self.options_default[0] + self.layer_idx = self.options_default[1] + self.return_projected_pooled = self.options_default[2] + + def set_up_textual_embeddings(self, tokens, current_embeds): + out_tokens = [] + next_new_token = token_dict_size = current_embeds.weight.shape[0] + embedding_weights = [] + + for x in tokens: + tokens_temp = [] + for y in x: + if isinstance(y, numbers.Integral): + tokens_temp += [int(y)] + else: + if y.shape[0] == current_embeds.weight.shape[1]: + embedding_weights += [y] + tokens_temp += [next_new_token] + next_new_token += 1 + else: + logging.warning("WARNING: shape mismatch when trying to apply embedding, embedding will be ignored {} != {}".format(y.shape[0], current_embeds.weight.shape[1])) + while len(tokens_temp) < len(x): + tokens_temp += [self.special_tokens["pad"]] + out_tokens += [tokens_temp] + + n = token_dict_size + if len(embedding_weights) > 0: + new_embedding = self.operations.Embedding(next_new_token + 1, current_embeds.weight.shape[1], device=current_embeds.weight.device, dtype=current_embeds.weight.dtype) + new_embedding.weight[:token_dict_size] = current_embeds.weight + for x in embedding_weights: + new_embedding.weight[n] = x + n += 1 + self.transformer.set_input_embeddings(new_embedding) + + processed_tokens = [] + for x in out_tokens: + processed_tokens += [list(map(lambda a: n if a == -1 else a, x))] #The EOS token should always be the largest one + + return processed_tokens + + def forward(self, tokens): + backup_embeds = self.transformer.get_input_embeddings() + device = backup_embeds.weight.device + tokens = self.set_up_textual_embeddings(tokens, backup_embeds) + tokens = torch.LongTensor(tokens).to(device) + + attention_mask = None + if self.enable_attention_masks or self.zero_out_masked or self.return_attention_masks: + attention_mask = torch.zeros_like(tokens) + end_token = self.special_tokens.get("end", -1) + for x in range(attention_mask.shape[0]): + for y in range(attention_mask.shape[1]): + attention_mask[x, y] = 1 + if tokens[x, y] == end_token: + break + + attention_mask_model = None + if self.enable_attention_masks: + attention_mask_model = attention_mask + + outputs = self.transformer(tokens, attention_mask_model, intermediate_output=self.layer_idx, final_layer_norm_intermediate=self.layer_norm_hidden_state, dtype=torch.float32) + self.transformer.set_input_embeddings(backup_embeds) + + if self.layer == "last": + z = outputs[0].float() + else: + z = outputs[1].float() + + if self.zero_out_masked: + z *= attention_mask.unsqueeze(-1).float() + + pooled_output = None + if len(outputs) >= 3: + if not self.return_projected_pooled and len(outputs) >= 4 and outputs[3] is not None: + pooled_output = outputs[3].float() + elif outputs[2] is not None: + pooled_output = outputs[2].float() + + extra = {} + if self.return_attention_masks: + extra["attention_mask"] = attention_mask + + if len(extra) > 0: + return z, pooled_output, extra + + return z, pooled_output + + def encode(self, tokens): + return self(tokens) + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + +def parse_parentheses(string): + result = [] + current_item = "" + nesting_level = 0 + for char in string: + if char == "(": + if nesting_level == 0: + if current_item: + result.append(current_item) + current_item = "(" + else: + current_item = "(" + else: + current_item += char + nesting_level += 1 + elif char == ")": + nesting_level -= 1 + if nesting_level == 0: + result.append(current_item + ")") + current_item = "" + else: + current_item += char + else: + current_item += char + if current_item: + result.append(current_item) + return result + +def token_weights(string, current_weight): + a = parse_parentheses(string) + out = [] + for x in a: + weight = current_weight + if len(x) >= 2 and x[-1] == ')' and x[0] == '(': + x = x[1:-1] + xx = x.rfind(":") + weight *= 1.1 + if xx > 0: + try: + weight = float(x[xx+1:]) + x = x[:xx] + except: + pass + out += token_weights(x, weight) + else: + out += [(x, current_weight)] + return out + +def escape_important(text): + text = text.replace("\\)", "\0\1") + text = text.replace("\\(", "\0\2") + return text + +def unescape_important(text): + text = text.replace("\0\1", ")") + text = text.replace("\0\2", "(") + return text + +def safe_load_embed_zip(embed_path): + with zipfile.ZipFile(embed_path) as myzip: + names = list(filter(lambda a: "data/" in a, myzip.namelist())) + names.reverse() + for n in names: + with myzip.open(n) as myfile: + data = myfile.read() + number = len(data) // 4 + length_embed = 1024 #sd2.x + if number < 768: + continue + if number % 768 == 0: + length_embed = 768 #sd1.x + num_embeds = number // length_embed + embed = torch.frombuffer(data, dtype=torch.float) + out = embed.reshape((num_embeds, length_embed)).clone() + del embed + return out + +def expand_directory_list(directories): + dirs = set() + for x in directories: + dirs.add(x) + for root, subdir, file in os.walk(x, followlinks=True): + dirs.add(root) + return list(dirs) + +def bundled_embed(embed, prefix, suffix): #bundled embedding in lora format + i = 0 + out_list = [] + for k in embed: + if k.startswith(prefix) and k.endswith(suffix): + out_list.append(embed[k]) + if len(out_list) == 0: + return None + + return torch.cat(out_list, dim=0) + +def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None): + if isinstance(embedding_directory, str): + embedding_directory = [embedding_directory] + + embedding_directory = expand_directory_list(embedding_directory) + + valid_file = None + for embed_dir in embedding_directory: + embed_path = os.path.abspath(os.path.join(embed_dir, embedding_name)) + embed_dir = os.path.abspath(embed_dir) + try: + if os.path.commonpath((embed_dir, embed_path)) != embed_dir: + continue + except: + continue + if not os.path.isfile(embed_path): + extensions = ['.safetensors', '.pt', '.bin'] + for x in extensions: + t = embed_path + x + if os.path.isfile(t): + valid_file = t + break + else: + valid_file = embed_path + if valid_file is not None: + break + + if valid_file is None: + return None + + embed_path = valid_file + + embed_out = None + + try: + if embed_path.lower().endswith(".safetensors"): + import safetensors.torch + embed = safetensors.torch.load_file(embed_path, device="cpu") + else: + if 'weights_only' in torch.load.__code__.co_varnames: + try: + embed = torch.load(embed_path, weights_only=True, map_location="cpu") + except: + embed_out = safe_load_embed_zip(embed_path) + else: + embed = torch.load(embed_path, map_location="cpu") + except Exception as e: + logging.warning("{}\n\nerror loading embedding, skipping loading: {}".format(traceback.format_exc(), embedding_name)) + return None + + if embed_out is None: + if 'string_to_param' in embed: + values = embed['string_to_param'].values() + embed_out = next(iter(values)) + elif isinstance(embed, list): + out_list = [] + for x in range(len(embed)): + for k in embed[x]: + t = embed[x][k] + if t.shape[-1] != embedding_size: + continue + out_list.append(t.reshape(-1, t.shape[-1])) + embed_out = torch.cat(out_list, dim=0) + elif embed_key is not None and embed_key in embed: + embed_out = embed[embed_key] + else: + embed_out = bundled_embed(embed, 'bundle_emb.', '.string_to_param.*') + if embed_out is None: + embed_out = bundled_embed(embed, 'bundle_emb.', '.{}'.format(embed_key)) + if embed_out is None: + values = embed.values() + embed_out = next(iter(values)) + return embed_out + +class SDTokenizer: + def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l', tokenizer_class=CLIPTokenizer, has_start_token=True, pad_to_max_length=True, min_length=None, pad_token=None, tokenizer_data={}): + if tokenizer_path is None: + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer") + self.tokenizer = tokenizer_class.from_pretrained(tokenizer_path) + self.max_length = max_length + self.min_length = min_length + + empty = self.tokenizer('')["input_ids"] + if has_start_token: + self.tokens_start = 1 + self.start_token = empty[0] + self.end_token = empty[1] + else: + self.tokens_start = 0 + self.start_token = None + self.end_token = empty[0] + + if pad_token is not None: + self.pad_token = pad_token + elif pad_with_end: + self.pad_token = self.end_token + else: + self.pad_token = 0 + + self.pad_with_end = pad_with_end + self.pad_to_max_length = pad_to_max_length + + vocab = self.tokenizer.get_vocab() + self.inv_vocab = {v: k for k, v in vocab.items()} + self.embedding_directory = embedding_directory + self.max_word_length = 8 + self.embedding_identifier = "embedding:" + self.embedding_size = embedding_size + self.embedding_key = embedding_key + + def _try_get_embedding(self, embedding_name:str): + ''' + Takes a potential embedding name and tries to retrieve it. + Returns a Tuple consisting of the embedding and any leftover string, embedding can be None. + ''' + embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key) + if embed is None: + stripped = embedding_name.strip(',') + if len(stripped) < len(embedding_name): + embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key) + return (embed, embedding_name[len(stripped):]) + return (embed, "") + + + def tokenize_with_weights(self, text:str, return_word_ids=False): + ''' + Takes a prompt and converts it to a list of (token, weight, word id) elements. + Tokens can both be integer tokens and pre computed CLIP tensors. + Word id values are unique per word and embedding, where the id 0 is reserved for non word tokens. + Returned list has the dimensions NxM where M is the input size of CLIP + ''' + + text = escape_important(text) + parsed_weights = token_weights(text, 1.0) + + #tokenize words + tokens = [] + for weighted_segment, weight in parsed_weights: + to_tokenize = unescape_important(weighted_segment).replace("\n", " ").split(' ') + to_tokenize = [x for x in to_tokenize if x != ""] + for word in to_tokenize: + #if we find an embedding, deal with the embedding + if word.startswith(self.embedding_identifier) and self.embedding_directory is not None: + embedding_name = word[len(self.embedding_identifier):].strip('\n') + embed, leftover = self._try_get_embedding(embedding_name) + if embed is None: + logging.warning(f"warning, embedding:{embedding_name} does not exist, ignoring") + else: + if len(embed.shape) == 1: + tokens.append([(embed, weight)]) + else: + tokens.append([(embed[x], weight) for x in range(embed.shape[0])]) + #if we accidentally have leftover text, continue parsing using leftover, else move on to next word + if leftover != "": + word = leftover + else: + continue + #parse word + tokens.append([(t, weight) for t in self.tokenizer(word)["input_ids"][self.tokens_start:-1]]) + + #reshape token array to CLIP input size + batched_tokens = [] + batch = [] + if self.start_token is not None: + batch.append((self.start_token, 1.0, 0)) + batched_tokens.append(batch) + for i, t_group in enumerate(tokens): + #determine if we're going to try and keep the tokens in a single batch + is_large = len(t_group) >= self.max_word_length + + while len(t_group) > 0: + if len(t_group) + len(batch) > self.max_length - 1: + remaining_length = self.max_length - len(batch) - 1 + #break word in two and add end token + if is_large: + batch.extend([(t,w,i+1) for t,w in t_group[:remaining_length]]) + batch.append((self.end_token, 1.0, 0)) + t_group = t_group[remaining_length:] + #add end token and pad + else: + batch.append((self.end_token, 1.0, 0)) + if self.pad_to_max_length: + batch.extend([(self.pad_token, 1.0, 0)] * (remaining_length)) + #start new batch + batch = [] + if self.start_token is not None: + batch.append((self.start_token, 1.0, 0)) + batched_tokens.append(batch) + else: + batch.extend([(t,w,i+1) for t,w in t_group]) + t_group = [] + + #fill last batch + batch.append((self.end_token, 1.0, 0)) + if self.pad_to_max_length: + batch.extend([(self.pad_token, 1.0, 0)] * (self.max_length - len(batch))) + if self.min_length is not None and len(batch) < self.min_length: + batch.extend([(self.pad_token, 1.0, 0)] * (self.min_length - len(batch))) + + if not return_word_ids: + batched_tokens = [[(t, w) for t, w,_ in x] for x in batched_tokens] + + return batched_tokens + + + def untokenize(self, token_weight_pair): + return list(map(lambda a: (a, self.inv_vocab[a[0]]), token_weight_pair)) + + def state_dict(self): + return {} + +class SD1Tokenizer: + def __init__(self, embedding_directory=None, tokenizer_data={}, clip_name="l", tokenizer=SDTokenizer): + self.clip_name = clip_name + self.clip = "clip_{}".format(self.clip_name) + tokenizer = tokenizer_data.get("{}_tokenizer_class".format(self.clip), tokenizer) + setattr(self, self.clip, tokenizer(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data)) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out[self.clip_name] = getattr(self, self.clip).tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return getattr(self, self.clip).untokenize(token_weight_pair) + + def state_dict(self): + return {} + +class SD1CheckpointClipModel(SDClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__(device=device, return_projected_pooled=False, dtype=dtype, model_options=model_options) + +class SD1ClipModel(torch.nn.Module): + def __init__(self, device="cpu", dtype=None, model_options={}, clip_name="l", clip_model=SD1CheckpointClipModel, name=None, **kwargs): + super().__init__() + + if name is not None: + self.clip_name = name + self.clip = "{}".format(self.clip_name) + else: + self.clip_name = clip_name + self.clip = "clip_{}".format(self.clip_name) + + clip_model = model_options.get("{}_class".format(self.clip), clip_model) + setattr(self, self.clip, clip_model(device=device, dtype=dtype, model_options=model_options, **kwargs)) + + self.dtypes = set() + if dtype is not None: + self.dtypes.add(dtype) + + def set_clip_options(self, options): + getattr(self, self.clip).set_clip_options(options) + + def reset_clip_options(self): + getattr(self, self.clip).reset_clip_options() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs = token_weight_pairs[self.clip_name] + out = getattr(self, self.clip).encode_token_weights(token_weight_pairs) + return out + + def load_sd(self, sd): + return getattr(self, self.clip).load_sd(sd) diff --git a/src/comfyui/comfy/sd1_clip_config.json b/src/comfyui/comfy/sd1_clip_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ba8c6b5bc3d6389fb6c9e2c8231729ad9d663a4 --- /dev/null +++ b/src/comfyui/comfy/sd1_clip_config.json @@ -0,0 +1,25 @@ +{ + "_name_or_path": "openai/clip-vit-large-patch14", + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 49407, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "projection_dim": 768, + "torch_dtype": "float32", + "transformers_version": "4.24.0", + "vocab_size": 49408 +} diff --git a/src/comfyui/comfy/sd1_tokenizer/merges.txt b/src/comfyui/comfy/sd1_tokenizer/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..76e821f1b6f0a9709293c3b6b51ed90980b3166b --- /dev/null +++ b/src/comfyui/comfy/sd1_tokenizer/merges.txt @@ -0,0 +1,48895 @@ +#version: 0.2 +i n +t h +a n +r e +a r +e r +th e +in g +o u +o n +s t +o r +e n +o n +a l +a t +e r +i t +i n +t o +r o +i s +l e +i c +a t +an d +e d +o f +c h +o r +e s +i l +e l +s t +a c +o m +a m +l o +a n +a y +s h +r i +l i +t i +f or +n e +ð Ł +r a +h a +d e +o l +v e +s i +u r +a l +s e +' s +u n +d i +b e +l a +w h +o o +d ay +e n +m a +n o +l e +t o +ou r +i r +g h +w it +i t +y o +a s +s p +th is +t s +at i +yo u +wit h +a d +i s +a b +l y +w e +th e +t e +a s +a g +v i +p p +s u +h o +m y +. . +b u +c om +s e +er s +m e +m e +al l +c on +m o +k e +g e +ou t +en t +c o +f e +v er +a r +f ro +a u +p o +c e +gh t +ar e +s s +fro m +c h +t r +ou n +on e +b y +d o +t h +w or +er e +k e +p ro +f or +d s +b o +t a +w e +g o +h e +t er +in g +d e +b e +ati on +m or +a y +e x +il l +p e +k s +s c +l u +f u +q u +v er +ðŁ ĺ +j u +m u +at e +an d +v e +k ing +m ar +o p +h i +.. . +p re +a d +r u +th at +j o +o f +c e +ne w +a m +a p +g re +s s +d u +no w +y e +t ing +y our +it y +n i +c i +p ar +g u +f i +a f +p er +t er +u p +s o +g i +on s +g r +g e +b r +p l +' t +m i +in e +we e +b i +u s +sh o +ha ve +to day +a v +m an +en t +ac k +ur e +ou r +â Ģ +c u +l d +lo o +i m +ic e +s om +f in +re d +re n +oo d +w as +ti on +p i +i r +th er +t y +p h +ar d +e c +! ! +m on +mor e +w ill +t ra +c an +c ol +p u +t e +w n +m b +s o +it i +ju st +n ing +h ere +t u +p a +p r +bu t +wh at +al ly +f ir +m in +c a +an t +s a +t ed +e v +m ent +f a +ge t +am e +ab out +g ra +no t +ha pp +ay s +m an +h is +ti me +li ke +g h +ha s +th an +lo ve +ar t +st e +d ing +h e +c re +w s +w at +d er +it e +s er +ac e +ag e +en d +st r +a w +st or +r e +c ar +el l +al l +p s +f ri +p ho +p or +d o +a k +w i +f re +wh o +sh i +b oo +s on +el l +wh en +il l +ho w +gre at +w in +e l +b l +s si +al i +som e +ðŁ Ĵ +t on +d er +le s +p la +ï ¸ +e d +s ch +h u +on g +d on +k i +s h +an n +c or +. . +oun d +a z +in e +ar y +fu l +st u +ou ld +st i +g o +se e +ab le +ar s +l l +m is +b er +c k +w a +en ts +n o +si g +f e +fir st +e t +sp e +ac k +i f +ou s +' m +st er +a pp +an g +an ce +an s +g ood +b re +e ver +the y +t ic +com e +of f +b ack +as e +ing s +ol d +i ght +f o +h er +happ y +p ic +it s +v ing +u s +m at +h om +d y +e m +s k +y ing +the ir +le d +r y +u l +h ar +c k +t on +on al +h el +r ic +b ir +vi e +w ay +t ri +d a +p le +b ro +st o +oo l +ni ght +tr u +b a +re ad +re s +ye ar +f r +t or +al s +c oun +c la +t ure +v el +at ed +le c +en d +th ing +v o +ic i +be st +c an +wor k +la st +af ter +en ce +p ri +p e +e s +i l +âĢ ¦ +d re +y s +o ver +i es +ðŁ ij +com m +t w +in k +s un +c l +li fe +t t +a ch +l and +s y +t re +t al +p ol +s m +du c +s al +f t +' re +ch e +w ar +t ur +ati ons +ac h +m s +il e +p m +ou gh +at e +st ar +wee k +! !! +c lu +th ere +n er +t om +s el +ï¸ ı +wor ld +v es +c am +go t +in ter +of f +u m +ton ight +o ther +h ou +loo k +j e +i d +si on +be au +at t +el i +or t +re c +f f +st er +su pp +g en +be en +il y +te am +m m +i c +pe op +it t +at s +on ly +mb er +en g +b ri +m p +k now +b ur +b ar +in s +lo w +sh e +ro w +â Ŀ +t ro +peop le +vi a +lo w +ag a +be t +x t +f ac +ch ar +e ar +w al +s en +f am +b le +n ati +is h +n or +g ame +li ve +s co +le y +d on +ic k +b all +ver y +the se +p an +i a +at ing +c r +a re +g ir +ma ke +st re +sho w +. " +f l +u p +d r +than ks +il li +w om +st s +i g +s ur +ever y +c ur +vie w +le t +in to +mo st +n a +in di +g ar +ha d +s ou +v ed +an t +iti on +ma de +f ol +un i +it ed +ðŁ ı +ic al +th r +read y +ch ec +d ra +k es +boo k +e p +si c +mor ning +ne ws +c au +c t +w ell +an c +pho to +th an +or s +bir th +g g +ou t +ne xt +som e +en ing +stor y +ch ri +do wn +hom e +f fe +fre e +d a +b or +f il +ci al +than k +si de +le ar +qu e +l ine +t en +at es +ye ars +m y +pho to +beau ti +ri ght +n u +for m +shi p +b an +th er +d ays +g am +as on +g y +ðŁ İ +birth day +se t +ic k +e t +st ill +com ing +ta ke +ðŁ ĩ +b b +s ol +s on +d en +e p +mu sic +the m +de n +wh y +f oo +c ra +am az +w n +h ol +t ting +w r +u e +ma g +c ro +l an +c lo +b ra +a k +s ing +c al +re ad +' ve +jo h +b ab +d ri +b lo +bi g +er ic +in t +t or +tr y +l a +le g +hou se +m ic +v al +beauti ful +l itt +chec k +ne w +ver s +s w +ar i +pla y +h er +âĢ ĵ +w in +m a +con gr +sch ool +f un +. @ +he al +ic h +d el +wh ere +l on +ke t +tw o +mu ch +wat ch +v en +d ed +a st +k ed +b as +go ing +m p +e ver +w ays +ro o +de sig +l y +s ed +to p +l in +ch an +to o +it ing +d ent +gh ts +t y +sp o +ne ed +b lu +in st +be ing +âĿ ¤ +w el +l s +hi m +m ay +st ing +n a +el y +litt le +g a +n at +tom or +m c +h on +w ant +a ir +pi c +am eric +p er +le ss +wee k +ve l +a h +c ap +ch am +g er +ti m +tomor row +ne ss +st ate +h al +ser v +z e +o s +p at +v is +ex c +s in +f f +c ity +c en +an y +b el +su mm +t in +w ould +loo king +k o +ce le +fam ily +m er +po w +hel p +bu s +c o +c le +sel f +en s +ic s +th o +an i +ch o +le ad +b s +t wee +th ink +for e +ch il +vi de +di d +al e +ch i +v il +en ds +w ing +p as +' ll +v ol +s a +g s +man y +j ec +be fore +gra ph +n y +ur ing +w il +d d +bu il +f av +st ed +tr an +l ing +ou d +d ge +fi el +nati onal +st a +c er +w ere +in a +se ason +c ou +n ed +amaz ing +ti ons +cele br +n s +a th +he ad +s day +d ar +lo c +v in +an other +g oo +s at +n y +jo in +pre s +s es +s ing +an a +in ing +.. .. +c our +ï¸ ı +ac t +cau se +li ght +am s +t a +b al +f c +hi gh +off ici +t t +chri st +d ic +d ay +ra l +h or +: ) +vi si +n am +o b +ma s +gh t +re ally +t un +fin d +thr ough +por t +u t +ti ve +st y +n e +or e +ðŁĺ Ĥ +supp ort +ne ver +ev en +ðŁ Ķ +h a +y a +l d +u k +r an +j am +wi th +me di +d es +ne y +ch ing +al e +h y +k in +! ! +d y +pl ace +al so +b le +wh ich +bl ack +b li +s ay +par k +pl ay +ir e +vide o +week end +a il +ke y +p t +w ard +fri day +d in +ine ss +g ro +b en +al ways +t ball +ag o +m il +c y +pro duc +di sc +un der +ple ase +sp or +fu ll +e y +ðŁ Ļ +is e +iti es +c at +k no +u se +fo re +k er +ar t +hi gh +op en +s an +e f +our s +sh ed +st ri +d ro +aga in +i m +ðŁ ĵ +en jo +fu n +ge tting +p en +g er +c li +an y +ever y +e u +wom en +â ľ +e st +c ould +r y +" @ +th ou +sh a +comm un +b er +d ents +di s +wh ile +aw ay +di o +h am +g la +d ate +k a +mis s +un ch +w on +in f +roo m +g a +re al +ex per +di rec +sh ould +sp r +g ol +l ong +bet ter +or i +e y +i ence +il s +z z +h an +f ound +v s +â Ļ +po st +ti c +par t +m en +ren ce +ce ss +v ic +s il +sho p +ðŁĺ Ĥ +f ood +v al +sti c +y ou +s ays +e lec +st ar +o c +l and +i d +c tion +fiel d +s of +st art +wat er +fri ends +on es +ðŁ Į +f la +f ar +wh ite +par ty +in st +gr ou +t v +every one +m ent +j a +ch a +pr in +an ts +d uring +l at +l ar +we st +th en +k a +y oun +in sp +in te +we en +visi t +aga inst +re le +he ad +c es +to wn +loo ks +th re +re gi +ren t +pro jec +gir l +se ar +w o +m om +c ar +h un +pu bli +d i +p le +c all +c ri +u m +for d +per fe +fri end +h ard +ssi on +te st +pla ying +ar ound +be cause +ke ts +me et +sat ur +ar ti +wor k +j un +v en +r un +me mber +por t +su per +t wit +s am +el s +t ly +ad v +ati ve +at h +s ure +av ail +la r +s qu +ar ds +ev ent +m en +l l +o ver +lo gy +it al +tim es +m al +b ack +c oo +ma king +st ru +â ģ +it u +sh ar +g an +c as +s n +summ er +pic ture +f an +h in +christ mas +c y +pr oud +cham pi +desig n +pp ing +ho pe +c a +avail able +ma y +we d +photo graph +spe cial +sal e +sto p +er y +a we +al ity +hi story +am a +pre si +b ru +wor king +d one +d r +k en +fe at +w ood +ate st +sun day +mo vi +vel y +s le +f ace +sp ec +stu dents +b y +ha m +sp on +bus iness +d at +i e +i p +so ci +g lo +h and +re cor +r s +me e +ke ep +p ur +heal th +sh e +com ple +go d +da vi +col lec +li st +r a +clu b +t ers +in clu +th ings +pl an +â ĺ +joh n +sh ing +at ul +so on +blu e +g or +satur day +w on +congr atul +se e +âĿ¤ ï¸ı +tho se +ðŁĺ į +fin al +d ou +it h +o wn +ro ad +t our +a st +indi a +ti l +n d +f er +fav or +su l +lear n +fir e +ju st +grou p +a h +r ac +bo dy +u r +c are +à ¸ +p lo +o h +po s +gi ve +te ch +su b +c ent +er ing +y m +il ity +f ic +lon don +v ir +gu ys +b a +ðŁ ¤ +bab y +sc re +ðŁĺ į +tru mp +un der +chan ge +i an +col le +ss es +l er +ss ed +n ice +ann oun +pow er +s ar +a king +min i +s li +s wee +k ar +fu l +c ru +ac tion +a ther +) . +st and +de vel +a a +g an +le ft +lo l +re l +tran s +m ents +in t +e f +man ag +di g +gen er +do wn +p au +ti v +k u +th ur +k en +st on +f ans +tal k +twee t +t oo +sty le +pro te +se con +fr on +awe some +g l +p al +ne t +s or +la u +g on +sin ce +t ty +ser ies +me mor +b eli +fil m +di d +di es +o t +congratul ations +p ra +e ve +w oo +offici al +su c +in cre +b on +par t +pp ed +cla ss +si ve +bo y +cu l +perfe ct +t ou +d am +wel come +foo tball +h i +p ap +wa it +ad a +congr ats +youn g +exc ited +re ce +j an +v a +re d +st ra +medi a +' d +do es +le t +mu l +ill s +gre en +m el +to ge +fu ture +ye ster +vers ity +for m +ta in +i de +ch es +ki ds +qu i +ha ha +de ta +bi g +favor ite +gir ls +con tin +do m +sear ch +u al +a ir +d ers +mon th +c er +yester day +commun ity +ad e +do g +vil le +ic es +d eli +sy ste +ru n +is m +he art +c up +en ti +fe w +presi dent +e ds +un til +fe sti +o k +f lo +sa id +ol e +me d +tra vel + £ +ph one +toge ther +fa st +lo t +gam es +sh ir +bet ween +y es +th ers +do ing +m ac +at or +b and +fol low +projec t +devel op +di ffe +con fe +spe ci +ca st +y s +bo ard +r d +i al +sh oo +r am +ha ving +sh are +fol low +on e +n ame +m r +pu t +disc u +or y +c ame +ou s +s ite +twit ter +t b +t it +fin ally +z ed +su per +com pan +us ing +all s +li st +r is +sho t +g al +t ar +de l +joh n +âĢ Ķ +some thing +ra m +inte re +wh e +b it +ðŁ į +stre et +oun d +a i +tic kets +movi e +re al +k y +ta king +o pp +c c +l am +m oun +in ve +bl ack +us ed +on line +y or +loc al +gu e +c ks +o w +ge st +bo ys +illi on +con t +re ci +in ed +eu ro +no w +se en +p h +te ach +de f +sou th +su ch +aw ard +mu st +is su +ca re +fe el +p lu +l atest +spor ts +we b +te x +e ment +s k +fi c +w an +te ch +o t +bo x +n er +fre e +t al +a sh +c ase +ho t +won der +mee ting +er a +ch all +ðŁ IJ +jo b +il i +c ool +j our +th s +m o +f el +di e +mic ha +e le +te am +serv ice +st and +ma kes +p ing +ear ly +com es +e k +ho li +v ers +ag ue +s au +thre e +mon day +fa shi +some one +th ro +se a +b ad +supp or +tur n +ur y +m ing +photograph y +n ic +mar k +pre tty +ss ing +wat ching +me mb +ar ri +coun ty +be ach +fr an +cen ter +pol ice +b at +publi c +t an +pre ss +s af +s y +ge ts +ro y +n ers +y our +bu y +st ers +sho w +as ed +chil dre +af ric +in es +sp ace +sc ri +h all +pa in +ar ing +hom e +m ur +heal th +ch ed +s and +rece i +gu y +e a +americ an +re si +childre n +- - +i ri +ing ton +coun try +ro ss +le n +ann a +boo ks +b c +e ce +d om +lo vely +k h +pe t +g y +g ri +st age +off ice +ro ck +m on +b ay +t able +su n +m ed +th in +l or +f low +( @ +uni versity +stor e +fron t +goo d +z a +vo te +nor th +he y +an im +or der +mi d +with out +a de +re member +mar ket +? ? +mu s +tra ining +e duc +bu t +co ver +st an +sc en +b la +bre ak +l ou +s ame +g old +a in +o s +bo th +l it +ver n +a i +al bu +p a +enjo y +be g +ell ing +thur sday +inf o +s an +americ a +ha ir +te l +mar ch +con cer +colle ge +confe rence +ap p +h our +ch ang +â ļ +s our +ol s +we ather +w ar +p hi +festi val +secon d +cu te +pr ac +en er +str y +le a +pol it +s av +se n +o w +m i +ne ar +ou ght +z e +co ffe +w illi +d an +se y +davi d +e se +f an +de ci +the at +no v +ati on +tr ac +sc i +re view +c el +e m +u n +ju ly +or ig +ti on +d ru +form er +st ay +af ter +in v +too k +dat a +b al +tu es +d an +ev ening +ðŁĺĤ ðŁĺĤ +d ol +u res +pro vi +t s +e st +sig n +j ac +u k +s ong +ye t +bo w +in du +j ap +h oo +po int +any one +z y +i st +h ur +it al +buil ding +wom an +ch ur +j er +per for +co ach +le ague +ce ss +ne t +i mag +nati on +br it +qu e +aw ards +ag es +wor ks +c ed +man ce +l ate +ig n +mon ey +tru e +i i +t ell +pl ac +p ac +as y +wor ld +be hin +im port +read ing +gra m +gi ving +me t +h it +for ward +st om +pres ent +jun e +so cial +no on +mar t +hal f +s we +go vern +k er +deta ils +li sh +_ _ +ac y +si a +ber t +f all +! !!! +) , +th i +d iti +sp ort +k ing +f it +st af +c at +mu se +cen tr +y er +con tro +b loo +wal k +ac tu +did n +li m +lear ning +re search +wed ne +au th +h ours +k y +f ar +h en +.. .. +it ch +ri l +str ong +sk y +que sti +jam es +r on +d g +f ur +c in +do es +app ro +mar ke +tu res +ful ly +ch at +behin d +te m +fin i +mis sion +b att +fe el +he av +every thing +b ar +w ish +pre mi +i ma +exper ience +e ach +re port +swee t +tic s +spr ing +re spon +syste m +vic tor +l in +sa w +al ready +gh ter +f le +ã ĥ +br ing +albu m +- - +ell s +st an +to m +inter national +w ent +an ni +mat ch +pp er +st one +sm all +ra in +fashi on +are a +v an +ag ram +k o +thou ght +wor th +v an +m er +coffe e +it es +g n +arti st +c on +ar ch +c ir +se cre +gr ound +is o +h and +co m +bri dge +h s +x i +l ink +pu l +sp l +r ace +f li +ri ver +g as +di sco +d al +play er +f it +photo s +it y +o k +j or +tr a +ap ril +ad s +a di +sol u +beau ty +do or +me ss +up date +ali a +sch o +en ed +mom ent +sco t +sc ience +i or +ti es +ac ross +ous ly +sh es +does n +p age +wat er +m illion +cla ssi +l ic +ca st +form ation +micha el +ell o +s mo +in ts +vi sion +op ening +ld n +au str +tues day +win ner +po ssi +r ound +shir t +di t +b o +u es +il led +al ong +tri p +star ting +im pro +k an +per son +no t +re co +ne eds +c le +li e +re st +r ing +win ter +si mp +mo m +be er +fac e +tor s +us a +collec tion +ge or +se ssion +tr ying +la s +la ke +j en +orig in +stu dent +se cur +v in +pic s +ex pe +com p +gon na +e qu +b ad +le y +a u +memb ers +bre ak +w all +gi c +din ner +bu l +insp ir +r i +min d +ic a +win ning +tal king +t ren +s is +t en +wonder ful +s now +he ar +th om +no thing +gu i +st in +blo g +fe st +b un +le e +war ds +ch ance +dre ss +re n +pau l +p es +tech no +ru ssi +c ard +e ast +mar i +w ine +t i +la w +str ic +k i +ap e +au gu +pro fe +as h +cour se +ma il +ren tly +d un +m un +lo ve +is land +dri ve +s l +end ed +ma in +lo st +nat ure +âĿ¤ ï¸ı +ch ic +re por +p in +pr o +st ation +ce p +ta kes +compan y +go es +on d +ma ch +ra dio +d ad +ro ck +j a +p ay +champi on +e e +in de +tt a +ati c +t ab +beli eve +ener gy +z i +t at +wor d +on ce +re sul +y l +and re +an o +inst agram +clo se +t am +cu stom +w a +con om +sho ws +li fe +k in +ro b +t age +n ation +al most +list en +sa ve +re li +ac e +mar y +tre e +for get +j ack +wa iting +direc tor +h ill +bor n +te mp +f l +st e +on a +sing le +wedne sday +un ited +in o +@ _ +ne l +celebr ate +en ding +de al +j i +can ada +hu ge +tr ack +âĢ ¢ +f y +fan ta +an g +yor k +rele ase +p un +ep iso +wor ds +t our +p ack +i gh +classi c +perfor mance +ke t +after noon +recor d +win s +pro ble +âĿ ¤ +f our +b ed +ban k +d ance +s la +cal led +mi ght +a p +pa st +ðŁ ļ +diffe rent +it e +gi ft +ssi ve +chur ch +c us +pro gram +ho tel +ic e +ma d +secur ity +en ge +d c +en ough +st a +e ty +de ad +g un +he ar +m ir +hu man +gre ss +oun ds +pi ece +bre aking +gar den +fi ght +vie ws +f ish +star ted +run ning +gre en +ser i +s m +as k +d or +de ath +e conom +er i +ir d +s er +l unch +âģ ¦ +bo x +nat u +ba se +b an +f al +glo bal +wil d +wo w +out side +mo ve +le ad +an al +muse um +on g +ha w +pow er +than k +b ac +char ac +cam pa +dig ital +r o +op er +de v +w ol +p ati +f a +m ale +pap er +ill ing +c s +â ĥ +educ ation +ta ken +e ffe +m ou +s ad +" . +bas ed +staf f +inclu ding +li ving +a c +ch ina +mo b +stor m +lu ck +ph il +o o +y n +tra vel +k el +ti al +pr ice +boo k +import ant +bi o +p ool +ny c +f ab +lo ad +? ! +chall enge +cr y +ser ve +we ar +bu s +ta in +nu mber +ro r +k at +i z +th ough +ho sp +m m +fa ir +ut es +ho t +po p +fi ed +cam p +develop ment +li br +c ali +em s +âģ¦ @ +b ol +is ed +stand ing +mo del +it a +g le +bro wn +ima ge +ve red +for ce +o il +par tic +sh u +da ily +la w +se c +cla ss +cam p +holi day +cl in +k ers +pres ent +gam e +incre di +er ship +inter view +b ill +du e +and y +ab o +in nov +ke y +ac ade +p il +mo der +st ars +br and +f er +wee ks +con si +pr e +sa fe +wr it +di um +la unch +marke ting +ann ual +as si +cour t +la dy +c ted +and a +in side +chil d +opp or +sm ith +centr e +gu e +âģ © +f ren +st y +for t +ent ly +is n +ke ep +to ber +on y +bo y +al d +col la +de mo +le vel +com pet +ad o +b our +fanta stic +m ate +s u +sou th +oppor tun +vers ary +lat er +bu d +face book +la un +ster n +p it +! " +ma j +gr am +tb t +fi re +happ y +a ks +wh ole +actu ally +ill er +ell a +lo ts +al ex +an ge +lan ds +ðŁĺ Ń +en ter +r ou +episo de +p ed +in ten +sh ire +wh o +pl an +h o +ca ke +we st +mag az +fre sh +c c +n ar +ch ris +wr iting +w er +n om +l o +mi dd +dre am +o l +ti onal +de b +> > +be come +s i +gr and +all ing +hi stor +ri de +i red +saf e +que en +ci l +in tro +vi l +d ani +.. . +ar tic +st at +sh ort +or ing +sel fi +mis si +do c +b it +g all +b om +i re +se lec +d ition +ðŁĶ ¥ +fri end +be at +gh ting +ðŁĺ Ĭ +pe ace +ex hi +ant a +ab ility +il lu +j on +qu ality +tri bu +m es +play ers +fa ir +cu t +c ab +suc cess +b i +su s +pro mo +sch e +an ge +ic o +comm it +cat ch +ill a +kin d +feel ing +qu o +s ay +anni versary +spo t +mo ther +an e +p end +your self +op s +app le +min utes +p o +gr and +ri es +ha ha +care er +ed ition +de c +ric k +am i +concer t +iti ve +ge ous +d ly +t te +adv ent +i g +li ghts +ak er +sk y +âĥ £ +r ay +fini shed +w ay +s d +ac coun +ðŁĴ ķ +ck y +ch el +lit er +pain ting +lo s +st un +techno logy +n as +ma r +b il +afric a +ki e +ey es +gol f +plu s +ni a +it ec +serv ices +wed ding +kno wn +te le +.. ... +star ts +pa ren +w ants +ati onal +mon ths +win do +fav our +er t +magaz ine +ex clu +re ve +b c +origin al +e ss +n al +an ti +st ro +t ice +stu dy +à ¤ +v ac +nation al +fi ve +ra in +ve ment +u te +ver se +em er +ar my +possi ble +gue ss +val ley +ther n +cro w +m r +col or +on to +pic k +cle ar +dar k +t ac +wan ted +it ting +can cer +govern ment +di e +ri se +z ing +col d +f oun +stu dio +str ation +bro ther +a head +sh el +mic ro +ic ally +d au +sig ned +vi ol +a x +as se +i o +w re +spl ay +ch ick +augu st +pl at +ti ps +sp i +hu man +e asy +lo gi +mi ke +gro w +ag re +w w +sh ad +mo tiv +wi de +tur ns +om g +v ar +de fin +su g +j im +ðŁĶ ¥ +t d +campa ign +nam ed +re tweet +co p +t v +le av +k is +dou ble +s mar +issu e +vil la +in formation +li es +sto ck +n t +di stric +sh or +mi x +er o +se p +me x +see ing +li ve +re min +co de +g ur +s c +wil d +l un +h ood +spo t +fa ther +fore ver +up d +tra f +f ly +ne ed +gra du +tra in +ma ke +s ab +be y +si ze +lead er +tal ks +e u +lo g +fo x +gor geous +le ss +le ts +sur pri +my self +no te +li ves +f ru +lo ved +se ver +de m +j i +so c +h old +do gs +n i +â ŀ +lea ve +air port +ben ef +ex pl +shi ps +comple te +ach i +gre at +vin tage +j ack +ro c +woo d +pri v +off er +ey e +ver sion +te a +co ach +off ic +w ell +g en +s at +h h +you th +o x +? " +m t +mi x +g g +d le +natu ral +buil d +break fast +thin king +theat re +mo on +ber g +go als +geor ge +en e +exc ell +il ing +tun e +y ed +g ate +m it +net work +jo e +h ello +f b +tu be +we aring +ath le +stru c +har d +gla ss +g ers +thro w +g es +b t +indu stry +manag ement +ali st +go al +stre am +y el +a vi +ici ous +o thers +s ki +chri sti +bir d +e sc +m in +tr o +l t +j an +im p +ri ghts +sh a +or gan +cent ral +ar a +ro ll +favour ite +che ster +el se +p ay +car s +m ine +ste p +prac tice +maj or +h ang +ðŁĺ ĺ +n on +v ari +eng ine +vol un +di a +i led +arch itec +p ink +d s +th y +wa sh +web site +ba g +contro l +el li +f ra +an sw +d ence +y u +r on +ol a +g in +dr in +li c +cou ple +sp ar +g on +cre ate +c t +celebr ating +de ep +e at +te e +vo ice +dro p +vis it +at ors +sta dium +f t +w is +ro l +gra de +fam il +po ints +re pre +w as +traf fic +jap an +or g +hon or +tex as +man u +âĻ ¥ +safe ty +re r +b ag +em plo +rele ased +re gu +ak a +n av +ro le +sen ior +spec t +cro ss +lin es +be st +p ack +s in +ti e +mis sing +sun set +li ber +is ing +j ay +sk i +champion ship +ac tiv +la dies +play ed +y y +pu bl +al o +pri de +s r +pa ki +lu x +sur vi +ck ed +e ts +cho col +austr alia +par is +mi les +h at +ment al +al a +me an +mob ile +en a +in si +f ound +chi ef +t ag +incredi ble +re turn +à © +goo gle +fren ch +cre w +hal lo +ali an +j az +ch er +sil ver +nor th +eng lish +base ball +c af +lim ited +follow ing +app reci +ear th +k ir +ve mber +w ed +p tion +g ed +oc tober +fl ori +c r +en cy +ga ve +lor d +stu ff +ber ry +po st +sm ile +bro ad +st ate +gg er +me ans +ic y +gu n +y o +ma ster +bur g +han ds +ni e +/ / +uni on +brit ish +big gest +distric t +am ing +h il +o ce +per son +pas s +en vir +scho ols +arri ved +anc es +insp ired +ex pla +be n +libr ary +bo tt +am p +ste ph +cont act +b ang +m s +cali for +t old +batt le +b b +chic ago +âľ ¨ +str ate +sh i +de ce +- ) +ad d +la b +j ones +leg end +cast le +ing er +st ance +be l +ur a +re fu +lead ers +po t +se x +h ic +artic le +ki d +fr ance +x x +ex e +gui de +volun te +pr int +al i +ce o +twee ts +w x +scen e +vol u +ant i +h an +as soci +shar ing +ro se +mini ster +sh er +in ste +cle an +demo cr +po ster +sk in +p sy +pro per +cra zy +i am +o re +in i +any thing +po d +mo ving +cl ick +ex plo +com b +cra ft +f i +bloo d +is ra +publ ic +d ent +ol ym +eng land +a si +ch er +fac t +envir on +har ry +g one +me dic +enjo ying +just ice +j r +indi an +wi fe +s ound +t es +dra wing +p al +ide a +cr it +ju li +il er +war m +cl ar +thou ghts +def en +coun cil +intro duc +di ed +jan u +an i +s end +li er +m l +intere sting +tra de +win d +b ay +s ac +anc y +sour ce +b es +org ani +ar ly +lar ge +ff ici +ta g +u t +de sp +o es +tit le +sy m +pic tures +op en +wom en +sho wing +ri a +le ast +lead ership +cur rent +elec tr +val ent +list ening +c key +gener al +de ser +du ce +; ) +c ent +ðŁĺį ðŁĺį +sco tt +po or +selfi e +ev ents +i on +wr ong +de v +h ill +sep te +cul ture +l ine +sor ry +s ent +si ster +ce pt +k ri +no vember +ar i +announ ce +z ation +br an +g ent +d u +l en +per s +f m +mart in +o p +e mb +om e +midd le +suc cess +pe ter +janu ary +f lu +rac ing +d av +bi ke +ðŁı » +pe t +shoo t +profe ssi +feat uring +septe mber +now playing +sta ur +z a +on ic +qu ick +bas ke +spe aking +mil it +z er +chick en +b ell +s ad +co ast +lo ving +y ers +d j +pan el +ver age +s wit +ic ks +b ou +califor nia +s am +paren ts +er o +k illed +ph ys +jo bs +mi gr +an th +e mo +hallo ween +and er +c m +compet ition +e ag +s ket +sp ir +may be +exclu sive +app e +jour ney +scre en +for d +i o +h ate +u g +sou l +her o +soci ety +sy n +gu it +n h +d j +as es +im pre +ti me +sal es +d d +f ts +summ it +stun ning +om s +tur ned +cle an +sof t +be at +re staur +de red +en ces +ma gic +di o +sh ine +gu est +health y +exhi b +stor ies +po pu +n is +el a +bel ow +fun ny +resul ts +s ne +cur rently +ar d +down load +f light +m al +f ine +p ad +ch u +ent ed +h at +ðŁij ı +ste ve +j o +mar k +r at +b all +p c +p on +b by +o li +ar ts +as ure +bow l +att ack +mi c +de ar +ran ge +en ter +chocol ate +br illi +ac cess +, " +? ?? +ch ap +con st +t n +mat ter +blu e +gall ery +em p +work shop +lead ing +y ours +baske tball +w anna +th u +_ _ +mar ri +sle ep +bi a +ch e +ma d +imp act +o wn +si r +chan nel +euro pe +e sp +k itch +hosp ital +w ra +roy al +f s +ne u +qu ar +ne y +ac ks +ch ase +pp y +st al +at ely +ti m +dece mber +r are +per form +cre am +we ight +ch oo +ni ght +ha ven +fr anc +kh an +buil t +hel ping +tru st +ty pe +gol den +ta x +s now +s wi +di sa +questi ons +ve y +li ght +c n +cl oud +thom as +ag ed +sh ou +te ams +gr an +re ason +a a +you tube +v p +pi zz +manag er +bur y +cre dit +tre at +ma x +i k +ma in +g ing +de ad +pro bab +ye ah +ã Ĥ +br and +so li +pl ant +ta yl +gir l +ðŁĺ Ń +nam ent +au to +mess age +ko re +n ur +ter r +ag u +ma p +sen ting +lo ves +gi ves +g ab +z en +ro bert +con fir +w ars +o m +sta in +cam era +and er +won der +a b +ca p +s old +su it +wal king +contin ue +effe c +dau ghter +d anc +cha in +mul ti +ki d +y an +champi on +v o +ta ins +ho st +min i +mis sed +re sc +ly n +fin ish +del icious +s as +tayl or +i b +pro mis +produc ts +moun tain +flori da +regi ster +tre at +rec ent +fe male +boo th +mat t +ve hic +s op +mo tor +suppor ting +phi c +ex tre +dr ink +lan e +th ird +p s +con stru +ce re +far m +ðŁİ ī +tu red +ðŁij ī +c ats +a j +gi e +shoo ting +as ked +paki stan +am e +m b +g il +leg al +squ are +in vol +dra w +oo oo +!! !! +opportun ity +p y +e i +b ts +teach er +charac ter +john son +br on +ly wood +ch ine +c ing +c ine +d ge +gam ing +russi a +ci a +quo te +ric h +go v +flow ers +sp iri +st in +grow th +ðŁı ¼ +comm er +j uni +mu m +r an +s na +a ren +c b +ac tor +col or +si t +pa ir +ch i +bo w +acade my +hel d +r ang +me tal +y l +ac tive +probab ly +t ch +need ed +spe e +cho ice +ital y +ry an +ðŁĩ º +flow er +v it +m n +found ation +b ak +si ons +ne igh +f loo +he ard +re mo +fre sh +ing ing +re f +to wn +cl ou +je sus +spiri t +cou ldn +z es +ðŁĴ Ļ +willi ams +pro ce +moder n +pro cess +sho es +cre ated +tri c +issu es +ann e +att en +de but +h r +n it +sti g +a po +e ps +z u +ã Ģ +si x +car ds +lan gu +fam ous +tour nament +se l +e bay +y n +st on +k ick +announ ced +k am +vo c +brilli ant +hou se +che ese +war ri +mus ic +ho ckey +ðŁĺĤ ðŁĺĤ +sk ills +au tom +smar t +med ical +mon y +e x +gu ar +gi ve +pers onal +ven tion +al li +pre ss +flo or +m c +victor y +hi m +simp le +th or +ðŁĩº ðŁĩ +ta il +lu cky +ale x +qu ite +bo t +ssi ons +chall eng +c ann +amaz on +h ell +b ought +) : +ed y +secre t +produc tion +inde pend +de fe +ad ded +p r +p ag +be d +gre atest +with in +j ay +ðŁ ¥ +ire land +re ly +s d +te xt +dri ving +pro gram +spe ed +col um +str on +à © +fore st +â ĸ +mach ine +co in +sc ar +oun t +bi e +¡ ï¸ı +por tra +comm on +wre st +recei ved +kno w +inve st +pl ans +ac cor +ad op +ter y +re ali +p p +k al +art work +me an +go d +inste ad +an ci +motiv ation +as ing +inspir ation +up coming +polit ical +euro pe +m ers +heav y +ðŁij į +fe bru +scot land +ou gh +b t +bo ss +sche du +spe ak +n ick +u red +in o +e k +ri sk +tor y +pres ents +b on +ru g +st ates +exhib ition +il o +m ill +br ought +: -) +tou ri +com e +offici ally +champi ons +do ors +re p +po se +ex tra +k ings +soc cer +squ ad +app lic +at a +some times +t ari +excell ent +ðŁĺ ĺ +stra ight +car ol +ri p +âĢ į +gra phic +m ol +elec tion +febru ary +as ons +l i +di r +m t +n ick +u su +m rs +com ics +inst itu +cor por +v i +ðŁĻ ı +tu ral +di se +ac ci +we are +am ong +sho pping +t ill +wh at +cha ir +sp an +chine se +innov ation +jo y +k it +cent ury +ob ama +ph ili +f c +re ach +c iti +ul ous +n on +d ang +happ ening +bur n +p el +or ange +d v +k ick +cla im +ing ham +ph y +no v +pod cast +wh i +ni ghts +ear lier +be ar +la h +exc iting +or a +gi ven +s lo +memor ies +contin ues +produc t +gh o +c d +kno ws +ðŁİ ī +publi shed +discu ss +y ard +i phone +tri es +w all +fe b +are n +tru th +win ners +tu re +diti onal +milit ary +proble m +m and +do g +lo ss +c ric +can adi +ve ter +villa ge +" , +y r +un g +don ald +ag ing +bir ds +sci enti +le s +th is +regi on +tic al +itt en +il a +ðŁĺ İ +d ad +di am +abo ve +st ren +li t +p ir +la b +fo cus +bus y +d ur +app ly +s ma +auth or +ac i +exe cu +dom in +re la +jack son +at o +wash ington +ðŁĻ Į +k ill +popu lar +ce ment +ro ad +e ating +loc ation +v ent +ar re +n an +cu sto +advent ure +or din +spor t +ul t +lo ck +questi on +dri ver +land sc +on i +k ins +p d +jor dan +te red +k k +a f +chil d +s p +just in +en i +s elling +z o +wh it +bo ston +partic ip +sig ning +happ ened +he at +m am +dre ams +lo ws +gra ph +the day +head ing +br o +ble ssed +vi c +ve gas +h d +in ning +ro man +and ro +den ti +u se +c it +pro gress +writ er +bo b +ff s +gro wing +b ly +aw are +ex am +sp ent +be t +sc ore +bey ond +do cu +ad el +s f +cou ra +colla bor +in c +priv ate +bo at +* * +z one +p ha +b ill +to tal +plan ning +to wards +plac es +pre view +cre ative +dam n +ide as +se ems +po ten +say ing +di splay +s w +a qu +lou is +by e +li l +e mail +we stern +ger many +ell er +re s +f ant +ment ary +de als +ric hard +jer sey +stren g +ra d +pizz a +mon d +w are +l ac +g i +ar chi +c d +yel low +rec ently +re ach +à ¹ +kitch en +desig ned +tr y +g al +restaur ant +at ure +w w +j as +l ma +ðŁij Į +pa in +av o +min ute +sch ol +ther ap +tic ket +d ry +jap an +diti ons +ter ri +sel ves +happ en +t up +ma g +cop y +sh er +free dom +f ile +speci ally +tor onto +lo ad +g ary +re y +answ er +lo y +cau ght +pri ze +u ne +fic ation +ni ger +sy d +tou ch +feat ure +jaz z +recor ds +him self +di sh +ro ber +spot ted +ma ster +wa ve +fin als +bu ll +for um +al d +re comm +ch a +a e +d oo +inst ru +tru ly +l g +in k +bro thers +de st +j im +m it +clo sed +is on +tri ed +s anta +af fe +w an +hor se +g row +camp us +rel ation +nati ve +jour n +go v +o ct +k it +b ound +part ner +re ma +crow d +! ) +c alls +ra il +qu ali +solu tion +con test +con vers +sn ap +b ase +in iti +ta x +y e +ent repre +it or +constru ction +foo d +present ed +n ings +cli mate +k m +mo del +b j +blo ck +present ation +dre am +fi x +c alling +bus ine +con gress +under stand +we b +val ue +ï¸ı âĥ£ +mex ico +it ely +ki m +char ity +ref lec +bl an +fl ying +anal y +famil ies +b and +reci pe +celebr ation +ac cep +ar y +to t +g b +intere sted +cap tain +âĻ ¥ +ti p +ab sol +bra z +inve stig +o logy +de c +tru ck +ver ing +c lear +don t +go tta +ad vis +beg ins +ma ss +de scri +blo ck +k im +davi d +son gs +memor ial +feat ures +su stain +' . +gra b +jo se +v a +con serv +se ts +man chester +fi ghting +de gre +ag a +in d +sle ep +pos ition +ha ir +sig ns +pol icy +it o +al ert +st am +sp end +w y +absol ut +d m +anim al +my ster +success ful +proble ms +ro bo +k ay +gar den +p d +may or +d ale +t ol +off ers +vis iting +friend ly +tre es +offic er +accoun t +ke vin +ðŁij į +gi ant +contin u +con su +tr act +n fl +ðŁĺ Ĭ +h q +b ility +a ar +dis ney +te en +on ed +wh ite +tra iler +de dic +al one +absolut ely +dig ital +willi am +in ation +s wa +e e +enti re +ger man +ro ll +h its +co st +st ay +th a +ali ve +accor ding +co t +liter ally +her it +re ti +haha ha +exper i +li kes +g t +ste el +__ __ +ch air +christi an +to wer +diffe rence +m d +tre ss +mi d +prin ce +afric an +fe der +foo t +car ri +ser ved +r ice +sh all +feat ured +ck er +rec ru +po e +sen se +ni fic +com edy +cont ent +f at +po sted +con tribu +tim ate +li ver +mb le +inter net +ag e +europe an +cl ing +gla d +ff ic +sc o +ak es +el le +ter min +ton y +p ale +col our +seri ous +pat ri +movi es +b m +professi onal +ad o +al u +br inging +f alls +isra el +ter m +langu age +bro ok +man n +commun ic +can not +ac ti +p he +y an +entrepre ne +tur key +log ical +lon g +ar m +ur s +work ers +ing ly +gg s +ri c +tu al +recei ve +op ens +ge ar +soci al +fe et +c king +ad ver +fin an +fe els +sp la +h r +ea ster +bra in +ã ģ +fi g +le dge +ne arly +prote ct +ma ssive +e th +aw a +ðŁĺ ģ +y rs +aware ness +defin itely +k n +imag ine +k u +syste ms +ðŁij ı +f as +li k +provi de +am o +disco ver +inf lu +ma ker +g az +fit ness +stre et +er s +te d +w c +ys is +pos itive +hel ped +que st +andre w +bra d +b in +hang ing +l ing +bri ght +se ction +ma ss +ðŁĻ Į +follow ers +ho sting +tem por +fla g +a ve +let ter +k ur +re qui +of ten +cry p +su ff +âļ ½ +russi an +treat ment +al le +ha y +l an +keep ing +hol y +power ful +pre dic +fun d +e specially +windo w +je wel +il y +ðŁĴ ľ +gener ation +app a +seri ously +o d +ðŁĺĤðŁĺĤ ðŁĺĤ +cer ti +iri sh +ðŁij Į +mi ami +be th +v ity +se cu +che f +cri me +graph y +ma x +arti sts +re volu +gu ard +spee ch +u c +upd ates +fac es +st ant +chang ed +repor ts +low er +pe ar +n c +k il +loo ked +spe aker +s f +re spect +ok ay +oce an +s itting +architec ture +tra il +se at +i ra +le g +japan ese +d am +u lar +sw im +polit ics +finan cial +ol d +mou th +at temp +de stin +fi shing +atten tion +me m +chang es +deci ded +reli gi +g in +c av +z z +ad am +ma c +wr ite +beg in +sc ul +al ter +is s +ath on +imag es +m oo +jo ined +ðŁĺ ī +âŀ ¡ï¸ı +pas sed +mu sli +h ir +lar gest +cam er +com ic +gh ted +rug by +bur gh +gg ing +te sting +pre par +lau gh +al ed +impro ve +beli ev +adv ice +sha res +he art +tur ning +s b +t el +caf e +n es +dani el +pat ter +t z +se tt +par k +c and +st ick +happ ens +bri an +ne west +e pic +ad or +ki es +war ning +anim als +custo m +ar c +di an +gol d +cor e +t f +c ity +pan ts +re ality +con fi +in ju +fo x +gu il +k new +âĺ º +cor rec +itu de +d den +. # +re duc +pas s +f on +y a +ow ner +re turns +n c +e ast +ap ol +in sur +th o +si m +juni or +be e +ang el +att le +elec tric +hor ror +cra sh +e ye +pat h +sou thern +emplo ye +ge o +t an +ha z +r ally +ðŁı » +proper ty +was n +enjo yed +gre y +g as +bre w +nor thern +hol ding +g p +ta ke +ch art +ly n +dr ama +z o +pa id +throw back +cu p +discu ssion +down town +w ill +le w +b is +t ary +bre ad +up on +r ate +teach ers +it ation +anc ed +cy cle +choo se +d c +ir an +co w +da ve +ra ise +prin cess +fa ith +- > +indu stri +sp ain +guit ar +fac ts +m n +sp en +cour te +go tt +projec ts +au di +o sc +pe ter +s and +intere st +happ iness +ven ue +sol di +surpri se +poten tial +per io +custom er +i i +g ni +manu fac +e co +bro ken +sing er +vel s +wal es +hu s +in j +f our +tal ent +d ying +mat the +fil m +jo ining +s ell +j ar +lma o +sur ger +bb c +sour ces +au stin +ni k +char les +f am +prin ci +ange l +cas h +lo t +o red +pla ys +pl ate +don e +memor y +br ings +n ba +solu tions +teach ing +gr ace +cir cu +hel ps +foun der +mar y +expl ore +de cor +par ts +ch o +inte gr +ha u +is es +pu tting +in er +r it +v y +mic hel +blu es +every day +for ms +bi o +ye ar +p in +t ter +spr ing +) ) +po t +al ing +perform ing +sh an +plan et +mus ical +head s +it alian +stru gg +âĢį âĻ +w ings +pu mp +h h +tr ou +a id +pri me +ear th +pa int +mon t +am y +bb c +fab ulous +fru it +andro id +bour ne +cere mony +enti al +? ? +deb ate +on ing +dra ft +sol ar +t x +j am +cor n +!! !!! +bro o +mil k +po sed +o hi +mo vement +b ren +part ner +p g +et te +ar ies +sh out +n g +leav ing +t ells +sen s +ta ste +kel ly +wor l +gy m +ric h +e gy +pi d +ma s +â Ĥ +courte sy +fran k +incre ase +wr itten +pp ers +re l +ha i +s as +s ound +tt i +w ich +ri ver +.. ." +a g +fel low +ro me +sm all +gen cy +ic an +lux ury +pro of +me t +wild life +mom ents +ra ther +cor ner +com pe +canadi an +lik ely +therap y +li am +econom ic +indi e +rou te +fi ght +ho pe +se tting +ant ly +cro ss +fant asy +de e +sket ch +comp li +ym i +ru les +engine ering +fig ure +ro w +. , +f w +syd ney +w ou +t ation +dre w +us es +the re +sp read +struc ture +pat rick +appa rently +ro s +h ills +w we +ann y +com mission +di v +f ying +con sul +anal ysis +ex i +ten nis +vehic le +ðŁĺŃ ðŁĺŃ +as s +high ly +op ened +b ann +ðŁĴ Ļ +mp h +wi shing +v or +fi f +give away +r r +ra y +je ss +g at +ic ymi +x it +high est +yor k +pi e +invol ved +high er +ri e +mal ay +int elli +desp ite +che e +sar ah +be an +reco gni +ar sen +tal ented +pas sion +ic h +ab c +lead s +dise ase +v is +se c +pre senting +m illi +hol e +sho ts +de part +surger y +gov t +b in +du al +e vi +lon ger +ev ol +scre en +portra it +et c +lo se +ch at +p en +p i +om a +s ick +er c +compan ies +en try +plan e +gr y +ven e +liver pool +premi ere +sha red +a red +fil ms +ir a +holi days +cric ket +ici an +v ing +. ) +ul timate +di vision +con duc +se pt +for ces +mon t +s mart +disa pp +sun shine +in d +b less +ma de +col ors +fran k +ir on +bott le +s go +m ood +j ason +er ic +bir th +te en +respon se +tar get +state ment +fe ar +th el +al um +ar ab +bl in +direc tion +ste ps +er ial +wor ked +at l +ðŁĴ ķ +fel t +pol i +scen es +hom es +b ell +e at +ate ful +t in +l ace +fol ks +p se +an n +wis dom +fa v +but ter +s r +are as +sm oo +bi z +dg es +app o +mo re +the m +effe ct +windo ws +sun ny +cap ital +tot ally +c ities +gr ant +mb ers +s low +au tu +il ities +w ro +ri sing +st ics +viol ence +i gh +qu ot +h it +t c +herit age +bu ff +ne s +z ar +den tial +ex ac +ed ge +de ep +aren a +be came +benef its +mar ks +mb er +a z +am es +pre ci +dra gon +re g +d ings +do s +ðŁĴ ª +n el +s ity +me al +di st +leg end +pur chase +pic al +st ick +f at +du ba +profe ss +car to +pro f +coun tries +respon si +se qu +fa b +tribu te +hon ored +prac tic +pur ple +an ton +pa red +t ough +summ er +environ ment +s ons +ðŁĻ ı +m ps +gi es +her oes +t elling +hen ry +f en +know ledge +Ģ ï¸ı +f r +ne g +u re +ac king +hear ts +s oo +hol lywood +ju mp +sau ce +schedu le +tur n +yo ga +cre ating +c ket +cre ek +â Ń +custom ers +ma dri +gu l +asse mb +moun t +c ell +to p +st al +dav is +t wi +sig n +premi er +iti ons +he aring +un k +pati ents +app ear +heav en +al ty +doc tor +a e +plat form +je ff +ðŁĵ · +regi onal +bi d +box ing +ex ten +or ity +a w +w ise +il le +sever al +bi e +s itu +sy ria +âľ ħ +remin der +enter tain +li on +part ners +in n +ph ar +f au +pl s +expe cted +sug ar +deci sion +s b +ch ron +associ ation +leav es +vis ited +sh ap +ðŁĴ ĸ +fur ther +h ann +w i +run s +l er +fun ding +fil led +.. .... +tin y +han g +or g +co ol +se min +ðŁı Ĩ +spon s +nav y +sa int +dru g +d al +r oun +co vered +tra ditional +invest ment +de te +al ism +f low +n is +sun rise +fe at +f ted +we ird +je re +ve gan +medic ine +an o +ac cu +deli very +temp le +chang ing +wil son +phili pp +re fe +n d +is er +g ay +r and +ati ves +t ely +p and +intelli g +g are +am bas +de mon +commit tee +strate gy +refu ge +bud get +prote c +pi er +ex press +nom in +econom y +al low +ic on +gal ax +o h +indi vi +dem and +vir gin +lu ke +ali sts +man i +s mi +ju dge +ent y +mic hi +resul t +am ed +spe aks +' , +hou ston +sh in +b ing +fl y +ch em +au to +v as +ge t +ar m +thank s +d in +gan g +x x +si on +loc ated +p l +jo sh +in fo +jo ins +adver ti +ot d +el d +si e +re asons +v ent +ðŁĩºðŁĩ ¸ +â ł +convers ation +stu di +ðŁĶ¥ ðŁĶ¥ +go s +s ounds +un it +mu sc +ge l +ack ed +pac i +co s +de re +u u +a o +la m +inspir ing +ar ms +tw are +mat ters +ad dic +du de +ex t +cri sis +b ath +me et +sing h +expe ct +del hi +resc ue +wor st +au g +shi pping +ser ving +st o +dar k +ac es +histor ic +landsc ape +desig ner +b illion +gr ateful +wa ke +e ve +m iller +hou sing +dy nam +is co +be ha +sh op +pr ou +e as +a sia +e ding +k on +depart ment +aw ar +mar ine +in ci +photograph er +ta pe +lo go +r ings +d it +-- -- +vin yl +w c +vo ting +se ven +ambas sad +dal las +t u +com ment +k ra +b les +w ag +u d +au dio +stri ke +offici al +o ts +me tho +to ols +ra di +al an +hun t +wat ched +a ke +fa ke +drin king +mer ry +m l +b day +ri o +ni ke +c ant +re pe +co stu +mur der +ak ers +ch ers +ou ts +beg inning +so s +ad es +n in +not es +wro te +sol o +c i +li ghting +ur ban +bre xit +att end +shir ts +pla yo +ac tress +pl ic +stand ard +quot es +par ade +anci ent + © +tur ing +re e +pri mary +fla sh +citi z +mat es +ste in +z i +clin ton +sk in +gen e +hu m +g ar +t le +y i +fo cu +de an +pl ants +cy ber +b u +om e +ho p +ad dress +ti x +gi fts +relation ship +sub scri +fe ed +exac tly +haw ks +ex o +stre ss +s n +arre sted +an e +sof tware +z ero +the me +mu mb +im migr +mi a +make up +ple asure +uni vers +har b +eng ine +ap er +r in +br a +institu te +le ather +al th +sing ing +co s +gh ty +me as +st ic +si de +insur ance +co t +pit ch +moun tains +cri min +su pre +valent ine +at er +wou ldn +sc ale +rel ated +re gar +star tup +pack ed +mi ke +week ly +p ts +coun t +ha r +gott en +min d +ber lin +con ditions +swit ch +cor n +sa ve +g li +emer gency +tun ed +sto ck +discu ssing +every body +s day +whe ther +wrest ling +ec es +gen der +ch en +ðŁij Ģ +madri d +mar athon +e gg +i er +th x +as king +kore a +wol f +ay a +g m +g au +at ory +v r +gra ss +k illing +b ble +ur o +un i +e th +sh ore +th en +re ale +bot tom +ex erc +k ar +or ies +ad ri +san ds +se x +. ' +volunte ers +per form +par liam +inclu de +deli ghted +execu tive +fu el +kis s +ã ħ +char ge +h u +ca kes +ve t +g lu +agre e +pr ices +n au +h l +g ru +ra j +streng th +b ic +sp ending +al es +av en +b last +: ( +yo f +nor mal +si x +qu ick +se a +d aw +mee ts +lo vers +upd ated +po tat +comple ted +coo k +opportun ities +p ure +organ ic +tem per +c am +avo id +par king +duba i +and o +di stri +to y +comple tely +don ald +tri al +bas s +b oun +back ground +v as +mar vel +lu m +ru s +t ool +com missi +throw back +fin ding +is lam +! ? +st op +e vil +or al +resi dents +i denti +o ak +ðŁİ ¶ +l il +span ish +chap ter +sto pped +direc t +ho sted +pic ked +lab our +lew is +defen se +à ® +health care +wh is +mat h +pe ak +ra ised +fi x +bu ll +th ir +chel sea +fol k +tr e +can di +pau l +ei ther +ad am +poe try +jewel ry +ðŁ ¦ +pr ay +Ø § +g c +o z +wi shes +fore ign +sun g +lear ned +en e +n ing +micha el +illu stration +legend ary +w av +b au +ðŁļ ¨ +cal end +stre ets +â Ĩ +mon ster +bu ck +g r +scho ol +ba th +wa ste +ne ck +ha wa +be ach +re plac +jec t +on er +fac tory +coun t +ðŁĵ ¸ +mor gan +der ing +se an +steph en +de p +no vel +vide os +ic al +press ure +arsen al +ex pre +ir s +tren ding +ss a +fla sh +re sear +thr ough +profess or +scul p +to s +gg ed +mm a +be e +a pe +hun ter +am i +he i +pla stic +bu cks +uni verse +le gen +niger ia +ple ased +ri s +thin ks +autu mn +i ds +d is +anth ony +ðŁı ½ +ak ed +gla sses +fin ance +z er +k as +con tract +nu mbers +sh aw +partner ship +t il +laun ched +s al +victor ia +theat er +usu al +nam es +perio d +eli za +i th +bar cel +ro cks +bag s +mat e +distri bu +j on +di ffic +ali zed +cur ren +sco red +b ha +du blin +ro se +in ted +soli d +beha vi +wal ker +simp ly +garden s +head ed +in i +ohi o +we ap +f o +gl en +e state +ran dom +th under +thr u +k ill +jac ket +it i +entertain ment +thanks giving +ent al +en coura +el o +a ther +tan k +high lights +f ting +ru le +model s +bor der +bj p +hus band +in done +ken ya +be ars +al o +n inten +pi x +str o +or ders +sal ad +ro ads +n or +l ation +sop hi +ðŁı ¼ +pi eces +b one +min s +inclu des +nu tr +phi l +s ent +fun dra +ga in +bor ough +n ad +mon day +activ ity +it ems +be coming +ken ne +de tro +car di +gue sts +u x +world wide +sever e +new s +thank ful +fic tion +ve ge +m all +si an +er al +inj ury +le e +men u +danc ing +scot ti +exam ple +( # +na i +studi os +ba i +ðŁĴ Ľ +j av +diam ond +vin ce +ric k +prote ction +lin col +cham ps +appro ach +d ar +m ile +clou ds +je ff +in fin +l ers +p les +pe ace +go p +âĻ ¡ +tech n +str a +a verage +ef fort +introduc ing +di versity +austr alian +am p +boo st +s ke +pati ent +appreci ate +ici ans +pu r +f ell +woo ds +illu str +ðŁ ĸ +ag ency +ac tions +brit ain +under way +se attle +el and +ag o +f ill +stre aming +pro test +challeng es +ky o +et sy +coo king +exper t +ru ss +rain bow +commer cial +sp in +be ats +c ry +val u +el i +th row +gr ams +le vels +michi gan +c ad +ador able +const itu +w s +pu b +mid night +th at +net fli +braz il +die go +regu lar +jo y +âĤ ¬ +li qu +ea stern +k ni +fl at +n p +bro wn +w er +se y +tt ers +ac ting +v anc +cy cling +program me +ra w +comple x +tat too +throwback thursday +se ssions +ro oms +si ght +speci es +bom b +lau gh +ke eps +mo on +offic ers +con ver +t r +ha sh +t ack +ri ous +ad ap +a j +reco gn +ex po +sug ge +confir med +rol ling +dre ssing +ic t +fri day +ph ones +ri dge +con cept +ro y +ke ys +ef for +c ate +k ne +ev en +l ay +commun ities +mo d +n az +every where +al ab +bit coin +ban ks +out door +feder al +sto res +h p +c al +m ely +sig nific +be ar +re public +clo ser +al lah +pic k +x d +pal ace +ch ill +b am +er ous +un a +al len +out standing +olym pic +supp ly +fi gu +v au +l p +char lie +un es +> >> +legen ds +ici al +co ast +benef it +mul ti +f its +far mers +am ount +si sters +har ve +hon ey +que en +b ers +pl ann +âŃ IJ +m u +barcel ona +al ber +stat us +re main +ex tra +c andy +vi ous +âľ Į +o v +warri ors +-- > +ju mp +am ar +x mas +stu dies +i ors +k or +don ate +pre p +fi sh +im a +pain ted +ad mini +co splay +spor ts +dro ps +fi ghter +evi dence +ðŁĴ ª +la ke +ro b +cine ma +pro file +à ± +stan ds +leg acy +sh ape +ro of +ci vil +i ans +sy l +sh am +vo ted +re tail +ph illi +li sted +du ty +n b +th es +f are +au ction +ffici al +stor ms +d p +l oun +sh ops +al y +ani me +multi ple +ðŁĺį ðŁĺį +psy cho +je an +ap art +candi date +gg y +con f +jose ph +w ick +me at +fr ame +c l +for got +ph y +f ing +li ed +re p +se ed +f all +u fc +nu t +lin d +mo de +fiel ds +en ce +s ley +ðŁ¤ Ķ +ch ill +follow ed +announ ces +cor ru +tro phy +them selves +ac le +al du +k ong +l on +s v +bro ke +ander son +ta i +stor y +tempor ary +activ ities +k ati +ari z +cry stal +spo ke +extre mely +tra ding +ðŁĴ ļ +à ¼ +in ch +ed in +out fit +equ ip +ma di +form ed +be ef +po p +ti ger +this day +ti red +neigh b +re tro +is a +un t +t as +kan sas +de st +secon ds +ta y +hur ric +o u +galax y +dad dy +bro w +bur ger +en ced +de sk +ac cur +secre tary +el ite +k ab +ch in +touri sm +bud dy +ici de +dre ssed +u d +vac ation +che ers +com for +charac ters +j et +bu ying +l ins +n ap +reale state +li e +af c +i ii +f ame +n r +b at +ag ent +ma kers +âĢ ¼ +sec tor +op ti +le on +di et +pra yer +hi p +mi r +le x +br y +an a +pas sing +w en +reco very +ak i +po pul +res ort +mar ia +stu ck +read s +ti er +perfe c +netfli x +p oo +cham p +o c +re duce +we red +comm ents +cla im +acci dent +s ag +h ack +sal t +kin da +k iller +i os +z y +ex change +lec ture +eng er +ic king +t au +reve als +pri son +z om +gh an +u l +jour nal +i ot +tr in +jon a +govern or +cap e +quar ter +spec tive +impre ssive +bab ies +t x +m ill +o y +har ri +jo int +su e +collabor ation +tren d +revolu tion +re new +alum ni +ge tt +sh ell +sun day +ent u +ni c +donald trump +block chain +paci fic +expla ins +sp y +ad voc +par adi +to f +star ring +p av +fe ed +br ac +smo ke +ham p +y am +to kyo +si mon +d h +e ffici +phys ical +n j +ell i +s low +gradu ate +americ ans +ti fy +f red +ap ore +fin ds +rob in +we t +not ice +se mi +un ve +k om +pil ot +scre ening +da ily +ðŁĴ Ĺ +roy al +sp a +vo tes +n ag +wh ate +att ending +exper im +ad dition +k ate +sto l +m ali +foo t +chri st +ch an +de e +lic en +glo bal +mo ore +ti a +bri gh +myster y +y ay +âĿ¤ï¸ı âĿ¤ï¸ı +cre ati +me chan +clo ck +di c +âĢ Ķ +pp er +al ph +through out +al low +re sources +selec tion +ham il +bb q +aa aa +virgin ia +dis ney +en g +so red +drin ks +f ancy +consi der +end a +jan e +hand made +du l +on tari +i us +s ville +color ado +whate ver +whe el +promis e +ne ver +desig ns +ab ly +sex ual +vanc ou +at i +con vention +cul tural +sing apore +pro mo +load ed +gla sgo +pp l +n oo +ke e +ste m +men tion +i do +cru ise +ri ding +be comes +be y +âļ½ ï¸ı +tw in +dedic ated +na sh +de si +work out +jen ni +i v +grou ps +rela x +pho eni +li ft +mix ed +m ck +p c +mu st +me tro +ci es +y ar +a im +ang er +i e +rec y +marri ed +dro pped +eng ag +le st +ambassad or +op h +de s +w ick +assi stant +nat ur +fa il +l td +shor t +k ap +sha w +bi gger +rema ins +crit ical +sur vey +co verage +er son +win d +n b +bil ly +let es +ac ts +jim my +at lan +al and +t c +import ance +dam age +f g +stor age +tw t +bon d +bal ance +cr ying +pu ppy +vo te +pu sh +ðŁĴ ľ +pol y +me l +lon don +terr ori +effec tive +corpor ate +atl anta +jac o +nas a +gre ek +sen ate +i sh +ev a +intellig ence +effor ts +al co +k un +h all +di ag +claim s +fir st +h b +ba e +v ul +pu ll + ° +se par +spe ed +vic ti +on thisday +audi ence +r ates +te ach +fil ming +bu sh +son g +y um +br un +ra ine +aw a +par ks +ð Ŀ +ra bb +ra ch +ra id +reach ed +ra il +mo ves +selec ted +fr i +ra ising +om y +st ones +su k +franc isco +cas es +cap it +con fu +w tf +po ke +equip ment +gre g +ess ential +off ering +ne x +pi es +be c +cre ation +chair man +cro wn +w al +john ny +shi ft +ne ck +ban g +bir d +ðŁĺ ı +du ck +re serve +de pu +ma sters +over all +no tic +ju ice +sne ak +che er +cla sses +eag les +n ca +car pet +ci vil +coach es +har ris +u ps +b alls +dec or +mar tin +ro s +v ice +announ cement +who se +ti gers +ste red +c ts +dr am +ste el +youn g +inst all +supp o +recor ding +de ck +se ats +l der +ang le +bo t +sty les +elec tions +for tun +n ab +but ter +ari an +ka sh +in ner +ou red +be ast +we i +ic onic +exper ts +ne cess +b eng +jam es +li a +gre ece +ðŁĵ · +ðŁĺ ģ +good bye +m itch +tw ice +mumb ai +ste am +ru sh +med al +ne tt +fashi on +t ar +r s +sav ing +ric ul +l m +sleep ing +brook lyn +mis s +sen ding +disco vered +sp here +of theday +k icks +missi ons +w right +er n +ght ly +i ous +mel bourne +star tu +mo ved +car ry +d ak +ag ues +bel gi +e ma +way ne +do t +er ie +pe l +it unes +matthe w +no body +est ab +cal m +win ds +lu c +prep are +tren ds +exerc ise +adv ant +ðŁĴ ¯ +athle tics +app s +c tions +adv ance +laun ches +litt le +real donaldtrump +eliza beth +carol ina +hu b +hi dden +n w +us er +pol l +great er +mo st +f ed +p at +life style +s ati +sco res +marri age +l r +aven ue +de serve +ri f +ðŁ Ĺ +wat ch +champion ships +gr ay +en ni +cot ton +g om +whe re +pack age +su m +ab solu +new ly +foo ds +ty ler +assemb ly +musli m +ban k +re memb +op tions +produc er +land o +fun ds +u pper +shad ow +pro gre +co p +ing e +leg s +detro it +hill ary +jo se +gi ants +sou p +sustain able +t us +clo thes +roc king +n z +min ne +mat eri +bru ce +ear t +ca sting +independ ent +thou sands +ta h +de cl +veter ans +li ons +wra p +âĢ ¦ +de ss +bl ing +st ine +e ggs +o on +clo sing +z ay +at t +bac on +fa il +ariz ona +de pre +gho st +new sp +w ers +vi p +li ked +id ent +volunte er +ad ult +pu pp +cir cle +mat erial +degre e +gro wn +boo m +calend ar +su r +vie wing +ath letes +ch and +re ll +asi an +en tr +vol ley +victi ms +bo dy +m ama +trans fer +ge ek +in dic +sav ed +ma i +g ent +it s +loun ge +k ol +the ory +situ ation +is lands +ar th +z oo +floo d +vi ously +show ed +parliam ent +ch ev +el ine +at trac +ab ad +ta il +h rs +lu s +por tu +gor y +provi des +to ys +de ath +in fe +an ce +g le +li am +lo ver +hu d +dv d +reve aled +g w +re ment +ca the +l ying +ra dio +der by +stor s +che mi +hosp it +âľ ¨ +' : +ilo ve +le mon +re public +s ni +ne ss +do or +re action +pre gn +fla v +schol ar +spo tify +is ation +vis ual +aw are +spon sored +jo ke +less ons +leg is +lo ck +si mil +ðŁĺ ĭ +kin d +la y +ma h +ho ping +vancou ver +as er +clean ing +gal a +thre at +la p +ach e +ro mance +ex pen +re post +z am +e pi +mir ror +o ak +ad ul +bat man +s lu +l c +vie wed +re views +d ates +indone sia +acti vi +off en +lea f +i si +ag ricul +costu me +s ites +spir itu +appear ance +ir y +st air +applic ation +spec tac +ic ity +ski es +hand le +pun k +paradi se +t n +de al +provi ding +do c +recei ving +bre w +micro soft +à ¶ +fer r +me tro +th ail +y um +car ter +à ¡ +gent le +bre aks +coo per +show case +cu tting +egy pt +bab y +semin ar +gl ori +ss on +fa ve +re hear +lo tte +la dy +al as +pre p +deli vered +nu clear +ir o +engag ement +at ta +con ven +z an +gl ory +hol ds +busine sses +str ange +sch e +it self +gra d +mar kets +f alling +st ats +ge on +bu dd +li s +she et +thi si +co lo +deser t +regi stration +ig n +expla in +inter ior +la ws +writ ers +spr ings +k r +fri ed +blo om +inf ra +a o +cre d +pa st +line up +bo o +bre a +boo ts +celebr ity +att acks +bro ok +ev es +ex cu +cher ry +oo p +fas cin +boy friend +se as +n ine +effec ts +po wered +k ha +ðŁĺ Ģ +sh out +con dition +i j +her o +enter pri +win ter +applic ations +sho e +g el +batt le +pro grams +w art +ðŁĴ ¥ +ra p +ho l +dang erous +di a +coun ter +ric s +i or +k night +co at +emo tional +at ures +d as +whe el +fore cast +tran sport +glasgo w +king dom +prepar ing +im medi +ff in +awar ded +prin ting +ro man +fight ers +any more +bel t +p ine +win e +x i +employe es +logi es +al led +de mo +birth day +ange les +lo g +dri vers +neck lace +k ath +s it +athle te +ef s +s burg +pur pose +resi stance +rele ases +t is +vari ous +deli ver +ch al +s anc +opp o +cra w +neu ro +dr a +suppor ters +sna p +diffic ult +swe ar +logi st +pa th +attemp t +à ¥ +swim ming +ste ve +hur t +inclu ded +b ap +wa re +ðŁĴ ĭ +end ers +ja ke +le eds +cli mb +l b +im ple +li sa +clo thing +ðŁĺ İ +d t +com pla +sw ing +stra w +v als +k le +us ers +stor m +cu ts +ontari o +p an +hand some +i ow +ar gu +chec king +scotti sh +Ķ ï¸ı +si er +em ma +po d +patter n +de sh +en h +ed ward +t ing +k h +hal f +lincol n +mo ther +al leg +r c +volley ball +d n +g ay +all y +le ton +gro ve +l oud +adv anced +re spec +cli ent +supre me +thail and +ho w +gi g +to i +do t +dol lar +ðŁij ĩ +p it +r b +h n +produc ed +gg ers +âĨ Ĵ +ml b +can vas +fin eart +us d +in the +p son +actu al +s l +t b +ip ad +en sure +u mb +w d +sk a +mar s +k end +f eli +th ing +count down +absolu te +r out +dra l +p y +inju red +min t +hun ting +mm er +s age +li gh +ac ity +ex pan +mur ray +ar o +sec ure +four th +eag le +reli ef +st akes +industri al +clar k +under standing +see m +pl enty +sil ver +cla u +thre at +sa il +pro duce +ab str +is is +b r +eng ers +wor ry +bie ber +s j +just in +reali ze +ky le +esp n +fil ter +s ch +ty pes +game dev +d ing +twit ter +soldi ers +p om +car bon +y ards +child hood +ri ed +ke l +ele ph +t ons +key note +qui et +wi re +po sting +is sa +repre senting +bac ks +alex ander +celebr ates +ta ining +| | +ch or +esc ape +pe ek +ti ves +fiel d +ssi e +im pac +spons or +r c +we dd +cann ab +si des +trac ks +com par +con trac +techn ical +bi ble +expl oring +sh are +tra v +n ate +ill o +sc ru +m ingham +gun s +of the +sh ame +se es +ca tho +ac cess +ce l +repor ted + » +mari o +p ad +hope fully +ou se +y on +disapp o +ol o +p itt +pa c +ga p +cru sh +s g +k le +ge m +emp ire +dir ty +a is +avi ation +ze aland +fac ing +high way +d anny +spi der +ot ta +ðŁĺ Ħ +w y +col ours +in fl +co sts +olym pics +au s +h m +ho ward +pas ses +lau ren +mu sh +op in +r ho +disc ount +oper ation +em ily +mm m +cham ber +d il +to yo +shi p +sam u +pic tured +un ic +po l +keep er +carto on +st en +ig nor +n ations +n l +ta sting +deta il +offici als +mo tor +franc is +ed itor +ðŁij ĩ +pe ts +rang ers +t g +r n +w ri +nic hol +i se +spo ts +ani e +chec k +tri ple +ku mar +spe akers +ic ing +pre pared +ab use +friend ship +mon th +swi m +air e +sc ent +hamil ton +indi an +j es +yum my +te ars +da wn +i zed +worl ds +ðŁ ķ +b illi +st one +n hs +ba sic +p or +st le +ir on +ol der +cle vel +e ing +ðŁĺįðŁĺį ðŁĺį +prin ts +fir m +air craft +fin est +devel op +aar on +t z +gra ham +own ers +fo li +less on +qu es +bab e +cra ft +ph en +ju n +bir mingham +v ine +ll er +i an +fineart america +evol u +st ab +im per +war d +com ic +wi z +inv ited +du ke +mat ch +por ts +ro ger +diag no +ke pt +te st +vis u +r hy +so c +to x +b aker +sur face +co vers +man s +b its +x box +ff le +n an +gar d +h art +wat ers +v illa +re tro +light ning +catho lic +democr acy +neigh bor +pen n +cr an +jona than +la ura +vi bes +su b +coach ing +clear ly +uk raine +bra ve +commit ment +t all +mar t +ra p +mo di +sco tt +bro s +show er +ðŁı ¾ +âĺº ï¸ı +cou sin +appro ach +br e +com pos +hil ari +phil ly +g ad +quick ly +ri an +t m +vir tual +hou ses +k t +phoeni x +w ire +ff y +b unch +anc ing +tal e +snap chat +star ter +h t +k icking +ap art +th y +) ! +blo gger +it z +com fort +ang els +w ash +" : +ar gent +re quest +hon est +mi ghty +bo bby +k g +ro l +thou se +ex po +h c +tab les +mag ical +po sts +de m +n w +or lando +ab er +* ** +ðŁĺ ľ +environ mental +trans formation +mi le +w ic +hir ing +ma ine +bo ar +r ying +ti s +nit ure +twee ted +anton io +opin ion +fin ale +di y +f is +th in +trou ble +le go +fi les +qu art +sp a +curren cy +cli mate +fan art +rail way +sp ace +ban ds +dani el +mo tion +l eng +hol der +oc cu +mar ie +cathe dral +bu zz +bi es +nas car +bm w +bat tery +char lotte +doc tor +zz le +se ven +in san +d dy +st en +lab or +thr illed +se ren +docu mentary +wav es +cer tain +can did +allow ed +ninten do +star wars +ta p +home made +d les +ther ing +bre e +emp ty +pi ano +pos iti +coun try +por k +pu ts +per ry +m atic +spot light +ti st +or ities +we alth +c p +bar bar +commit ted +as sau +pro fit +e ight +hu l +fini shing +run ner +ss o +insp ec +char ged +christ op +lo sing +co al +ho o +ele v +de le +mo ham +don ation +c able +clin ic +j in +manag ed +ter ing +â ¬ +ur ban +depu ty +bb er +bur n +acade mic +o tt +sta ke +it er +sto wn +ack er +advent ures +ad ams +gre g +pro m +vo l +ac qu +con gre +pa int +citiz ens +c all +af ford +v c +as ks +the tic +independ ence +â Ľ +h itting +bl on +fu ture +â ı +in no +gen e +bo ards +di stance +se t +re mem +th al +pre vent +l ang +ob jec +su sp +mat t +in duc +bor o +pi one +re di +vir tu +prin ted +sco pe +shar k +suc ce +a stron +il legal +j ag +c ting +ine e +at o +rob in +nutr ition +b f +du tch +b n +fur niture +for gotten +at ar +ru p +hy per +bran ch +communic ation +degre es +on ia +un cle +promo te +or che +wi i +j s +but ton +ma jor +c bs +bri stol +premi um +ordin ary +e dit +m g +we ed +st even +: ' +gu s +te s +cap tured +dru gs +do w +wr ites +bi shop +whe els +ali zation +disco very +w r +rach el +ne il +hy dr +cu test +entreprene ur +kore an +ore gon +ul ty +perfec tly +suppor ted +histor ical +t wins +ell y +we l +de vil +in come +scienti sts +de leg +h en +on i +ic ed +gi o +cur ry +reve al +e g +buff alo +n ol +op era +camer on +haha haha +j ab +gradu ation +cra ig +r al +i f +organi zation +le ge +g ang +su d +edin burgh +l ack +fli es +g ate +thr ones +q b +the real +e leg +pp in +c les +jam ie +tn am +cryp to +ou l +p ages +a se +roo ts +stu pid +a did +boo t +prote in +s ap +si um +su s +end or +fun ction +don t +en na +ch y +squ e +wor ker +m tv +e a +k an +ðŁĴ ļ +mu s +professi on +t to +oper ations +al lo +c tor +inv ite +sc and +ou th +z im +lin ks +cli ents +sam sung +discu sses +n ell +ul tra +some where +ste wart +ine t +de z +b out +fac tor +ti an +tr ans +jere my +d b +ðŁĩ ¬ +or n +develop ing +spo l +coo per +ma u +rememb ering +tre k +famil y +sen iors +fo ster +att ended +w ing +trans form +ele mentary +hor iz +li sting +malay sia +it ch +warri or +philipp ines +russ ell +m end +initi ative +cre ep +to ps +br iti +a ur +shar p +adverti sing +ug ly +achi ev +materi als +bu g +dev ice +bon us +fac ility +col e +nh l +y as +plann ed +pol e +excell ence +tr ick +con fl +r p +achi eve +lo an +swa g +jess ica +ho we +p our +sc u +z oo +r ated +dre sses +re bel +mex ican +co ordin +me ss +atlan tic +t l +osc ar +wal ks +phar mac +investig ation +... # +cc i +eas ily +monday motivation +y ment +au ti +for ced +ar med +colle agues +pap ers +pro per +sha ke +bu c +le an +exhi bit +e vement +co tt +bi z +sp er +k ent +sw an +/ @ +girl friend +haw k +âĺ Ģï¸ı +mon o +ðŁĴ Ľ +stat ue +ðŁĺ ³ +ra s +te eth +preci ous +t ile +p am +swi ft +v ali +no se +dr unk +experi ences +come back +gen ius +wor se +sh ef +ra d +ed it +hon our +au spol +lar ry +h ire +gor don +achi evement +.... .... +su icide +alter native +su p +sur roun +sha ke +ke ith +pe pper +tur k +crimin al +be ck +su m +w alls +cn n +an tic +of fe +col li +win es +high light +hawa ii +emb ar +l fc +ðŁĩ ® +m v +> > +at mo +wor d +car l +shout out +bre wing +ì Ŀ +do f +s ic +hot test +col on +hh h +shu t +low ing +volu me +apart ment +agre ement +de stro +we e +religi ous +iow a +ro d +land ing +re present +ðŁĵ· : +la s +usu ally +h l +c ac +sal v +al ong +laugh ing +be ans +remin ds +pha se +some body +ma sk +ran ked +dest roy +sc i +â̼ ï¸ı +gab ri +le o +ro a +fa iled +si l +refuge es +re vi +r ing +ber ries +coo kies +y y +conserv ation +sh ab +human s +de termin +a in +ni all +as su +mb a +fro m +extre me +vic es +commer ce +ght ful +or dered +suppor ts +re cap +v or +dro pping +correc t +pay ing +mean ing +n j +qui z +" # +busine ss +ðŁĩ® ðŁĩ +indi gen +du st +box es +bl ind +x xx +zz y +ðŁĩ¬ ðŁĩ +ss els +s ant +dd le +hilari ous +desig n +wonder ing +vehic les +k re +ju d +rece ption +par ker +Ã Ń +pri vi +hy dro +sof tball +pol lu +lo cked +ba h +e ar +scri pt +di vi +br ace +geor ge +the ast +bel o +j al +tion ary +dent al +roc ket +pur ch +sh ak +manufac turing +e z +it is +con cep +tb all +ch s +direc ted +pra yers +oo k +phil os +vari ety +che ss +ser ver +g and +bal ti +ðŁĵ ¸ +sel y +cru z +spectac ular +bur ning +re present +i z +t one +mer ce +h ell +bed room +estab li +bo l +com mon +ãĥ » +ab or +kit ty +hei ghts +re pair +willi am +qu ake +alab ama +popul ation +re v +re tt +i sts +n ite +le m +a ha +clevel and +r m +po ver +ob se +mon tre +man ia + ® +con ne +car ni +sh ah +f y +u a +sc or +strugg le +bo b +' ' +appro pri +deci de +ff ed +ca ster +s ort +hun gry +dra g +ا Ù +gr ounds +d w +sli ghtly +car din +dead line +bron ze +web in +bar ry +sil ence +e uro +op tion +ear n +ðŁĴ ĸ +howe ver +na ren +na ils +bath room +v ine +ph d +min ing +gar age +( ) +shou lder +defe at +di r +o v +liber ty +ple as +x on +com pre +a v +j in +ab les +sil ent +fam ili +vis its +di pl +ha bit +milli ons +regar ding +innov ative +sen ator +r ts +v on +k l +wh il +requi red +âĿ Ħ +lu v +presi dential +po cket +hun dre +sho wn +fro zen +to ward +fa st +confi dence +r ough +indivi dual +qu et +ðŁı ½ +dom e +fi fa +engine er +z en +re mix +ðŁĺ ĥ +pl ant +min or +robin son +as y +pul led +cer tain +potat o +( : +pre s +oc ca +w it +it em +si e +d ating +thom pson +own ed +an u +vi e +te dly +good night +ex cept +ðŁĮ Ł +ira q +ki e +ren ces +li p +simil ar +sau di +vi g +arth ur +pic ks +mil an +hon da +ma xi +o g +ste st +ar ch +analy tics +ba sti +pear l +ter ry +hor se +ast ro +ac ce +laun ching +inter national +s no +ta sty +den ver +ir l +pe te +tor n +advant age +var sity +" " +sol e +g c +lan g +demon str +ol ds +un ity +ne ts +insp ire +cre te +nash ville +nel son +e ter +wal k +hy un +m ack +tre as +see king +ra ge +bru sh +ab and +whil st +co con +h ong +shel ter +i p +possi bly +so o +it ed +â Ħ +rac es +war ming +qu in +tele vision +mat ches +ra pi +ment al +pal m +jenni fer +rol ls +indi ana +b ars +cat ching +resc u +candid ates +fa re +âł Ģ +se o +vie tnam +alph a +michel le +visi ble +re gre +wn ed +app le +li p +f fe +li z +york shire +ha il +se asons +be gan +m d +k c +la p +fascin ating +hel p +ur y +u ms +nu ts +se m +along side +bri dge +ori al +o ve +world cup +briti sh +comfor table +i ve +hot els +fair s +hor ri +so x +d ining +stre am +bar ri +ss y +w im +ter ms +v u +pe re +l ens +wal ked +r or +l ars +shi eld +dou bt +pro to +cro ssing +me ant +medi um +ad ding +e b +che ap +fun c +pap er +bran ds +ry an +feed back +col lins +un known +tro pical +sand wich +fal len +for mu +selec t +lo ads +answ ers +or i +mag a +d or +du o +ali e +dru m +ur i +de er +sou l +sh ut +âĺ º +sto len +don ated +bu zz +patri ots +ha l +na sty +nomin ated +mon te +ki a +th ri +ing u +te sts +pe tro +ðŁij ij +ho sts +ne st +to pic +pat ch +m my +hu gh +ab ilities +ma the +s miles +g b +ag enda +insi ghts +chi p +ph an +fail ure +dg ers +ha i +signific ant +sho ck +ru ral +gl am +figu res +pot us +o ta +mini stry +appe ars +fe ar +r h +americ an +h att +son y +fi res +e di +n ou +e qui +wh en +univers al +mad ness +i x +sculp ture +b ach +t to +swe den +et a +en to +develop ed +month ly +ma ps +ra h +le d +del ta +sa ints +is lam +ben ch +fif th +v ard +so cks +wel coming +j e +tur ner +v b +ad i +nor way +ad y +hurric ane +por sche +tra dition +ex am +newsp aper +lu ci +a ver +ide al +d na +madi son +ðŁ § +wit ness +ac ou +insi ght +si mon +robo t +sna ke +n bc +ac o +ro ss +sh ment +religi on +ch ann +in su +camp bell +inst alled +we ather +hor ses +ol i +rober t +k az +ðŁı Ģ +veter an +th read +quar ter +ea sier +cap ture +hi pho +law rence +roman tic +pas sion +cl ay +ox ford +th ai +stu dying +fi a +elec ted +most ly +c b +tu mb +âĢįâĻ Ĥ +x l +sh an +fa ster +ev ans +sli de +sh ri +see k +mi es +chemi stry +pump kin +tu m +, , +ro om +fi red +li ps +pres ence +af f +brew ery +arri ve +sw ag +photo graph +pen gu +chi ps +at tor +val ues +accur ate +con temporary +princi pal +cannab is +ari o +any where +gi a +democr ats +buil dings +li ved +ap s +neg ative +m are +bal lo +li on +diam on +loo k +re form +tom my +il la +tre ats +hundre ds +port land +wor thy +ex cep +ar ia +ido l +be er +cd n +y u +aw k +ðŁĩ ¨ +c ells +à ³ +ident ity +dra wn +de vil +f inger +th am +ðŁij Ĭ +ear ned +fin tech +dol ph +twee ting +evolu tion +ðŁĵ į +est im +m vp +n one +ðŁĩºðŁĩ ¸ +toyo ta +au x +mar in +b old +l bs +ste ak +mur phy +it able +lou is +sol ve +pi a +sk ir +ill ino +webin ar +ban ana +lo v +th on +vo ters +afford able +defe ated +lm fa +air lines +super b +any way +deb t +bo red +ver si +me tal +responsi ble +m k +s se +f ay +cau sed +f p +recomm end +pla za +spor ting +alli ance +au stri +n n +t ours +surpri sed +arti f +th under +sur ve +wor e +bri ef +necess ary +z ie +ash ley +dra ke +r t +kni fe +im mun +char ges +a the +bri de +rep ly +g av +broad cast +pu er +brace let +cap acity +harve st +id k +perfor man +d ding +il ers +par a +jam a +pro vince +ch in +id ers +har i +te aser +ch en +re stor +r at +fl at +col om +ðŁĴ ŀ +ðŁĩ¨ ðŁĩ +smoo th +r t +p itch +stay ing +isra eli +t cot +per spective +do ck +open er +lo vel +x o +class room +l ington +go al +kenne dy +sh am +sp aces +mitch ell +home coming +uk i +claim ed +recru it +ing o +mu fc +mon it +g roo +resi dent +per cent +per man +otta wa +int ment +an xi +stand ards +wor ship +sche me +f x +pot ter +bi an +athle tic +af gh +s se +sat ell +par ties +âĿ¤ âĿ¤ +infra structure +rela x +mo du +wor n +smo king +y ach +practic es +wc w +am b +dome stic +tay lor +k entu +provi ded +mo di +ve g +" ... +ob serv +ðŁĺ © +be ard +m our +an gry +ðŁĺ ± +startu ps +woo den +di ve +na il +anti que +ro ses +torn ado +m at +^ ^ +su spect +far m +de vices +me ga +tu l +scholar ship +ge e +disa ster +arri val +po in +mar c +kati e +bb ed +fal se +deser ves +ric hard +ju ana +fre y +tion ed +hy bri +r w +sar ah +ach i +c ure +o le +mor ris +ch ic +broad way +la bel +pa k +pover ty +gol f +e red +f u +er ies +be es +alo gue +st el +wire less +je wish +ti de +blo cked +life time +b har +sp lit +am ster +th i +jo shu +br unch +ha ps +s for +oo ps +ka poor +hi king +suppo sed +ro of +re as +tra in +ti ght +tru mp +bas ically +r r +ea red +see ds +entr ance +c p +wi e +son ic +vic tim +he re +e h +ear rings +sal mon +arc tic +an ne +dou gla +corru ption +hann ah +ha sn +vo ices +con ce +att a +fle et +clin ical +democr atic +ton y +st ood +le f +twit ch +a il +honest ly +incre ased +dro me +don na +accep ted +visit ors +ap ar +ad or +p ar +jer ry +ra i +brand on +ab u +!! !!!! +me me +in gh +glori ous +b hu +pu mp +j ol +li ke +fi sher +ma z +ag an +destin ation +play list +le tters +gen u +br ace +celebr ated +bann er +r he +dra gon +ðŁĺ ħ +sig nature +gre y +âľ Ķï¸ı +al ice +be red +ph er +ber n +ca th +ga thering +sc oring +influ ence +sm iling +de pt +lo cal +a x +ac u +reti rement +hon or +her self +chem ical +asse ss +y all +fre qu +appreci ation +ac a +cho ir +cu z +so il +c il +repor ting +u h +enterpri se +gr at +jaco b +ru m +fe e +j ak +sp in +bi kes +phi a +ste re +p is +bloo d +t att +ra ft +war ren +sh eri +back stage +mar sh +hash tag +ther ine +re in +game day +guar an +reci pes +min ds +stron ger +issu ed +bic y +n ak +ment ed +sc ary +u x +pre vious +tt le +th ats +ac tors +u ma +tin a +bun ny +promo tion +u ss +oli ver +montre al +what s +appreci ated +la kes +excu se +kno wing +pri zes +musc le +shad es +sco t +ing redi +electr onic +ju an +comb at +s ri +e h +turk ish +l om +stri kes +pri son +re e +po pe +vi d +ol dest +dol l +sw iss +certi fied +cli p +re turning +lat or +le igh +tt es +wat son +heal ing +el im +per haps +ha ss +k au +d der +mou se +new castle +indigen ous +wel comes +co le +tau ght +no ise +appe ar +jo e +can on +wedne sday +u tah +c tive +dri ven +i v +c ell +stri p +ac c +focu sed +ar rest +sto cks +wo o +â Ĺ +notic ed +shad o +di spla +ter ror +bor ne +secon d +que ens +wo ke +ja il +no tt +cam bridge +har t +se af +fa x +ac cept +âĺ ħ +goo ds +k at +t win +h s +thou sand +s ins +su ite +amp ton +ar n +rele v +ric har +hoo ps +n bc +class ic +p ab +soldi er +de plo +le ans +install ation +cla sh +le ban +ee e +ti re +belo ved +fu sion +travel ing +ne i +coo kie +glo be +phys ics +s q +co l +wol ves +d l +ex it +" - +foo tball +le af +ster ling +hi de +minne so +fresh man +natu re +indi e +supp lies +bri s +iri sh +ink tober +doo dle +ic op +mess ages +adul ts +recor ded +fix ed +ar do +offe red +under ground +dr one +p ine +ma inten +and re +ham mer +s x +r ound +hi ke +bra d +ro me +fu ll +on ey +ro ws +colum bia +archi ves +appro ved +bat ch +illino is +recogn ition +shou ldn +fo g +nca a +ke vin +human ity +al though +pow ers +p ou +s ar +pe st +alco hol +con sci +phil adel +en o +t m +ok la +cate gory +particip ate +accu sed +bri ef +po em +clu bs +consul t +ja b +big data +amster dam +ac ing +certi fic +n u +d at +impro ved +and y +campa ig +pale stin +p ace +mo bi +feel ings +wol f +bra in +pro pos +inter active +prin ce +inde x +c is +cha e +peace ful +co vering +ac o +cour ses +mon key +re place +b l +bloo dy +tal es +brigh ton +neighbor hood +g ates +spiritu al +af raid +bre ast +b ones +ðŁij ī +vide o +w au +tou ch +inju ries +car l +ri x +une x +âĢ ¢ +fre d +consi dered +thu si +an ch +on y +u sa +graph ics +ac re +ðŁĺ © +com memor +com mod +go ti +guar dian +star bucks +pre vention +haha haha +admini stration +portu gal +fac ulty +bet a +ul a +al bert +bre ath +er i +le tting +tr ic +ment ation +incredi bly +ten nes +v d +ðŁĻ Ī +ed die +br ick +gr ill +bt w +wat ches +resear chers +t ney +ni e +p as +a ster +vi br +poke mon +ch rome +go at +pitt s +il ly +festi ve +y d +can al +ðŁ Ĩ +fi es +car los +re que +partic i +tra ins +sam ple +temper ature +sym ph +pic king +in door +z ers +playo ffs +____ ____ +ap es +ly rics +islam ic +performan ces +d ick +spar k +se as +hom a +gr ound +disc i +employe e +com mu +alas ka +al an +fe ast +dg ing +ban king +manu el +slow ly +tru cks +mc car +oo o +sc rat +orche stra +indivi du +m x +bre ath +stair s +equ ality +bla ke +loc ations +cocon ut +balti more +aa a +l c +ðŁı Ĩ +har vey +resi st +immigr ation +adid as +fil i +re f +lg bt +mo s +pp i +ken ny +terr or +ban e +apol is +s g +social media +ka i +hon est +as sas +bol lywood +âĢįâĻ Ģï¸ı +ferr ari +hor n +cryp to +bo om +mainten ance +i di +s man +w l +ext ended +in sul +ve s +go sp +tr i +pi g +tar ge +cel er +st ati +sm h +ri dic +appe al +? ) +con clu +cos me +she ep +christop her +en thusi +po lish +me ts +oun ded +sustain ability +creati vity +con crete +ra i +ali en +ble ss +te es +clu b +ro t +bo s +ex ist +perfe ction +lu ck +rock y +expen sive +mean while +happy birthday +pre t +thr iller +ca ve +playo ff +som er +l u +le x +def ence +am writing +home less +pro phe +ch et +past or +ðŁ¤ £ +land er +ww w +Ģ ï¸ı +tic a +! # +o tic +rad ar +po sters +pow der +po li +ha un +tra p +bl in +assau lt +shor ts +re y +sh y +squ ir +rac ist +gar lic +fu r +remo te +sm ell +impre ssed +fing ers +âł Ģ +din o +le ment +s nu +promo ting +str ing +produc tive +b age +ma son +ra z +direc tly +j k +ev al +ðŁij Ĭ +doc tors +co w +ri der +st v +re move +w u +na than +ro d +n r += > +affe cted +inve st +mp tion +g inger +o d +agricul ture +s que +mu g +coun ting +ke e +mag nific +coo k +ani stan +roo t +plac ed +sym po +gh ana +un d +che er +thro wing +secre ts +f illing +opti mi +butter fly +bu bb +ðŁĺ ī +terri ble +d g +sil k +obse ssed +lo u +ai de +sal ute +mon u +philadel phia +scienti fic +i st +u ae +dess ert +bott les +can yon +ðŁĺ Ī +car ib +o ther +w ich +re source +guil ty +un d +le on +e ss +kan e +el e +tra iner +he im +an te +man age +roo kie +tre ated +po ses +rs vp +cau ses +aw ak +je well +le tt +on ics +tit les +cardi ff +g aga +bu mp +use ful +? ! +loo se +bb ing +: : +argent ina +de bu +cy cl +wh el +dis gu +j el +k ills +bio logy +ex ter +tra sh +bo dies +tr am +circu it +expe ct +la ds +w ells +sho t +ge e +naren dr +fa stest +b ent +b ills +mar shall +h ats +intro duce +citi zen +im possible +gi b +az z +net working +r ant +thin k +in dy +st ops +f theday +bri an +* * +amo di +dom e +coura ge +pac king +af fairs +g n +si zed +ent ary +pol and +swit zer +afgh anistan +w u +ten der +subscri be +mo sco +att end +republic an +hon ey +âĢ ĭ +si mul +we ster +foo die +or o +midd le +ab t +co pies +ma je +narendr amodi +ty pical +inspir ational +vit am +wis con +cu bs +tiv ity +h ali +e ars +k ay +d are +mari juana +cu rious +an ia +tom ato +re mind +ðŁĩ · +sc ared +cou p +po et +land ed +ri d +wra pped +mor ri +climb ing +e ws +fe eding +con tra +tho logy +gri d +ti vely +read er +la ser +di ving +di g +lat in +ti ed +shake spe +o ci +ad m +show ers +chu ck +mar cus +oo s +kne e +o live +ow l +dy lan +an no +g ym +deci sions +well ness +arri ves +sati s +chri s +thur s +ðŁ¤ £ +inter views +thank you +switzer land +over night +journ alist +ser ves +vol can +.... ... +plo t +nic ol +car rying +mag ne +tre asure +ex p +be ver +ðŁĺ ¢ +mar ty +mo le +don ations +recogni zed +b h +du s +sh ann +al do +success fully +ent e +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +cab inet +cu is +tit led +d as +so l +strate gies +deli vering +ad ds +ani an +ne ther +ðŁĴ ĥ +con tain +su its +pa irs +to dd +rel la +ro pe +ci o +cro p +paint ings +su z +re jec +bu st +d h +fra ud +m h +contro l +je al +destroy ed +al lows +wo ol +minneso ta +om en +j u +sympo sium +d af +lim it +accoun ts +load ing +inter n +re solution +hol land +qu al +meet ings +gra ve +cam ping +v am +re nov +liber al +am ber +gre e +hu mb +fe ver +el ing +broo ks +à ² +be th +ad ed +al t +ro e +perform ed +jo sh +frank lin +nic ole +de ss +bb s +m g +net works +min im +al t +weap ons +gu y +jas on +g ha +harb our +at on +pra ise +kentu cky +bel fast +st icks +blo ss +ho pes +an thro +famili ar +wa it +ch ile +depre ssion +la x +je ts +le ice +recei ves +si er +an k +de x +inde ed +fle xi +fab ric +lam b +hel icop +am anda +âĢĶ âĢĶ +compe te +sn ack +techno logies +sy rian +mom s +mu ham +cho sen +an at +dev on +shar ks +re t +fundra iser +selfi es +st ations +communic ations +tennes see +tu tor +ro t +valu able +dynam ic +nur se +i ed +earth quake +deser ved +a ve +sar a +stre tch +dougla s +ne pal +à § +ob viously +d ame +ra pe +any body +k w +pat rol +hol ders +h anna +info graphic +ec o +be ating +stan ley +bo ats +ri bb +e z +wit ch +inv a +ac id +boar ding +- @ +gi l +da ve +care ers +opp os +l loy +in ter +do pe +re su +j agu +sh ade +in dy +on ist +rel ations +ag en +ab le +inci dent +me ter +shar ma +id r +pro ve +immedi ately +tro ops +am an +g low +gaz a +blo cks +person al +chron ic +all er +si d +sh r +whats app +lu cy +ar chae +ho u +journ alism +our selves +go t +the med +shap ed +we ak +cas ual +leng th +sla m +ab bey +e v +coun ter +est a +reci pi +cha pel +expan sion +sel f +suff ering +sp ice +n z +sp art +desp er +boo king +quart ers +y on +ðŁĴ Ĺ +p k +continu ed +- # +man hatt +tal ked +sh en +com bo +hybri d +je ans +liqu id +se al +re tweets +ac celer +collec tive +t as +: )) +profession als +ra w +o tt +su san +ir ing +okla homa +re ven +survi val +cre ator +tran sit +st ac +sur f +i k +ed iting +ch illing +bai ley +ste al +ra ble +pa rent +hun ger +sn app +collec t +philos oph +dedic ation +c f +c m +le ep +repe at +re ha +un fortun +a er +a ero +abstr act +mon itor +ag ents +bu l +sci ence +harb or +drag ons +floo ding +ac compli +d ash +juli a +the red +tues day +cy ber +b low +ta ined +le m +refe rence +pp o +ne goti +char le +con nor +au lt +access ories +commissi oner +rain y +re ar +advis ory +luc as +ma id +co al +k av +pol o +ðŁı ¾ +tran sport +mar gare +straw berry +bur ns +gre ens +ne v +partici pants +col in +belgi um +col our +in form +d ell +br on +cal y +kick off +strate gic +re union +hon ors +li b +egy p +âŃIJ ï¸ı +hy po +si zes +regi stered +bet es +relax ing +bloo m +inten se +valent ines +insan e +w wii +p x +tri o +bla de +wiscon sin +con e +plat in +ali ze +ra ven +incre asing +indi ans +il ian +bl u +rabb it +exten sion +je f +au di +fer ry +s ell +a day +us b +swe at +cham pag +metho d +mem ph +assi st +s by +ca pe +remo ved +mag n +v t +r ams +f bi +tack le +phe w +h on +motor cycle +su spec +eleph ant +sub ject +let te +da iry +whe at +awk ward +ac t +tro l +mit ted +zay n +sheri ff +ene my +con s +ke tt +bul ls +ev alu +bt c +satell ite +ho lo +por ter +dia betes +bet ter +rele asing +sur f +: - +se basti +collec ting +en cing +e thi +go ds +al ley +health y +m ills +sma sh +co pper +cr ack +read ers +sp ac +licen se +bas ket +bang la +en tic +om i +m ere +si vely +anim ation +lan es +dent ally +chill in +fi e +k aren +dep th +li pse +n g +ri p +mel o +sand y +ðŁijı ðŁijı +vin cent +nu t +hu g +who le +cre ates +? ??? +âĿ¤ï¸ı âĿ¤ï¸ı +bak ed +up grade +rober ts +har a +carib bean +auth entic +mb s +mosco w +attor ney +wi ki +ch lo +hu ll +cor k +" ! +sty lish +ðŁĵ¸ : +di ary +impro ving +ex pand +bri ght +pollu tion +k nights +person ality +chec ked +fac ilities +z el +bow ling +gu er +ðŁİ Ĥ +on going +un its +hoo k +be ck +confl ict +to dd +far ming +educ ational +k ak +cla y +stro ke +bel ly +explo re +mill enni +th m +loo p +sm s +consi st +cir ca +br yan +d ab +youn ger +soli dar +pp a +experi enced +b ella +bo ard +shef field +steph en +consu mer +sub mit +spon sor +t ang +ag gre +comb ined +trac king +sand ers +b az +survi ve +fer red +equ al +se p +re ed +str ong +priv acy +st ap +un g +ac ry +pa sta +pir ates +ag er +fair y +du p +introduc ed +wi p +let s +spr ay +ðŁĵ º +gre w +a sts +pitts burgh +new york +jo ey +lau ren +tra de +ch op +pi pe +cla ire +behavi or +v ap +cre ws +lap top +ðŁ¤ Ĺ +che ster +disci pl +d f +out doors +k s +go ver +super star +cas ino +far mer +; -) +re turned +ðŁı Ī +ma il +roa sted +co sta +v ill +pe z +gard ening +distribu tion +sh ining +inve stors +ra sp +dec ades +reali zed +bar n +p ti +st able +ut d +pan thers +m ens +b n +ca de +bu cket +yn n +when ever +wa ke +da is +ber nie +lo dge +ju lie +atmo sphere +ðŁĺĺ ðŁĺĺ +major ity +par ti +exc it +cu t +me h +musli ms +be gun +fli ghts +vene ss +ce me +po sing +so le +g ou +dark ness +pe ach +cel tic +auth ority +grand ma +ful ness +smi th +speci fic +gar cia +co ins +good ness +aldu b +recru iting +den nis +gar y +sle eve +weap on +pl z +disco ver +harri son +recruit ment +ja i +ch im +com pared +tom s +mo thers +am y +archi ve +t ask +ben jam +se g +law yer +al um +inve sting +mi e +che z +j p +a ke +fl am +wall paper +âĻ¥ ï¸ı +t ton +che st +favor ites +we igh +coo lest +r ating +relev ant +lo gan +ma ple +run ners +pri or +peop le +ma ur +terrori st +te sted +carni val +su spen +me asure +m v +cyber security +app ren +terror ism +o z +v ital +ni es +gon z +fun ded +twi st +assess ment +die sel +en for +colum n +ad dressing +ca sts +pay ment +x ton +fi er +, ' +la st +ne e +un less +clo se +sk ill +cuis ine +fun eral +ti les +a un +k ru +relation ships +ðŁĴ ¯ +ev ent +âĢįâĻĤ ï¸ı +kind ness +pro posed +acou stic +a es +defen der +dan ce +h tt +w at +vo y +ðŁ¤ ĺ +au s +cli ff +sear ching +beauti fully +in qu +at l +speci alist +ðŁIJ ¶ +da i +tra ils +class ics +inst ant +v ous +re venue +mar ch +kir k +fr inge +fire works +tri via +âĺ ħ +tr action +wal ter +mo to +l ily +att itude +cli mb +sc an +sav ings +c w +fa ith +cred its +ab led +gra ff +auto graph +he he +ran ch +ha d +ro gers +ðŁĮ ¹ +f in +re qu +fol k +ad ditional +lyn n +u ber +dol lars +lo gic +wor th +so m +the sis +p ound +bi c +st ur +cer am +spen cer +en tered +v amp +organi zed +âľ Ī +pp s +tr on +merce des +no ti +compet itive +do w +ous ness +vic tor +gr illed +na i +pu tin +ab ra +bl ame +alex and +anim al +dec ent +p ent +inter ior +:' ) +but ler +bal let +ðŁĴ Ķ +albu ms +down s +la d +si r +pla in +p ers +blon de +dis c +paki stan +se ment +ga a +w age +ch as +man i +co ps +terr it +lo l +lau ghter +ri vers +magnific ent +lam p +w b +new sle +char ts +ble ssing +p unch +lon gest +fl oral +cu tie +fare well +sto pping +mb b +bu d +chee se +de cla +si m +mc donald +de ter +you th +t ch +fre der +kin dle +fer n +at or +as leep +p ond +spr int +p ounds +la zy +gh e +fundra ising +dead ly +gran de +dou g +he y +lin da +consi dering +i um +gol den +vi k +auth ors +di ss +u ally +appropri ate +mor ning +y le +hon oring +foli o +be c +re bec +fin land +formu la +corn wall +sh ay +cau sing +bl end +sig nal +t ent +kash mir +nation als +har mony +sc out +acce ssi +he ight +medi eval +impro vement +ke es +prac tical +car d +de par +hu n +om ing +cal gary +ste l +bu bble +gur u +ma h +unex pe +n h +ed a +me at +i ge +si o +god dess +in ches +tun es +br itt +sti on +ra j +âĻ « +mer cy +ðŁĴ ĺ +sen ds +i est +pol ici +val e +reduc ed +as ap +vi jay +defen sive +celebr ations +ri ders +med itation +har mon +g ing + ¡ +program ming +in au +sud den +m h +replac ement +sk u +j ar +gra des +ta st +k itt +brand ing +k aw +boo t +f ought +p ays +g f +iz ation +ho p +k k +activi st +v end +coast al +cha os +ðŁĶ ´ +se me +bill board +li fting +cu mb +sc al +ðŁĸ ¤ +stru ck +l v +indie dev +beat en +jun gle +al right +destin y +m ing +k c +ch ances +om an +q atar +cra f +tra ined +pri x +char m +o tive +s mu +e c +and ers +hand ed +al ban +certain ly +arri ving +i ze +sa i +tr ack +pain ter +hu mble +appo intment +head line +manag ing +mo d +as pe +andre a +à ¤ +ethi op +un ited +exi st +bal i +k ad +n t +d red +re x +recogni ze +tam pa +be ers +ati a +he els +no te +transport ation +tur tle +re de +hipho p +sp icy +sp urs +⬠ĩ +cor p +ther n +to ast +hur ry +proper ties +ma ge +mar co +ele ments +bou ti +syn drome +ms g +develop er +gra ders +he im +re sil +off ices +del ay +di men +vin tag +barbar a +ðŁĺ ± +vene zu +cu lar +fac ed +bar n +ðŁĺ Ĩ +survi vor +wor m +confu sed +passion ate +Ø ± +identi fy +electr icity +sou ls +brad ley +repor tedly +lun ch +shel f +eli a +swee t +smoo th +emplo yment +am el +manhatt an +ste am +oun ts +ye p +li ving +un e +descri be +ca res +man ila +sha wn +ac ted +bas h +st even +re st +pet ition +div ine +wel sh +rac e +platin um +ðŁĮ ¸ +p b +extra ordinary +solidar ity +m all +on ion +schedu led +game of +fer gu +de ms +nor m +p k +tri als +polici es +publi shing +st ole +fron t +charac ter +van ia +ex ce +sti e +sc a +resi dential +sa iling +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ +spons ors +th ick +champag ne +she pher +continu ing +ven ice +per th +na p +a ster +y ak +un limited +cho ices +ne o +hi v +repor ter +bru ssels +f old +dy s +se mi +la wn +it alia +wi fi +as k +em ed +fr ame +monit oring +ste ad +i da +gr in +is a +fli p +re stric +offen sive +atta ched +di sh +wh y +philli ps +gre et +p als +mix tape +v ou +fiel der +spar k +alber ta +g len +ca sh +s ri +u ri +ro dri +entreprene urs +climate change +p sy +d le +em ents +lin ked +nether lands +acci dentally +oppos ition +vel vet +ra ys +c w +om o +m f +lmfa o +newsle tter +: ) +toi let +liter ature +di sp +phili p +uni form +sudden ly +head er +cool er +-- - +prou d +bri g +nis san +scienti st +j ah +con centr +pac ks +appo inted +so ap +eng age +cho se +âĻ ¡ +se tup +jeal ous +har ry +g ation +tun nel +te mp +osc ars +dec ade +recomm ended +child ren +ab a +anxi ety +ve ments +sal on +pho too +organi z +mach ines +ab s +vil le +hy pe +ti ff +emer ging +av geek +[ # +contribu tion +bra dy +re sto +g mail +fit z +photo shoot +hel met +h t +eleg ant +ug anda +nur sing +or leans +pen n +na h +foo tage +em a +w o +w ad +concer ns +ve re +re mark +who ever +str ang +p t +qu it +sh ang +histor y +s ick +perman ent +ill ness +col d +visi on +he m +ar row +con vic +pin k +oc cup +bal d +ex hau +u of +am o +on t +ãĥ » +adop t +la id +smo ked +inter pre +ess enti +associ ated +b d +bb y +fi er +inst all +dipl om +con diti +c f +w ak +any a +gr aci +fi sher +s ss +ap r +il it +mus ician +symph ony +cor d +h ack +le gi +l v +bless ings +hum or +sc ra +e ti +min ster +trav elling +bu sh +jewell ery +li me +!! ! +pregn ant +pe e +lo b +cap ital +ip a +pen cil +la bor +duc ks +prou dly +wedd ing +dere k +m w +pe g +valent ine +an gu +re treat +pro spect +dang er +vul ner +up set +, # +sr k +x im +thur sday +n fl +kis ses +re ds +cr ack +re ward +c u +ko k +me te +aband oned +it t +me als +sp ell +stan bul +del ays +ru m +le op +gu m +no va +super man +ch ick +m is +dram atic +inno cent +r ounds +re c +auti sm +bangla desh +mor al +mo vie +sp oo +k la +âĥ £ +ou ting +mess i +ab road +loo kin +a im +q i +st ack +colla ge +à ¯ +hud son +sc an +ho e +ch au +oc cur +comm ander +ho les +ðŁİ Ħ +bi as +v on +stick er +ma k +responsi bility +colum bus +sa int +ed mon +rac ism +far ms +w en +gul f +may o +!!!! !!!! +corpor ation +ba chel +el a +inter nal +je ep +fol lows +di alogue +de rer +smart phone +he len +rich mond +equ ity +s land +b g +ne ar +av i +memph is +we ir +discu ssed +bad ge +p up +mi stake +phen omen +un ite +ðŁ Ľ +de pic +ri des +in augu +n at +sof twitter +comb ination +gosp el +âļ ¾ +ad mission +retro gaming +ðŁIJ ¾ +sch u +mb o +jun ction +al arm +à ¦ +gr ac +kh ali +k ul +m ale +cap tion +wi sh +te re +cor ps +ru bber +play station +er in +effici ent +l or +jo kes +in ary +nor man +lu is +inaugu ral +ch ed +âļ½ ï¸ı +di p +to e +str at +aa c +am u +pi er +co tt +comm and +tt en +sn oo +cu be +clo ses +class ical +s word +expre ssion +reach ing +n app +co st +affe ct +ric o +gi f +brea the +tri be +or tho +h ay +l g +fri es +n m +hi ding +richar ds +en de +mic ro +capit ol +cop y +ro m +regi me +mary land +tax i +di al +embar ra +un believ +ch t +v s +elim in +o dd +pen ny +sound track +l ings +trans ition +rema ining +a is +mali k +? !? +rand om +def end +ul tra +tru m +danc er +st ol +dri ve +a ver +ro ast +defin ition +se an +excit ement +partic ul +su rely +sh av +ber y +di shes +com m +is ol +i am +ob li +gho st +hugh es +chi efs +b as +conserv ative +speci al +fe min +sh ri +n ancy +inte l +tu ne +ðŁĩ ª +jo el +gg le +mo to +ðŁĺ Ķ +bu ck +d ag +antic ip +mont ana +gu id +fro g +ec raft +op e +dri ves +nu mer +x y +color ful +wednesday wisdom +illu min +bey on +inau gur +deep ly +pre fer +for tune +coo ked +ti ble +âĺ ķ +swe ater +it ter +tt y +u i +gi e +com plic +~ ~ +tax es +cu ps +di verse +sam anth +âłĢ âłĢ +ba king +sy mp +wa i +be half +mer cur +travel s +ðŁİī ðŁİ +or ia +eng aged +jump ing +reti red +n aked +p uni +speed way +sci ences +rehear sal +on ym +dy ou +pl ates +r ati +kri sh +jaz z +car ol +ra f +pen alty +tim eline +ru by +engine ers +ra f +bel le +do se +che on +esc ap +me g +ran k +or d +me gan +mer ch +ec lipse +âĺº ï¸ı +ple dge +kir k +per si +leice ster +sa k +w k +saf ely +yy y +je t +promis ed +j c +en ne +no ah +re no +re a +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +tra il +ðŁij Ģ +f d +soo o +ri min +w k +ภ² +i al +x ox +bis cu +d ale +fan dom +particip ating +fla g +privi lege +pe ach +mach ine +bo ston +gro ss +o g +mir acle +adop tion +u ss +mon sters +be ij +clar ke +pu shing +pra ying +ar o +d n +ell is +apol lo +od ds +refuge e +to w +b p +ðŁĩ¬ðŁĩ § +h end +app eared +memb ership +pe an +du m +viol ent +v y +potat oes +aw w +greet ings +t ts +ac on +sh ane +photograph ed +cra b +temper atures +cu ba +c fc +wel com +he l +in nings +m k +co de +kno ck +gra ss +swe dish +p ta +ick y +v at +lin ing +s q +sa p +ar c +announ cing +sk ins +cit yof +br ing +co x +gam er +it arian +i da +h d +ros se +sad ly +ge o +âļ ¡ï¸ı +tag s +fa ther +chan ge +l ance +whis key +adel aide +te c +stick ers +marke t +class y +bad ass +flo rence +lin er +fro st +k ate +ac on +scand al +es sex +ðŁĺ ı +vi vi +dr ill +blo ggers +recomm end +d ha +ac res +ro ma +bu y +gro cer +er ia +ma har +ff er +patter ns +ver i +com pu +st ev +ang a +ment or +do o +it ali +cdn poli +on ly +conduc t +elec tro +de f +wh ale +prepar ation +bicy cle +vi ral +turn out +bra ss +qu ad +hospit ality +pack aging +den cy +ceme tery +abo ard +dre aming +pic ture +t all +inv ent +ad mi +o e +tem ps +qu an +fun dam +pro mp +resi dence +mu d +sour i +âĦ ¢ +graff iti +gi f +d nd +com p +s war +pe eps +pale stine +devil s +san g +assi stance +bi ke +missi ssi +inter viewed +ne phew +dru ms +v and +gentle men +n sw +inst a +leban on +ee ee +oli via +ver y +rou gh +industri es +m ation +ðŁĺ Ĵ +bar rel +n ay +po ps +moder n +ill y +are st +on ents +protec ting +v ans +e o +vi kings +restaur ants +re ck +jac kie +andre w +w illing +he ath +citiz en +disc rimin +à¹ Ī +stu art +m ys +hi p +tran sp +" ? +te x +su shi +ke d +cro ssed +dist ur +pe dia +f ate +some how +mo th +proce ssing +is s +r in +u ts +yy c +ver t +lg bt +re id +on to +arab ia +habit at += = +stre ak +simp son +addic tion +wim ble +deli vers +challeng ing +ðŁİ ¶ +fran ch +e du +s me +ai ds +hur st +th am +tari an +remem bered +palestin ian +fe es +tru m +sket ch +ur u +fit ting +jes se +ðŁĶ¥ ðŁĶ¥ +---- ---- +ba ch +ici a +colo red +da h +associ ate +int el +s eller +p u +stu ffed +ac s +b s +sh in +cooper ation +certific ate +ab u +ingredi ents +re v +in ge +el der +christi an +bun dle +th ic +dir t +beij ing +comm it +ted dy +ed u +to day +s field +w yn +confir ms +lo o +j v +ene ss +al pha +vir us +ari um +gr ind +bri dges +introduc tion +pol ls +bac ter +z ach +termin al +ra iders +fla vor +zom bie +vo d +sp reading +gameof thrones +effici ency +lat ely +ale m +twee t +cri mes +cl er +de y +dg ed +hy un +pay ments +cir cus +ðŁĺŃ ðŁĺŃ +mis souri +lu b +episo des +c age +po s +mat ching +tumb lr +lin ed +ge st +am bi +nar r +ing ton +regu l +blo wn +is le +co co +on don +joshu a +tour ing +sm a +sau sage +best friend +bo eing +desi re +sav age +ra pper +de vo +te ar +take over +cow boys +po ker +par ag +pp e +h int +we ars +se th +ro les +l anc +man ga +form at +fl yer +c ay +mo or +ba ke +spla sh +v ad +ker ala +proce eds +sil ly +reflec tion +di str +wi d +su it +ci vic +yan kees +by n +migr ation +di stin +or ch +fe mini +quali fying +tu ri +o be +hun dred +cra p +wan g +mathe mat +bu re +expo sure +fergu son +seme ster +re serv +pl ym +a hu +fac ial +wa x +wor ried +ca b +vi o +as a +co d +to pics +p cs +hal o +rescu ed +horiz on +ar k +âļ ª +hol ly +el f +ul ti +pu p +quali fied +attend ance +ati vely +destro y +y c +for th +photoo ftheday +c ents +ic eland +meas ures +de sk +port folio +artic les +direc tors +dat ab +e w +creep y +oun ding +hon oured +mi st +j it +men tioned +port able +iti c +d ann +friday feeling +am id +ti ger +scri p +helicop ter +hard ware +expl or +work place +austri a +beat les +ber nar +spi der +disc o +cul t +lim its +shor tly +fin al +nin ja +lu ke +le bron +wal mart +o il +van illa +shi re +ye g +ak y +c s +bl er +collec ted +t g +rol led +speci als +b ff +pier re +sh im +vi er +flash back +restor ation +individu als +pro d +fre aking +tu rer +o a +re fre +mor oc +gre et +re yn +care ful +our ing +u sh +is d +g ill +vie w +thunder storm +b led +pic nic +guar di +pi g +ar k +syl vania +bann ed +u cl +vi jay +ori um +av engers +believ es +eu r +monu ment +concer ned +la bs +ber g +a ap +vi sh +sing les +can cel +z el +ar ab +ru th +too th +ar ta +sh af +chair s +r ack +dise ases +crow d +cl y +fle x +christ ma +artif icial +tom at +fin e +dra ws +advoc ate +fran ce +Ù Ĭ +ðŁĺ ³ +heav y +s our +compre hen +no ble +aa p +hin du +cor al +g ars +ow en +n l +st all +yel low +mar ina +in ver +suppor t +tou gh +promis es +pi e +master piece +sco re +for ce +mor tg +crypto currency +o x +r ors +rock in +pro vin +ho g +no stal +oak land +pat rick +inclu sion +tra ffic +ah med +a ha +lux ury +con secu +de mon +âĸ º +b lowing +st ag +: " +encoura ge +ben e +sku ll +do dge +bu ster +kin son +wit ne +er ror +lo west +fel low +à ° +sh re +bl ur +vir gin +compos er +sli p +mor nings +ga ins +tab le +gra in +ari st +braz ilian +w we +tu es +ribb on +an ag +di st +sac rif +em brace +entreprene ur +af fili +de o +t ali +touri st +fat al +ì Ĭ +autom atic +ðŁĩ µ +we ak +wel fare +confir m +benjam in +fi ghts +alleg ed +me ad +strugg ling +pro secu +che f +à ¨ +propos al +er n +ðŁĺ Ħ +dy k +on gs +hon g +m ack +mel on +on ent +ru sh +d ap +tol er +pro pag +c ze +trans lation +wal let +cott age +sa il +constitu tion +ðŁĴ Ģ +mun ici +fav or +storm hour +i h +ðŁĺ Į +approach ing +pin ned +j ed +niger ian +n ach +sh at +particul arly +mc don +camer as +anni e +admini str +he at +electr ical +char ming +gib son +bouti que +ex posed +ac tor +pil low +beach es +genu ine +margare t +ben nett +lou isi +pos itions +el y +shin y +ten tion +architec t +ren tal +ac qui +goo gle +sub way +mom ent +ðŁļ ¨ +ri m +metho ds +cy cli +nor folk +Ù Ī +over whel +ra pid +we ar +happy birthday +progre ssive +ðŁĴ ¥ +co gn +pap a +f ool +philosoph y +pol ar +jim my +wi g +ðŁĴ ĭ +oper ating +reduc tion +ph i +fla gs +to the +o di +a res +k oo +k ang +ar kansas +ash ton +wimble don +sci fi +attrac tive +mississi ppi +logi sts +ral ph +la bel +gradu ates +ma ha +home town +âľĮ ï¸ı +foun ded +on the +li z +trans l +mini mum +pre sti +ta m +gener ations +re bel +journ alists +par am +mc m +acry lic +death s +tes la +w t +bry ant +jer us +i stanbul +muham mad +ri ley +k ris +work shops +is o +coun ts +stre t +prote cted +trin ity +man ual +r hin +r il +pleas ant +le mon +ner d +har der +dar ren +bur y +ra h +bas is +mi gu +occa sion +li sts +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ı +e b +de cre +hamp ton +ìĿ ´ +tra vis +trans form +puer to +nh l +av oc +tri ps +unexpe cted +ve t +di dyou +bar ber +st ages +m son +re presented +for t +l al +pp le +nic ely +ignor e +qu il +qu inn +h k +carri er +remin ded +am ong +pass enger +el len +gue z +sc ape +mu ral +youn gest +ma sh +d ill +rout ine +stain less +jack son +gand hi +th al +on ers +edit orial +convers ations +sd ale +autom ation +i ke +า ภ+ðŁĩ ª +hau l +la ying +men tions +am en +abor tion +i bi +coun ties +ca therine +man ds +jam e +roll er +au t +n am +o logical +cep tion +ran king +tox ic +sn acks +victor ian +bang kok +psycho logy +re g +ang ela +respon d +sty le +sophi e +dak ota +achiev ed +mar ked +imper ial +in as +glo ves +sli m +confi dent +att acked +gg er +lon ely +valentine sday +re b +craft beer +orig in +zim bab +ce iling +te ens +other wise +w b +f ers +day sof +advis or +y ah +âĻ ª +en der +republic ans +av a +skir t +pi pel +chi e +jan e +ja x +ðŁĺ ĭ +âľ Ĭ +j ays +bre tt +bal o +cru cial +d har +as is +de au +lloy d +chat ting +âĿĦ ï¸ı +rel ay +remark able +n s +we t +bris bane +ðŁĶ ´ +tion ally +f k +la yer +house hold +consecu tive +es is +pend ant +st ir +crit ic +su gar +photo shop +pa res +arti stic +do dgers +c un +cra fted +am end +bo at +âŃIJ ï¸ı +egyp tian +sa w +tra ge +small er +ox y +pa ired +nex t +i res +tac o +o y +u c +st i +a erial +: // +dr o +dot com +gg ins +r pg +ay e +le an +stri ker +lo bby +prote sts +pri ority +congre ss +am ate +inv it +r ington +mom my +th us +allow ing +pione er +enfor cement +g ori +tal k +dra g +du mb +bul let +san ge +er y +tar gets +ðŁĩ ¦ +he ather +consi der +seaf ood +ve st +ris ks +% . +p g +sac red +he ating +kick ed +tto t +. - +chan di +co ven +po ol +pul se +i a +ro ster +shakespe are +es a +car go +pean ut +tro op +ac tion +tab let +home work +cast le +stru ction +mus icians +free zing +bu tt +justin bieber +j j +bah rain +an them +au dit +didyou know +na vig +guid ance +âĸ ¶ +tur f +n un +fic ations +ye men +char ging +x c +bron cos +su bur +p ale +bor ing +among st +for the +em per +om fg +p j +expe cting +ðŁĴ « +st l +ad min +expect ations +sw an +shoo t +oooo o +min ent +ãĢ IJ +wall ace +stan g +satur day +adop ted +dou bles +hom ie +ome z +d han +vent ure +surroun ding +fi le +mob ility +de es +w ski +broo ke +emb ro +re members +kar a +test im +bo tan +m tv +sacrif ice +jerus alem +d l + ´ +proper ly +ili on +as i +leg it +co pe +m cla +recy cling +lar ger +ðŁĴ ĵ +pat ric +gener ous +ja red +p f +mol ly +thom as +ju dges +h b +sor ts +bl vd +o ven +enter ing +plan es +be et +integr ation +boo ked +fre ed +ver n +ash es +to pped +de pot +welcom ed +ren a +m ick +d and +see ks +gam er +ran kings +ren e +mu t +whis ky +fire fighters +gu es +ga ther +tour ney +de men +y ang +new ton +autom otive +back yard +deta iled +mi st +to bac +fi ber +un usual +grat itude +sp are +ne ys +: * +per i +flo ating +fin alist +don ating +dre ss +bro ad +be the +econom ics +tai wan +ed wards +plu g +pra iri +val en +bab a +f ad +an as +har per +dis order +app lied +p att +bi kin +li ver +cu ri +carol ine +ann er +juli an +wal king +mal col +screen shot +co ding +skin care +activi sts +myster ious +ex act +blo cking +mercur y +bat ter +du mp +âľ Į +en se +li sh +ridic ulous +prote sters +ðŁĻ Ī +lu st +swe at +as s +ali ke +co dy +re ments +win ds +as pir +vi enna +pra y +.. .@ +bo i +cand le +assi sts +te e +der son +p ony +f ence +con spir +âĺħ âĺħ +oo th +e pic +ba rely +a unt +b am +diamon ds +end less +scre ens +can cer +gr o +p st +pro spec +mo sque +help ful +ou ri +bro ther +gu jar +cri sti +ine z +to wers +ad dresses +gra y +bur ton +re tweeted +ðŁ¤ Ķ +n ity +du ck +super vis +jo an +kin der +sanc tu +pi ed +âı ° +ł ï¸ı +m ati +reven ge +ce ster +eli fe +desig ners +back ed +bo li +wei ght +cou ch +su res +s its +shri mp +la gos +auth orities +os ity +hol ly +compu ting +fac tors +ab e +pan els +ram ad +sent ence +missi on +hol m +r b +d ads +shang hai +mon ey +she ets +sk ate +thre w +cup cakes +infin ite +l is +practic ing +ess ay +ka i +as ci +mo b +u gh +hol mes +re gg +ik h +mo ck +collec tions +pe p +o va +sal t +nan dez +co y +thre ats +tex ts +cin nam +pregn ancy +pen ding +stam p +flow er +g is +agre ed +pay ne +ro ver +ph ra +sof t +f fin +fa thers +pass engers +aw ays +al a +h es +li van +in s +samu el +ingu i +h of +j j +chen nai +cat al +om ic +he ath +ni ece +pump ed +integr ated +are l +no m +produc tivity +wan ting +vis a +di ana +tw il +it v +cam ps +ro wing +d ley +black and +gu ards +b ells +re verse +vi be +ric ky +mo ss +ny t +âĺ Ģï¸ı +el le +tro y +cu dd +ev an +women s +fo to +mi stakes +wick ed +mi l +c led +me mes +co smo +schol ar +ren o +ðŁĺ Ģ +v ents +# â̦ +terrori sts +ca sey +cardin als +ðŁĺĬ ðŁĺĬ +venezu ela +bol a +liter acy +t w +en o +con tains +au stin +fin anci +ev an +har vard +origin ally +chev ro +her ald +nott ingham +manag ers +âŀ ¡ +accep ting +wal sh +tutor ial +entrepreneur ship +yach t +requi rements +glen n +pe de +unfortun ately +ach ing +dais y +gi an +night mare +âĿ Ĺ +r ina +b art +ema ils +oppo site +who m +sa ke +pu zzle +da shi +par ty +blan ket +bus es +lo re +beau ty +reas on +pun jab +winds or +func tional +exi sting +hel lo +gli mp +con vin +la k +scre aming +rebec ca +bli ss +north west +infin ity +cosme tics +pul ling +coffe e +pl ing +op ho +colom bia +interior design +( + +emo tions +sa c +sun glasses +sav es +d f +six th +al y +ðŁĺ » +de en +dev ast +polit icians +lac rosse +g u +pe i +jav a +comb ine +coal ition +er ts +survi v +ch ad +stri an +n n +de vi +coun c +concer n +contro ller +bre ast +j ury +tu m +introduc es +la di +mobi le +al z +ste ady +nur ses +h acking +on line +oce an +ðŁİ Ħ +a am +ju ven +ic c +louisi ana +ar te +street art +is on +wn s +fr m +p anda +no ir +main tain +del ay +symp toms +thor n +ge ome +ter n +carri ed +p ru +pan or +as sy +per u +clou d +sp ra +pe di +e ste +tag ged +ðŁĺ Ŀ +shado ws +naz i +ا٠Ħ +cor ri +âĻ¥ âĻ¥ +j ad +ðŁĩ « +form al +spo ken +ðŁĮ ŀ +enjo y +lo pez +out look +in ho +w ander +Ù ħ +ma ya +pe e +d ine +ãĢ ij +brief ing +suppor ter +ar ily +ght ers +natur ally +doctor who +j en +v ar +new year +re se +si mm +re x +con sequ +tomat oes +bur st +bra vo +bur gers +cr acking +nor theast +bi om +mush room +mar que +dou ble +ni er +v ag +tw enty +key board +win ni +jama ica +par ish +: - +mental health +ali zing +ren der +wa king +ðŁİ Ĥ +g ly +na than +wa shing +mel issa +jun g +loy al +chil i +song writer +guit arist +bo wie +neighb ors +onym ous +as set +ta i +head quarters +ðŁĮ Ī +i hear +ci gare +sur g +) " +re pl +dar ling +ðŁĻ Ħ +z ak +sa re +ãħ ĭ +mic key +ware house +mass age +ine es +did nt +i w +hur ts +eng aging +mag ic +women in +k itten +mor s +c art +tit ans +colle ague +compe ting +er an +k hal +mar ble +dem and +del ight +et ary +bli zz +lou ise +m ls +fini shes +experim ent +conduc ted +electr onics +itt ers +car ing +wh ats +sym bol +jun g +e cu +pi x +con text +char ger +ðŁĺ ĩ +re ig +fra g +ë ĭ +ch ad +tru e +ker ry +def ending +a int +au ton +check out +bar nes +less ly +d t +m me +clou dy +second ary +are z +_ : +app a +const ant +" ) +ve ts +jo b +i ent +ðŁĺŃðŁĺŃ ðŁĺŃ +m j +fren ch +di ver +davi es +hh hh +e book +๠ī +mar iti +bree ze +susp ended +mat o +vi et +ra hu +se i +bol t +en ary +le is +kar l +fr amed +expla ining +ab c +de aling +nat o +ja ke +exp and +leon ard +establi shed +du b +ar men +el led +voc al +nichol as +ori ent +k yo +illustr ated +ah h +danc ers +milli on +ge ta +po pp +as u +mur dered +gi ble +sto ked +gri ffin +maxi mum +adri an +en counter +ther o +david son +ðŁį » +holi day +ev o +asse ts +car son +memor able +âļ ½ +ob am +represent ative +cb d +tr icks +vo gue +vo ice +mm mm +sebasti an +cli f +ath y +par alle +ðŁ¤ · +pa k +ev acu +e ats +ا Ø +tou ched +organ ised +spir its +can ad +gui ded +frame work +ðŁĮ Ł +pe d +natur al +ag ar +replac ed +anch or +ti t +sha h +organ is +super ior +r n +ch ro +eric a +st ill +cor on +chu ck +loc ks +or gan +ro sen +sc am +ben ed +/ # +ke en +tre vor +vamp ire +sor ted +! ' +af ford +in tro +gr ace +ðŁĺ ľ +sau r +kick starter +influ en +v u +y up +po c +ðŁİ ¥ +a ar +s ang +tre k +et sy +tb h +scre am +chevro let +pix el +shepher d +an or +gabri el +tw ood +sd cc +me ters +develop ers +clo sure +v w +twit ch +ì Ĺ +se oul +pr ice +ho g +n ish +hill ary +scrat ch +in cen +wag on +dis ability +pan ther +ch ats +g d +wit z +sus sex +l ate +den mark +ger ald +cancel led +net te +i x +nav al +bap tist +te t +y ad +ma th +ho y +r andy +po int +intel lec +fru its +w ool +gu in +pr on +the ft +con dem +mar ry +n ola +architec ts +cin cin +roc kets +gentle man +ex plan +t ate +do e +ra ises +wild life +w l +insi der +blan c +w p +for sale +ny c +po well +unbeliev able +pen s +goo dies +mu stang +p ens +st ays +squ ash +xox o +near by +ever ton +co co +le agu +k han +stu d +south west +con struc +s worth +cro atia +le a +su ms +aim s +e an +van ess +iti ous +pa thy +arc ade +b end +sugge sts +sac ram +roy als +ri er +em ir +in cl +an k +clar k +ri ght +vac c +ठ¾ +tan e +li b +u sc +sal es +hu h +s ally +ver a +p ga +gro ws +dru m +tre e +eth ics +sug gest +is ab +se aled +pre viously +anim ated +ab du +ri ses +glo b +pre dat +scar f +del ic +om ar +ll i +sx sw +py thon +ne bra +fun k +reflec t +pav ilion +tic ally +ch asing +bak ery +inva sion +ko h +believ ed +co hen +con qu +cra fts +nat i +cle ver +govern ance +sam ples +fa ils +â Ķ +ti mo +r itu +stri king +inclu sive +sho cking +can t +requi res +dra wings +à¸ Ń +purch ased +du m +z ach +war ner +con sole +man sion +foun tain +circu m +e sh +is land +mil k +pro fits +hali fax +ri val +âľĪ ï¸ı +jen ny +sand ra +ny e +k elly +y al +qu ad +no s +inste in +fin alists +mid fielder +cu e +excep tional +a an +sa pp +gett in +sa a +f ati +sl ice +vol k +s wal +la sting +sum mary +it as +sm o +s z +âĺ Ĩ +ip l +fl ames +ene ws +ha v +hoo die +pitch er +win dy +re vol +centr al +ton ite +ðŁİī ðŁİī +sol ved +mil wau +organiz ations +wee ts +re fin +s th +ãĥ ¼ +el in +ton a +cinnam on +ðŁİ ¨ +ðŁİ ģ +ron aldo +pen insu +ome ga +el ds +desig ning +e igh +blu et +ben z +nu g +ash a +robo ts +su dan +choo sing +en do +ser ge +clo sely +hand y +fing er +be ing +ar te +survi ved +fl ame +mile stone +gu t +d war +fu tures +é e +el o +fri dge +eli c +ou ch +u b +p v +tit an +col lar +st ation +nev ada +aur ora +r d +dun can +âģ ł +bri en +mar sh +Ð ¾ +to tal +ch ry +s ers +su ffe +ra chel +colle ge +to days +cour ts +ch it +re united +gym na +gen esis +be side +re presentation +ch ant +collec tor +ra k +ath ens +ni gh +mun ich +langu ages +fl u +particip ation +__ _ +c v +spec trum +so da +co ver +refe ren +ab bo +ap a +public ation +ed m +mon ica +ar my +ðŁļ Ģ +div or +dr y +stre ams +robo tics +ci der +bull ying +appro val +sto ke +plat forms +sier ra +ex tin +i b +ha yes +succe ed +suff er +at ically +da i +lyn ch +h ound +del ines +ack now +d ated +exclu sively +he res +fac ilit +dam aged +char ter +la kers +fal con +unve iled +wel ove +e ase +pati ence +l one +gent le +gene tic +produc ing +g our +shann on +bil ities +zimbab we +p int +dau ghters +liter ary +bel le +cl am +surroun ded +k any +ne il +pir ate +rang er +hb d +nat alie +bel ong +olym pi +emb assy +sc ol +en er +ak in +lo ren +b h +: / +di va +den im +hi pp +ðŁĩµ ðŁĩ +arn old +? ' +we ren +em power +dis abled +man or +rasp berry +b af +aw ful +dru mmer +kar dashi +n ash +machine learning +ch u +rebel s +tim ing +mon roe +ton gue +ran ge +pup ils +re ss +amaz on +b z +har ley +pal mer +ballo on +s ings +ic ec +j b +c ers +g ps +whi st +ri se +l t +oo oo +c attle +shoo ter +vod ka +uc l +mt g +le sli +jon as +di spo +at ric +ste in +vintag e +fir ms +flo yd +cow boy +soo oo +is aac +war craft +disney land +beauti ful +be am +franch ise +bu n +k ag +an on +tur bo +swee p +made in +kar achi +dete ctive +penn sylvania +contro versi +vitam in +a side +chron ic +descri bes +remo val +ha h +ap er +ten ed +u to +bad ly +mir ac +f ry +ye a +in jec +ther mal +comp act +th or +te ed +ur gent +l ite +g illi +sop hom +ic o +che m +p m +for k +fre ak +ch ak +recipi ent +i y +ni k +model ing +c ans +ðŁı Ģ +del ux +se am +surviv ors +rad ical +investig ating +reli able +f m +tur t +ligh thouse +to ol +go wn +) ) +bo ts +auto graph +a id +bu ffe +h mm +horri ble +ssi onal +ann i +à¹ Ģ +k its +sch i +eter nal +hu ss +sens itive +r u +tast es +chec ks +im o +por tion +sk ate +e den +half time +fri ed +ri hanna +ti se +fl ick +ca in +s gt +âľ Ķ +sh au +sta ined +ra ffle +dro ve +sal man +princi ples +sh o +ar u +je ss +gu ine +gar bage +my an +jel ly +dis ru +z ia +q ld +ent ries +la v +fle w +ad mit +objec ts +comp are +ny times +cann es +p n +suff ol +ro c +d ana +e gg +hi st +coun sel +' ! +phy si +imag ination +ad just +explo sion +plym outh +hor ror +elli ott +bour ne +de x +bre ed +au dio +lob ster +disappo inted +nation wide +( ( +incre ases +austr ali +ce dar +star ing +rac ial +e is +g mt +visi ons +stay ed +discu ssions +de an +cur tis +mai den +stel lar +happ iest +h wy +pre season +car av +mon days +hospit als +glimp se +schol ars +ja i +ter race +ann a +goo se +gra ded +lot us +hun g +grocer y +stam ps +emper or +sc oop +in ser +c as +exist ence +he al +fal cons +mar vel +reduc ing +terri fic +magne tic +perfor ms +bar re +p us +tre ating +ic on +w h +decla red +tra uma +do d +come dian +nik on +bu gs +as m +mont gom +ibi za +comprehen sive +ha s +san ti +fellow ship +da sh +p sal +louis ville +sp y +fau lt +d the +fi led +vi sta +de sc +fe ars +you tu +sp s +es p +ri g +cri me +ber ger +wonder land +k ent +in formed +stev ens +my th +ast on +ir i +visit or +at ri +produc ers +al la +person ally +separ ate +agen cies +af ri +il an +spo ke +n ina +squ ad +di ves +de pend +li v +fier ce +enter taining +cha in +sc at +bor ders +pal ette +sp ro +os is +der by +tobac co +zi o +willi e +ju vent +zoo m +hol y +enti rely +af e +mart inez +be ds +pe a +bull dogs +ðŁĩª ðŁĩ +ib m +ne on +ethiop ia +team mates +plan ting +tw er +any time +for bes +ó n +run way +ner vous +ro ger +p ile +ch anc +apo caly +u w +o i +dr ought +territ ory +br ick +cre atures +go in +w aff +gre n +sou theast +je an +am bul +ed ited +stra p +c v +aar on +ãĥ» ãĥ» +t su +descri ption +kin dly +clu tch +im mer +en or +women sday +or ange +ra g +ob vious +hy der +chann els +man go +me yer +ra ining +ge tty +pil gri +coordin ator +up load +ninten do +don uts +san chez +app arel +j r +zz i +, @ +jeff erson +accessi ble +great ly +e id +initi al +budd ha +par is +ma scot +â¬ĩ ï¸ı +sch war +si ri +sp inning +mortg age +e cho +end ange +ge dly +chlo e +enh ance +kar nat +k ry +explo res +ðŁĴ ģ +af fair +ic als +all a +dar t +dolph ins +diffe rences +squir rel +au gh +dr ones +ell en +re store +pa w +un for +pi ke +hil ton +colla b +consu mers +co inci +out comes +pp p +a q +coup on +li est +si ms +k ho +av es +spo on +pu dding +cor byn +hat ers +ex ams +sla ve +. ! +p sa +app les +tam il +se d +co ke +zz o +lo sange +car bon +cla ir +... ) +k hu +cra ig +explor ation +sanctu ary +su e +al way +demen tia +won ders +super hero +pakistan i +brown s +bluet ooth +lo cker +mar c +ev entu +delux e +rodri guez +âĿ¤ âĿ¤ +ro bb +ðŁĴ ¦ +lin ux +ten s +intellig ent +se ed +vo ter +s ler +pe aks +inter n +teen age +peninsu la +hand ling +ti e +cou sins +wen dy +me e +à¹Ģ ภ+din o +ðŁĴ ° +ðŁĺ ĥ +ze e +s bury +trage dy +b k +bo re +z in +war ns +idi ot +tou ching +contin ental +tac os +saf ari +wa shed +po dium +morri son +fore sts +c bc +al on +partic ular +be ads +inv ented +lo ch +li ghter +where ver +i de +docu ments +a we +k r +no where +min er +st it +ro x +contribu te +har dy +cl an +ob ject +ca it +ðŁĴķ ðŁĴķ +happ ier +vege tables +t art +g ag +nom inee +heav ily +pan ic +j d +there sa +at m +u ph +s fc +su ri +drin k +n al +re vel +k l +avoc ado +nom ination +ma donna +shar on +malcol m +control led +sh ers +revi val +legis lation +shoo ts +n in +comm entary +pro s +human rights +str anger +mit ch +pipel ine +leg ally +th u +gil bert +tol l +gran ted +gh s +ir anian +refre shing +du k +ab i +pri me +jose ph +mo sa +stati stics +produc tions +mer ry +pat el +sa x +human itarian +struc tures +e missions +town s +fre el +ster ing +rat ings +alle gedly +cab in +st l +w ade +fl yers +tri m +promis ing +z u +bal lot +compar ison +free ze +ou ter +great ness +as sign +snow y +r ale +tor ies +med iter +kno ck +consult ant +cincin nati +analy st +sc oo +je ws +appro xim +pu re +portra its +cy rus +ation al +lo ans +acqu is +el u +accep table +uni on +water color +ru st +batt les +per fu +seas onal +ser ial +mind set +ri ot +fel d +enni al +clo set +pri est +tan ks +int l +scre w +bu m +ab dul +ou x +expla ined +ric a +imag ing +law yers +bu ried +ãĥ»ãĥ» ãĥ» +ear l +âĢ ķ +l ton +resto red +stri pes +fo ss +de mands +ste aling +alex is +mun d +ak er +ur us +war dro +hu gs +gen re +e go +Ù Ħ +particip ated +bab es +ban quet +ti ous +he mi +ds b +lo st +milwau kee +jen ner +ge m +ou tra +lo ses +id i +re ps +ðŁİ § +regu lation +fla w +f ang +vibr ant +ram p +ra ins +well being +so viet +vie wers +de po +libr aries +bi go +ser y +g ill +de struction +co z +c x +bri dal +al ds +plan ted +amate ur +lu d +che ering +show cas +pro file +i u +ver tical +pack ers +wiz ard +ski p +s light +be au +air ways +mu ch +re ra +ðŁĮ Ĭ +ab sor +pati o +pack ages +s ells +ment ally +ðŁĺ ¢ +reyn olds +k are +tri bun +wal t +kn it +ta ste +sur rey +boun ce +cre ature +b are +bet ting +su re +mi ley +laugh s +al ore +cy n +t l +arti st +ann ah +war mer +dynam ics +lunch time +mariti me +vulner able +ðŁĴ ĥ +wol ver +dur ham +const antly +am in +si bl +: @ +bul let +k ach +angel o +wil der +doo m +desk top +law suit +k ca +hen derson +inv iting +bet ty +ta wards +ra fa +le aked +and i +ge ms +af l +vel o +mediter ran +pro be +to tten +steph anie +sn ation +com be +q s +over come +assas sin +ra v +fil ip +winni peg +sh il +determin ed +k as +ou tre +regre t +gui des +aa a +ðŁĺ Ī +wi ves +mani fe +er ly +sm y +sh ima +x ing +pix el +jac ob +ac commod +to y +on o +po o +ti er +an swe +ðŁĴ ģ +ro sa +le ase +bel ongs +th ar +eventu ally +nei ther +go a +ski ing +at ra +ag h +broad casting +f ury +py ram +d ice +volk swag +wom ens +provi der +bom bs +miss ile +whi p +d ick +nor we +back up +el der +mat ure +concer ts +gi ous +sque e +good morning +bra ves +^ _ +au ssie +lun a +mal es +he ck +for tn +rome o +steel ers +p n +pe er +re presents + « +kat y +migu el +requ ire +cha ins +l ur +immedi ate +ti mber +âĸ¶ ï¸ı +advoc acy +ex port +an z +tiff any +auth or +ðŁİ Ī +du des +chil ly +hi d +har m +bu g +mon ster +terri er +tu c +story telling +ta k +in ti +immigr ants +b is +reach es +com passion +john ny +contribu tions +ðŁIJ ¶ +mechan ical +impre ssion +ran ks +ko be +men ting +bloss om +pab lo +buil der +bom bing +tw el +sul livan +om o +pe te +de mi +ku dos +w bb +t gif +mass ach +neighb or +che fs +eng ines +pun e +ga ined +phan tom +s days +ext end +gr an +cent ers +jac qu +dat asci +sleep y +el vis +answe red +s lot +con y +flexi ble +ti ally +le tics +% , +andre ws +si ble +mom ma +vin o +do x +invit ational +twil ight +j ade +ill ery +joh ns +f ou +p v +-- -> +break down +billi on +prin ter +mon d +c bc +mag gie +legi on +du b +kur t +po or +paren ting +regi ons +bikin i +be ware +si onal +au burn +kid ding +amp les +sp an +con tempor +c ic +ha bits +ak o +pre fe +bud dies +it z +em ily +person nel +moun tain +ver sus +ðŁĺ ¬ +ear ning +s ink +dar i +u u +s win +i ster +bru tal +n ac +kat a +clo th +am and +ðŁĶ Ĺ +ne o +alu min +week ends +nebra ska +co des +delay ed +brun o +pro ven +in c +i ght +fl an +or o +lam bert +regu lat +w f +massach use +kardashi an +bern ard +fi esta +volcan o +grand pa +anc a +d re +st itu +mean ing +fo am +au ck +at ed +r l +hot el +pers ons +dy nasty +ell or +ma i +am ne +sty ling +avi er +e g +vege tarian +, â̦ +foun ders +sta in +g d +cy cles +sky line +trac tor +exi sts +tra l +kid ney +mar il +inst ag +se tte +addic t +tri angle +flash back +controversi al +z on +p ins +i as +tr ay +town ship +deleg ates +sp am +h ms +cr ane +peop les +o lo +fac tion +but es +on ica +deleg ation +new profile +eli er +mc a +w and +g ely +losange les +ber ke +ti ve +dis rup +zz a +cas a +jor dan +ford shire +ga thered +ic hi +atten dees +à¸Ń ภ+pe ppers +co in +bour bon +ern ity +ro tary +behavi our +jere my +team work +compli ance +tre mend +ðŁĩ § +bu hari +cam bo +bu yers +ha gen +bu ds +bay ern +mon te +sm ells +an za +ath lon +descri bed +work force +gi ving +ap i +invest ments +da il +sel ena +datab ase +th um +mor tal +stu dent +bu yer +do ver +gar ten +att le +loy alty +gen oci +holo cau +theat ers +ru ling +ven us +pat ent +ch un +ab by +awa ke +mass acre +bang alore +break ing +simm ons +ju sti +hal e +ed chat +gg les +haw k +mar king +head lines +stro m +co ve +breath taking +med als +hair cut +christ ine +tele graph +gujar at +ju ra +can e +sho re +propag anda +mu eller +.... .... +sa vi +stom ach +thro ws +ta b +war m +j ong +reno wned +hi r +ra is +mush rooms +guaran teed +bo a +m j +revolu tionary +certi fication +bru ins +jo in +w es +pas sport +c g +sex u +cap able +w v +ton es +jac kets +ac compan +spin ach +fore ver +bla ir +wat ts +g l +cou ples +prairi e +newprofile pic +logi stics +massachuse tts +jagu ar +o id +we al +under water +mo z +y i +ma ths +myan mar +pre ps +suffe red +tr ace +wal i +ah hh +bor g +st itch +cu lin +real ise +infe ction +discrimin ation +sh ame +an kle +hu mid +y t +brac ket +tru ck +tri u +ea ster +commun ity +post card +invol ving +ty ler +car amel +over view +ex amples +integr ity +base ment +instru ments +ani um +at us +gh er +laun dry +achi eve +gen eva +pr icing +hyder abad +beli ef +me ta +j aw +accoun ting +lead er +cristi ano +cou ture +cy p +vis ed +, ,, +k nu +h ick +break er +br am +ra b +mo or +ham as +gradu ating +pupp ies +ak h +ta h +ach es +ri e +op ini +g ta +re ign +tra gic +re ver +p ill +pine apple +tou ches +da re +le ys +il o +inter iors +sc outs +bar t +en zie +don o +bro ck +christi ans +ense mble + · +cine mas +new port +air line +win ston +le igh +cont ents +pre scri +ur ge +tr out +fic ally +il ia +sub si +are r +âļ¾ ï¸ı +w ounded +ðŁĻ Ĥ +pe pper +ðŁĴ ŀ +fit ted +af f +re sur +thursday thoughts +z ero +archae ology +di v +je e +i on +awa iting +co zy +beauti es +bal d +dat a +gri zz +stal k +kin ds +cle ared +jess ic +regu lar +ali ens +plac e +bo s +bi zar +thisi s +ðŁĴ Ģ +totten ham +ma fia +s lam +ari ana +car roll +back pack +care y +uni v +r g +pe p +dig it +tatt oos +ag on +volunte ering +diffe ren +consu mption +ka thr +head phones +t shirt +o b +ele ment +re tail +sh ru +al gori +contain er +consci ous +fi l +com ing +ra sh +u rope +def ine +gi or +femini st +flow ing +rout es +gl aci +fer t +somer set +ant es +twee ps +$ $ +h our +endange red +year sof +ro h +po pped +bac king +ba sil +bra ke +mon aco +lgbt q +pra gue +ut ility +cas si +gate way +haun ted +sch ul +ðŁİ µ +shou ld +walking dead +comple ting +dann y +montgom ery +pengu in +ss i +mer chandi +ðŁij ij +chur ch +h ates +cap tain +brea thing +ce t +fair ly +approach es +compan ion +surpri sing +kany e +pe y +hin di +targe ted +lor ds +de ut +di gging +ger man +ru t +ener gy +close st +y un +apo logi +ภ± +s ack +ru p +dd y +port al +d ough +b ats +ðŁĵ ° +at ur +graph er +pi res +mo tors +ðŁĮ ¹ +j c +dan g +tu k +clu e +us c +pag e +d less +bro ws +ju s +ad ing +re marks +oo m +car dio +ste fan +arm strong +âĢ¢ âĢ¢ +ni est +belgi an +bi op +so y +lo f +í ĥ +q t +flashback friday +ce e +ģ ภ+wre ck +mar ines +amend ment +wardro be +vo y +bur ned +guit ars +ra inf +li fel +ssi l +oun ce +exter nal +c key +me sh +she ikh +inv itation +sugge sti +pop corn +phenomen al +an onymous +tun a +chic ago +o val +del y +loc als +( & +pro f +no vel +fin der +spar ks +la ven +in fu +nic ks +qu ant +ra e +exe c +dist ingui +st ances +mu tual +sh al +unve ils +edmon ton +zan ia +a dio +vie wer +brad ford +audit orium +qu is +re act +htt p +l ero +chee ky +impac ts +ta k +ed t +desper ate +t ay +ì Ħ +sett le +bar gain +resu me +un ite +thro wn +ke st +se ys +mar ching +am it +decl ine +sch ar +me tr +stan ford +lin ke +ber ra +dol ls +rug by +jam i +b or +road trip +dino saur +mi k +sun der +re m +b k +over seas +nau ghty +imple mentation +iam srk +lun cheon +fir ing +mi ami +pere z +the e +z on +gi fted +con version +ceram ic +¡ ï¸ı +pe dro +ì Ĩ +v ick +! @ +he ed +si d +b w +docu ment +pl un +gr ants +fant asy +predic tions +vali d +car ved +gradu ated +ðŁijį ðŁı» +nation ally +ch y +af l +re sso +blan k +ri vals +j ig +e ties +om ics +une mp +b ound +sk o +inspec tion +par al +high s +cri sp +b ans +ob a +[ @ +co spla +costu mes +rec all +mou th +ni gel +b ts +ter a +ko v +do cs +west minster +dic t +gra vity +kar i +ro gue +t ted +war k +ida ho +w end +aw i +queen sland +proce sses +cli ffe +m ick +com pens +op ol +the y +cl ari +wiki pedia +salman khan +haz ard +pre ston +swee test +pd f +che es +tr ilo +south africa +bur nt +( $ +con tain +t p +sub mitted +sound cloud +at u +re z +word press +corru pt +n f +ma ker +í ķ +par as +adv ent +ri al +ca fe +fo ssil +!!!! !!! +co ws +c j +sp ur +institu tions +land mark +ent it +re ut +h is +alz heim +we mb +regg ae +mo squ +st at +identi fied +deal er +re am +re land +ten sion +ðŁĩ © +wra pping +deep er +fr at +red dit +ar is +moroc co +.. " +b low +ma pping +pri orities +ing a +swa p +re wards +conspir acy +creati ve +c j +congre ssional +vau lt +ple x +sophom ore +shad ow +ele ss +ðŁĺ ħ +dar ts +aldu b +anno ying +pro ps +n as +alumin um +h bo +offen se +j ill +oni ons +la ur +ta e +har dest +sh ro +ga ining +meas ure +ed tech +cyp rus +tar a +ang eli +car lo +go on +all i +im plic +ju pit +resil ience +ha il +bal anced +) ... +joy ce +gr a +th eli +defin ed +shi pped +main ly +min a +l m +sac ri +o ber +p im +claim ing +ent ers +co rey +bo k +cri ed +cool ing +dani elle +pharmac y +thor ough +ca ke +k lo +outre ach +z ens +digital marketing +val ent +sn p +her b +mr w +caf é +cap tures +no tre +triu mph +pan cakes +cu mber +spi ke +d ation +bi gg +sp er +crit ical +am al +too th +foun ding +a stro +' # +quan tum +th ames +un c +pri de +air bus +kno cked +un defeated +mediterran ean +cal cu +clo wn +sens or +ham mer +for give +cu shi +ber ry +maje stic +elec t +polit an +g ta +k ari +bur ke +sea hawks +volkswag en +re i +landsc apes +cas u +grand father +list ened +/ / +star trek +rainf all +fur ry +vi er +star k +rif le +ff a +leg es +hillary clinton +min us +correc tly +architec tural +pre ce +up side +box er +ðŁĻĮ ðŁı¼ +is ai +de t +pro vo +tis sue +spoo ky +ve led +re con +prospec ts +que bec +âļ « +ig no +anat omy +shap es +w p +p interest +hor e +an es +pick up +ti p +pra desh +hu gh +co e +po k +gram my +well ington +sti gate +ri gh +lea p +king ston +scen ic +go sh +v ani +au g +s ary +zi er +bure au +lin son +con te +fra gr +all an +g aw +lan a +colli sion +surve ill +ren ais +ar range +s ali +do in +br ance +bren dan +our se +in coming +suspen sion +à ´ +l la +educ ators +in tri +da e +bio graphy +bul gar +villa in +go thic +rw anda +e w +may or +meet up +democr at +mor gan +su dden +te sco +car rot +bom ber +mck in +re ne +fun day +agricul tural +haha h +show time +form ing +col a +scor pi +quo te +po ppy +s life +d az +tu b +ne n +mo t +ðŁĺ » +s ore +elder ly +o ve +skin ny +um i +anc o +man ship +we re +g v +k ah +fol ding +ne at +samanth a +dan ish +uk rain +humid ity +nu tri +jak arta +cand les +oooo oooo +at ile +streng th +i bra +bap ti +charle ston +fr ames +girl s +clear ing +glu ten +# # +super natural +ju bi +ph one +he in +dr un +le ak +invest or +y er +dom ain +ball room +mi sh +app li +off shore +bla ze +dor o +âĺķ ï¸ı +win ery +shar if +ad ore +n ir +saf er +si gh +as cri +strong ly +trac y +ck er +ol l +faith ful +ey ed +deli ghtful +vis m +karnat aka +tit an +wh ar +jer seys +re fur +heav en +gri p +pan ama +pre li +glu ten +o dd +cont ent +pon ti +tion ing +e commerce +feder ation +flaw less +ge ar +ti res +by r +pol ice +cu ban +tri butes +tic ul +chur ches +nur sery +di aries +muse ums +snapp ed +i van +wi ght +touri sts +ramad an +t rent +prophe t +won dered +focu sing +hi d +ic ons +i q +ambul ance +pi st +fun niest +time less +sr ilan +bu ys +ki ds +colour ful +a shi +ch ir +mu m +ðŁĵ ļ +let ter +x en +reut ers +pre serve +in ting +ste p +fu ji +uni ver +i u +show down +po ems +surveill ance +suspec ted +ta e +sol ving +tom b +mother sday +car pen +recru it +pil ots +bro c +mix ing +fri days +ty r +represent atives +tra pped +abdu l +free style +clu ster +âļ łï¸ı +k d +sk ill +pit t +ex o +commer ci +muse um +loc ally +g ina +no bel +immun e +fr ac +cap su +main ed +attemp ts +bull dog +be spoke +sing ers +sp elling +seg ment +nat ures +tic k +lip stick +clean er +gett able +preci sion +â̼ ï¸ı +th ood +re ef +no pe +bill y +di gi +mu si +ri val +figu red +tal ity +sun ny +ber k +aw ww +awa its +un real +co pen +asy lum +ex otic +bu en +mo ck +en able +arch y +fr a +pla stic +al mond +amp li +displa ys +abbo tt +s me +x p +ðŁĻ ĥ +graph ic +i ved +mar a +cau tion +lea ks +en berg +ul u +unic orn +cann on +appren tic +ðŁĺĺ ðŁĺĺ +b ball +wil low +at ics +am as +manufac turer +campaig ns +port ers +flo ors +l su +ty pe +ke j +honor ary +it im +to le +min ecraft +d x +ma sh +ri o +consequ ences +ron ald +go ssi +suffol k +mu se +r bi +live music +i van +ðŁİ ¤ +le u +patri ot +man it +lan ca +home decor +de ar +sig ma +ti de +str ings +v ita +sequ el +try na +inve stigate +bor is +ve gan +barri er +mind fulness +web b +hu stle +in da +tan zania +str ay +tex as +c ag +diagno sis +wom an +g w +ob session +l ative +nu fc +fl ynn +moment um +sof a +wal d +vege table +tu cker +supp er +se ab +ar ro +se ag +ven ting +counc ill +sp lat +cal cul +.. # +com fy +odi sha +sto pp +war fare +ca es +à ¨ +co y +price less +in sec +ðŁĺ Ľ +contro ls +empower ment +datasci ence +per pe +gen ic +e res +tru deau +man o +sla very +expand ing +ma he +fa iling +s aga +photograph s +cre st +re on +surf ing +hi e +ðŁį Ģ +ja e +fel lows +south ampton +sol om +ce ster +tab ility +hor n +se ct +he e +cole man +at las +explo rer +consul tation +copy right +organi zing +den ied +mon keys +noo dles +br is +fl or +dou gh +bon ds +sho cked +eco system +care fully +w m +apart ments +cur ve +san diego +must ard +comm en +cere mon +e ch +ru th +ðŁĻĮ ðŁı» +hawa i +fil med +te ar +as ingly +ca ir +wat t +instru ment +ou tta +ye ol +river side +ë ° +. : +nor wich +alo g +migr ants +new man +ri de +spr ink +targe ting +beli eve +tor ch +reflec ts +per mission +ff man +ene mies +bas ics +se ized +sun days +le i +hass an +en do +h c +st ad +le ments +kk kk +nan o +shar k +man a +on ic +treat ments +ear ly +collabor ative +shu ttle +bran ches +mis ses +mained cm +ap ers +ky le +carri e +leis ure +sh et +bir ding +adv ances +ðŁĵ Ŀ +popu lar +di ane +a be +re war +neigh bour +k pop +remem brance +play ground +ru b +krish na +e bola +inqu iry +ep a +lu min +organ isation +abra ham +norm ally +pre ten +jan et +w t +ðŁĴ İ +encoura ging +a stic +bu mp +syd ney +s z +ss ss +gar rett +ðŁĵ » +consul ting +roman ia +spo tting +chanc ellor +ar ma +presti gious +ðĿ IJ +t ad +cry st +compe tit +rati o +cat aly +bro w +j ur +vi king +commu te +y day +la yers +du mb +esc al +genoci de +f ill +gu pta +ste pping +se i +fo to +wild cats +col i +projec t +ear nings +st r +ge ons +comple tion +b m +decor ated +craw ford +af ghan +sc are +visi bility +hi b +direc tion +stro ll +christ ina +alter nate +cl are +sty list +be hold +s ance +leop ard +acqui red +narr ative +ash i +the a +?? ?? +pe as +at ch +sli des +le en +renew able +eng lish +qu ir +co aster +r x +fo ols +match day +mis m +amaz ing +z ig +ke ting +won t +to wel +di ab +sta ke +n m +mel t +e than +gra pe +polit ician +sm en +í ĺ +re o +wedd ings +cat cher +or acle +me mo +ðŁĮ ´ +ec k +rob bie +norwe gian +oper ator +am or +se wing +ju l +x ie +u v +fif ty +me ga +tatt oo +liber als +u pri +traffic king +richard son +su v +ki p +mess y +tremend ous +gl ou +cour tney +la d +stere o +my ers +i dio +^_ ^ +man ning +dy e +w d +thr one +jun k +as u +provin cial +k ook +wr c +fine art +hamp shire +renais sance +b red +fall out +s j +sn l +al am +tor ture +fy i +sh ines +pa w +ch ar +hen ry +c row +aci ous +di an +pa ige +ba re +stock holm +scen ery +ðŁĩ · +jef frey +pu sh +decor ation +ne d +cu te +brig ade +laven der +inv ites +e sports +vo ir +dri ed +tran spl +sur geon +no vels +pul ls +son y +lun ar +man e +i vy +fru str +dor set +sa i +tor res +ssi on +shut down +suggesti ons +writ ing +e o +battle field +u ga +ðŁIJ ¾ +vac u +spl ac +g it +u g +high land +% ) +mer maid +sacram ento +ta ils +p w +ka h +t ell +enh anced +ì ķ +auck land +cru el +ðŁ¤ © +au dre +sail or +gram mar +g love +de on +infl am +fresh ly +k ell +zi p +christi e +mil d +di xon +instru ctor +g ence +ãħ ł +sub jec +constitu tional +crow ds +in visible +ru ins +da k +si p +pla que +p ouring +comple x +z ine +ste ad +f let +trans mission +lo way +ar un +incre asingly +au d +transp aren +cro wned +sc oun +blizz ard +lux u +fi ers +achieve ments +hun ters +rock ed +bas in +vio let +pro ves +achiev ing +pro sper +se ga +flo at +vi an +xi v +pol ic +tur a +approxim ately +wander lust +keep ers +geta way +co d +pol is +br yan +col ts +tal ents +yo gur +gluten free +wri st +gr y +cze ch +ðŁİ Ī +ev ille +ðŁı Ī +to x +dani els +am er +bi ds +weare one +me tab +g t +boy z +pd x +pos session +pu shed +shr ine +reali stic +tri gger +na vi +ru mors +n af +jen kins +tr un +comm uni +Ã Ĺ +gam ers +arm or +moham med +bal cony +y ah +stron gest +rhy thm +unfor gettable +k p +ho bb +custo dy +greg or +r ita +aes thetic +il ation +sponsor ing +n ay +kid napp +sh s +ra jas +me g +signific antly +butt ons +la c +ver sions +essenti als +opini ons +k ro +d printing +wi dely +d k +ur an +y al +reque sted +c n +cur ric +plu m +gr un +v m +dev on +m yo +rel ation +juvent us +rou ge +min ority +min es +jupit er +n ine +oxy gen +fran kie +une sco +fab ric +disgu sting +sal man +dete ction +lan ka +d ac +ðŁĩ« ðŁĩ· +argu ment +shel ves +cel tics +rober to +pi gs +he dge +fau l +pow ering +butter flies +fi r +re make +att i +com o +emp ha +kend all +poke mon +se ating +d ans +bald win +ðŁij » +lesli e +one direction +ti mber +im an +fon t +e der +di on +ste ph +for mat +gre gory +pro p +he x +ru in +sor y +inf er +n aw +bar ak +sd gs +kar ao +lu sh +v ander +end ent +g is +a fro +soc cer +ay an +t uni +lun g +da yof +alex a +mar ath +addic ted +ag ile +hy gi +light weight +ì § +mand ela +jo ey +anc y +hu m +bi r +memor ial +jim in +ging er +v ak +jav ascri +cro ps +orig ins +d ari +pi per +im port +aggre ssive +predic tion +re pairs +cr acker +voy age +ni ke +mu mmy +linke din +country side +bor der +gla ss +per t +s als +sho e +autograph ed +wal nut +colle gi +sal ary +pa iring +ðŁĮ ¸ +cath ol +swee the +defe ats +streng then +roof top +impro vements +barri ers +ur u +t ally +ru led +ðŁĨ ļ +nai ja +emo ji +per cent +gi o +pro bs +on ce +adm its +pa ths +li ar +day tona +pe ters +cal i +cal li +mu g +o sa +ap h +ab y +hy de +eth nic +pla ins +ol f +haha hahaha +holi c +?! ?! +su bli +bl acks +mo t +gh ton +lo vin +b rent +bar u +l ati +de w +ate au +q a +pain ful +bu sters +st atic +ðŁĩ¨ðŁĩ ¦ +note book +out fits +si es +r f +floo ds +Ñ Ģ +thro at +su ici +ro vers +beng al +pre pares +blo g +mini ature +Ø ¨ +am phi +com b +r sp +in timate +green e +Ì ĩ +al tar +surg ical +ves sel +... ? +gav in +g ator +threat ened +z ar +rob bery +di er +promo ted +y g +x s +su bs +inter viewing +threat ening +do zen +me ado +water fall +nintendo switch +cal um +mini sters +dro p +univers ities +war ned +tac tics +ðŁĩ ² +refu se +ad ju +v ast +ðŁĺ ´ +mc fc +lib ya +no filter +distribu ted +re ser +ron nie +de co +javascri pt +mon k +intere sts +fle x +mar tha +sti es +oo d +ðŁ¤£ ðŁ¤£ +e un +b ali +g omez +sti mul +moder ate +d ity +ir is +stra w +consist ent +direc tions +adop t +sal sa +cro o +reco vered +black friday +lan caster +accep t +weareone exo +buil ds +free man +air plane +diti on +bel ong +jam ie +pit ching +li f +om in +cri spy +pre pping +ve g +chan g +accompli shed +graci as +dolph in +elec tor +culin ary +super bowl +wal a +pur suit +black berry +be an +cardin al +pro ved +immigr ant +stric tly +holocau st +pass age +ha us +cou p +pur se +har ass +< < +le ed +ado be +st ad +legis lat +par ked +pri yan +sil va +kri st +s the +fun ky +ig a +sett lement +ph s +t mrw +stre ssed +hun t +ho ckey +treas ures +cham bers +ol u +hu t +mar ley +tex ture +wilder ness +mm ing +poten tially +om aha +ju dy +to es +spo iler +distingui shed +feli x +ah u +recommend ations +zom bies +hit ler +tri ple +colla pse +motiv ated +ulti mat +gg ling +so y +ci gar +fo ren +vine yard +gl itter +fin dings +colon ial +hun ter +eri k +den s +beet le +lot te +sub tle +s matter +tru sted +experim ental +nam ents +ðŁĺ Ĩ +regi on +acquis ition +bre eding +quarter back +am reading +oo td +ru de +initi atives +st out +hy ung +out come +al fred +mic s +exper tise +bacter ia +pengu ins +jump er +valen cia +bar k +ing day +sell ers +contrac ts +hou ston +commissi oned +adap tation +swan sea +santi ago +common wealth +ju dging +sub mission +sco rer +tom my +ñ o +ex quis +fil ing +explan ation +alli son +wemb ley +ri dge +chev y +san tos +own ership +cogn itive +favour ites +sh ed +phil anthro +dele ted +go dd +s nor +gui delines +ff ing +je ep +cli ps +sw amp +an or +guil d +bol ton +spring field +munici pal +goal keeper +ye on +ðŁĺįðŁĺį ðŁĺįðŁĺį +ãħĭ ãħĭ +water front +gra ve +contempor ary +ar ity +ÃŃ a +sle eps +sy rup +al am +pi re +co yo +moto gp +ty son +kej ri +cir cul +sing ly +cr unch +complic ated +nostal gia +k op +mo ve +k ale +mac ro +mid west +h ans +tri bal +nu de +௠į +bey once +congratul ate +cat er +leagu e +ðŁĻ Ĭ +la dder +cra shed +tech nic +karao ke +harass ment +ro ts +experi encing +kri sten +ðŁĩ ³ +ðŁ¤ Ĺ +reflec tions +guin ness +illustr ator +ðŁĻı ðŁı» +cen ter +nar row +comm ons +regul ations +Ù Ĩ +har m +cro ft +cu ssion +hong kong +st ical +intern ship +zo e +cho p +hoo ds +estim ated +batter ies +berke ley +smooth ie +shau n +cro s +~ ~ +cam pe +hu mp +b g +proto type +cl ick +shaw n +re viewed +tem pl +p f +jed i +blo gs +ray mond +as th +ba h +av ail +scot ch +leaf s +nik ki +to k +hol low +ur ges +of t +un like +lat in +u e +cat ering +mil i +alter nati +ma ver +Ð ¸ +ag le +pre order +lu x +cu cu +ðŁijı ðŁijı +t art +âĿ¤âĿ¤ âĿ¤ +arab ic +rapi dly +ar rang +all en +travel tuesday +pa ws +flo ws +st ability +flu id +ca pp +can berra +uu uu +sp ani +demon stration +m la +plac ement +m w +presi dents +awe som +bever ly +ani st +ne al +father sday +referen dum +la hore +o aks +deb bie +half way +gho sts +de bor +matthe ws +fi at +t fw +pre sen +rob i +de d +bro ck +laugh ed +am ounts +bam boo +kinder garten +eat en +mtv hottest +break out +u sic +fra ser +legis lative +p ang +modu le +sam my +go ver +ear ns +expe dition +gar h +concep ts +char lie +la va +bachel or +veg gies +deter mine +el lie +un locked +fru it +dal la +cou pe +wash ington +depo sit +iv ory +pau la +chic ag +gu cci +ðŁİ ĥ +cul tiv +pier ce +li fted +stu mb +re cover +musc les +conduc ting +cb s +mcla ren +sophi a +cel lu +oce ans +up loaded +game play +mal dives +kim ber +avo i +rac er +ca ine +cav s +h ana +li ga +ra ven +inter vention +inaugur ation +oo h +at traction +merchandi se +tune in +li king +juni ors +int ended +att acking +aqu arium +i wd +comp onents +sur ing +cent u +yogur t +ðŁı ĥ +show room +op tical +ty our +ju dge +yi eld +an to +pl c +transparen cy +recy cled +chi ef +ar om +ambassad ors +plan et +âĿĦ ï¸ı +om ed +vaness a +cour t +mar gar +hal ey +v r +reg ina +pd ates +hi span +live stream +âģ £ +ya hoo +gal la +secu red +w ir +bene ath +off l +n il +am b +ye g +out let +u te +pe ep +lind say +bent ley +... ! +he el +trilo gy +vo s +ty re +there fore +tor onto +ab i +simp li +ja e +exten sive +eleph ants +s or +orient ation +im peach +re play +constru cted +peter son +pa is +por ted +custom s +colla p +ad u +high lands +sal em +shel by +ko vic +stra in +ro sie +sen ators +snap s +bo bb +suz uki +bla des +k p +lo lo +gener ate +si ght +ma e +struc tural +predic t +jump ed +ah mad +sun g +just ice +gla m +vol vo +jubi lee +de tention +lo sses +pu ri +every time +Ð ° +ra o +ed ge +li mer +rese mb +har old +re tri +sacri fic +surpri ses +am c +srilan ka +bar bie +men s +fin n +ag s +ukrain ian +em brac +î IJ +flav ors +hom er +lau re +ou th +pr iced +ver de +fir m +ah s +cu b +tre y +par anor +pro fit +in dv +who a +har sh +al ot +crit ics +hu bby +fi gur +gi ra +ca stro +chan el +in put +origin als +ten ant +yy yy +ture rs +lincol n +co on +lear n +ch ou +ac are +o les +din er +hy p +bizar re +mc r +let sgo +decor ating +ðŁĮ İ +al ison +ar vin +f d +reha b +mccar thy +lot tery +da h +minne apolis +eli gible +diagno sed +emer ald +destin ations +s ans +or y +bla zers +n v +ba il +digital art +no c +mal ta +sol ar +pi pes +alleg ations +no ck +po pe +bri d +premi er +n x +present ations +ef a +bo ws +val ve +opp onent +Į ë +visu al +ing le +cate gor +e ter +po is +dan i +at tract +neu tral +th ene +cra shes +fred die +ut ili +c st +awak ening +slo ven +quali fy +pro of +fair y +le v +fre ight +enjo ys +cup cake +flav our +â ķ +protec tive +ðŁijı ðŁı» +is u +ad mir +h mmm +continu ous +ai res +rap tors +showcas ing +y uk +pa ste +follow er +instru ctions +sp ru +@ __ +the o +debu ts +ve tte +sto w +es of +ach ed +sul tan +sand wich +som alia +franc o +car ne +flu ffy +al pine +jas mine +he ated +viol in +ple ss +divor ce +per former +phi es +port sm +dar a +kir by +lo p +chill i +for th +sky pe +ðŁĩ®ðŁĩ ¹ +celebr ities +ed y +ve e +po ison +ey el +gra bs +ssi c +un o +wester n +rail road +am er +numer ous +s v +fo w +fi st +âĢ ĭ +reque sts +mar tial +em my +accept ance +lau ra +ภ´ +er up +hyun dai +out lander +u tt +wrest le +esp resso +demand ing +g dp +geo graphy +sas kat +tro ll +confe der +su es +se m +be ts +t ful +to sh +teach es +col oured +gal way +mac y +dis orders +bb cra +at em +fen der +lit ter +e sh +provi ders +renov ation +nomin ate +ps g +nomin ations +jen na +shar p +some day +z ur +bra ins +che shire +pre y +hu go + ¿ +to ken +r v +car r +tac tical +zel da +kay la +fern ando +photograph ers +j our +umb rella +woo dy +congress man +du mp +le vy +ju an +d azz +sign als +la in +an u +mic hel +por ch +al den +sibl ings +y ale +pe el +sw ick +gg in +ll c +k ale +s con +il d +pat reon +re el +qu in +wit t +mar ty +moo dy +ton i +der y +g ators +speci fically +dd in +ly on +tr ick +meado ws +p j +bor gh +vi k +tu r +bron x +pu ff +lan tern +ðŁ¤ ¦ +g ently +be stie +fac t +refu sed +fas ci +mp y +ðŁĶ µ +cross over +mead ow +indian apolis +duc ation +sle y +loo m +mix er +new music +film maker +prosper ity +li m +week end +cre amy +neu tr +lu ther +h v +nor thern +tw o +h ra +cat ches +appear ances +ha bit +kitt ens +n v +illa c +inf an +regar dless +liz ard +dun k +cur tain +ac om +in tu +ve z +e min +fl ats +calend ars +em power +ru ined +hun gary +vi d +we x +u lum +aber deen +o sa +k t +ma ssi +se emed +s den +' ? +tele phone +de fi +insp ires +me ow +z ones +bl ind +pl y +tuc son +advent ure +ge d +oy ster +ðŁijıðŁijı ðŁijı +out put +tt t +metal lic +sma sh +ucl a +sco ts +perfe ct +lu cy +regular ly +sp ic +rel ative +ath ers +mis e +batt ling +deci des +mat a +occu pied +random ly +cat softwitter +gi an +ball y +al ties +al lies +im men +sy rac +ðŁĴľ ðŁĴľ +l lan +au r +k ut +lam ar +affe cts +n ra +star war +ðŁ¤ ĺ +sc ram +en chan +pro cess +luxu rious +ar ray +sher lock +comp ati +dor f +stre ss +m su +s with +sal a +sof instagram +fo il +under stood +qu ay +r p +c ade +ja w +en ab +en coun +ðŁİī : +do ck +satur n +mu ll +lay out +ra rely +happ ily +fix ture +or ph +over looking +her bs +m itt +pil lar +nol an +pe tty +str y +u i +mu k +o res +o vers +á µ +re creation +we sley +ri t +kejri wal +sto cking +g v +subscri bers +moo se +ma e +ber t +opp re +assign ment +u ro +high lighting +cal vin +we igh +cambo dia +av on +ke m +dis abilities +read y +char gers +p ads +iz ing +illi an +tru ste +col leges +associ ates +alban y +mil ton +cr on +bu r +har dly +si ghts +anti ques +e cho +surpri singly +ha iti +cap t +ph p +op io +ine quality +equ al +ken y +sch mid +autograph s +ren t +qu er +cit rus +challeng ed +te c +epi de +fe st +z hou +li me +citizen ship +cry stal +convin ced +mess enger +copen hagen +âĿĹ ï¸ı +war ran +develop ments +ï¸ı âĥ£ +fore x +hi ro +sne akers +xi de +vi va +stere o +bat ting +ss el +ho st +beng al +critic ism +q c +cr un +attemp ted +ry e +determin ation +cre ations +d read +label s +pos se +anc er +joh an +si ster +partner ships +les bian +k st +guaran tee +bar o +fix ing +ma son +m ous +chem icals +t less +bio diversity +par o +bhar at +ac ol +refu ge +en te +t iti +dys sey +respon ds +lef to +in er +se vel +rahu l +ol ine +frank fur +cho reo +enjoy able +c to +strugg les +wood land +heavy weight +gen s +rece p +ac cred +ðŁĺ ¡ +trans formed +list en +at op +n k +sur ge +be re +gover nor +prison ers +clau de +t ill +mu lator +emo tion +water loo +star t +ðŁĩ º +clean ed +grand mother +fear less +afric an +astron omy +ðŁı ģ +à¸ Ļ +the world +su itable +anth ony +k and +tt en +meaning ful +disc lo +jaco bs +à ¸ +tom linson +ghe tti +ty pho +sub stan +as co +te k +nag ar +mu d +am on +vacc ine +f ty +fle sh +no el +infl ation +portu gue +glam our +tra m +v re +te qu +roun dup +w yn +rejec ted +mosa ic +si ghting +cal f +o ta +com position +go pro +gonz ale +e ed +b ard +tu e +effec tively +we en +al to +ri bs +rel ate +thir sty +fu rious +di m +ch ard +perfu me +s ny +chur chill +k of +master class +wa ve +ðŁĶ µ +er in +own s +to be +sk illed +te m +go f +en i +tor i +cra zy +l ick +resi stant +ici al +ag ar +! : +g ali +del aware +bl itz +koh li +pu ck +avail ability +hi malay +influ ential +cro chet +victor i +read ing +ho bby +vie t +j as +en gra +sk ul +ðŁĩ² ðŁĩ +educ ate +tech no +distric ts +blu es +se tt +seven th +lear ns +ee ee +apocaly pse +hang out +cru el +mu tu +bru h +hel en +she er +c tion +kle in +tex ans +ce real +sh ine +ne red +gra s +am bro +f ella +hin du +matthe w +li ma +mir anda +je wel +so ho +euro vision +neighb ours +chand ler +be sides +ðŁ¥ ° +ast ros +thu mbs +ren ault +ra ve +hi red +ðŁĸ ¤ +it ary +z or +bla zer +k ine +ea u +kat y +dc comics +pe c +ro dgers +water proof +kill ers +super int +pre serv +as so +brew ers +promo tional +sc am +villa ges +sket ches +ju icy +for life +au dit +so lo +fundam ental +len e +philipp ine +t end +conserv atives +sponsor ship +dd le +a ine +h tc +os i +hul k +w af +à¸ Ļ +evalu ation +ant ine +sle e +robert son +roo sevel +ag i +sophi stic +emplo yers +bubb les +ko wski +inter action +sh u +bou le +ic an +j are +han k +leg itim +k nicks +kar ma +recei ver +per ks +u h +sta ir +sun i +labor atory +gra ves +voc als +oo t +c ture +thri ve +tic o +ãĥ ³ +b w +carto ons +mcdon alds +dra w +y ung +pl er +li d +eth ical +groo ve +ent a +international womensday +pat ron +wor ries +ðŁİ ħ +ðŁij ĭ +ka therine +di az +tor i +bach chan +tru st +min eral +ic om +buil ders +bor n +col oring +lat te +ca se +revolu tion +tra der +ox id +chi pot +inst antly +sou thern +se hun +pro b +her nandez +lis bon +hu awe +p ong +me a +ro oney +wheel chair +ke en +be tt +cor in +regulat ory +di splac +ka ren +sch em +sun sets +wh ales +remin is +he p +hi de +mar cel +pand ora +do yle +th fc +ot to +no kia +trans gender +ko v +hawai ian +sha ve +so vere +exc er +nick i +pu g +st or +ro th +wee t +leg al +dig nity +po w +hom age +ðŁĩ³ ðŁĩ +s re +can on +la x +wo ah +quart z +ñ a +gree ting +flick r +nai robi +advoc ates +an c +vi i +eu gene +th ra +c re +el an +pen sion +th letics +ton i +re agan +x v +sto re +ben ch +har lem +todd ler +sent enced +âĻ¥ ï¸ı +glob ally +che aper +u f +ma m +nic o +ik u +tho u +ni st +dam i +th ala +rho des +sal e +bow ls +â Ī +las vegas +sanc tions +adm ire +mat ched +un able +travel er +ele ven +straw berries +âĢĶâĢĶ âĢĶâĢĶ +stu dio +jac ques +im s +valu ed +s no +cheese cake +n xt +e os +s x +f x +ton ic +hat ch +chic ks +gra ds +hand ic +r ory +as p +ri pped +denti st +n en +lu fc +âľ Ĭ +di ge +hop kins +sher man +f da +for all +ash ley +str and +h y +liqu or +buffe t +ess ence +phar ma +suri ya +ðŁĴĻ ðŁĴĻ +festi vals +z an +re fresh +pur ple +uni forms +kenne th += ) +as an +hel sin +transform ers +k ali +person alized +chal k +bo bby +â Į +the mes +depar ture +prin t +illustr ations +qui et +agre es +gri ff +Ø ³ +m iti +toge ther +conven ience +ab ar +car lo +turt les +info sec +some what +ar lington +scholar ships +emir ates +mu ms +st ella +auton om +fe ather +g ore +nom inees +fragr ance +Ñ Ĥ +w ong +thea stern +gr e +z illa +is i +bump er +go o +do zens +ab duc +âļª ï¸ı +o ils +don ors +sil icon +i pod +fortn ite +ðŁĴ ¨ +tor o +spark ling +consci ousness +pal a +nu m +moun ted +ffin s +thi eves +team mate +pra b +om er +ta pes +bo d +mit su +ste w +e re +p bs +tu sc +lo we +ra de +parliam entary +h m +ed gar +ðŁijĩ ðŁijĩ +to a +a gh +hon i +s late +ge ek +ap t +hard t +ta p +horiz on +grow th +make over +hi l +paper back +id an +reha bil +gi u +possi bilities +let tu +fran co +bo ss +ach er +does nt +mo e +ta ker +huss ain +ml k +di l +th ia +ham a +real ised +raven s +curric ulum +m ith +k night +ted x +r v +isai ah +cumb ria +birth days +f ing +pre z +mu barak +exquis ite +clear ance +y en +par i +ev o +à º +modi fied +app lying +imple ment +disco vering +chap man +indie game +dis k +crowd funding +mach in +li vel +sty led +âĿ Į +ma king +rehear sals +nutr iti +subscri ption +and ro +cre ators +car ries +ky lie +cam den +appren tice +tax pay +c ca +tuesday thoughts +pis sed +er man +dete c +freed om +mer i +.. ! +psal m +sun light +per spec +be ings +book store +rock star +fun ctions +p ence +fav es +z n +obam acare +sp ill +coven try +pi geon +pi vo +ba it +kol kata +av al +don or +wa h +privi leg +tra ditions +rajas than +ten ess +portugue se +yn es +tack les +de fic +tor n +pol ling +thor ne +in a +bened ict +bar ry +cal ories +ver dict +save the +nor ton +off ice +main stream +impro ves +fr on +respon ding +real tor +scotti sh +de clar +r l +shi v +supp lier +re sting +swee ts +qu i +. â̦ +whit ney +startu p +thank you +teach er +h alls +ha ve +hand made +pro ving +quar tet +ro chester +li an +virtu al +mend es +of icial +mid lands +x box +meas uring +o vo +accommod ation +bri des +collegi ate +intellec tual +in car +ni ag +ðŁį · +sf w +coco a +co ats +civil ians +presi dency +mat rix +sweethe art +tri athlon +wag ner +ra dic +plann er +the o +execu tion +k um +the walkingdead +sc ar +ro tation +blo gging +bom b +re son +bb les +st are +assi sted +e do +brand ed +war nings +thor pe +acknow le +satis fied +sho res +ri d +dor a +phys ically +bi gh +appro ves +ha h +ric al +vers atile +pret end +lu m +ab hi +ye e +sp it +ãĢ Į +dj s +ash tra +j t +ven ues +gram mys +cy clo +tr acker +over watch +repl ica +el yn +nr l +lind sey +hom o +ballo ons +kitch en +si s +am os +ende av +ðŁĴ » +a rec +thu g +hoo ked +hr c +new york +bur gh +americ as +patric ia +ug u +ap athy +ha st +psy chi +cor k +petro l +ðŁİ ¬ +ak u +po pping +psycho logical +au x +g ma +cad illac +wa ste +auth ent +bri stol +nam e +que er +to ber +jer ry +com in +ch ant +privileg ed +op ar +lo ser +tex t +mar ker +stri es +equ ally +ak i +christ mas +gare th +ble w +em ma +imag in +se als +che at +conditi oning +j ana +ren s +dar ies +o asis +disc ounts +coun cil +i ka +shir ley +vou cher +al ps +w x +q r +dri ft +attemp ting +ut c +Ø ª +gonzale z +m f +jo ker +paralle l +pa re +aspe cts +proce du +n p +am a +rale igh +bright en +gu ire +radi ation +cre scent +ho b +il le +str and +v ore +n ard +che st +di wali +av atar +al der +d ling +pa thetic +ðŁĴ ĺ +spir it +jor ge +film making +ðŁĻı ðŁĻı +challeng er +b j +down town +ht ml +ade qu +twi sted +in ely +( ' +wra ps +oper ational +y ne +n us +mag net +market place +health ier +snap shot +dam on +inter ven +fe derer +ow ls +biscu its +j p +ro deo +blue berry +lec tion +fron tier +summ ers +re yes +pede strian +go l +caf fe +refur bi +bou lder +me ghan +speci alty +la ss +e i +suspec ts +appro x +rr r +ra th +st im +cru shed +he d +wh un +lo af +cr ore +river a +gene tics +so ck +wa sted +ny pd +answ ering +do ve +bel la +ol in +du n +fi ji +pre tty +spar kle +y un +j d +euro pa +li fts +am ber +mu r +te k +boy d +roy alty +in do +ri b +go tham +ti est +inst alling +ke mp +the photo +cos mic +) )) +whole sale +loy ment +eas y +su ing +sett led +af p +pro ver +suppor tive +re es +ne ath +deli ber +c é +wel come +pic oftheday +new born +pat ty +sun s +si est +fl int +diffe rently +spo ilers +troop er +g ins +cor y +look out +equi pped +ta pe +to by +resear cher +u sh +ke yes +al ma +induc tion +k w +k har +sl ick +bri de +e ur +cra ving +book ings +ch es +tr unk +vern on +sp her +cryst als +rel atively +pom pe +uni ons +val ley +par a +w ant +ok c +de af +ser gio +len non +sh ay +cr a +v at +he e +t we +liqu id +pol y +ðŁİ ģ +b ent +be aring +motor sport +bar be +te sti +han i +fin ancing +astron aut +water colour +ri sh +comic con +gar t +wr ong +ber n +it an +ste pped +fil ters +c low +me x +dem ons +all o +expand ed +comm and +et ers +go ats +si ri +y r +pot tery +mari on +i le +el an +san to +person a +du ke +hom eless +li ghted +wheel er +chang er +cab bage +sur real +ham burg +sma shed +str an +k not +i art +ob i +be dro +di al +th ick +b ingo +fu s +vacu um +con ve +ati ve +accur acy +accoun t +re fer +ri z +spider man +ban a +r ite +u b +ab s +medic al +lin k +si em +> >>> +be tra +g lowing +re actions +pupp et +spa ghetti +ang s +re medi +pray for +roy ce +char lotte +£ ï¸ı +gh et +affe cting +ro de +soci alist +mo ses +az i +o it +re porters +cd t +ap ing +s nat +minim al +wa ist +sie ge +>> >> +ri g +schmid t +h are +ec a +thor n +he mp +es the +cly de +th a +don ut +moham ed +ling erie +le gg +carpen ter +perform ers +de a +imag ined +cur se +la sh +ct r +agu a +ro ar +gr i +ro le +j fk +resur rec +roosevel t +maril yn +sm alle +will is +wa ited +char ities +the res +li k +origin al +car i +c ough +cru ci +la gun +contra st +k ou +arm our +re moving +t ent +maz da +bri ghter +thi ef +cor ner +tequ ila +buzz ing +al bi +p am +az ure +disc oun +pixel art +possi bility +ham ont +tra des +bu da +hi ve +vers y +fin ch +tran spa +em i +terri fying +in qui +g ba +sub stitu +collec ti +plac ing +cin dy +k ann +pa tho +diamon d +mour inho +guine a +anthro po +air s +pu mps +ì ļ +pas o +cur ling +an ita +resi dency +ne wh +jo on +cigare tte +que ue +ex trac +gam es +spl en +ex press +public ly +bon nie +tribun e +ba ek +reason able +c or +timo thy +she eran +Ä ± +f dn +su tton +concentr ation +carav an +x avier +al ger +cy lin +freder ick +ner ve +pe ak +lettu ce +j ail +pre game +kav an +up graded +eco logy +squad ron +gra pes +goo g +pa stry +ðŁĹ £ +ãĥ¼ ãĥ +mil ano +awa z +presen ter +ðŁĮ ¿ +her d +king s +tem plate +fl our +h v +k ley +i ya +spe c +at er +frankfur t +co ch +tex ting +del i +communi st +regi ment +ele anor +anticip ated +ðŁijĮ ðŁı» +thephoto hour +ran o +survi ving +simul ation +daw son +ar in +aqu a +m or +â̦ . +cin o +ira qi +sh az +dun dee +we s +dra u +hann ah +s news +occup ation +ste en +x m +ang les +sett ings +gur u +kno x +or ca +shap ing +w ent +dr illing +zz ie +br i +kis sing +fin d +ma ine +âŃIJï¸ı âŃIJï¸ı +ðŁĮ į +lar ry +bu sted +ta vern +acti vely +- " +replac ing +no d +un lock +. " +âŀ ¤ +affili ate +to w +l n +happy newyear +di f +j m +green wich +contro versy +daw g +con dol +sav annah +compens ation +touch down +te o +amb itious +embro i +convic ted +iart g +bar ack +tr ance +testim ony +au dition +thum b +my ths +be x +que z +orch id +den y +entit led +hoo d +gr ant +in box +blue jays +r illa +smalle st +bur den +in famous +divi ded +boun daries +t ter +el t +wy oming +be verage +me sm +one ws +budd hist +y ana +as sad +is ms +bar rett +predic ted +back to +tw it +e there +cap tains +escap ed +ay o +lam borgh +gard ner +la ps +k al +adverti sement +insec ts +na po +am en +ac y +r and +g k +te h +k athle +tri dge +pan cake +at ro +pyram id +bu la +paral ym +gau ge +en cies +tom y +biscu it +but cher +quali fier +coun ty +ke i +po ols +dar ker +should ers +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +sp re +( " +writ ers +g m +ðŁİ ĵ +k nit +hu ff +mt b +philli es +o st +den is +g art +licen sed +inter face +ex cel +d well +from the +co fficial +az zi +appear ing +fore st +n ana +ke ith +manufac turers +beck ham +) ? +e se +col ony +delic ate +ut ter +mc in +transpl ant +pre ferred +par d +ari e +hu b +po ds +perspec tives +pic t +del u +app er +be than +p mo +crimin als +femin ism +sh ack +circum stances +fel las +prote sting +wa x +sugge sted +t ator +dre w +om ni +fa ke +kath y +re b +del ine +ber ni +mi sty +ðŁij © +er able +break through +men swear +millenni als +chan yeol +la z +inser t +rep lies +phra se +n x +ihear tawards +audre y +gran ite +rac ec +ori e +ter ra +innov ations +britt any +at eral +pe ar +bio logical +sh ments +institu tion +m sn +frequ ency +d man +neg lec +t f +ste fan +fox news +ty po +comm s +sequ ence +car men +wh ites +econom ist +exe ter +se um +re sorts +cas ually +bun de +divi de +Ø ¹ +ga g +cre ed +reti re +cau cus +rapi ds +wrestle mania +tul sa +sunder land +fundam ent +o di +yam aha +v ary +intri gu +el se +be acon +an gie +tra ded +tran sm +g ents +kn itting +gal ac +ðĿ Ĺ +u to +sea side +hol t +re rs +far go +train ers +mon soon +b ale +sou ght +mad die +h w +co li +fr an +fav s +ðŁĴ Ķ +int ent +r ally +s bs +lemon ade +barack obama +bre ad +stick y +explo sive +chel ten +t j +as soc +ram en +hom ies +v log +mi ster +lor d +âĢįâĻ Ģï¸ı +aly ssa +sketch book +ru mble +cat ch +migr ant +discipl ine +un likely +chronic les +fl ora +sl ams +am id +s boro +coo p +ju mps +tran qu +mel is +sof ia +en ri +gab e +sy ri +nicol as +cha i +w v +be cky +foo ty +ta o +suppo se +ðŁĺįðŁĺį ðŁĺįðŁĺį +plu sh +ri sh +ðŁ¤ ĵ +k ha +satur days +ac cent +he c +lim it +carl ton +wi red +taylor swift +ðŁĺ ij +sq l +har ro +recipi ents +g at +go p +th of +amaz ed +gh an +ðŁıĨ ðŁıĨ +por to +cla re +di stant +na c +ohi o +ðŁĻı ðŁı¼ +mt n +anti bio +dino sa +me sa +par tial +b v +lear nt +lov ato +questi on +ex tract +gossi p +gi bb +niag ara +ðŁij ¨ +displa yed +so oner +ste vie +nug gets +ml n +bro m +tur b +give aways +stu pi +bl ink +c ili +conven ient +mo h +vi ve +f ric +cau se +cham ber +cu les +ne arest +is se +small biz +t j +canadi ans +smar ter +bra sil +ra re +que tte +w ha +cand le +at omic +ðŁijį ðŁijį +warri or +relax ed +stri ps +ne ur +k ka +r fc +jen sen +reco vering +respon ses +sal am +ortho dox +acti ve +ell ers +n it +âŃ IJ +metro politan +centu ries +vi da +gra ding +transpa rent +sim ple +do ts +superint endent +elev ator +autom ated +red skins +ima m +summer time +jona than +ge aring +michel le +confl ic +m ice +to te +publi sh +pa x +) - +na iled +á ´ +tele scope +ser bia +ba b +ape u +st ically +sen ti +r ats +isol ated +grou p +hat red +paranor mal +stan ley +ali on +safe ty +l s +ठ° +nex us +alexand ra +mas ks ++ + +tr on +au k +brother hood +brow se +mix es +sim one +mu sk +appro ve +lo la +ex p +per th +fu turi +un seen +d m +chel se +sc outing +o we +portsm outh +k ram +mi ze +di spen +su p +d lc +adver t +tere sa +is le +cy cle +met all +shi elds +marin ers +ra z +ing en +fun d +an go +jon es +o ka +mad den +broc coli +domin ic +situ ations +mer o +cric ke +puni shment +d b +sha king +ðŁĺ ļ +m q +ari ans +le h +cla w +we ds +d ure +ni el +j elly +gour met +tra ders +le vi +w ages +kne es +wi se +heaven ly +avi d +melo dy +z ack +ban anas +apprentic e +pro p +fun ny +o de +respec ted +me gan +fe wer +dra fted +med it +gra pe +us army +cru sad +vo cali +prepar ations +non sense +us age +th r +ro th +wiz ards +insi de +promo tions +mon a +red sox +si g +eleg ance +ch ia +univer sal +ãĢ į +ra ja +un ga +pol lin +filip ino +ak a +t sun +ik on +bi king +decor ations +z ac +cade ts +hum our +ag m +re ppin +vac cin +elo ve +u w +dia be +galla gher +az er +do l +a while +pro minent +wel sh +t ann +' ) +bi en +wa g +in al +c wc +wic ket +ur st +q anon +x e +out door +dun n +star r +co logy +ric ky +u efa +reb ounds +s music +inf ant +ðŁĻ ĭ +so p +u mber +hand ing +beg in +sor ting +ha sh +sp ati +re k +buda pest +black hawks +dele te +ro m +can did +auth ori +de bris +spe cul +inter section +marri ott +im ran +ðŁĺģ ðŁĺģ +cru ises +ram sey +rafa el +aware ness +vas cular +beyon cé +ru g +ðŁĺ Į +festi v +ar am +s able +bas il +p ill +flo oring +un beaten +implic ations +u f +w ound +for ge +poin ting +po ts +popular ity +ðŁijı ðŁı» +mani pul +s lots +deb ates +abs ence +ver mont +never forget +wri st +gl oria +ren ce +hu sk +mel ting +ðŁİ Ł +br aces +tim ely +transform ing +am ps +ma k +po e +ah an +gener ally +nd p +ale ppo +unic ef +pro fs +nor d +ma sk +jackson ville +v v +sh ells +bloom ing +oper ators +char coal +ne ville +ma gi +chi p +sam a +ir an +re forms +accu mul +ru e +æ ľ +web sites +ga on +devast ating +sto s +glaci er +ra pp +chipot le +pr a +or ous +rom ney +seas on +decor ative +c isco +dit ch +compla in +ll o +assu me +ðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +n els +cent ric +ft w +car rots +tat a +can ter +per ience +li ers +demo s +bl unt +oper ate +reserv ations +le ah +sub stance +di son +an te +elec tion +v ue +squ are +non profit +ca a +f su +y am +ãĤ ¤ +v ladi +comple tes +mar i +philli p +ne ill +er as +ka it +men do +mahar ashtra +g p +dan e +provi dence +ther apeu +juven ile +me mo +in corpor +aa aa +seven teen +teen ager +à £ +or ns +wi de +cu teness +tw d +ff les +bar a +com edy +over time +y az +bar on +unemp loyment +ðŁij ĭ +exter ior +den se +cent res +match up +history month +artif icial +qu it +e sk +war n +cr itic +j af +ðŁĵ ² +inform ative +fu els +recy cle +nam ing +stri pe +sol ic +mole cular +dee pi +con vo +s sel +na e +de scent +ti z +accoun tability +ter ry +r ito +sl ay +em o +dem ol +sens ation +co v +tor e +round table +y ol +excu ses +ॠį +tur quo +hh hh +pod casts +cele b +me ssi +li o +man n +contribu ted +u z +gener ator +ele ts +veg gie +indu l +en suring +detro it +pun jab +tran spor +instru ction +ad d +por cel +pan eli +cir cles +persi st +clay ton +sp n +dog softwitter +is nt +sp r +retail ers +p w +hun gar +el ena +mon aster +gu atem +je ssie +an z +ra shi +fle e +car ving +fau x +l al +hen ri +d jo +du ll +s ana +lar a +glo be +cri mson +com pass +pau se +na b +lion el +ba ths +u fo +invent ory +sin gh +sat an +ðŁĩ ¸ +ce ments +in form +gener ated +bi den +av g +tas ks +de er +sa u +ja iled +pa stel +sc c +na il +steel e +per is +lamborgh ini +pur sue +mar gin +u ch +bo sch +dra in +cl ara +bo m +lat ino +web ster +rose mary +r ha +s oun +billion aire +not ch +percent age +con or +' " +hom es +earth day +h ort +big gest +di sin +wal ton +edit ors +im ma +om ar +equi valent +pharmac eu +ah med +cam eo +han ni +under rated +ge ment +micro bi +v oo +honor able +obe sity +âļ ¡ï¸ı +limer ick +invol vement +st agram +boule vard +bur g +blackand white +liber ation +fi ve +inter im +sm m +rival ry +cap abilities +stat ements +thu mb +ve d +sw ans +bar ber +e que +seren a +hel m +noo dle +sam pling +n awaz +sing le +thunder storms +sh on +in ev +ë ¯ +to pp +orch ard +bi an +ðŁĺ Ķ +door step +salv ation +marke ting +r ons +cle mson +ra vi +in take +stand with +sin a +ha iku +ple y +elector al +ph illy +la ys +electr ic +cap turing +u pp +er gy +believ ing +cul tures +es day +inva sive +ed ed +spee ch +end ur +viet nam +boy cott +pe de +deli ver +ðŁĴĸ ðŁĴĸ +mer chant +st ir +den ies +poc kets +o ti +cu ddle +ro land +mm ed +den ed +lear ners +hoo p +sour cing +h acked +di m +environ ments +ben son +jud icial +wor cester +pear ls +govern ments +arri vals +cor ners +tun ing +la bour +y m +or dering +le wi +i fe +hygi ene +thou ghtful +indone sian +campaig ning +princi ple +assau l +ru bb +at v +wil ly +en tre +il i +ph on +du ties +âĻ¥ âĻ¥ +sn akes +lo op +am ar +conver tible +bon ding +ment oring +max well +ethere um +destro ying +ax is +ca iro +fin nish +sho ck +ðŁĺ IJ +cal eb +com a +pe dal +co re +contin ent +el son +temp o +helsin ki +ac p +tack ling +st ated +bl a +dou b +sma shing +a ja +camer on +disru ption +warm th +being salmankhan +bullet in +o de +syrac use +ar an +mc gregor +bul k +an ton +confir mation +sp ine +im ran +instru c +jac ks +chi o +pal m +str e +embarra ssing +un t +elimin ate +to ss +c ise +a ws +oni sts +sh inee +jo s +ho se +li vely +opp onents +mo vements +recogni zing +sandwich es +sh akes +exerc ises +se at +profe ssion +merry christmas +lu gg +adopt dont +mar vin +byr ne +un le +he t +ku wait +rah man +aspe ct +humb led +gen es +f and +long time +) ; +cam pu +an gus +ðŁijį ðŁı¼ +q uran +sle eves +s lic +¸ ë +twel ve +your e +i ke +go gh +b st +dic tionary +reflec ting +to on +yar n +em bed +ðŁı ´ +re serves +floo ded +ver iz +du sk +estab lish +pro li +au d +ritu al +or bit +declar ation +recor dings +cam o +cas sette +good luck +cu tter +bo p +b ho +che ating +paci fic +ma res +tim er +col t +tr ous +tomor row +han sen +ci e +w ang +ban i +circu lar +ac ute +far mer +co ys +p se +ir ving +w j +haw kins +b ison +ur day +cru ising +o te +k ath +whi stle +your selves +ant is +sla sh +thorough ly +ke sh +ser ie +ex em +en ig +guil d +sh red +ho gan +ap o +ä ¸ +pu zz +ne tball +au ssi +panor ama +ws j +av is +ar ming +hum ph +brow ser +cri es +fo ggy +mat te +ðŁĮ » +it er +tal lest +by ron +cap tiv +je su +any ways +flag ship +p ton +we y +fay ette +financi al +f oul +solom on +jenni fer +cucu mber +ar gue +tex tile +wrest ler +john ston +pa stor +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +cac tus +edi ble +re served +ric hie +met res +ingredi ent +h ella +un to +ch ol +cele bs +po ets +gra ham +hay den +coinci dence +b aw +communic ate +flet cher +/ - +tole do +ecu ador +coun sel +s laughter +line ar +at p +os u +jo el +ev ed +conqu er +ru stic +plic ity +recogn ise +room mate +cr acked +jas per +ph er +ðŁĮ º +wo ven +mo ist +ff c +ste ering +ni sh +stand ings +frequ ent +ar di +haz el +as msg +bau m +d art +si dd +nat h +ch ero +card board +c ss +n sfw +pa ir +ðŁĺį ðŁĺĺ +occur red +homeless ness +mal one +ph e +xi a +pad dy +decl are +theat re +b f +per sian +ta d +ax e +susp icious +lam b +mu cho +sen ior +st as +k ite +st ing +gra d +k af +wat ering +Ø ¯ +spi ral +th ms +educ ator +jer ome +of c +clo ck +su l +pe mb +.... ..... +park way +de aux +restric tions +m ons +need le +e j +le agues +water melon +am an +pl enary +max im +w ab +coming soon +bry ce +vi gil +super market +fortun ate +turquo ise +presi dent +li v +inter ns +feel in +fix tures +stun t +st aged +premi eres +lo k +prac titi +shor tage +log ne +ve c +con cor +roc ke +li g +com posed +syn thetic +di p +cam ila +ch is +j ou +su san +eye brows +supp lement +satis faction +moham mad +ti bet +house of +pu n +as sam +shado whun +psy ched +se duc +mand atory +her bert +sc allo +stream ers +proto col +block buster +produc es +sch nei +lau rel +tri be +time hop +pl a +mod elling +tv time +mtv stars +wi dow +me tric +ch am +con do +flow ering +ale c +d ms +inten sity + ¨ +mccar tney +islam abad +k b +f fi +ph al +anal og +f ond +h acks +positi vity +treat y +sub marine +conne ct +sel en +categor ies +cu b +organi ze +si k +quote oftheday +remin ding +am or +loc king +ðŁijı ðŁı¼ +comp ound +et te +b out +rec ur +fe rence +mi zz +tren d +hip ster +for tress +forth coming +preli min +o dyssey +ang p +del ici +even ings +ðŁĶ ¹ +i q +d w +da ir +kathr yn +christian ity +moon light +ha b +wh oo +f bf +se th +genu inely +pa x +char ity +deplo yed +b nb +bu cs +ju dg +con ge +plant ation +im press +car a +sc lub +sco py +land ers +compla ints +b ama +re build +x y +real ism +sh our +le in +brac elets +mer a +assas sin +an chor +ðŁijĮ ðŁı¼ +lin en +con fron +chronic le +comm ent +cat alog +il les +gor ge +me try +jung kook +love my +sent in +se em +fit ness +alli ed +ts man +digital transformation +pr an +lo ft +min ton +alden richards +en vel +cher ish +certain ty +zz z +rhin o +per kins +en rich +cape town +ome ter +sec tions +ske leton +def enders +ðŁĺ Ŀ +pen c +bri t +ja h +capital ism +ðŁ¥ ĩ +baz aar +re me +ex t +kk k +conver t +stor my +b ye +kar an +chry sler +ad os +pre ssed +syn c +ation day +dang er +bad ges +refu ses +em powering +ly m +ex ports +adoptdont shop +ðŁĩ ¯ +th c +awa ited +focu ses +fin ed +o at +haha hah +âģ © +n family +fi ona +luck ily +thr illing +ty ping +out break +di es +he u +craw l +ne sses +o ath +scri pts +gee ks +ðŁIJ Ŀ +p b +mathemat ics +al is +________ ________ +gymna stics +acti vism +recommend ation +gre n +wa in +cour ty +n apol +cau li +hor nets +g als +jo ckey +dir ty +at ar +enor mous +pe st +greg ation +an os +ii ii +def ends +black historymonth +at x +mb c +lugg age +wit ch +co b +la sts +cu m +gg g +ba thing +n ar +ce bu +ðŁį ĥ +navig ation +min e +re jo +ðŁİ Ģ +gif tide +re ta +use less +pu ll +defic it +al lu +ati me +it v +tr illion +pu e +ac ies +proce dure +l ori +jen ny +c ad +ul ously +dr ac +promo tes +ing the +can u +woo hoo +na omi +zar dari +ts u +be ir +sd g +le ver +we ber +ab ud +lun d +crow ded +deplo yment +ter rain +ken ny +ho f +witne ssed +lo ch +j k +bul ly +w ren +poe try +do ff +ww i +mo red +din i +cul ture +promp t + ¥ +maur ice +to pps +r m +cor respon +ab out +jewel s +gi br +eag le +ðŁĺĺ ðŁĺĺðŁĺĺ +l ending +sou ven +ç Ķ +contemporary art +establi shment +j ong +â̦ " +gat or +patri otic +mc coy +v ape +human e +feli z +coach ella +re posting +ste als +fu ller +n ering +at ra +( - +bla ke +he ather +wor ms +discipl inary +rede mption +y ard +am in +" @_ +d nc +t ds +k appa +ne wark +comm its +spe ars +j ams +t and +msn bc +inter medi +aim ed +at ic +teen th +observ ation +kash mir +kavan augh +ou l +san francisco +re u +bel ated +cho w +pass word +st ills +deta ined +sar i +day ton +dar ren +itali an +ar th +amu sic +ar bit +w m +v m +he m +dou g +my r +a sho +pre v +vin d +bra h +sta g +ภµ +pre views +gu k +con taining +leon ardo +sad dle +ru shing +st av +lon gh +gam bling +ve gas +reserv ation +end ale +bal a +fl a +vari ant +he dge +bulgar ia +nat ali +we aver +sol st +encoura ged +ap c +as parag +ne st +cycli sts +fe l +ìĬ ¤ +overwhel ming +pey ton +j it +a post +mb le +ble eding +neighbour hood +a very +expre ssions +mac donald +gi gs +mon ds +illu sion +n ct +cam ero +over head +my th +ol y +vi o +et v +lau rie +unve iling +pri or +con n +iron man +di ff +day in +crit ici +con go +re vision +wal e +direc tor +p ines +black pink +gar ner +cur ated +manit oba +h ac +common ly +bar ton +.... # +mor tality +live smatter +philos op +shor ter +con vince +fre ak +vend ors +insi ghtful +el ly +sens ors +e led +s berg +weight loss +u kip +sp ur +priv ate +qu a +ss c +, ... +supervis or +advis er +amaz ingly +less er +at es +mah on +oooo oo +sar as +pmo india +waff le +un ders +toler ance +sculp tures +her sh +kno cking +smo ke +cathol ic +gri m +tra veled +fli p +ge off +dinosa urs +sle pt +scar let +ok i +compla int +ob sc +nam i +la g +cross fit +u fc +mc cain +refe ree +sad ness +pen ny +li eu +mo de +ki er +vol s +w is +el on +she a +ba o +son ia +cla ire +em manuel +moist ure +di gest +vi ii +t eller +ch on +access ory +night club +foss il +aw an +hu sky +ab original +brand on +ffici ent +cou gars +ste d +ad mitted +igno red +content marketing +ag as +v ase +execu ted +negoti ations +she ad +n and +tab lets +go th +ts al +d fw +on ep +protec tor +sp ho +gaz ette +andre as +ss er +comp ilation +ha v +contain ers +bro ker +soc al +porcel ain +hy uk +air ing +ðŁĴ ° +publi sher +scen ario +spart ans +re viewing +itu des +ed el +pear son +ba sh +mau i +a ad +ðŁĮ Ĭ +li u +ul ate +program mes +fav our +web design +real ty +motiv ational +cro sses +' ... +bus ch +adjust able +ar jun +mist ak +dimen sion +pi stol +weigh s +en y +unve il +indy car +gor don +f ade +fran ken +qual ities +bet t +loc ate +ker r +sp c +confu sion +ne e +luck y +bas es +dep ends +fire fighter +ol a +re t +mar oon +ðŁĶ Ĭ +w am +defin ing +whe at +bi l +é s +b hai +psy ch +ta u +ic ans +thi k +ob ile +inspec tor +ìĨ Įë +ill on +go s +ev angel +fa i +si st +voc ation +bur ge +chi stan +renew ed +enthusi asm +en ting +ag ri +ike a +m sc +aero space +sens iti +memo ir +hosp ice +co caine +der ry +mechan ics +Ħ ภ+tin o +reduc es +collec tors +in justice +supp re +v ana +ab un +nap a +su sa +os lo +e ff +en core +lic ence +ched dar +z al +moun t +ðŁĴ IJ +threat ens +!! " +archi e +fu tsal +scu ba +jo s +gn on +se xi +s official +compar ing +domin ant +tof theday +fa it +propos als +gi ft +y as +cn c +l r +ha b +reser voir +beli efs +gener al +mar ti +t d +est e +ì ł +wi l +ðŁij ¯ +ðŁĶ « +sp x +et work +excer pt +e instein +hir o +sil hou +team ed +per ception +corri dor +mental health +hin ts +ben ny +induc ted +sw x +wi desp +spe ak +cher yl +dru g +ðŁĺ ķ +h f +asparag us +myster ies +fitz gerald +off er +therap ist +care er +dam aging +ts d +per u +wei bo +y ay +phoeni x +disc re +mac book +bar ker +stig ma +sp read +roc kies +kang ar +bri dg +pa i +bi shop +ta iled +capsu le +ðŁĴ ĵ +ge of +roy ale +short listed +o ste +ash amed +ch app +key e +cl a +screen shot +austri an +nati ve +en ight +juli et +michel e +ðŁĮ ´ +travel ers +pi l +football er +win chester +ðŁĻ Ħ +azer bai +gold eng +organis ations +interpre tation +predat or +ofthe week +lo gan +pok é +mari e +cal la +t nt +cin de +ge tic +fit fam +gra v +ow ens +ðŁĮ ± +shoot out +sal is +commissi ons +co he +p tic +ni xon +hi a +amb ition +mar ine +cruel ty +t k +cru de +sal ty +jim a +mon go +ir ony +on wards +arre sts +strang ers +ig er +cycli st +ra g +exten ds +tra dio +bour g +mo i +el la +e able +lex us +au l +der a +histor ian +mor ton +ti ff +man ner +ko t +d k +po inted +mar qu +a an +en ey +du blin +on poli +em ili +secre t +fl o +âļ ¡ +ba j +ste ep +accompan ied +rum ours +dev i +purch asing +fi g +pu b +sch oo +autonom ous +go alie +x ia +autom atically +re vers +ter o +fu ku +titan ic +shoo k +sand als +see kers +exc av +nor dic +bigo live +ba ke +r att +z ak +ne p +ðŁĺ ¤ +cand y +billi ons +book worm +pp et +à ³ +sur faces +sc ars +phil ip +do gg +ci gars +co te +transl ated +cur ator +sin dh +han gover +bre wer +on es +el ton +ðŁĴª ðŁı¼ +mar cu +elli ot +righ te +di oce +ru ss +rail ways +grand son +as cen +apo logy +awa it +mob ili +re spir +parti san +oli vi +stri ke +yo o +white house +expre ssed +pu ps +bed ford +cul tur +fro gs +fly ing +cav ali +c ds +fri ger +street photography +re solve +tali ban +kan g +cru shing +ju m +ðŁĺ Ĵ +william son +tan g +cur ly +t man +veter an +fa ire +artificial intelligence +un anim +pre n +back drop +fr ances +oc cer +doro thy +work ing +ar thr +conver ted +day light +serv ant +pad dle +compla ining +thir ty +nad al +ak u +ibra him +ad dressed +p iss +green house +batt alion +si mulator +out lets +embroi dery +ðŁĵ ± +fis cal +ger ard +sas sy +ðŁİī ðŁİīðŁİī +vent ures +mer it +public ity +ðŁij Ī +sophistic ated +c tu +conven tional +condol ences +isra el +tra dition +ar an +te ss +gla d +ðŁĺĬ ðŁĺĬ +correc tion +ge on +am d +or ship +be ast +ch ment +ì ŀ +nic o +wk nd +wel s +cushi on +beli e +vo c +idio ts +under neath +pu ma +corn ell +en ation +lu l +swa ch +ab ig +u rer +mi e +form erly +ca f +er nal +chor us +juli us +sen ator +âľ į +wh ir +salv ador +ph d +uni fied +boo ster +graph ical +w rec +son ny +mi z +dere rs +s all +ven s +tusc any +wi d +y ong +kur ds +w az +trol ls +mac ro +cat urday +pre ssing +sa sha +cent ennial +gu sts +em c +be fore +den ise +cu st +ðŁĵ ¢ +lo oo +base l +eng land +y olo +ar du +manife sto +do ha +ì ľ +kni ves +bourne mouth +bi bl +bar b +al icia +Ø © +com er +cycl one +g it +ane ws +character i +vent ura +in tra +sf giants +hu t +be a +dar win +ell er +al v +re ese +bl y +kar an +conclu sion +man ny +fla kes +unite blue +nad u +co pp +ed ges +lanca shire +i als +o tta +philipp e +l ent +che e +ment ors +festi val +an ism +compli mentary +r j +pu g +d ine +we i +cli ffs +sar my +ti veness +treas ury +il and +after math +rabb i +ou n +bou quet +herit age +zi on +sur render +shen an +in ks +kar l +gh ty +pol icing +exam ination +ce y +per su +measure ment +hydro gen +lu han +âłĢâłĢ âłĢâłĢ +war i +о Ð +j y +fow ler +mis h +al fre +âĺ ij +bb naija +cat alogue +recogn ised +sa ver +hu skies +col in +mun do +si va +p ng +discoun ted +man utd +fre sno +de vin +prelimin ary +tro phies +pla stics +du g +pro cu +indi go +g ard +dy lan +pit ches +ground breaking +in son +bl ac +an thology +f h +expl ic +r ard +admi ral +so chi +la shes +splen did +en vy +ad v +sex y +festiv ities +stic king +bi b +thr ill +op p +ari el +botan ical +endur ance +fe males +br icks +vat ican +black pool +ber mu +br ough +roll er +bi d +sue de +sloven ia +mm ing +ml b +med alist +di ans +rehabil itation +ne on +s go +li thu +ram os +z ed +pi anist +inten sive +broad band +stu dy +peter sburg +lu ca +ah hhh +phys ician +dill on +tele com +gri ef +mu n +ac ro +si ded +s ly +blo ws +classic cars +tri um +ar gy +? : +h ri +marsh mal +âĢ ĵ +to pping +war saw +tran sc +preserv ation +b av +re friger +experim ents +ä º +gl it +sli ga +g age +fac tor +flav ours +br ony +sp o +cook book +carri age +aw ay +ny fw +on ian +w g +simp sons +ro lex +ðŁı ¿ +cro sby +ãħ ¤ +cre di +syn dic +pu bs +ali fe +poor ly +mac ed +ðŁĺ ŀ +behin dthe +w enger +n ats +ðŁİ Ł +rubb ish +procedu res +typho on +opho bia +er do +fu el +vi era +bu mps +millenni um +new zealand +lec tures +it on +mil ky +respon ded +ê ° +landsc ape +.. @ +bo ther +âĸ ¶ +z hang +huawe i +tu ition +s worn +in u +y or +pa olo +au ditions +ab il +malay sian +ho ps +fe athers +mp le +au ts +ã o +boun ty +ic he +ì ĺ +sh q +pin ot +ge ars +disapp ear +video games +t na +alzheim er +ðŁĮ ŀ +a ji +under wear +swit ching +sign age +o scar +ec on +dro w +cl int +pl ated +gun dy +emb lem +ho es +ici st +nel ly +juni or +road show +miner als +at le +alexand ria +ac claimed +v ell +shi va +ad he +en ne +amne sty +h ounds +councill or +ðŁĴ ¦ +aes the +part nering +influ enced +mag no +fl are +extin ction +civil ian +maje sty +va il +law makers +rac ks +mc c +ori an +sp ices +er rors +may er +co ca +pa i +s ooooo +reti ring +ba thro +ðŁĻĮ ðŁĻĮ +âĸ ª +su f +endor sement +buil ding +broo ch +pal la +arvin d +ag ent +kar ate +r hi +c tv +ta ine +um m +ba x +reig ns +uni of +enterpri ses +adel e +fla ke +at tire +bru ce +ba hamas +gra vy +sa in +che ek +tri vi +lo v +e en +bb lo +lady gaga +itt a +. "- +du stin +observ atory +eigh th +bloom berg +kh s +f cc +gi st +commemor ate +ve er +sexu ality +ed c +nic ole +vac ancy +u ser +son a +:' ( +dipl oma +t end +up grades +Å Ł +jura ssic +cardi ac +dr s +widesp read +à ł +dail ies +vend or +sim plicity +wi der +len ses +supp lements +de pos +ob served +vin es +parti ally +renew al +collabor ate +ali g +fin ity +ph u +zz y +pe tit +ðŁĵ ħ +z in +i gu +sm ack +fall on +ðŁĵ £ +back wards +comp onent +o so +compati ble +bin ding +zur ich +thom e +w ounds +ly ric +fresh men +sne aky +fi bro +di et +emplo yer +in sect +h ated +sch er +raz or +n sw +boo ker +califor ni +av fc + ° +preten ding +pep si +al is +un titled +k art +grand parents +e the +o ck +lux emb +visu als +small business +abdul lah +min ho +su baru +h ra +reve aling +heart breaking +clar ity +am g +sl r +** ** +âŀ ĸ +recor d +ici ary +min ded +ye h +exce ssive +knu ck +icec ream +tru th +ev ic +ta stic +ant arc +ren dering +, , +mit t +loren zo +st patrick +bound ary +zi g +vo cab +osa ka +fur n +tu n +gu l +s ounding +blo gger +utter ly +g af +adv ancing +l cd +mar gin +lifel ong +solst ice +sh ra +wa its +ple ar +bre ach +en ligh +ad er +itt le +c ation +ho on +stu died +?? ??? +k ash +ev angeli +ps l +wei ghts +met als +ty res +tur no +wi e +car b +g ale +se al +sun ite +am ic +patter son +á n +eu ph +up stairs +quali fiers +khali fa +apple music +ìĨĮë ħ +vau ghan +al ter +cru iser +mu a +t ana +kat rina +id ols +spo iled +secre tly +fi bre +part nered +um es +gi ov +com et +screenshot saturday +k eller +fil tr +fe t +con way +pe u +bad minton +gi d +m ound +don key +bu ff +lea ther +lar gely +bro ch +int ments +am use +r k +sto ve +impac ted +con t +cr acks +prison er +bar i +contrac tor +ori oles +domin ate +pol ar +am elia +dr c +ðŁijĮ ðŁijĮ +vi st +su arez +injec tion +blo oms +ðŁļ¨ ðŁļ¨ +sti ff +pay pal +sno wing +thur sdays +goo se +we dge +educ ated +weak ness +de cker +abud ha +bree zy +Û Į +hope ful +o bi +rai der +gh am +de u +se ve +par tly +fu t +infu sed +mer ri +than e +some time +hu e +me in +cre dit +sli ding +ran de +cher ry +dead pool +sh ol +ar am +under wood +sky e +distur bing +m nt +poli shed +guardi ans +ha dn +pic asso +ari us +ak shay +ir ri +j h +happ en +la kh +dal ton +at the +s well +mar sha +re h +cour s +j kt +top us +serv ice +r ink +hack ers +dono van +hor o +tc m +may hem +cha se +dev ops +ken sing +sc up +sh ere +quali fication +c live +ton g +n ancy +mar is +der dale +ber man +cinde rella +jol ly +ci c +loo t +collecti bles +hom icide +g ge +epide mic +su ites +mu ddy +gi mme +e rec +- * +tal la +lis le +embro ide +ðŁĩ© ðŁĩª +veriz on +ve ctor +be anie +arti san +ga in +flo res +vi gil +u so +ðŁĻı ðŁı½ +grin ding +gh er +air ports +respon sive +shaf t +can cel +ceremon ies +e me +at ari +bru shes +eag er +bo hemi +children s +yan kee +ma a +suspen se +mor an +mac ar +sun flower +cre w +vo id +ke ar +fashi oned +jen nings +sunday funday +sub missions +me ad +her man +wa i +crit ically +le um +baek hyun +for cing +co bra +ãģ ® +acqu ire +al k +ge ology +pri mar +import antly +ire z +bunde sliga +curi osity +sen a +stric t +con soli +win ters +ven om +chelten ham +ðŁį º +cen a +t at +ba in +glo ver +under cover +as ses +car n +memorial day +am eli +i rene +ch on +syn thesis +spe edy +mitsu bi +sla yer +compos ite +under stands +pe w +inter rup +hen ri +mor row +an om +thof july +g lee +thre e +ðŁĺ ® +and hi +ch att +renew ables +ye s +trans fers +!!!! !!!! +bab u +du ter +lo ops +pe ers +o ilers +pau lo +ic ation +h mu +war a +mer cer +hom eland +fu ji +ale y +year book +re m +re en +ab sur +bo is +] : +caes ar +shot gun +kur dish +o ren +ra e +anci es +ty pic +f h +def ault +re plic +lu k +trans actions +r ys +infan try +ðŁį ¾ +cho w +chick ens +ba gh +wy att +ay e +gg i +bre ws +ed itions +mi ra +commen cement +pre su +peris cope +ic hi +guatem ala +zam bia +pain ts +wit ches +wan i +un dere +cro y +vo ws +us mc +hear ted +theat res +shu ffle +le vel +mul tic +squee ze +fer n +app et +post al +mal t +on board +ld nt +co o +s sc +k ac +ðŁĺ ĩ +sc rap +mar cos +deal ers +ann u +mill er +co ve +ul ary +vladi mir +be ef +th ur +pick led +se same +bengal uru +mo tt +kathle en +hi st +no tor +dr ank +du chess +snow fall +e ff +tin y +j n +sy our +speci alists +scot us +bay lor +eve rest +mali bu +pre m +harm ful +l ali +b ates +g ye +differen ti +and ra +geome try +el over +black out +== == +ko ta +inter act +asi an +la yo +samu rai +fi del +exhau sted +gla di +pd t +spher ic +anti qu +guit ar +stu ri +ho pper +ang le +f ills +sla p +mi th +rod ney +ong i +in som +pre venting +cassi dy +ap ho +ore gon +lo in +ham mond +contribu ting +f n +gar ri +ori on +comp elling +escap ing +aim ing +plu mb +bi stro +be asts +concer ning +bo e +do pp +shop local +stumb led +âĤ ¹ +naz is +âĢįâĻĤ ï¸ı +gest ure +war ts +us open +hi ggins +char li +hang s +bom bers +° : +fe eds +c ch +st il +nic ola +ðŁĵ º +clam ation +tro pic +af ro +ou k +expen ses +der rick +al ine +fa w +reg ard +im er +sat in +thi um +ry der +pear l +te ss +mm mmm +sen ses +ðŁĩ ¹ +positi ve +exhau st +occu r +nor ris +lil ly +is les +direc ting +yo fficial +count less +sam ar +on stage +flo ck +mir rors +arch er +mo i +k d +vi v +in os +si kh +le i +sen sory +br its +kno x +chest nut +op y +coli seum +z af +di vin +adap ter +:) )) +tem ple +ku n +hel mets +t df +gu ide +m old +o ids +lu ther +he is +monaster y +sp ree +k lu +brit ney +jagu ars +gre ats +c cc +ky rie +machin ery +cric ket +re ro +ab o +aspir ing +semi finals +ale ss +sig natures +var d +me th +her bal +hol den +king dom +ap or +reg gie +ore o +palestin ians +em mys +sec tional +ro i +ney mar +qu el +cu ll +l ka +haz el +estim ate +ul ties +go w +be a +purch ases +bel ts +protec ts +m é +gue ssing +bb o +clau dia +fr acking +jon ny +el k +cel tic +al mighty +ra je +courty ard +ig i +can es +ðŁĴª ðŁı» +bank rup +le thal +âľĮ ï¸ı +graphic design +vad er +penc ils +rough ly +dan te +m fg +const ell +cam el +j b +bloss oms +en to +balo chistan +cine mato +ill ard +jer sey +con sent +dent ed +con templ +sch er +hol i +lou gh +st our +a yo +begin ners +cur b +v hs +a jax +du ff +av eng +dom est +commit ting +ai red +cha p +hedge hog +disappo inting +freel ance +in land +char ms +ðŁĺį âĿ¤ï¸ı +ai sh +m x +buck le +ti dal +per mit +bo ating +ra cha +kend rick +b ello +b hi +ple a +estim ates +l b +apo logies +jay a +bb l +ast oni +inter state +main taining +el bow +mu p +ep it +ðŁĺ ¡ +viol ations +def end +be h +sl c +am ir +pur i +ti um +fi fa +blur ry +scri m +ðŁĻı ðŁı¾ +ma ple +rel atives +âĺ Ŀ +cho c +con nor +⾨ ⾨ +whi sp +list ings +ma ze +than king +ri dd +grass roots +shi fting +desper ately +gor illa +den i +ju les +stra th +g ley +ja in +bu ick +t anner +ðŁĴ Ŀ +ga e +pri m +it ors +n ano +separ ation +armen ia +bor deaux +ðŁ ħ +pj net +bu rial +e bon +glo ss +re new +gri er +spe eds +comic books +sym boli +pur poses +ãħł ãħł +spati al +no table +ci on +n ps +ho ffman +nor man +rt g +du sty +situ ated +tr an +k fc +em en +nic kel +hast ings +sett ling +gr it +l ena +w aw +art s +gu m +ca regi +le wis +sapp hire +rememb er +embed ded +t lc +bl at +serge ant +el sa +boot camp +bow man +photo graphic +pill ars +direction ers +classi fied +no is +ve er +barre ls +wh oop +ðŁĺ± ðŁĺ± +fe male +petro leum +medi a +e fc +poké mon +ठķ +enthusi astic +var un +pro files +pedi atric +acci dents +con rad +jan g +jo jo +ac or +ob server +l f +live stock +for gi +fo s +el m +an and +go e +c ere +avoi ding +gri t +om an +thank fully +scat tered +nick y +cylin der +chees y +di ver +mahe sh +cav es +ear liest +qu inte +subjec ts +b end +gul f +vocali st +glu e +pat ches +un stopp +sny der +demonstr ating +pi o +hor ns +wic kets +and the +r ama +yo on +stra ight +bed time +or ang +bul lets +sa urus +min ers +inci dents +! ... +ðŁİ ¸ +ag ers +hand les +stat es +in ity +d ons +incredi ble +emin em +avi v +ru dy +moz art +folk lore +appli ances +mt l +fre y +di as +hu a +page ant +stri ve +im prison +bul lish +r ana +al erts +bb mas +hy per +derby shire +re cre +re dd +debor ah +cosmo s +law son +mel anie +psy cho +ho or +doo dles +sni per +shad y +man tle +canadi an +new year +inter actions +separ ated +cor ds +spiritu ality +ap u +it o +p ct +pel osi +rebel lion +se iz +wor cester +sec tors +ul i +san ta +Ð µ +ðŁĩªðŁĩ ¸ +bi ased +class ical +gam ma +dee plear +emer ge +back er +sur ance +hand crafted +ðŁİ ¥ +franc is +mill an +ic i +cro wn +wo w +stri ped +un fair +relax ation +³ ï¸ı +embrac ing +she alth +pale o +martin i +dist illery +wr ink +or k +na th +hay ley +cour thouse +si ber +sa di +quiet ly +mel t +m sm +me h +smart phones +rel ent +pp ing +war wick +co logne +gli a +cot ton +pro g +lon e +ip sw +star ters +expan ds +u mp +su ed +ski pper +infe ctions +ing le +à ¡ +cler k +demonstr ate +ac ar +ðŁĺĤðŁĺĤ ðŁĺĤ +ti bet +bun s +alo m +demol ition +ssi a +g st +[ ] +so ar +âĺ Ģ +ðŁĺ ª +ðŁĵ Ĭ +dee pest +beyon d +are t +att ends +activ ated +di mit +âļª ï¸ı +high lighted +magaz ines +rum or +az za +steph ens +dol ph +sho ckey +mat s +we av +mel an +serv ers +tra um +ku sh +æ Ĺ +bab ys +pa z +a al +la use +break ers +canter bury +ul ture +mi ri +euro s +tane ous +impre ssions +du tch +il d +gh i +pur due +adequ ate +l p +sy ner +ang ler +du rable +gal ore +ro wn +mg mt +ðŁĵ Į +lu cia +âĺij ï¸ı +zay n +bor row +. ( +north umber +cru sh +eng a +su sh +extra vag +t out +ma hal +ali stic +ther mo +gall eries +es se +chi bi +attrac tions +lex ington +legislat ure +docu mented +resi den +brow nies +w f +st ool +plan ets +sho ppers +conduc tor +ms p +tr icky +fru ity +end ra +feel the +whi pped +hair style +re fer +oo k +oc topus +audi ences +ku mar +after no +op tim +c fl +ni p +gen i +alpha bet +ann ab +lam in +accep ts +l ng +ðŁĺ « +t ine +ac om +cheer leaders +t k +gr on +v g +k ung +ja x +dha bi +r ss +mack enzie +beir ut +clean up +gy psy +st ell +bur ger +hurric anes +educ ation +st ina +âĻ¡ âĻ¡ +unfortun ate +jere mi +bad ger +at ers +: â̦ +ter ra +subli me +stu d +y mca +mr u +duter te +bren nan +bul b +mel o +yl on +hack er +c red +gu d +as an +pad illa +embroide red +vietnam ese +pione ers +projec tion +re boot +id c +an ey +pri mer +suff ers +win ding +p on +sto day +mor n +u ch +all in +adid as +eliza beth +tu ck +o graphy +ðŁļ Ģ +be g +os borne +ghet to +r h +cn n +ir ma +ma kin +cab les +mur ders +oc ks +inst a +al as +si k +cu ff +la re +foo dies +o vic +at om +geome tric +em pathy +ภµ +cent enary +newsp apers +administr ative +ðŁİ Ĭ +sti ve +contrac tors +le tt +tas mania +awesom eness +den sity +ve en +prince ton +frequ ently +re ject +gh i +modu lar +ceram ics +sh ag +ki wi +can vas +sweat shirt +an j +ti mm +napol i +il er +appe als +hamil ton +ma yo +we ave +arrang ed +whar f +occu py +b vb +as aki +ot ter +nor m +vi es +de tox +tion al +dere k +id ad +ad missions +constitu ency +u pper +woo t +allo y +se ve +lu b +un comfortable +ed win +ab re +d wight +ar che +virtu ally +sp ol +pri e +ai i +er r +swit ch +bar ack +se ok +cou l +wn t +pou l +o live +caffe ine +cardi ff +notor ious +de mp +ex cess +bar r +t ford +a jay +bump ed +my thology +shel ley +fal con +shakespe are +must angs +no ted +bon e +civil ization +sy d +par sons +un official +hy ped +sp ends +oppo sed +v ings +space x +noti fication +deci ding +bio tech +out si +sal ah +! . +fe d +ss y +c ms +bad gers +cr o +ela ine +n ba +dy our +n ant +honey moon +climb ed +conom y +ath a +m ell +ne bula +nature photography +juli e +bm x +inve sted +mon o +lieu tenant +wat kins +techn ician +o se +ka e +ì Ľ +mc queen +pre ach +trav eller +flexi bility +ze bra +reta iler +p ant +ben der +brand t +squ id +war rant +veri fied +cas s +pier cing +hon ours +t ying +mor ris +kis sed +op rah +panor amic +me i +splat oon +wich ita +ari as +gal li +indy ref +good times +athe ist +confe ssion +ow ski +re pping +ad ditions +mechan ism +z im +j ans +su f +cho pped +beg innings +vitam ins +ãħ¤ ãħ¤ +or th +po les +ru b +antarc tica +indie film +web cam +ket ch +bre tt +cle ment +her on +defe ating +hydr o +buc ket +wand ering +sid ney +future of +b inge +on ies +knock out +administr ator +syn the +l ent +jan i +bar ley +premier league +ner ds +cr m +bra s +bot any +evol ved +rot ter +ro wed +tum or +weal thy +Â Ń +mon arch +li shed +da hl +ðŁİ ĥ +bu ch +ken yan +Ø § +red ness +assemb led +se mit +hud der +shro p +ran i +lear ning +mor y +iti a +geo graphic +worl dof +f b +pho sp +boo gie +am ped +? ... +che w +dwar f +ar us +s sen +ru sty +recru its +h k +gar de +app lause +vol umes +invol ves +ta c +hand bag +trans late +ffe l +se ym +aqu atic +trans fer +zo di +and r +acade mia +cr ater +te z +ar se +adap t +col oni +snow man +mal i +hang in +di schar +oy sters +pho e +colon el +w ba +hispan ic +thri ving +sh y +ag les +sales force +cre me +so les +la fayette +â ī +ter ia +ach a +sp erson +go go +car ly +the ore +am ore +vo x +af t +ãĤ ¹ +stap le +mu ffin +di agram +ino x +su stained +av ent +me ta +arbit r +dec ay +ado le +Ð ½ +ec ol +ph o +n k +o cu +gr anny +ç a +luxemb our +stad t +alber to +le vit +am as +d x +or phan +co bb +as c +lo gy +immen se +chan ts +off line +p ent +bre x +w inger +plan e +i el +nichol s +ca thy +nar uto +low ed +/ // +ignor ance +cat astro +you ts +sch en +buil d +haz i +s ine +critical role +du g +dete ct +lo gs +en amel +stpatrick sday +ed die +co pa +cigare ttes +ho ff +kay a +la goon +ra pha +air borne +choo se +puer tor +ke v +gui ding +fro sty +bor ough +mir a +ðŁİ Ĭ +cade t +anu sh +yo gi +e ger +fl ing +slo pe +nin th +we ston +foot wear +f n +may weather +a am +pla in +stair case +witne sses +work outs +ro bust +dex ter +co hort +ðŁļ Ĺ +sp ell +ha ze +o om +organ ising +wild fire +cont acts +av on +min o +upd ating +ðŁį » +li thium +ing ual +k is +au ga +lo com +de duc +u da +th ak +boy le +mp er +hot tie +eri k +re vised +is la +travel photography +oo za +en qui +confe rences +clo ver +g room +cur ves +live on +per f +displac ed +bo log +xx xx +ðŁĺ© ðŁĺ© +te al +ve ssels +rain forest +cal ci +pan ther +gira ffe +ta sted +imag ery +pad res +day time +bas s +ri pe +opio id +nu e +vin yl +invent or +sen s +process or +mu t +gad gets +bibl ical +shann on +jacqu eline +car y +the resistance +ali en +n vi +co sy +bi har +fo ley +ren d +mu gs +fa ken +cl one +ni allo +gra bbed +chi hu +power house +n tt +chero kee +spon ge +imple menting +rh ine +le one +ðŁį Ģ +pret tiest +infra red +impro v +swit ched +tu bes +con tr +bl k +projec ted +be aver +yo t +bbcra dio +thi gh +per secu +apologi ze +w ack +po ster +oli ver +az a +lou d +( ?) +f the +women shi +spar row +blu sh +us able +sc ales +it ative +peu ge +ne eding +legg ings +glam orous +mat ur +c z +wat t +da b +tam ar +et sym +bau er +heart felt +h n +else where +bir ch +alu mini +hu ck +e me +j l +traf ford +d z +por tions +ana sta +arthr itis +esp n +ber gen +viol ation +yo shi +c z +northumber land +clo sures +ðŁĩ¯ ðŁĩ +smi ley +r w +tel ugu +inten si +gre gg +ve ga +dun geon +south bound +ba il +domin ican +semi final +chap ters +h itch +van ity +trans iti +recomm ends +sati sf +bar ca +queen s +( ( +de struc +stra it +ra vi +dess erts +in tru +har am +k os +fo e +fat ty +pais ley +magn itude +dri dge +com ey +schem es +vision ary +our t +down loaded +ðŁĻĮ ðŁı½ +gd pr +lan i +p wc +gu ad +nic est +stake holders +re ferred +george town +arvind kejriwal +schnei der +in doors +all star +strand ed +gen der +ze pp +ma sses +ðŁIJ ± +pati ently +bl dg +z ab +we arab +vi vid +he ck +d ella +sy mb +je opar +la ger +à ª +comb ines +ne c +br ay +flo p +tx wx +jo ys +pon t +pro found +sur round +mad hu +ma ble +ay r +te as +n sa +open ly +er nest +ãĥ © +to po +g na +anti oxid +ti an +e tr +c ello +ma thi +gener osity +b iting +man ic +kel sey +chee ks +ten der +w th +pron oun +ultimat ely +gu sta +ari anag +ger ry +ble ed +red dy +mic h +mitsubi shi +oper ated +sex ually +ma u +cl lr +vi ds +co c +mel ted +ðŁĮ Ī +q ld +ite ch +instru mental +end game +ðŁĵ ĸ +ener gi +brow nie +tam il +at in +domin ated +pra ises +fire place +sens ational +men a +k arti +un prece +ru pt +ori ental +mc cor +tour naments +scen ter +re eves +prescri ption +sam e +fra u +tru ffle +em bo +roman s +bla sts +techno logical +pr at +b sb +y ar +tren dy +ac l +al ad +ðŁį ģ +o hh +bankrup t +tho ven +regar ds +is er +war wick +vine yards +real m +niallo fficial +do ta +ge mini +to do +v able +¨ ¨ +la u +wre ath +ju ve +nat asha +le ver +lor i +hor ser +cc tv +air bnb +es anders +sin clair +ema biggest +high school +con test +optimi stic +t te +ðŁĴķ ðŁĴķ +ss d +ye e +hel ena +con sen +ric ks +jes se +an ic +ðŁİ ¯ +re acts +ro be +independ ence +vol tage +m ington +s ant +à¸Ļ ภ+-------- -------- +sentin el +ke tt +rehear sing +aaaa aaaa +sof the +stir ling +sear ch +wi gan +stand out +sna il +pent agon +Ä ģ +ch lor +cru st +net any +chemi st +disapp eared +ric ardo +sp iders +bo se +war ren +me ssing +bann ers +gu el +par ach +ma id +coun ted +epi le +bon fire +speech less +se tter +meas ured +rejec ts +nik ki +le ster +foren sic +fab rics +alo ha +pre served +wat ford +deta iling +dar th +bo u +car ly +... ' +tail gate +noti fications +å ¤ +pas sive +trous ers +balo ch +ro ther +typic ally +à ¥ +sp it +wi z +sic ily +technic ally +ex pose +st age +hu bb +cre am +cap s +po ke +sle ek +ju ne +tempor arily +de z +awak ens +l ame +_ - +ji ha +tues days +advis ed +advis ors +exi sted +dis agree +news room +lo sers +world tour +dr ying +al di +har ness +foot print +hobb it +p mln +i ro +que red +asse ss +gaz e +sa b +th ian +í Ĭ +ti f +ob serve +ev il +dra wer +swee p +cor y +co dy +kyo to +cal lum +n inj +lau rent +be i +sket ching +custom ized +du r +regre ts +knox ville +ìķ Ħ +mess aging +grac ie +abun dance +bi dding +bre wed +fl ouri +therapeu tic +alt itude +ho gs +bur ner +elec tro +wonder fully +he ater +post pon +li very +r all +ad as +a ac +sau l +brook lyn +play house +âĻ¥âĻ¥ âĻ¥ +char itable +in y +z ah +compet itions +be av +plu gged +o is +do om +astron om +speci alized +max i +ta ps +cellu lar +depre ssed +folklore thursday +cri b +e mul +ë° © +fi gh +ru z +car lisle +spe ar +side walk +de i +depend ent +lac es +nh s +ðŁĮ Ļ +reali zing +net work +ric he +re gin +re fresh +st ral +pa thology +pla id +psyched elic +hin d +u ka +algori thm +lin king +progre ssi +fe y +d ade +hydr ated +b ant +fam ed +cot sw +bo ise +as c +rac ing +ja vier +ww en +mar lins +poo p +swe pt +toni ghts +we f +ani me +slo vak +âŀĸ âŀĸ +cla us +lem me +cli ppers +re ls +arianag rande +r te +ko t +thal apathy +hungar ian +zu ma +y von +is u +jour neys +clin ics +be be +ww f +n ws +super heroes +er it +sle ague +identi fication +mo tto +ba i +sour ced +ill er +ap i +pri se +unprece dented +dam as +tuni sia +dra in +undere stim +e ther +quarter ly +rewar ding +al ham +wolver ine +cab ine +hyp no +nad ine +hav ana +da e +ðŁĵ Ī +dr on +read ings +b ati +pic o +mer ci +iti an +wal kers +el ope +mi key +god zilla +bur lington +abu ja +social ism +at ility +sh ell +harry potter +g no +ab ur +re leg +fel ici +ro gen +neuro science +inst in +ath am +vou chers +j arre +fu se +def ici +monte rey +de port +mid day +pp ard +fre ed +ame ter +wil t +n ingham +pr att +liber ty +slo gan +o to +pr i +co ated +c pd +ne tt +il las +mal awi +evol ve +accessi bility +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +or nament +b p +el is +son line +chi ro +fl ick +ib m +ar ak +en ables +gar land +san e +cu ties +tri p +rotter dam +n ys +lam ps +lu cas +bo g +ra ils +travel led +hic ks +en u +sab ha +scru b +hi er +hart ford +fo o +fer nandez +tre vor +mat tress +appo intments +ale j +fe i +o logist +saf ar +oc ta +sr c +sha un +ambi ent +dri c +bi ker +she e +must ache +h ta +bo one +her ty +car dio +bra kes +rec ital +consi sts +overwhel med +cau l +robb ins +im it +al th +ur l +bi bli +on ne +black livesmatter +diffic ulties +tel ang +tall er +ðŁĵ Ĩ +deb ating +bur rito +mo vember +strength ening +bo e +te stam +mirac les +base ball +re nee +ðŁijī ðŁı» +al fa +âĺ ĺ +unstopp able +ec s +g mo +giftide as +path way +fen cing +ðŁİ ¤ +b ham +ra s +sk o +d led +thel ast +magn um +bin ary +wil de +wil der +wh ati +barbe cue +h ism +can oe +kur di +eli ve +advant ages +mad ame +bi er +mis sing +enter tain +air force +y ama +c is +hash tags +j is +ve il +dream y +ten se +may ward +ch ateau +hunt ington +âļ ĵ +v all +up on +bl ouse +dun es +ðŁĺ ´ +fert ility +m ole +curren cies +st u +ber lin +toa sted +div as +wal t +lar k +por a +hit ter +um er +chil led +bal ancing +fa is +y in +or tiz +east enders +h ate +ur al +ap ril +tim el +à ± +per o +sto cked +respec ts +th t +best friends +giving tuesday +be ad +inv ent +im i +nap les +comb ining +tok ens +thir st +ma sc +par rot +sp u +dent on +* -* +t res +subur ban +wid th +si ve +con tender +siri us +lo k +troop ers +outra ge +tur bo +frag ile +me ssed +do h +disc ord +netany ahu +re sign +forgi veness +mo han +mun ch +cam ou +identi fying +enab ling +hot ter +thorn ton +jai pur +ar ya +ðŁı» âĢįâĻĢï¸ı +mu staf +maj ors +o ke +du ffy +roh ing +til t +ðŁĩ®ðŁĩ ³ +rock star +she ep +hend rix +ra v +in vention +do u +lagun a +gru mpy +sw is +im pe +) ' +you ths +bun ker +st ache +oppo se +indi es +acceler ate +ml p +ed en +w ann +k ail +akshay kumar +su pt +pol ym +midd leton +extra ordin +wil son +australi an +alumini um +way ne +alum nus +mat ics +gri m +er nie +opp a +competit ors +rand all +h ence +decla res +pre aching +sha he +can e +sustain able +stap les +le dge +ad ena +doctor al +bur gundy +decor ate +ren dered +ri sen +pr ank +di or +bee thoven +flo or +ac com +to t +ho dg +touri sm +say in +objec tive +mar kers +premi ership +en abled +camou fla +gi ant +Ñ ģ +smo key +ric ket +pan g +de pending +s ation +evol ving +inter cep +cen sus +tof the +re en +mendo za +trum pet +marke ters +an it +ðŁĻ Ĭ +north western +v la +foto gra +blackand white +che wan +wi g +tro om +ginger bread +k n +ro mero +n fc +or chi +fun ko +sour ce +f s +ra ped +o st +tar ot +ann ually +ðŁĺ ¬ +r ill +del av +.. !! +se s +can n +medic are +ph el +ape x +guardi an +rema ined +r pm +a ñ +story month +instag ood +neighb our +p ing +sem ite +my stic +as cot +mat er +hand ful +dang ers +ti d +ana heim +opol y +sh allow +nami bia +tor ia +procu rement +big bang +announ cements +prosecu tor +beng als +sal le +en roll +ga stro +sugge stion +ba k +ha ul +budd hism +berni esanders +flu te +fati gue +cyn thia +cho i +ir win +gu a +str ous +h p +ba p +satisf ying +play a +ðŁİ ¼ +inst ap +al ice +t p +irri gation +ðŁĩ¬ðŁĩ § +in tric +clu es +ple x +sa x +he pat +dump ed +signific ance +by u +medic ation +pro v +tough est +corn ish +âŀ ľ +kel ley +u v +si zz +si bling +me st +di stor +diplom atic +aun tie +b hat +son ic +bren da +pump kins +ro ch +black burn +ur ged +shi a +arrange ments +floo d +sa unders +lec turer +nou ri +popul ations +diplom acy +consist ently +ðŁ¤ Ļ +t mund +cauli flower +l ily +vocab ulary +vari eties +coo ker +up town +qu ent +mo sa +re inde +velo city +spru ce +social medi +i ber +volun tary +proce ssed +bal tic +y ang +leban ese +d p +dol ly +arrange ment +y uri +cran berry +kal yan +elev ation +cli ff +pu shes +ìĬ ¤ +sil ic +co wx +eter nity +sla ves +vine gar +glou cester +con tained +breaking news +aga inst +renov ated +norm andy +hero in +ys m +mo ds +gre ek +un di +tren ch +v h +encoura ges +head ache +gr ange +: ' +ever green +Ù Ĭ +reck on +ab used +th ru +cho ice +ti dy +col der +scho ice +ha in +bru m +li ars +bre it +yor ker +sh ack +he idi +micha els +sco pic +fasci st +play ful +ca c +yas ss +sh ad +.. ? +qu en +ram irez +clif ton +pr s +best fan +âģ ł +gener ating +head set +disappo intment +abstr act +bo iled +paren thood +azerbai jan +exhib iting +bom bay +oli vier +ko so +un lea +mat ernity +iz er +si ves +r hu +col l +saskat chewan +fre akin +de k +na g +stab ili +ðŁį ķ +organi zer +bo sses +ar u +u va +at able +ta un +after wards +fert ili +ver ge +az i +mor ph +๠ģภ+jer k +cosme tic +ko w +stru st +ap ache +post cards +for mul +ì ĭ +spin al +jack pot +elec tri +Ã Ń +lo y +gra der +diab lo +ar di +he sit +f w +arch ery +pa sh +the ories +repe al +re live +per cy +âĺ Ĩ +im in +syn chron +sham poo +coup ons +o to +la i +thou ght +luxembour g +mo v +ðŁĺ ¥ +ge mma +se ated +m ga +strat ford +un certainty +shi fts +est o +fo ol +fire arms +cor rie +ki ki +appa rent +p ills +olym pia +fi d +elev ated +de cks +ignor ing +av alan +ro v +whist le +p tsd +milit ants +robo tic +pac ers +quil t +bankrupt cy +lic h +per cussion +celebr ity +al s +( ; +su t +pokemon go +h g +off s +gibr altar +scre ams +billi e +gen ome +mar in +be ams +arch bishop +em in +bedro oms +g ated +ol ly +warran ty +at own +cudd les +gun na +k ic +vi ve +cy mru +nar row +pro b +le o +refe rences +manufac tured +cho pper +brun swick +sem is +don ia +r ye +man o +hur ting +? # +hol li +investig ations +c els +ðŁĵ ŀ +le ster +temp les +sto rey +mc mahon +toi lets +wo of +ï¸ İ +le verage +at om +night mares +victor ious +haun ting +custom er +ag i +yo ongi +mon ty +ver onica +w ur +inti mid +blan kets +volu tion +j m +âĺ İ +am on +jud ith +ðŁĺİ ðŁĺİ +distr acted +dri p +hurric ane +and es +revel ation +tro op +ab leg +col lin +tibet an +wor rying +inter nationally +eat er +camero on +brad or +y uk +ðŁĴĹ ðŁĴĹ +tra k +slo pes +ci er +ne a +ol er +ta ka +albi on +volcan ic +am n +a fi +ob stac +face time +ger ing +n pr +metall ica +organ ic +ðŁĴ ¡ +ki dd +d ances +pemb ro +wash er +m its +om er +emo tionally +tan go +ip o +do cks +scan ning +spec s +tho m +the ology +emer gen +om i +g pa +selec tions +un necessary +ima ge +ter s +induc ed +gi gan +rent als +supp lied +m fa +shan kar +lat er +pa jam +cla ve +Ù ģ +ma hin +carl son +avi an +ano va +kati e +aj ith +design ated +chocol ates +investig ators +gla zed +prin cess +er ry +ra gn +ou rable +hr u +sun dance +peuge ot +steam punk +gh lin +gre ase +hi res +z ap +per ce +j ill +tom e +he hehe +joy ful +mae stro +ni shed +gene alo +v ich +p its +fox es +good man +emer son +lo bes +con verse +o ats +thom son +ra him +mal ware +ah i +man kind +re sin +im g +sw ood +kin der +sc roll +ar a +sak ura +ro bbed +xi on +ny a +c ism +ce dar +be in +mour ning +tor to +heath row +done gal +bar b +hydr ation +k or +elim ination +su pdates +hill s +appe ti +star red +ko m +gw en +dd d +cra y +sc anner +personal ised +seren ity +re design +meta ph +box ed +judg ment +no se +ë ¹ +er ad +ac ne +supp liers +ener getic +v om +as ap +ðŁĶ ¸ +ir vine +hat ch +la ss +ad ren +waff les +accur ately +ici o +itt le +se un +occup y +web cam +thene w +ent es +ga i +j w +accoun table +vis or +ir rit +licen sing +hudder sfield +gen ie +ðŁİ ¾ +atmo spheric +ten sions +spart an +clif ford +ol an +north bound +ame en +cen sor +u el +ster y +$ $ +far rell +hy ster +cl t +se dan +rep lied +descri bing +micro wave +sla b +pro sp +assi sting +ru bio +e than +hh hhh +gu ay +z man +ra ise +roll ing +o e +n ile +ambro se +scar borough +hero ic +coo ks +mor t +chop ra +ðŁĮ · +to b +shav ing +stac ey +dor m +motor sports +wi ki +fol ds +sp iced +stress ful +liter al +fu dge +pe ggy +wa ite +tre sses +se sh +pr ic +ðŁİ ħ +fri ght +r va +mumb ai +po m +tt v +cel lar +tom e +andro id +dor is +tsun ami +tin der +o ec +m wc +dor tmund +no thin +l iti +so u +believe in +at u +kno cks +mag ni +ss sss +ro hit +ine ws +ang i +m andy +ke ttle +intermedi ate +av ant +cur l +endor sed +ori o +ur t +consider ation +wi res +shel ters +b ino +vik ram +imple mented +ly dia +bu k +paro dy +c news +under graduate +canu cks +sam i +polit ically +ro tten +gh z +tex tiles +over load +moder ni +recre ational +fli r +bat on +typo graphy +ov ation +intrigu ing +pilgri mage +al ge +ad ays +tcm party +sp elled +cur ls +boo ze +ste m +ann es +ir ls +spon ge +sho pper +sig nation +bra ss +mi stress +le ah +beg inner +lau derdale +augu st +pre school +ta ping +tai pei +execu tives +b d +rhe tor +esc or +immun o +deeplear ning +stat ues +it us +manu script +ly ric +cor vette +mol ly +la ge +de p +cn bc +le st +je ssi +fi fe +griff ith +oppo sing +ran g +dr ills +respec tful +p ity +d ell +har ding +play boy +blo ke +shut out +k ili +o sp +se attle +bc poli +mis es +journ als +team ing +es ther +fre ddy +Ķ ï¸ı +metr ics +no tre +gar ry +for ty +navi gate +perio ds +bened ic +j id +da w +ance stors +restor ing +con g +aller gy +tit anium +c ence +lean ing +ab bas +v ast +uc f +roof ing +e man +seve rely +vo gue +ve au +in bound +d z +tane ously +stret ching +man chester +dr yer +dav is +kan th +the game +it ted +re tain +el les +conge stion +frat ernity +ol lie +lo ki +fre ely +cho o +pon y +sc ep +tab ly +bal t +rock n +di me +lo gging +ðŁį · +ad u +ha voc +water ford +char is +swee tie +run ning +ner d +erdo gan +z ara +weigh ing +fif ty +pre cise +low ell +kurdi stan +r yo +or th +syn th +lin ers +phenomen on +art illery +il legally +constru ct +nostal gic +gar th +al ta +shel ton +a sean +w ander +dur ban +di versi +bon o +cl on +le man +sh un +obstac les +appet ite +fe eder +respir atory +di xie +formu la +an to +so ber +extin ct +au c +ing les +legitim ate +; ; +min nie +ipsw ich +dram atically +ðŁijı ðŁı¼ +ingh am +milit ary +mon et +us navy +for k +dun no +play er +q otd +st oo +ex or +ethiop ian +film fest +pe red +c ate +sau di +in ner +sin cere +tion ality +ale e +de eds +cooper ative +ir onic +cro cod +br ary +post season +cam per +can ary +e in +exten sions +nb d +sher wood +spo kane +hu mp +jit su +ê ¹ +dar yl +p si +stab bed +offer ings +expe cts +cav al +body building +fr aming +f ca +ye arly +bom bed +sk il +resear ching +jud iciary +gree ted +tu dor +mil o +innov ate +ðŁĺ Ľ +r hs +ru by +contribu tor +fam er +soci ally +m lin +fi ery +ut ter +beau t +it os +de voted +rain bow +bar ney +pe ren +ar jun +r na +gab by +ut i +hann ity +pick le +ser v +qu akes +pp e +fe m +wh itec +j n +victor ies +ðŁ§ ¡ +gol fer +congratul ates +resul ting +mechan ic +ur ve +cen tered +kie v +an s +in cub +< < +c mo +bestfan army +dap h +en ham +on cology +ku sh +t xt +ori ented +fashion able +c sr +sa hara +r ack +pd p +han son +ภĩ +ti ers +ra r +pan am +in sky +sa hi +testam ent +asth ma +in her +fisher ies +or der +ho we +gall on +ep is +suz anne +drow ning +paneli sts +ðŁĺ ² +ë ¦ +al ach +commemor ative +at tribu +ðŁij » +mo o +visi onal +week sary +gu st +ak in +poin te +ee e +di spar +ni pp +dent al +st all +pi an +bor e +ul ster +tic k +ir r +tae hyung +micro phone +bermu da +ga ard +el er +plumb ing +hu gely +âļ« ï¸ı +race way +cam bridge +mar cel +burn ley +to ast +holly wood +fa sting +me red +hib ition +ca pped +benef icial +ow ning +cont amin +arab ian +to on +cap ac +hul u +sm ir +nutri ents +se in +graph s +con ditional +ðŁij ħ +or ac +play in +nor the +tor nad +mar ian +ju mbo +lex i +incredible india +road to +uk one +confu sing +sp h +shan k +pi ed +mq m +positi vely +sher ry +path ways +consi ders +tof u +argu ments +resil ient +che tt +with dra +ter o +ated ly +sw ana +he b +fli ght +har ley +decre ase +kind le +book shop +³ ï¸ı +marty rs +sm ur +mc cl +concer to +sti me +rejo ice +app lau +cle ment +mer kel +jai me +im mortal +isle of +mar co +youtu ber +stal king +me too +st ack +sp ouse +u st +lu v +âļ¾ ï¸ı +eque strian +ev ing +fl in +nick name +the big +as ar +st acks +wal ker +bor a +kidnapp ed +hur ling +humb old +rec alls +co pper +ann is +se o +mer ger +mu ir +ad dy +ðŁĴª ðŁĴª +be x +cr acy +con an +congratul ation +mid st +âĻ ¬ +for bi +op tic +cr ate +crocod ile +mad agas +secur ing +ast on +o gue +savi or +salis bury +love it +fuji film +cast les +as st +ar rows +sp acious +tr s +poly vore +progre ssion +m ri +nel son +bi m +indic ator +o da +pe pe +re signation +gu t +sne aker +log ically +az y +are lla +te aring +jo shi +ssion ism +q pr +mari ah +p x +ble ed +mi an +med ley +we iss +ker ry +gat ory +at al +madi son +av enger +nab y +pl and +gi les +fresh water +d ington +ta j +demonstr ates +n tv +bul bs +sunday morning +pe ake +souven ir +wa h +ton nes +m kt +complex ity +con den +ross i +b ing +y ds +su k +n go +mid land +ol y +life is +ri pple +mo reno +dd ers +tu s +á ĥ +bou l +x a +hol dings +wn y +shadowhun ters +ke i +asp ire +m ous +ow en +so ak +skir ts +moun taine +stor ming +ch rome +ri ots +sar ato +amaz e +less ness +nav ar +crit eria +ra fa +indul ge +ay er +por to +nam o +........ ........ +yi elds +val le +j h +mac ron +sa ins +dur ant +tra ilers +wo t +confeder ate +sh rin +id ol +form ally +ten e +motor cycles +than g +no de +bang er +dal y +p ats +enroll ment +au ctions +at al +ar bor +lo gos +de arest +trans action +dom ingo +fle a +ser mon +de ck +sin cere +questi oning +juli o +was p +pre tz +armen ian +k ham +inflam mation +picture sque +acci dental +film makers +ðŁĺ ļ +ðŁĴ į +ca sey +so b +yee zy +good will +parag ra +ss ly +fe ather +dy ed +assassin ation +na de +b cs +app lies +femin ine +fe u +ext ent +depu ties +l ack +psy chic +go i +kill ings +pse u +ðŁ¤ ª +un c +mar l +tan e +mck enna +sur fer +influ ences +free way +hack ney +mal aria +el and +te au +rema stered +Ø ± +raz or +gg y +cor ro +lak sh +fla ir +honest y +hoor ay +de pp +am c +wedne sdays +q a +ed its +- $ +se villa +dou bled +human ities +c cot +som os +r ine +af a +si oux +re construction +wel ding +th reads +am ish +encoura gement +po der +bo ck +bal m +p tions +stand up +accompli shments +guar ding +convic tion +ac ion +napo leon +depic ting +att ack +su i +wear able +âĸª ï¸ı +pot ter +esc ort +vis e +to ts +bo on +event profs +angu lar +womenshi storymonth +bar row +sch i +ac comp +ti k +l end +kensing ton +wol fe +st acked +cra shing +exhi bit +wing ed +sab rina +ma sa +k ms +alway s +et t +pla sma +counsel ing +pick les +nfl draft +mr s +inev itable +coura geous +staf ford +writers life +ho s +e j +gh yun +trade mark +adri an +influen cer +coron ation +ra ging +explo red +usa f +excep tion +eu x +tan ker +sw ami +pac ket +ðŁij¨ âĢį +f en +she en +a ero +j l +re gal +nw t +au ster +meh ta +char ge +a ste +b ate +inf eld +racec ourse +collap sed +fle ece +z il +al lie +alternati ves +geor ges +ðŁĵ į +quir ky +fc b +nat geo +philanthro py +bra i +every day +ðŁIJ ° +ach ers +ja an +fin es +q i +fisher man +distin ct +gri mes +nation alist +comm ence +ro wn +âĢ ³ +z ing +f ter +hr w +baro que +bl ender +kitt y +hoo ks +c ited +w anda +consen sus +reinde er +an and +supp ly +me ds +v n +ol ph +rat chet +shel don +secur ities +ë°© íĥ +cro m +mosqu ito +j eric +im mac +dimen sions +â ¤ +di ssi +sponge bob +dami en +steven son +jo anne +del ish +yi kes +than x +surve ys +postpon ed +alco holic +al ised +ðŁĻı ðŁı» +do ch +sen tim +mered ith +com pares +b ago +happy days +mo ss +ãħ ĭ +ne c +gn ment +frustr ated +comb in +ri v +ec lec +col lo +compli ment +actor slife +ct to +nic ar +op hon +apar the +man t +ja de +trol ley +optimi zation +eye on +eco logical +qui st +ep he +ॠĩ +cin co +appo ints +old school +c pr +behavi oral +min aj +:- ( +tag ging +ev al +jo aqu +ðŁĺ « +ha k +de me +jama ican +so s +hy att +hand book +libr arian +hanni bal +pump ing +ch om +f man +ga i +hu ll +respon ders +green ville +n us +vau gh +ðŁİī ðŁİī +ta xi +gold berg +man tra +te ase +forbi dden +metho dist +ati vity +* *** +ec t +mc gr +Ħ ëĭ +se b +amid st +disapp ear +thy ro +phili ps +er ina +v icious +stream er +million aire +ma p +str ick +hack athon +gh a +ed ic +mi ka +pe ck +ill i +anto ine +ar ca +op tic +ma ure +ðŁĩ¦ ðŁĩº +cla shes +man ly +âĺ ģ +al var +and res +me i +el m +ww ww +al tered +l te +ê¹ Ģ +mo jo +for rest +thal ai +non t +spee ches +acknow ledge +ign ite +x factor +ðŁ¥ Ĥ +mead ow +disru pt +debu ted +scrim mage +pharmaceu tical +fi dd +found ations +philosop her +et al +publi shers +bo ys +c ke +ru gged +opti mism +re be +phil harmon +nar cis +ral lies +lu is +go blue +fol ded +un acceptable +optim al +li sa +pol aro ++ . +en za +âĿ £ï¸ı +mon opoly +grace ful +dair y +du a +diffic ulty +judge ment +o si +mer sey +flu x +new found +ter ns +dimen sional +in vic +al ba +am it +abudha bi +alger ia +autom obile +the ad +lo tion +acceler ator +vac ant +iti on +lu f +al ic +pl l +bla zing +ba z +sen e +ðŁij ¼ +villa ins +direc tory +eis en +to ck +broch ure +ri pp +hb d +zayn malik +nic he +lo lol +certific ates +mor se +fac up +x ham +un wanted +im ports +carne gie +fan sign +mo u +r alph +destroy er +sw ing +trek king +cili ation +pit bull +g aps +ho well +defin itive +mc le +f ps +et z +bol ly +lyn n +gan o +at ure +fur suit +co il +na v +but ts +tro jans +eu re +en ko +sch umer +horri fic +install ment +br b +subur bs +a bel +vi r +de sh +cun ningham +ðŁIJ » +span n +sch we +ke mp +tr u +ste alth +qu es +le w +deli ghts +ko ch +hu mili +cr iti +il t +sp ells +mi ley +car ic +ðŁį ´ +lc fc +substitu te +oun g +? !! +af fir +predic table +class of +er r +cy press +chand ra +age ing +__ __ +ther land +don caster +el in +yo shi +sail ors +har ris +jo anna +niger ians +h ers +pla gue +pro cra +k no +can ton +busine s +un h +pra kash +c in +bow en +co ating +m als +be gging +smith son +ponti ac +sp ies +dam ian +pl ine +und ant +al ta +one ss +shame less +da q +bb m +wal es +stam pede +ser um +Ù Ĩ +cataly st +x n +ab sc +free zer +ch un +ari os +mc cre +fore head +he ars +damas cus +tac oma +ardu ino +encoun ters +stan ton +lg b +ab as +" .. +ke te +drac ula +ele m +g ne +zepp elin +la brador +pul p +op tional +or n +russi ans +san itation +hil ary +etsym ntt +pen alties +au st +ig ans +olympi an +medic aid +vers ace +va pe +re stra +pe ep +sexi est +st alls +di le +the a +punjab i +pupp y +tuesday motivation +ðŁĵ ļ +the flash +roc ket +mo dest +chihu ahu +on na +k sa +hur dles +ca ve +fail ures +sp lit +bo ho +gur l +disappo int +ho ward +nug get +fran z +stal ert +kaz akh +for getting +sch ri +ag ate +am at +eve rett +du et +veter inary +juli an +ch ills +bra ve +ghost busters +lan do +gre ets +profit able +d é +ti r +ze e +om en +pd x +gray son +har i +fix es +stab bing +swim mer +symb ols +compli ments +po se +func tioning +th nx +gi r +corpor ations +bar low +lo e +off season +distin ctive +marvel ous +nik on +enri que +ky u +ja ws +amo to +lom bar +travel blogger +fa h +ouri sm +tri stan +so e +ce ase +ðŁı ħ +z ac +mck enzie +taxpay ers +swim suit +bl o +les ley +kan sas +w ks +ki el +provo king +my les +str ing +kangar oo +galac tic +fif th +s ke +we ir +ll is +mat ory +ðŁĩ ¿ +un ci +re productive +roo ting +ti des +gad get +.... ...... +alex ander +bow ler +scre w +apo log +eri ka +wal ters +shet ty +lan e +ban ter +as ant +me so +v ain +" "" +us i +fer din +accomp lish +man sfield +bom bar +collabor ating +cla p +it ure +s da +smo ky +na k +im person +car la +com ra +bur gl +lo co +ti es +in hi +trac ey +se is +diss er +rr rr +dra y +prote ct +cor ona +hun ger +ck en +c eli +trou bled +predat ors +fic tional +shav ed +riche st +metab oli +ful ham +gro oming +mono chrome +wa sting +as co +ast e +ti sta +remedi es +ung soo +south end +perman ently +bu mble +procra stin +ident ical +practic ally +ma scul +su ke +assu red +val erie +devi ant +grizz lies +thi er +pur a +ne pal +not ts +bil ateral +spo il +car mel +cine matic +ph l +ni fty +ma o +hypo cri +la ser +pan try +mathemat ical +el isa +coordin ation +bel mont +a it +radi ant +bo iler +man g +f ag +cr c +h ams +br in +â¬ĩ ï¸ı +famil ia +âĿ £ +sab er +ru pert +gg an +rit z +mic h +sal ford +le vi +gra l +ðŁĴ ¤ +n ino +ce d +business man +ul tr +sim ply +compre ssion +pa ins +hal t +ë°©íĥ Ħ +landsc aping +n f +croo ked +er d +itt in +ddle ston +sur passed +ino a +da g +bl en +exten ding +at ing +al gae +ball er +u mar +snoo ker +col lu +flo wn +thu b +ridic ulously +ki sh +op le +di re +as ser +ari sto +sc iss +h ating +trou ble +syl via +suc cul +plo ts +sincere ly +al er +laure ate +br ack +att n +rif les +me to +collec tible +cu omo +conte stant +consist ency +ant z +rang es +abig ail +de b +mini ster +grow ers +an oo +hoo ver +dream er +nu cle +resear ch +mi y +sha hid +ma v +d honi +cin i +do j +hin dus +part ying +dal i +alon so +inform al +clark son +it ton +ki an +cit yo +mor i +la sted +as pen +libr ary +susp ici +qu at +den ial +fol der +ch ori +swee ping +eni x +ðŁį Ĥ +Ø Ń +nas car +handmade hour +mou l +heat wave +em er +exam ine +ib n +gr ind +po v +tion ist +m bo +she ila +integr ate +om es +take away +cer v +con nie +tic ket +ce led +bi en +visu ally +madagas car +sor ry +gu i +park run +tra its +la be +pois oning +à¥ Ģ +vi able +bohemi an +denti stry +bad os +spr outs +mask ed +te ddy +ðŁĺ · +sa f +sa as +ji ang +ti ght +spe aker +withdra wal +bc n +as signed +class rooms +fle ming +ðŁĴ « +super girl +tot als +table top +e books +horizon tal +cra z +flu sh +j ard +c dc +er son +ãħ ł +green wood +ni h +co x +ad a +lit re +go ing +v icky +cur ved +lou ie +gra ins +hy e +lon ge +reme dy +tra inee +san jay +super stars +ma ser +man u +s age +wh l +ðŁĺĤ ðŁĺŃ +ðŁijį ðŁı» +m sd +en z +rab hu +j oo +gh u +ac er +e po +resurrec tion +justice for +bl ended +mo da +avalan che +france sco +re spective +g s +ye ast +wel ch +devo tion +ge tin +athe ism +am ic +carol yn +lo c +ld nont +ave c +us da +le gged +bra very +b lower +cow boy +he h +sti ble +buff al +chann el +run chat +âĺķ ï¸ı +ide ology +best seller +y oo +pe anu +bon ne +fel ic +edi son +fr actu +naren dra +pp ets +seym our +ri viera +he ctor +necess arily +bi anca +soci eties +the best +w g +sent ences +win k +vacc ines +pal ooza +jam ming +as f +mp us +agre ements +ec k +ba c +hon ore +com pul +wild cat +im posed +yo ga +hud son +can celed +l ich +fu zzy +es que +ch uk +w vu +se k +fli pping +r hon +wi shed +wh a +cap ability +len ovo +ìĨĮëħ Ħëĭ +vi vo +tv d +nor a +sil k +pas adena +yo semite +valu ation +clo cks +u ber +mr c +dar kest +au bre +ss o +bell y +wrest lers +kill in +lou der +buck ley +ge el +ad on +un s +appe aling +ðŁij ¯ +semit ism +list ens +fit z +ãĥ³ ãĥ +ny lon +ar ty +seem ingly +hal a +su ited +et y +she ds +mu ffins +ap ric +um ents +u ta +jam mu +chelse afc +star z +yo ko +roo t +clean sing +di ar +pione ering +ihear tradio +dig iti +fin dyour +can o +ðŁĴ İ +z ol +spac ecraft +six ers +moi sturi +b ile +ti sts +hor ton +rang ing +colum bi +mete oro +senti ment +ep l +foo th +text book +drain age +r ly +sc ue +imran khan +ðŁĴ ¸ +margar ita +ed dy +predic ts +gamer gate +advis e +growth hacking +love you +ug and +v f +beng hazi +s later +ne wor +ch el +independence day +p np +cul len +hoo dies +num bered +brit t +t sa +kl tu +s ages +mom o +onep lus +col l +gu ts +w ta +mesm eri +enh ancing +chiro prac +j is +teen agers +m one +constell ation +sweep stakes +e ze +slovak ia +la ye +pear ce +wa ver +po gba +k ron +sur geons +mar x +ti d +gg a +desc end +p ours +upri sing +wal la +sab bath +bachel ore +mack in +k am +peter borough +hor a +ðŁĮŁ ðŁĮŁ +think big +r j +hy drau +sp al +univers it +ðŁı ī +mail online +league of +ten ants +w ally +lan ce +heav ens +dd r +bol ts +am ir +i phone +ci gar +en du +re i +el abor +r inging +john son +characteri stics +sal oon +algori thms +tal kin +m tn +di ve +region als +ff ice +hat i +deviant art +so tto +shir o +l ama +k we +f aded +por ting +tu mmy +est ates +buen os +ðŁ¦ ģ +beli ever +pen etr +dar n +sp ite +can opy +fashi oni +t illa +pet als +eli jah +bra wl +marty r +ë°©íĥĦ ìĨĮëħĦëĭ +mid town +eric h +d apper +sm town +me gam +ww w +le le +on s +cat fish +fir th +fossil friday +ball park +th aw +pot ent +illi e +cre ep +car p +so ap +gun dam +infe c +yy yyy +ठ¨ +z ag +rit t +calcu lator +bo ca +ok o +to ad +threat en +refin ed +olym pic +accompli shment +bacter ial +a ji +tat um +feli z +she ed +j at +th ic +jam al +ðĿ ĺ +lin a +ðŁIJ ¯ +jo king +yot po +pin ch +ak ron +her b +motiv ation +li a +ho stage +cre ek +gam ble +russ ell +patt i +fo tos +c pc +bro ken +back the +cla ys +u mm +stock ton +mat ernal +ü r +la kel +cent ury +be k +infe cted +ภ¡ +smack down +man ned +ta hoe +sm es +bas a +su la +augu sta +. * +rohing ya +gre ed +counsel or +silhou ette +gra vit +cla use +' - +bo bc +occa sions +now adays +dic tat +be ard +n ally +brigh test +kab ul +inc india +dhan ush +archae ological +che ape +mizz ou +d hi +ov ski +bax ter +asse mble +à ¢ +gi gi +ac am +wis ely +haz ard +north ampton +âľĪ ï¸ı +me th +bla sting +re unite +mu lus +ali zes +t read +mil a +ed ward +ko va +pe sto +ðŁij ¶ +vit z +hydrau lic +refurbi shed +mo tel +isab ella +hom me +sever ance +uph ol +mis erable +f ari +lat ter +ef er +crack ers +es l +ac io +yy j +in an +ec b +z ind +pan as +tru cking +re ed +sh aker +burge ss +em pire +ag nes +n ington +art works +fr s +ti le +bi ome +eu n +ch ong +americ ana +god father +go blin +i shi +! ). +temp ted +gen omics +mand ate +ck y +ðŁĴĻ ðŁĴĽ +som ali +br andy +in ven +spoke sperson +pc b +yu an +h g +fa z +starwar s +ro wan +blue grass +don g +d day +trin idad +er ton +ban ning +re tention +cu red +tober fest +re set +we is +deta ched +behindthe scenes +immun ity +ph a +bra y +ðŁij ½ +ran cho +ram say +est onia +nd tv +] . +cab aret +tar o +d v +show cases +plu m +ðŁij ¸ +son oma +pre pa +memor ab +e stu +drive way +u les +magn us +x r +nn n +much as +en ge +stre amed +fore stry +audio book +tro y +reck less +kil om +ru ler +ra k +proce ssion +i ons +po ole +noc tur +wh s +farm house +per a +par me +hypocri sy +s ics +v ant +cas k +holi stic +au st +Ð ¿ +in do +ðŁij© âĢį +di so +disp atch +ol sen +make it +en nis +cent re +ar range +ðŁĮ ¼ +sal ted +ea siest +f ate +reg atta +mo zz +ac an +sin i +g ically +ch ops +chick en +work in +ha gg +invol ve +wee ds +book day +wake up +ky r +michel in +fu ss +re juven +vac ancies +incar cer +m st +sc ents +sovere ign +kick er +à § +bo d +âĢĶ > +sa h +mob il +shrop shire +oph one +dress er +mis suni +hep burn +i mo +foli age +diagno stic +as san +cycl ing +guil t +c sa +puertor ico +win elover +wake field +do ggy +k he +pa pp +co g +al lot +cu ck +poe tic +mi o +re vit +mag ician +ç ¥ +ant enna +west wood +mber g +lux e +oat meal +Ø ¬ +te at +ffe e +sear ches +l ly +plu to +el on +let tering +inno cence +fa i +ann on +telang ana +ma it +neu ral +can ni +ar oma +a stor +fe x +co cac +mon etary +f ent +un sure +' @ +indi rec +teh ran +isol ation +li bs +make up +merce des +ff y +he tero +de o +sco m +cur sed +veteran sday +franken stein +shre ws +de co +ge ese +lefto ver +ha did +vari able +acade mics +carol in +under going +vari ation +na h +ssi er +gamer sunite +pur suing +emer ged +ll ers +control ling +ro aring +mete or +vol t +daw gs +be aver +is life +bathro oms +aci onal +pre vent +lake district +in als +y ani +gra bbing +sac ks +le z +sw ay +k ool +time s +klo pp +la de +con cord +resul ted +revi ve +recon ciliation +ol and +az z +gir o +mand arin +de en +nutriti onal +is coming +van i +aw www +der ived +love your +stop the +shou ting +nov ak +ðŁĻĮ ðŁı¾ +lo af +displa ying +sunday with +ma guire +ch eri +ðŁı Ł +re match +qu ic +Ú © +y in +ðŁĺ ¹ +ili ve +z ip +our ke +down loads +sw at +missi ss +care rs +t ment +proper ty +hahahaha haha +gi bbs +sur rey +ar ise +tic ism +sti a +ir ling +fro g +co se +bas sist +fore ig +lea u +pil lows +hol la +eli e +disclo sure +peanu ts +inte ch +ww c +plun ge +trium ph +cor i +sli ppers +ðŁĻı ðŁĻı +neutr ality +ma re +hair y +gang ster +hu mming +cust ard +mer lin +ale a +s by +dam p +mo han +ver bal +j st +gu tted +b jor +un finished +ðŁĩ¯ðŁĩ µ +un happy +âļ« ï¸ı +by pass +at su +fis cher +sa v +afric ans +re use +mid way +demo lished +ger rard +her cules +Ä Ł +medic ines +cl icking +sur round +jo ong +wav ing +tri bes +wet lands +offici el +argu ing +l le +do va +su zy +club house +ne gro +ob tain +ga o +gl ance +assi st +ch os +ãĤ ¢ +âĺ ķ +adri d +occur s +st ans +par don +livel i +emplo yed +re visit +ff xiv +bb le +ne aring +min er +ðŁĺ ¹ +giov anni +up to +mar vell +mar se +to wels +cb n +engine ered +y elling +spart an +si ans +ðŁĻĮ ðŁı¼ +se v +coyo te +sta di +t cm +app en +shenan igans +open access +so aked +ma squ +le vine +stro kes +l k +aparthe id +hipho p +char don +may may +ha asan +stri pped +fr o +scri ption +f ton +h f +pri sons +marsh al +ķ ãĤ +an cho +com promise +classi fication +buzz feed +bblo ggers +deser ving +) / +s way +ob o +camp ers +poder nfamily +p oured +bri e +squir rels +se ize +: # +le k +ti mb +st acy +nas daq +repe atedly +br at +mi ghty +competit or +mah one +de si +o ke +bm w +shi e +f cb +cheape st +minim alist +par amount +n ate +har as +insan ity +lat eral +ment ality +mo zam +ta pped +yad av +u sp +b way +the od +bil t +ra ids +em press +adap ted +pat ron +nut shell +ag ra +be aded +sundaywith marsha +vi king +proce ed +main tained +thinkbig sundaywithmarsha +sn es +mus ica +to wer +ch ab +bo k +sm t +insul t +harve sting +windo w +ru ther +be ige +dec al +indic ate +ma iling +ri ft +po le +ander son +ch oral +sp ride +l ili +ev elyn +imrankhan pti +.... " +ke red +un dp +water falls +se ars +le mans +world series +ri el +ani e +app ar +score rs +lam p +a than +phys icians +qu inoa +refu sing +vu itton +unle ash +s la +pat i +shou ts +inten tions +fo amed +europe an +neighbor hoods +me er +man son +du h +br at +con es +bow l +kazakh stan +ठ¿ +in appropriate +del hi +ketch up +ful ton +s ys +consul t +gar field +to go +f ml +f led +b ds +facilit ate +ree bok +selfi e +elev ate +activ ate +bi ble +ca wx +b ys +cam ille +sy ou +sk ool +her t +w bc +ple dges +recor der +po sh +ac re +so aking +mat il +v sco +shoot ings +pla r +e con +ðŁĻĮ ðŁı» +rashi d +u bi +ðŁ¤ ¤ +sw inging +wi pe +rap tor +m su +music video +dur ham +at tic +apar ty +fe tus +activ ation +aa z +motiv ate +ðŁĴķ ðŁĴķðŁĴķ +j al +ठ® +ag on +sche er +stal ker +fo ster +az zo +tele gram +vi gor +s laugh +screen shots +entrepre neu +kri stin +inten tion +ch illi +fr action +don a +ge a +tc u +s ite +la k +em il +d nt +bor o +wil kinson +re cu +ato day +t anya +bl anco +cd n +brilli antly +g cc +ac c +evacu ated +ther ine +den ny +cait lin +she pard +pou ch +hand held +sou theastern +ha a +à ´ +re solutions +led ger +sr in +r ar +shat tered +chim ney +im with +mete or +hand led +ra ke +town send +en han +shi py +duc t +tw x +inflam matory +war hammer +theat rical +gro s +sk ar +sco tty +ni el +tit o +tin i +conne ction +_ . +goldeng lobes +sha q +ðŁı ³ï¸ı +hall way +fron ts +effec tiveness +gla ston +d hs +ex pi +to h +c pl +sc s +re o +ha g +resemb lance +hor an +abu sive +qu er +virtu e +cho lester +a q +shan e +m ce +carri ers +di stress +re wind + ¡ +voo doo +int act +ann o +ðŁĺ ¤ +pi led +adi a +ãĥ ³ +en ow +di gs +light ly +goo fy +turb ine +governor s +con te +re open +pa h +i ve +cra fting +swee ps +jo di +an de +zu cker +kaw aii +o ko +v ai +out line +kri sti +ts n +insp o +qu int +fil thy +lyn ne +listen ers +depar ting +or d +t weed +, & +ale k +sel fish +nor ther +recogni zes +i ps +be s +a ed +w ills +pe at +surround ings +mon uments +ais le +be cker +la v +quant ity +v ah +helicop ters +tu cked +alv arez +sha pe +o bey +ad diti +road side +m ite +bl ers +ep age +j au +ignor ant +b ins +lu lu +x o +c fo +ee eee +apprentice ship +shef fiel +to i +ho k +faken ews +deplo y +aid an +husk ers +ãĢ İ +west brook +mi ster +confi gur +car r +fic a +proceed ings +ha w +ste ak +mur derer +pay day +a jo +p vc +don ates +bi af +nom nom +be it +k ali +x rp +ahmed abad +se mic +che y +x tra +an twer +head lining +squ ares +roun ded +flu ore +bol d +disa sters +am oo +gener ic +cran es +brief ly +gi g +auster ity +anticip ation +for ti +treas urer +cann y +ce cil +dete cted +check list +ภ§ +pam ela +bar bados +an field +hear ty +tx lege +peren ni +arro g +ing ram +âĹ ı +ty ne +spo on +r ation +am ba +m be +cam el +h hs +york shire +reflec tive +fre aks +to k +ju do +partic les +du bs +ban jo +accred itation +prover bs +over dose +inte gral +gu ang +mc s +super car +af b +al vin +ail s +x tre +st aging +tw ent +rabb its +mar o +inste m +dol l +cr ay +sant ana +ble ach +mini ons +che ap +man t +di vers +catal onia +lo is +mat ri +cou gar +kay ak +e gre +p so +a ia +å ® +char lton +tr acked +sc ari +pe tt +f wd +x in +gra vel +br ic +bigg boss +ar den +hu gging +pal ms +st v +li mb +the movie +handic ap +ri me +z ai +stu b +indi a +lithu ania +rhy th +p ita +maced onia +high ered +brid get +schwar z +ske let +hi kes +ant arctic +c ps +mash up +Ð ° +n ell +chand ra +he ir +an us +sher idan +mi mi +muse u +bec ca +an ir +bar rie +dioce se +compar able +ðŁı³ï¸ı âĢį +yuk on +me p +hor mon +mer ic +al f +con quered +christ church +ðŁĴĻ ðŁĴĻ +hazard ous +poo h +cont ing +retro spective +par ame +na ir +con sor +ho tra +astoni shing +cater pillar +u man +ti sm +t vs +serv ic +croy don +mor ales +c g +cu m +te ur +scan ada +s all +magno lia +el ise +th our +à® ¿ +ag omez +phel ps +ë°©íĥĦìĨĮëħĦëĭ ¨ +wh os +weav ing +si sd +pro poses +cro ws +pre sale +econom ies +bernar do +sha hid +air show +mc cann +hor ticul +nr l +du el +mongo lia +tou lou +requi rement +struc tured +ed i +o lives +he a +cu ter +Ð º +enthusi ast +harri et +domin ion +sub mer +ðŁį ĥ +sa ab +nes burg +mo ff +def ended +bur t +rewar ded +gold man +op tics +khali d +house holds +buc kets +ce cil +che ss +substan tial +ef l +oper ation +evalu ate +st n +rece ssion +l ll +tom as +tru ths +ak bar +s words +p act +embarra ss +ha o +ay urve +scrip ture +ny cc +op t +di ameter +sc ented +organi zers +re lat +ha e +dream ers +de se +ðŁĮ » +restric ted +n ale +r hp +dol an +mun ster +ha ired +consult ants +jo ints +hu mil +d ill +relent less +t é +af il +ut ilities +japan ese +condem n +pet ite +colli de +q f +peach es +cou rier +l ore +âĺİ ï¸ı +reli ability +ch uk +ðŁĻ ĥ +stu res +ge ther +ho stel +bi er +- _- +â ĩ +e ze +ta ilo +di ent +blu ff +chu ffed +pil ip +mon arch +e em +bu chan +b ick +op au +ku ps +ภ¢ +pist ons +sp ins +m and +ce st +bur ne +v ile +cher ries +bec kett +need les +pan ch +ë Ĥ +haha h +trou bles +insi sts +do you +g mc +mor tar +deleg ate +in n +g anda +sin atra +ठ¤ +spee ding +pu pil +pre mises +ali gnment +pi kach +as us +j alan +Ø µ +lime stone +fol kl +parme san +ce il +mo y +shawn mendes +ac up +hu st +ot es +med ina +ma di +gta v +censor ship +ar g +swe eney +sy kes +col o +foot steps +cann ed +adv ance +gta online +healthy living +ðŁį ¾ +a ig +p ality +oc s +he brew +im minent +berk shire +jeremi ah +out going +bak er +entr ata +ma ids +gro ves +bo c +a del +m fw +con science +arm ys +nut ella +conte stalert +novel ist +la h +ban ker +marque z +ðŁı ¡ +to ff +out age +gr p +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃ +musc le +du dley +nvi dia +mi di +m uni +ess ays +dat ac +car ter +ภ£ +t ans +i ves +public ations +al er +ok wx +il u +cu tt +har p +out law +luther an +br ill +bo lic +do well +green land +be sties +path i +pay ton +gue st +har den +ðŁ¤ © +ann ed +evacu ation +po ised +mc der +b han +o i +envel ope +ci d +ca vi +ta pas +book review +grey hound +âĻ ª +fe ud +lun gs +for te +rai der +ff er +oni x +dep end +yn wa +rel ating +de vs +ðŁĴ IJ +acqui res +d ha +j yo +priv ati +can ine +k b +cra b +sar din +imag ining +k j +em por +down hill +ne z +ta eyeon +nick imin +gb p +à µ +w ap +sec co +ma shed +ðŁĴ¥ ðŁĴ¥ +augu stine +diss ol +dic tator +â ĵ +vi per +ed fringe +vau x +hard work +book let +no x +chi ff +ðŁĴ ¨ +observ ations +xbox one +u sher +ke er +lu p +dal las +cal gary +ma dra +di ous +k bs +wood ward +hero ine +lu mber +sea world +o ws +mc ke +maver ick +gu la +cross roads +fan g +s ade +nik ol +chee tah +me c +pp g +er ick +ðŁİ µ +tox ic +bj j +viol a +sp ire +ch ino +tra vis +institu tional +ha as +low ry +w ac +ea e +hu mid +mp ton +ru ck +je w +c ine +zim mer +se f +bhar at +fre es +aam ir +ðŁĴ ħ +z inc +wan e +multi player +royal wedding +e el +preci pit +qu ery +kimber ly +isa bel +ful fill +ig an +vau l +pan e +sc y +dig it +gun n +u tah +dog day +fi on +xia omi +da c +el ast +cha vez +ro blo +g ine +ten th +ab h +ke to +hur dle +na dia +memorab ilia +ha bs +qu an +h w +hv ac +pix ar +ec cle +kram er +accu ses +ðŁĴļ ðŁĴļ +per se +mean time +wa hl +atle tico +âĢ¢âĢ¢ âĢ¢âĢ¢ +ott oman +no vo +k us +conne cted +tru sts +d mv +spen cer +rahu lg +do ve +sto kes +bolog na +enthusi asts +à ª +rockstar games +ted cruz +du ras +s acked +late x +immer sive +cer t +lu cin +princi pals +fa res +sa ils +far n +am ent +saf fron +quent in +check point +fer ris +ex cur +ðŁijī ðŁı¼ +bai ley +se h +ter re +mad am +s band +wan derers +cumber batch +yy c +digit ally +blackandwhite photography +roll in +moroc can +ðŁĮ ħ +din ner +d well +to om +m ye +ez ra +cp fc +war hol +me er +jon ah +no aa +s gate +so on +secu lar +g ating +ti o +dri ver +si ssy +assan ge +ta th +ed mund +bobc ats +ra ji +po stage +stu ds +m gm +kat o +edin burgh +meet the +shir t +fa a +mens fashion +sp reads +wi m +car ts +phoe be +j ars +bot swana +Ù Ĥ +ed war +sk ar +ri ve +gu sty +c tv +ferdin and +su therland +nickimin aj +k v +si us +bee ch +re z +desi res +on ial +camp o +quar ry +lor raine +gil more +ig gy +µ ï¸ı +ho pping +avi z +ðŁĮ º +uni sex +dedic ate +att itudes +ste er +jun kie +rail way +y b +whi sper +key an +k us +ju g +di x +a ins +sum mon +ov ich +sy ed +her ald +ma ison +me ded +wild flower +main land +ri sky +ru kh +over looked +ki c +destro ys +nam an +ki p +z ano +champion sleague +ban dit +quin cy +smi le +cal vin +open ings +ta pp +ol ulu +spec tro +accred ited +ap k +pra ised +bar nett +pol len +premi ered +selen agomez +tou red +screen ings +uu u +mis o +en se +adam lambert +guel ph +har yana +hu tto +le ar +l tc +po ached +brex it +æ Ŀ +tt c +pa vement +mon gers +ro e +ad ers +ling ton +particip ant +ca red +ga il +y ates +lan tic +dash board +jo o +feli pe +ssi onist +bu m +s end +a eri +thu gs +luci fer +a he +dete ctor +fil ly +gas oline +ham per +hump day +the ta +the band +fore casts +o hhh +lo bb +hol l +cp u +az u +ad ar +hai ley +bu b +car t +quo ted +an archy +pan cre +twit art +al den +st ash +the less +or ni +belie bers +mor mon +partic le +avi ation +⬠Ĩ +webcam toy +sad dened +cru is +ham let +n ct +roll ins +marque e +saw yer +reli ance +a ura +di ec +soo thing +sig nings +ak is +à ³ +at kins +aer op +ðŁĮ ¿ +y ab +sh ari +con nol +du bbed +manufac ture +convin cing +feelthe bern +ra u +pu lit +on ec +gem stone +ur ging +bag u +ga h +aci ds +fi anc +zodi ac +sn oop +her rera +initi ated +ven ge +profess ors +pro di +stron ger +e mission +bb a +hal le +ta pp +haw an +wh im +compe ted +myr tle +ir port +cold play +ach e +ske p +m son +ss ic +calli graphy +swim mers +me y +pp c +thri ft +po c +re places +commu ter +âģ¦ âģ¦@ +go ers +lo gue +para dig +bas kets +sensiti vity +joh an +atl antis +& & +suit case +anxi ous +l h +str i +gal loway +stre ad +war den +gr ounded +ffici ency +li feat +reli c +disgu ise +island ers +f cofficial +classical music +b mc +en field +bi que +oak ley +bat man +sla ying +ner ves +mul tit +calci um +projec tor +scott sdale +ant ino +gri ps +kim mel +des mond +prote stors +hi atus +metaboli sm +conclu ded +press er +ti pping +sli de +e to +hun ting +aus open +ri k +pp ery +innov ators +pitch ers +ag ger +fun gi +z ad +proli fic +rockn roll +bl ames +ct ar +stam ford +q ad +mozz arella +insan ely +den ver +ph ouse +nom ad +ï ¿ +s ris +pro du +hen ley +pag an +am trak +ru bi +in cl +tu tor +sco tia +wo es +sing apo +fun nel +turn bull +know ledge +gri mm +real madrid +we are +missi les +con sol +emo jis +sne ak +smi ths +ru iz +br ou +i el +ha ver +ðŁĮ ļ +kin gof +basil ica +circul ation +prin ters +ta pping +ri dley +dra gged +ha j +writ er +fundament als +personal ities +me tre +stereo types +bur le +best of +n ffc +ha th +mini stries +a ali +trac ing +pav ed +ł ï¸ı +g ic +insp ire +tu g +ha re +repe ated +ex pon +lol li +rho de +pre cin +install ations +instag ram +az ar +i es +sole ly +du kes +mission ary +van guard +fursuit friday +on d +pol ari +ma st +har an +jos é +jack ed +ec oun +al ities +ne ph +ra vel +moder ated +sco w +s fb +uru guay +as o +ni g +au du +p ints +lat ina +ben z +m itting +char ted +mat ology +cit ro +biop ic +ðŁij Ń +djo kovic +fox y +agu il +so to +an ada +sin king +sc rap +hair s +bethan y +fact friday +ðŁIJ IJ +unlea shed +) ( +contra dic +ram on +coast line +y ong +sn sd +li gan +p ome +mit age +ge tt +wat i +ri sk +so aring +bru sh +f pl +av an +å Ĩ +lar son +sh ear +mul til +blu r +multi media +chun ky +par i +n ani +weir d +cholester ol +char les +dream ed +tan ning +puzz les +fr am +hand ball +ch ag +beli ze +al u +bang s +Ñ Ħ +detec tives +mc g +ish q +bo thered +saf c +mp ing +ten eri +g ays +sail or +an gi +mul ticul +gue ssed +ros é +high ways +bro om +chatt anoo +- ' +see ker +on ed +at f +lu c +> < +bar i +per cep +jewel ry +as ph +sor row +sl ing +mam moth +jac kie +ë § +wilt shire +sa o +can cell +im paired +tor ial +bre ed +guy en +jud ice +tit le +pro spective +applic ants +ðŁį Ĭ +epis cop +e id +b yo +stock ings +ðŁĴĥ ðŁĴĥ +ll p +sna g +keep it +l ough +ol son +matur ity +!! !" +cop ter +i sha +bl i +wil mington +tr youts +th ai +ðŁ¥ ³ +pe bble +kra ft +f p + º +ssi vely +li vin +contest ants +tex tures +jo an +h dr +film festival +prov ence +wi do +op end +c si +sto wn +cro ati +ad just +host ile +analy sts +il an +cu ppa +bru m +newfound land +good win +me tt +mall orca +plu gs +bu k +bb hutto +wrest le +sa ire +sho pped +for za +le head +vi vo +ba st +ro xy +reg is +hard working +hon olulu +desp air +young sters +ni g +impro mp +roll tide +de emed +tre ason +ru shed +for ged +ff f +pikach u +bri ggs +do it +ac cent +la us +gla ze +compet ent +a ho +photo g +mid field +le go +har vard +min orities +re illy +slic ed +once upon +initi ally +financi ally +landscape photography +har dro +qu o +mm ers +par kinson +smu gg +read iness +bru tally +glou cester +mp ed +bbhutto zardari +mur der +ye d +dat aviz +sr t +dow ning +bi ans +m ü +fle ck +fli pped +s ly +brilli ance +ri m +k um +bubb a +ko i +knit ted +sor g +ma is +ðŁĮ ² +ti ss +su stain +sen su +ak han +zi est +exam ines +chardon nay +user name +short list +re bs +on o +dar ing +hard wood +che que +righte ous +light ening +dir k +shra dd +du ra +down stairs +sh al +ami gos +ru ff +s law +ri es +red nation +man us +ðŁĩ§ ðŁĩ· +distin ction +u bun +dur an +mi gra +thi ans +la ver +domest ic +k x +jaz zy +justi fy +belong ing +insul ation +color stv +drun ken +chann eling +qu and +xi ii +enligh ten +kan o +fati ma +teen choice +terri fied +p ba +as ley +met museum +dun e +pack er +ki o +ðŁĴľ ðŁĴľ +bo iler +fas cism +ar mored +back grounds +in mates +embarra ssed +defin es +th d +we go +silic one +lo on +el ding +bor rowed +he mp +ak sh +kaw asaki +br y +de af +kill er +dispo sal +ðŁĩ ° +glaston bury +un covered +o xide +po ff +d ant +k j +ku ro +dri zzle +peop les +fe e +pro pri +dd lovato +pi ggy +ot is +aller gies +u bis +pengu in +ser a +vi z +prosp erous +ici des +tornad oes +sene gal +web cast +sto red +enchan ted +bb cone +bay area +entrepreneu rial +rednation rising +experim enting +ang an +lot to +they re +por e +er p +seren e +east wood +bro kers +bar ge +stal lion +timber lake +tailo red +dy stop +b ate +lat ors +di xit +bran son +dynam o +ky lie +shame ful +bt wn +spring time +mix ture +s ounded +lu ton +dad es +mal a +op ra +en ic +rahulg andhi +se wer +~~ ~~ +ky u +nor theastern +ca er +bc u +nir vana +kitch ens +ous y +al m +river dale +hid den +fl int +sp d +pat rons +katy perry +au gh +exhib itions +sm c +shu ts +at ore +da in +some thing +ber th +bo g +por ter +gen to +con cussion +ang lic +ro we +gr illing +scar lett +master ing +mor nin +comm ented +si me +si zing +christ y +ce os +st m +at ry +tari ffs +vac ation +pre judice +p su +paren tal +far age +can a +cap com +koso vo +you re +men stru +stal in +grape fruit +br an +che sa +dav en +exc el +!! ) +๠Į +distribu tor +ce a +bride sma +millenni al +wa in +ob serving +mis ery +plan etary +expo sing +bra ised +comp ton +don gha +q l +spring steen +th ul +syl ve +cab o +pal ad +niel sen +gaz ing +ba ja +r oud +orchi ds +johan nesburg +se man +d ji +oper ative +affe ction +eclec tic +at c +mut ant +aw x +nic e +mel bourne +indu lg +tu lip +dias pora +wel p +big gie +mississ auga +retri ever +or an +tam my +c ta +hipp o +seas oned +ger mans +eng v +marvell ous +im f +rela ys +mon tan +maur iti +me ister +as surance +reig ning +su fficient +han e +no thing +pos se +nav y +in love +brigh ton +en qu +ch ung +sweat y +es c +cal ed +man s +nicar agua +sl ices +mo cha +washington post +bb n +dam ned +grow ing +en burg +lo an +me s +wh oops +believ ers +spi el +vo daf +l at +s led +cricke ter +brown e +golf ers +bar ra +wat chers +lu igi +sw amy +mom s +pit ched +san tor +cr s +si re +sc amp +bo de +ste war +jon ny +ent ity +pac qui +mind ful +min india +bear ded +temp t +scorpi on +eat on +authori zed +ar to +s vp +op athy +cch ini +house music +disney world +âĢĶ @ +pro pose +di y +expen se +ten g +pupp ets +sm el +d aca +per ry +fin n +boo sting +lefto vers +cou gs +satell ites +man y +az e +g ong +fi e +metho do +fer ries +ðŁ¤Ķ ðŁ¤Ķ +explore rs +load er +attrac ted +il ton +godd amn +pi azza +doc tr +sav ing +paragra ph +visu alization +may ors +work flow +ack les +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ +ठ¸ +twer k +clu t +lo ver +te ases +si an +o te +deter ior +accor d +l fw +swar ovski +nat al +tra ps +k ina +analy ze +laye red +bever ages +un it +ran som +pe shaw +dest ined +astro logy +si pping +miley cyrus +cam ino +marshmal low +bli ss +out back +fa q +int oler +humil ity +po ppin +hallo ween +mon tene +op hy +nu n +tattoo ed +a as +ðŁĮ ³ +dale y +qual ity +du sa +fisher men +swi f +ter rac +st au +le in +trol ling +ship ment +garden er +march madness +head band +gr t +bur nett +w and +!!!! !!!!! +gh e +du x +hu d +war ner +ðŁĩ ¦ +ex ile +rescu e +rat a +d han +duc ati +dro wn +bl ends +spi e +alli gator +simul taneously +broo ke +u ke +k har +comm union +ri ka +ford fc +chin atown +you rown +me y +can al +syste matic +de pri +ox ford +an il +w ut +equ ation +be z +fle ur +the good +lang ley +ad ity +ed ith +al fie +о ÑĤ +en cry +br ill +ex emp +ce sar +mb ling +ab ri +sc icom +j ing +school ing +mi ka +mechan isms +impromp tu +rhe a +moo re +crime a +be sto +wri ght +el ders +ro ds +kam al +folkl ore +be et +mini on +reli eve +thr o +team usa +pas cal +made with +boli via +itt i +free bies +desi red +best selling +l iness +la den +ke ane +mi sts +hipp ie +atta chment +@ / +se w +flan agan +âĿĹ ï¸ı +supre mac +stl cards +si as +q u +rh ys +ste ep +val leys +v w +pav ing +disp at +al ison +por te +id u +new sc +soc ket +mo s +co star +re vo +prote ins +stanley cup +m cal +ear ring +se cs +mc lean +cap ric +nick elo +ad en +v c +shou se +adap tive +maxi mize +entertain er +pro se +gri ffi +six teen +lam ar +mi rage +saudi arabia +awe ather +ru st +in filtr +fashion week +ðŁĺĬðŁĺĬ ðŁĺĬ +selec tive +bubb le +a den +fen nel +deci sive +m ta +mock ing +mb les +st amp +mu le +bernar do +gr in +po tt +j ingle +vet tel +colom bian +cam o +motivation monday +ba han +p ly +dh ary +k ami +x men +sleep er +gar a +my sti +confi dential +conflic ts +p neu +ce s +insur tech +clean se +me rely +va is +tu x +the great +shar on +ma j +hol a +eco systems +aj ay +aa j +hu sh +har mon +backto school +wiki leaks +reflec ted +ðŁĺ ĵ +commemor ating +ac et +buck ingham +messi ah +tu ous +hor net +to be +d q +he ine +mi g +pl ate +nichol son +sp ie +cumber land +nor mal +pho bia +happy halloween +city fc +mc el +gilli an +ke to +lu de +de mise +su ga +str ate +mcgr ath +visit scotland +foo led +cb r +gc se +col ori +po td +missuni verse +fin ances +ma poli +for ks +Ø ´ +cann on +medic inal +ðŁĹ ĵ +kh o +wre ck +pan to +bag el +gu ll +syndic ate +ic y +pr c +ki en +zi ka +ti sh +pe ta +c co +li za +ch ut +ex traction +el g +gl i +fu eled +pos it +respec tively +leice ster +br ink +vulner ability +im ported +e sha +ðŁ¦ ħ +r ural +re ll +gam ing +atlan tic +aband on +no ah +re solved +pro state +aller gic +ps d +âĺ ¹ +dun geon +fang irl +illumin ated +m hs +white sox +d ently +ck o +endor se +over ly +dazz ling +prior iti +night life +ut il +be have +flam en +east bound +ðŁĴ Ł +ilove you +gov uk +mozam bique +alle gi +dr i +testim onial +ath s +ì§ Ģ +mm y +shab by +pro secco +friend ships +cal am +dam ages +off set +jura ssic +jun o +arre ll +ðŁĴ © +interven tions +dare devil +car ver +run away +ran e +truste es +ha ute +dep ths +ðŁİ Ń +me in +sacrific es +con cier +ne sting +i zzy +me tam +ilove my +ur ine +du lu +mal hotra +ve ins +night ly +co at +an di +he witt +lon el +ci ble +wr ite +jen nie +sant ac +ĸ ï¸ı +str ato +singapo re +sop rano +kri sten +cheer ful +flee twood +fa iri +m eli +wa st +tur nt +sfor sale +sc rolling +angel ina +ren dition +jeric ho +nick y +or b +fla vo +patri ot +ash eville +sick ness +re fund +aggre ssion +b pl +ãĥ ĥ +elu sive +thi story +hang er +bu ffs +vil las +at kinson +sp h +ja it +decl ined +wo k +supre macy +oo tball +ey ang +ðŁİ ĵ +s ford +ath i +consu me +road ster +e so +u pro +reci pe +au f +uc i +ar on +oo oh +cs go +re ich +mc d +min ute +ladi es +pun k +rut gers +mee k +ariz on +ta j +land lord +de gra +autu mn +lyn x +us f +b hi +fairy tale +dongha e +bet sy +explo ded +chen nai +op a +pro tag +br ant +ðŁĵ °: +g f +pal li +ðŁı¼ âĢįâĻĢï¸ı +su t +ill ini +colum nist +shir tless +de centr +sear ched +ec or +bu ggy +s ack +ðŁĺĤ ðŁĺŃ +de t +ther i +or naments +bring back +to v +quarter finals +ic he +con stra +gi er +buchan an +vi x +kay aking +mu stread +swal low +mel b +sc af +op al +may oral +har at +ðŁ¦ ĭ +schedu les +id f +ha gue +ro z +a ah +d mc +du plic +ca che +orph an +frac ture +rec on +ch av +bun nies +al ain +mustaf a +ðŁİ Ļ +vac ations +dynam ite +tex ted +broad caster +ðŁĴ £ +ste amed +rock er +di etary +luxury travel +inaugur ated +sa wards +vaugh n +lincoln shire +click ed +kra ja +f anc +remo ves +layo ffs +mc far +bre eds +win nie +jon ghyun +incen tive +vari ations +pat ton +atur day +persist ent +pr un +pi ers +dal es +æ ĸ +breast feeding +r ance +ta wa +Ĥ âĸ +mur doch +cap tive +thi stle +nic a +commod ity +cou ldnt +board walk +graci ous +practiti oners +n gc +scru m +ner o +camoufla ge +col on +he i +phys icist +saturday morning +ten er +si won +colum ns +bru ne +y vr +ba ir +reti res +hal am +cab er +shaz am +min u +cas cade +milk shake +gri d +d ren +vin cent +so dium +plat ter +cheer leader +chen ko +y ak +elimin ated +ty po +y man +re think +âĿ Ĺ +ts ville +bernardo kath +ex tr +ðŁĺģ ðŁĺģðŁĺģ +ta o +re per +mo ths +em powered +c iting +transpor ted +mon ks +san at +cle ars +bachelore tte +camp bell +racha el +har le +hand ler +climb s +inter ference +rele ase +sh and +r bs +hr h +ãģ ª +val le +r é +sli me +w akes +chu bby +slo an +el ves +ath en +attor neys +micro scope +ston er +sc aling +o be +c out +se man +mid week +bal sam +ðŁĺį âĿ¤ +ti ful +v ish +lo tta +ri pping +re mn +ti re +le ap +ha vent +la by +hi mach +whisp ers +we in +ðŁİ ¸ +wild flowers +se le +u cc +li ability +az ine +sw ings +k ya +ta ir +re main +e do +flo ps +poc ket +grand ad +exam iner +gr is +ffe ct +ðŁijĬ ðŁı» +stud ded +heart beat +de acon +firm ly +infec tious +ste f +out lines +le asing +cla ws +sen se +tab s +hoo t +mo sul +spa wn +co a +hog warts +ve in +alban ia +manu el +b ino +vaux hall +scot land +go bucks +mat ty +phy sio +tor ino +const able +investig ated +s lower +mistak en +bay er +wild fires +vo ic +x on +time to +chas sis +bar ric +pi on +bald head +woo k +regi str +dra fts +b hs +li gue +l ick +staf fordshire +baf ta +dar ry +je anne +ven ding +cor p +⼠³ï¸ı +kid dos +fen way +ca o +west bound +ðŁĺ Ļ +dv r +quick er +bla h +goo die +ðŁĴĭ ðŁĴĭ +vo x +esp er +fac ade +cor relation +red bull +rou p +decl ining +chi ve +mc gee +tur o +in der +f eller +fu g +il ysm +mar di +peshaw ar +ki eran +ine ma +meat balls +pe ck +depre ssing +sen sing +gi z +dd ington +spring watch +ro aming +yellow stone +horse shoe +am man +week day +ol or +ðŁ¥ ° +boo sts +spr int +scar ves +je e +bee tro +cl an +all the +ìĦ ¸ë +enlighten ment +ado be +re generation +? @ +cont ag +yach ts +to u +mor a +en voy +r ani +go li +dhanush kraja +wood working +streng ths +se di +disc s +ar ina +sc on +lit e +ano ther +ðŁ¥ Ĭ +ye men +gu ern +sav vy +lo yed +biom ed +heart break +comra des +milli e +pat ch +un f +jar vis +bl aming +commemor ation +ge y +å ¥ +cardio vascular +alig ned +docu ment +. ? +aesthe tics +em u +the irs +le h +ps ic +si f +pl ateau +ex pend +domin ating +rob es +mauriti us +excep tionally +hom er +discover ies +bra un +ten nant +insul in +ðŁİ ® +car bs +te as +? !" +zi e +franco is +brow sing +th ol +cla rence +hel per +ob tained +cas sie +le es +! , +pome gran +hu bs +presti ge +] [ +mach er +bott led +pun ch +pi pe +o ch +gall ons +deliver ies +u ra +un day +mon de +depic ts +re gency +outra geous +khal ed +car o +he arti +za g +develop mental +over coming +stati stical +flavo red +for ds +cre atives +lau rence +di as +sun screen +in ked +pre acher +n ul +impac ting +auti stic +âļ Ķï¸ı +o ss +pel icans +cele ste +v b +ru mp +mc gra +fair fax +hu mor +bbc news +row ling +cal der +seam less +ag ne +p ti +mix ed +t shirts +mer ci +b tob +women instem +genealo gy +pre ven +l our +cra dle +gi use +Ð ¾ +chron o +fair ness +chocol ate +tor y +as da +pre scott +stret ched +al man +u il +re charge +in tre +ob st +hosp ital +hay ward +teneri fe +fried man +vap ing +confe ssions +ye ah +bal li +luck now +cor pse +sculp tor +amp ton +t pp +indic ates +sur plus +tru man +ðĿ Ļ +sin ha +in vo +sovere ign +ke v +establi shing +engra ved +assu ming +ðŁı ģ +sou za +fab i +ton ed +oun ge +del oit +dow ney +no ble +om or +car tridge +ðŁı IJ +u hur +hol loway +succe sses +r sa +âĦ ¢ +ma zz +tw d +disc ourse +. < +y at +satis fy +com pri +ठ¹ +graph ite +disser tation +ar ter +í Ķ +b ally +zom bi +ly ons +a ic +u bc +pra da +e il +da x +cla i +grand daughter +extravag anza +chall enge +ðŁ¤ ŀ +po ver +primar ily +dad dy +man a +bi kers +inqui ries +da un +fel ine +gener ative +he f +benef iting +lind sey +pol ka +demonstr ated +al le +rand y +o su +low key +weir dest +red bull +our y +n ous +wood stock +cre denti +nic er +g ado +aly ss +ap h +prepa redness +station ary +incorpor ated +dy er +sarato ga +cele sti +: " +antibio tics +or gs +inde fin +ap ron +и Ð +fif teen +no f +ðŁĶ Ŀ +ph x +te ga +m z +organiz ational +on air +band ung +pleas ures +mor i +secre tari +rac coon +ca shi +pil ates +k on +geof frey +la o +kam p +depart ments +back packing +an am +à « +crack down +aun ty +on do +li zzie +ph ers +cu n +ðŁĩ ± +k pop +pu t +inten tional +connol ly +bar clays +hs fb +swin don +u ku +s ally +a int +âľ ħ +pen ang +up lifting +epile psy +inter ro +bun gal +go ku +blue berries +ठ¦ +u ssia +sil ky +mou red +i stic +bri efs +me ats +go b +ch aser +state wide +pra sad +gl itch +ar in +ban ff +memb er +ðŁĺŃ âĿ¤ï¸ı +lo ving +hall a +ภ¡ +smo kers +yak u +scicom m +physi o +sw ol +lem ons +gel ato +ch ool +capit als +ki stan +ti ghts +spi kes +trav ellers +ik lan +commissi oning +ar ine +emabiggest fans +empha sis +front line +pad dock +destruc tive +ba ha +l inger +je wish +shet land +mc gin +mon key +ko z +s one +raj ini +te h +y en +c vs +masqu er +gir ly +we sle +was nt +bro dy +termin ator +gil le +mag gi +bir die +jeopar dy +cu bic +vm ware +intric ate +an up +to pia +east on +sab res +investig ates +bu sting +bil ingual +valent ino +in format +fer re +advent ur +hydr ate +for sy +az iz +san to +e de +whist ler +continu ously +d ham +un used +ji had +addic tive +vi dy +do b +i do +fi ed +ni versary +n one +fu er +ðŁĺį ðŁĺĺ +coven ant +prin table +immac ulate +o em +cl t +serv ants +consu med +un released +sc um +pack aged +me re +ìĦ¸ë ¸ +to by +ta f +spo ons +me al +f ball +fair field +jan et +silver stone +dart mouth +follow me +voy ager +kom bat +anni ver +ene w +mag dal +ho ve +sa th +grizz ly +car di +gart ner +sand y +kan ye +post ure +po ign +im pulse +radio logy +horiz ons +si am +aish war += => +no che +tr is +el yn +com me +du i +ce c +councill ors +cudd ling +creep ing +loc ke +manag es +trans ferred +ne cks +di er +dan o +v ick +lun ches +d he +en sures +cri ss +ul ster +bann on +cont enders +sp am +sweet ness +med al +hon duras +arc tic +ultra sound +in fr +disco vers +ei ffel +ca sters +ru ben +du st +awe ed +atri um +lest we +se ared +ðŁĵº : +ty ne +ex changes +little mix +l le +astron auts +hersh ey +work day +kno b +so v +re signs +today show +der man +an th +af c +ta ster +sw oo +sa eed +per ing +narrow ly +rn li +best buy +panas onic +obst acle +farmer s +ðŁİ Ļ +pa wan +ki est +ang ers +absur d +oh my +sin o +pist achi +sp ice +giu li +prime time +ko w +k ens +ex agger +! ?! +u ba +midd les +ju dd +e jec +slam med +pen sions +of a +re create +b hp +xx l +liver pool +thre sh +pur ity +ni eu +hol ics +wr ath +ra do +gli o +am ma +dile mma +cr u +lets go +.... @ +âĿ ĵ +sugge sting +tru mps +hor us +f v +ic om +refer ring +predic tive +tar ts +ge tte +so ck +glo ssy +pin ky +al ec +thy me +ou ra +thero ad +pe tr +cr am +p fi +dv n +me ier +incen tives +tun nels +mobi l +rec ap +extra s +upri ght +rev amp +per severance +, - +ot p +mir ror +ar wx +ger ry +ma her +g or +hom epage +am is +ag ra +made le +best friend +sirius xm +bun dles +admir ing +t dsb +ðŁį ģ +ch as +slow ing +ro h +wall papers +â̦ / +tek ken +gang s +tal a +lind say +shou l +line backer +tool kit +ur anium +caly p +ab rams +mat thi +ðŁı ¿ +hon ourable +da yo +ver sail +tan k +st c +fr itz +spl end +pat ag +anno yed +on day +devast ated +chattanoo ga +national ism +mas sey +jen n +tail or +dev gn +org ans +zu cchini +on fox +sat ire +wex ford +dis grace +no to +vol ta +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +à ¶ +home owners +poin ter +m cr +au sten +day sto +mo ons +pal ma +gra zing +e so +influen cers +shahid kapoor +compli ant +measure ments +develop s +y d +par l +p vt +rand olph +tor tured +ger ald +eli as +deepi kap +war mup +hick ory +g ap +co ffin +am our +re neg +moun ting +seven s +ig le +hi er +dec ad +tri ght +esc apes +wer ner +t fl +ful filled +ni ger +sour dough +re aper +choo ses +spin ner +week nd +fil tered +sh uk +kat i +old ham +open source +kh anna +at elier +conne c +opho bic +gla s +complic ations +ar son +counc ils +sm ol +as sy +lur king +ling ui +han ks +e in +Ù ħ +ru gs +n guyen +nou veau +men ace +le v +alad din +ru ining +round about +k m +con or +shoo ps +may day +traum atic +prab has +ka iser +k ita +rou ter +pe dro +re tar +stun ner +spani sh +distur bed +acade my +e learning +wit ty +sen g +fer al +av y +sta b +ke aton +ur du +ko to +hu i +coo ke +ari an +the personal +u ma +se ap +a sting +rhetor ic +hand writing +munici pality +consor tium +ðŁIJ Ł +glasgo w +ra ya +eli za +polym er +bro th +prac ti +correspon dent +addic ts +gay le +ail ing +o fe +p li +hear tw +st itch +sight ings +prie sts +sam o +slo th +good wood +roc co +sab c +summ it +l ace +pres ley +itt en +cin cy +thepersonal network +s week +pe gas +af con +regi stry +ci m +le th +dic ap +cand ice +flu ent +sm ack +pede stri +al oud +car ac +priyan kach +p gh +ir ons +dol ce +lat via +dece ased +thero ck +cla p +cen e +fo am +morris sey +gre t +essenti ally +com cast +be agle +argu es +ing ed +- â̦ +sa g +ha san +ðŁĻ Ĩ +ðŁį ° +nh ra +kann ada +indic ators +on er +bri xton +at as +screen play +sor ority +sha heed +he em +class mates +tain ment +es i +breast cancer +zucker berg +aur or +en cia +ref ers +kae per +vor tex +com part +lym ph +photograph ing +ste ff +rest ling +par sley +mom ento +th man +lac king +du tt +ocu lus +fin o +fren zy +ra sc +der n +dis missed +noo k +met gala +sh ill +rapha el +maver icks +exhib its +eag erly +c pa +amen ities +. âłĢ +exo dus +ern st +lit a +deal t +womens march +i ain +score board +campe ones +c en +ti ki +garri son +fidel ity +bra g +road map +psy chop +lo e +ble u +ðŁijĬ ðŁı¼ +sau vi +spr inger +temp tation +ru dolph +ac ura +wic z +parach ute +stro l +len ny +zi k +dom s +nb af +al pac +vivi an +ro ve +pre et +perpe tu +sna ke +air soft +infl atable +prin ces +ati e +ffe y +pati ent +m ire +chel le +sl ack +groo vy +# : +up loading +!!!!!!!! !!!!!!!! +siem ens +provi sion +v fx +need y +f ats +to poli +bhu tto +sa thletics +alu ms +t winning +south western +adop ting +last night +man ne +la ga +tw ell +ac ia +-- -- +eye wear +hur ley +fle e +sa ch +pe cker +cost ly +is k +cr ates +polic y +ero sion +in go +wer k +ðŁIJ į +torto ise +therap ies +inter net +chihuahu a +ri ps +fre i +ed or +tai ji +t fc +do d +demp sey +christ in +chen g +hi ps +gra eme +com passionate +cavali ers +histor ic +soul ful +crimin al +ja c +vin ci +expi red +sur at +turi smo +k ona +se aweed +ber ts +le ica +expre ssing +a al +wor t +break fast +her ring +am used +rhu barb +mar tian +cospla yer +y ash +stri al +ra ul +refer ral +dw ts +j w +ad ler +cur tains +gu r +val ence +tyr one +sw fc +coach ed +re born +diabe tic +cho ke +nor folk +investig ative +ðŁĴ¯ ðŁĴ¯ +z id +v mas +phi e +objec tives +âľ ĭ +over due +di vers +mat su +ðŁİŁ ï¸ı +casu alties +ภ§ +al k +stand ardi +re alist +arti facts +pand or +ke x +in vin +( !) +ine y +par aly +mr t +fay e +the voice +on ga +de ed +skin ner +az wx +speci men +priyankach opra +nu evo +bar kley +toulou se +resu mes +football ers +cit i +fe tch +è re +lestwe forget +ðŁĻ ĭ +ch unk +dri fting +manipul ation +equ als +pu tt +ky ungsoo +âĿ¤ï¸ı # +ela stic +par ano +fo y +do ping +cin cy +ss ler +interrup ted +al ay +ado res +ame thy +con voy +ãĢ ı +Ĭ ãģ +black list +gener als +sa chin +bru shed +oun ces +non stop +illi ams +bt sarmy +u av +ru ff +bur ma +bi k +defen ce +schul tz +bo asts +lonel iness +go re +trans forms +alum na +@ @ +ra ppers +ne hru +car o +himalay an +wearab les +ge h +pepper mint +re development +flam ingo +cos by +big baldhead +ag ri +bare foot +sco pes +re gram +gh ana +ðŁİ « +i heart +sa die +carri e +microbi al +ku ala +sk ater +quer que +âĻ © +gen res +reas oning +ch ased +as o +sli pped +en can +vam os +ker s +ad verse +mo il +commod ities +with you +sil ent +hy pe +an de +am ination +whi spe +lit z +âļ½ï¸ı âļ½ï¸ı +ri ff +pp y +lam bs +gan esh +ab sent +regu lator +marse ille +en roll +par cel +wa p +by rd +ðŁĩ Ń +tu ber +country music +par l +contro llers +responsi bilities +we y +ch ate +montene gro +chic o +mil an +l ms +tra inees +appropri ately +un certain +popp ies +ed sheeran +nutr itious +gar o +deut sch +awe some +ãĥ ¼ +comfor tably +land marks +et i +re usable +daniel le +ro sal +co les +just ic +c cs +f anny +ni m +mc u +clin ch +at ene +mer ge +im db +ang lo +uc cino +pan ini +an not +bur berry +feat ure +predic ting +fashioni sta +s ask +imag inary +mm o +south sudan +spe ar +hu bble +jo inthe +coyo tes +sli go +ko dak +sit com +polaro id +roo ted +corru p +ðŁĻĮ ðŁĻĮ +bris ban +at z +ah l +re my +tal ent +aval on +ra da +pau line +locom otive +go ons +ne mo +maser ati +ic u +stu tt +histor ically +sm b +pres by +avo id +so oners +rhine stone +w ad +ri sing +tro t +mo des +reg ent +optimi ze +re ece +sm u +ver ti +newyork city +cor tez +ra c +in case +sin c +fiel ding +e tta +tiff any +al monds +sad dle +k rat +mat ter +g low +star ving +gl o +cra ppy +sl ur +st d +monit ors +recei pt +maymay entrata +mc il +un is +rain bows +cal dwell +pacqui ao +j op +a fe +hoo k +es sen +wiz ard +medi an +fla ws +com s +âĿ Ħ +ing h +ha ynes +anton io +tem plates +ou ter +na w +cardi gan +bel grade +ðŁĴ ī +hom o +a ise +ro pes +no ve +what you +tri gge +concep tion +ad ukone +na di +fri ars +sw er +adju sted +hot line +san ity +kau r +down loading +c gi +ten or +eth nic +app alach +ภ¸ +pa g +gol ds +on set +investig ator +car tel +peace fully +jarre tt +cat alan +poli o +n um +fru stration +dhar ma +my life +âľĮ ðŁı» +aber deen +mu sa +bin der +spark ly +fle eing +instin ct +co ping +domin ance +ill ers +er a +u conn +lo oms +living ston +gal i +he s +c ma +bel a +se ley +mon k +la ch +mar x + ´ +m erica +woman in +es sex +ra ina +jim i +nep tune +z ack +chine se +mart ins +chand elier +her n +with us +ear l +asph alt +modu les +st p +ul la +psychi atric +mile age +captiv ating +si der +men to +mor t +tran ce +tal bot +ab by +ì ĥ +âľĮ ðŁı¼ +j ak +daw n +turn up +scre wed +fe ds +blue print +ðŁĴĸ ðŁĴĸ +har sh +er os +insom nia +ban kers +ta emin +mis conduct +hu mber +gi di +edu ardo +con a +musc ular +consu ming +ra sh +don nie +di pped +col lie +samu el +melt down +ðŁĺįðŁĺį ðŁĺį +me z +exam ining +schwar tz +pri stine +ðŁIJ Ŀ +ve it +ful filling +an esthe +gue sses +dra ft +som me +soli d +pati onal +ho ped +evolu tionary +all er +enter tained +sli ps +lud wig +conclu des +sen sible +bon net +cra ze +tra s +haz ards +const antine +ed ics +star trek +to c +occu pational +in cheon +deepikap adukone +pizz as +new comer +de part +oppre ssion +ebon y +foss ils +tro jan +el en +ste aks +k hou +positi oning +ug by +red cross +ak h +dol ce +us mnt +pp en +dil ig +ma vs +call er +cost ello +⼠Ħ +dy n +thing s +rhin os +a xi +sar kar +con vocation +att ers +ss ss +fun gus +eu gen +russ o +squ at +w sb +eli on +william sburg +s off +defici ency +be arer +o kin +key stone +t wain +cal ming +break able +wa res +horser acing +com bs +bun ting +u it +t land +ðŁĴĻðŁĴĻ ðŁĴĻ +ga stron +sab ot +ick ers +commissi oners +sen ate +ii ot +ath ena +nit rogen +an tony +ero tic +di alo +mis sou +hypo cr +âľ Ī +kaeper nick +can v +d roo +clevel and +o sh +mon sta +stefan o +^ ) +sh ul +po ison +ha e +commerci als +ma ul +nit ro +co worker +alo e +vap or +t ents +russi an +qu id +question able +mid get +po ker +girl friends +sin the +erit rea +ten ure +depos its +buc keyes +spot ter +theod ore +trin ity +joaqu in +u cci +follow the +caf c +mp a +ðŁIJ » +plo tting +dom ino +ta ek +sion ally +dicap rio +pa p +car mel +ig er +bt cc +beth le +www bigbaldhead +foo die +bagh dad +mason ry +off ended +à · +ภģ +sc ro +vers es +ori ent +ar ches +pi yu +know your +gre e +ta kers +gu ard +dish on +bucket list +bha fc +war dly +ðŁİīðŁİ Ĭ +leigh ton +pe w +stra y +assaul ted +in hal +ly fe +amar keting +l x +kat z +ubun tu +me o +carto onist +turno ver +mi z +dis like +mul len +mo f +bl and +hi des +emer ges +chori zo +truste e +ma hog +lan sing +paralym pic +fa int +fa una +ch al +sn ar +cat h +bent on +cast illo +sli ppery +apric ot +oec d +bar o +l z +he ming +clow ns +co workers +peru vian +commu ters +y ell +ðŁļ ´ +under ing +v j +tt p +fli pk +w ana +soc ent +Ĥâĸ Ĥâĸ +ठĤ +oo sa +jag ger +di sm +e less +d ham +cali f +a official +ec lip +harro gate +gra pp +com rade +n tr +concentr ate +thi ghs +bit coin +bel arus +ë ĵ +end uring +now watching +industri al +pi p +ar on +ar at + ® +whit by +oooo ooo +sa ree +tic als +mis leading +yo on +year s +sle igh +roman ian +sciss ors +vam pires +ac up +ab ba +th weeksary +cent ri +fl ye +u o +c bi +bu ena +sin d +mar ino +bur r +re building +ठ² +anniver saire +ac ca +ðŁĴĢ ðŁĴĢ +gett ing +tu lips +wolf pack +âľį ï¸ı +more than +ta kin +ð٤ĺ ðŁı» +u be +mon ic +dou bts +mo wer +co balt +don ne +specul ation +argu ably +kak u +htt ps +prosecu tion +din ah +stam atic +disclo sed +bever ly +fl wx +cra bs +extraordin aire +war mest +imper i +o logists +trac es +par c +lake side +am r +ter i +hour ly +domin ation +ar row +shrews bury +ance stry +wr angler +trigge red +pen sac +roo ster +survi ves +a on +bo ko +val or +love is +la g +pe y +fo cal +out laws +bl anc +artic ho +wit s +marsh all +die go +support small +u ca +sa h +je et +syn ago +gover ning +ðŁĴ ¬ +sal ads +cre ate +miri am +cen sored +ami de +no u +z eta +allegi ance +* ) +bl m +ric an +pa stors +oly mpus +blo c +whir l +star ry +pr one +y k +p ne +congratul ating +be v +so ber +love island +sa ir +an ing +tutor ials +q e +lun d +in ist +cle ver +taxpay er +ali z +wren ch +dd ling +cap ri +h pa +ðŁı» âĢįâĻĤï¸ı +na j +o j +futuri stic +jelly fish +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ +cel ery +plan k +fil a +ne me +un healthy +lec tions +ðŁ§ ¡ +rit chie +n ws +mi kha +wonder woman +âĢ İ +hip stamatic +ka g +ðŁĴľðŁĴľ ðŁĴľ +poul try +mo w +wor ds +lo ff +ðŁ¤£ ðŁ¤£ +relat able +re mixes +keny atta +ke m +re signed +fo d +stra igh +j lo +hu tch +box ers +colle en +mag s +instruc tional +ko l +attrac ts +pra g +account ant +go ggles +br u +th ole +mar row +leu ke +oc to +pon ds +bubb ly +he ist +ìĹ ij +im p +a har +ha unt +hall mark +psy ch +kkkk kkkk +col umb +jump suit +cost co +si delines +ag gies +over turned +ni b +key chain +fu k +f af +mi am +assist ants +cy cled +ri der +dam mit +red wings +mag es +kin s +ì Ĥ +ho d +son t +carol ine +" ' +cu le +bra id +fel ony +ar ities +ruther ford +depic tion +isab elle +ro ach +k day +fifth harmony +em y +li gam +bari sta +albu querque +gro ss +ðŁį º +oo ks +ðŁij ¼ +dun can +try in +jag s +g ould +li tho +âģ £ +а Ð +sam my +tun g +cas ser +apo lo +aaaa a +man g +as ics +sh en +p ye +tur bul +ss p +saint sfc +on lin +n anny +he ster +do z +à¸ Ķ +th read +ren ts +kh and +ðŁĴª ðŁı½ +un conditional +rob son +car re +ph on +sacrific ed + £ +auto s +par ker +oc a +log in +kee gan +hard cover +dough nuts +ðŁĮ İ +spit fire +refresh ments +saskat oon +commod ore +j f +rub ber +halam adrid +child care +stra da +io m +ri k +dak ar +ther mom +cro pped +gar u +ali k +ven i +i ft +si ka +ritu als +z ul +e ch + © +su dan +l land +i me +do cker +ì ¤ +fe ared +fa o +wal ter +no g +mutu als +l h +ali gn +mon ia +concep tart +ðŁĻı ðŁı¼ +sco e +compet ence +sw ine +ly me +laun ch +green er +abstract art +inqu is +gran ada +ga elic +flu ff +d backs +grave yard +ba be +acade mic +adventur ous +joh ann +~ ! +bi bi +| # +pl ings +gett y +as b +âĿ¤ï¸ı @ +staf f +religi ons +bang or +world bookday +me gh +de vin +ash ore +meri dian +gi thub +qui z +all stars +be stest +ir resi +ack er +do te +war rington +pol ly +newor leans +cr ou +wi gs +che y +smithson ian +la sag +de tour +bor is +stra ps +mari ah +inten tionally +ko h +ðŁį ¸ +ssi an +mar issa +cor al +episcop al +casu alty +tom o +supply chain +sam p +on go +ro o +cavi ar +p fw +clau dio +buff alo +s ations +mat ty +snap back +l ds +al arms +mat te +âĺ Ķï¸ı +conditi oner +d ors +he x +fi zz +a stri +sus sex +secur ity +qa eda +all star +cocac ola +as one +cl icks +sc ans +mu te +he avier +ðŁİ § +âĺ ŀ +lv l +book boost +youtu be +fla shes +f jor +c su +explo de +do dge +cair n +gonz ales +th ill +pel le +hart ley +renew able +re tin +e stre +costar ica +shipy ard +nc fc +pri ya +a ghan +an ath +plu gin +co rey +re bound +or u +kat rin +hor mone +gi m +mahin dra +s sus +park land +har per +fanta stic +infer no +ep ilo +wrest ling +fe ct +c it +ac oun +to ssed +monu mental +char tered +bu st +pe tra +âĮ ļ +wildflower hour +sweat ers +* . +bl er +ate ch +go wan +demo graphic +bra l +suici de +renov ations +vu el +sin ister +ar mani +miso gy +ph arrell +nap s +un iting +crusad ers +cor gi +insu red +than i +no or +g q +d ada +bicy cles +snu ggle +sch an +ten berg +ss al +fe mme +bo il +½ ï¸ı +re ap +occur ring +hus sein +divi d +sto ke +sh alom +na ia +o lic +frustr ating +Ù ĩ +ig s +gro ver +scen arios +n ds +bru tality +med alli +bu on +sas s +skate boarding +ony x +lor ry +ny u +gau tam +mm ings +gu g +end i +lo thian +comm ando +chal k +ph ora +asse ssing +ti gh +crun chy +ad ay +is l +ci ara +pilgri ms +kam al +p to +brit anni +t ani +sm c +l ure +app store +ab y +golf ing +cl c +fa u +an as +shu tting +regul ated +carn age +scow boys +all enge +c ma +humbold t +rel le +ku mb +her i +refin ery +sound check +d wayne +bos nia +i sp +the alth +anni v +relev ance +my a +bag gage +dre ad +s bc +th ed +bu h +hi jab +lo id +ke w +c te +respec t +lovel ies +cu bes +celebr ate +dir t +sav ers +_ , +gar ment +pulit zer +mas jid +beat port +al arts +encry ption +s ner +ple ads +found ry +sym metry +ru mi +birth place +scallo ps +supp le +pivo tal +t ati +no de +so d +pro xim +tr ics +col dest +bren t +mand u +cla ir +e ach +and alu +hi ddleston +ðŁIJ º +mel ts +v ance +pin n +se ments +scre ened +sa chs +o bl +ic ha +âĺĺ ï¸ı +school ers +heal ed +lo gged +ð٤ĺ ðŁı¼ +ic us +bore dom +b ish +b ffs +tal king +sure sh +hoo kem +de on +de fl +ei leen +ðŁį ķ +women intech +ri sotto +rang er +adverti se +ภģภ+tel ly +la go +dart moor +d ong +sk ates +lo go +un ner +mail box +ma sala +lo oooo +amethy st +che wing +c bb +australi ans +rc mp +game art +# ... +kor n +extre mism +fruit ful +anci ent +pu bg +pol ite +wh it +mur als +m gr +line man +dav ao +ste ms +ten nis +av age +tu pac +gigan tic +hs bc +auto biography +up the +ี à¹Ī +re gal +fig uring +ku l +mis sy +hoo p +gra s +for ums +back lash +abduc ted +p nw +min ic +bu tt +bott oms +at on +ven g +ðŁĮ ı +del aney +prab hu +fan club +over haul +health ye +sy no +aa f +ren amed +kim i +un cle +man city +se u +qu anti +este em +um in +en zo +mel vin +under go +j har +far ah +coast ers +humph rey +mh z +children s +^ . +d hi +disrup tive +integr ating +r nb +over sized +a ide +ne au +docu mentation +ðŁijĢ ðŁijĢ +pal o +hear th +ri yad +pun ctu +abc news +secu res +boy band +bir ch +ju co +tra ff +legislat ors +bay a +ãĤ ¯ +no ises +collec ts +s warm +k ner +bi shops +stur geon +snapp ing +mo l +fre aky +chair person +tro p +lyn ch +car cin +art sy +e sto +cha i +fl ur +inv ali +sau sages +im el +j or +fun fact +wit ter +puni shed +ac ons +h ya +re versi +em c +dif fu +z x +sp aw +cla d +d mit +hol land +fre sco +pay roll +ab undant +stu ffing +mor o +c ny +boy cott +wend y +ele ven +pro voc +pil ot +tr x +be ad +climate action +ri on +assi e +ì ĸ +o sm +islam ic +ho ar +good reads +al ici +afterno ons +spoke sman +jo lie +it as +masc ara +âĻ© âĻ« +pre vail +beetro ot +lu jah +k li +dod ger + » +ru le +l n +scre am +ho bart +col bert +r tc +er m +pat ro +quo ting +s live +que st +non fiction +semin ary +prosecu tors +ve st +express way +g ge +nau tical +et f +ðŁİīðŁİ Ĭ +dur ation +cha ired +the film +fab io +she h +can o +ðŁĴª ðŁı» +with draw +! :) +cor pus +phen om +yel p +la wn +ent om +snapp er +but te +pin ball +pro xy +libr e +alle vi +n ada +gabri el +fo wl +eure ka +daph ne +tu nes +pun ched +wh ore +jo g +ren tial +man ners +o pe +wh ufc +gu th +revol t +sne aker +philharmon ic +ho ste +sovereign ty +ðŁĻıðŁĻı ðŁĻı +fish ing +sci art +fe ta +i pp +dump ing +kel own +gir i +dig its +sal u +san jay +twee ters +sp as +col chester +sc ab +ma dd +๠Ħภ+Ä ĩ +ged don +march for +do p +maure en +un plugged +di do +fashion blogger +up a +mex ic +tar y +pol ye +jame son +v t +grin der +mad dy +consult ancy +¬ ë +leagueof legends +ac cents +um ni +jane iro +tu ss +h ens +ampli fier +to shi +pret tier +pre vents +new town +red wood +vant age +ball ard +ar tof +a she +a sion +lac ey +ap at +gro ve +ภĦ +rw and +real tors +tra itor +bed ding +ö r +zi on +fla shing +cam pan +boom er +secretari at +ab ol +liti gation +cont amination +se dly +shred ded +in for +do herty +bench mark +ro che +skate board +sho vel +i zz +to pper +o ster +laby rin +autu m +k ong +hum mus +vi z +tech news +kla us +am using +socialmedi amarketing +i des +cast ell +ste e +underestim ate +cal ab +pa ign +b illing +unanim ously +g mb +fly fishing +hath away +commerci al +colour ing +skul ls +pivo t +te p +tb c +motor way +x press +construc tive +pu k +under lying +kir sten +mani ac +cha o +se ma +chiff on +ðŁijĮ ðŁı» +ver ona +kom o +stan doff +wi ped +c ated +bla ir +wor kin +m sc +bethle hem +swi pe +unexpe c +pe es +pe tri +orig ami +ðŁij ħ +mex ico +flav or +ru dd +cannab is +mar u +ri ddle +wor shi +sil on +sch at +ap se +tang er +bi ous +e er +questi oned +o zar +dan k +angle sey +char an +bak u +compe ten +re pri +bat ter +sa xon +cal ves +leng ths +$ $$ +âŀ ¡ï¸ı +immer sion +ga unt +car ry +cy to +b anda +shu tt +experi ence +el gin +mous se +ta z +ê µ +in correct +en z +b ham +mor on +so ver +ar un +ti pped +la ble +de arly +bau tista +í Ļ +mor tal +woo p +dt la +sho cks +dav os +ðŁĵ Ŀ +swim wear +her man +ðŁijĩ ðŁijĩ +z ir +neglec ted +grac ed +campu ses +av s +ar ora +swach hb +live pd +ac cra +enqui ries +shoo ters +kur t +vancou ver +brad ley +gar da +g ü +ol la +attrac ting +up ton +ne win +lu mia +furn ace +ev ers +e on +sw a +roo kies +a oc +v ss +bris ket +tor ch +yo da +heart land +tac o +ph ony +food bank +ab bey +bab ylon +u y +gre ate +expre sses +d andy +sc apes +survi vor +ron d +e ci +ha vin +ab el +chil dish +tor que +wav y +ur self +kanye west +year of +ale stine +o brien +al fon +sk ag +kore an +anchor age +val eri +de w +ðŁİ ¨ +land slide +car ole +christ en +go phers +af i +priyan ka +q q +power of +it te +pc so +tw ol +pr y +intellec tu +guer rero +pi les +wish list +w ren +time table +ë ı +prodi gy +gibb ons +. / +ne ur +anz ac +mur ray +vie st +pla ster +la ir +art gallery +inter continental +g br +bell ator +nam joon +mam mals +am el +y aw +saras ota +cam ar +bud ding +sum mari +aco sta +la sh +ey ou +post graduate +instruc tors +ti g +const ant +were wolf +ic os +cla s +glen n +bud ge +ðŁĻ Ĥ +er ta +sta ins +persecu tion +cumb ri +o ch +syner gy +hu ang +scand in +mid terms +comment ator +regar ded +perpe tual +bo iling +al p +lan ge +sch le +fac eli +twee ta +ri dden +ok toberfest +charlotte sville +ik lan +jo u +ch atham +b sc +ðŁį ¦ +stra uss +mel low +xx xx +happy hour +re actor +ww er +distr action +at orial +ðŁĴª ðŁı¼ +twin peaks +fay ette +a or +ko k +bro om +sy fy +ou se +am ag +Ø · +ubis oft +lu lu +hall mark +stu art +it ya +si deline +venge ance +re lu +sex ism +boun cing +un ites +gu stav +te ssa +stu mp +pro clamation +ima x +divid end +col by +ðŁį İ +play wright +un safe +co smo +ðŁĩ²ðŁĩ ½ +cup board +constitu ents +ang lia +ram page +ðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺį +than ked +take aways +shro ff +de bat +kh ur +conduc ts +format s +à © +port age +graph ers +u ten +pre m +mo ines +condem ns +s ous +l ps +f cs +deal ership +leuke mia +bure au +ski d +guardi ola +ca ster +thir d +avoi ded +en cyclo +c sr +vi xx +analy zing +she ar +dulu th +shap iro +chan ting +stre sses +as be +mil itia +ãĥ ª +col lin +arsen e +sure sh +teach ings +yi xing +sh ill +nu des +sv u +clear water +war ped +pro life +artist son +it u +versail les +galax y +ax el +spring st +cal a +hu hu +sc u +commit ments +exe ter +poign ant +mo tion +conserv atory +row dy +rec alled +mu sk +emb elli +so the +âĺ Ģ +sto pper +sch ild +to pe +el mo +zi el +j om +barn sley +snow den +on tour +jour ney +hills borough +par ole +w ts +mo ving +ag ility +tiv o +ff ers +kindle unlimited +g wen +ann an +ah mad +tex tured +hepat itis +dra m +insi ders +tis sues +ãĥ Ħ +fc barcelona +cr atic +na acp +pe can +f gm +custom ize +concer t +g sm +pe g +p one +justin trudeau +super cars +happy holidays +bu lar +ado x +lap tops +digital health +destin ation +gradu ally +áĥ ¦ +popp y +ss l +inhi bit +star light +of fro +glo omy +x per +hal der +im plants +le to +hass el +a as +un told +en ci +liber ia +or an +con tests +il ah +sma g +sc out +mari anne +cr yo +schedu ling +lo s +kan e +stutt gart +ne se +law rence +da in +pho tom +car ou +ภ£ +g wy +national dogday +roa sting +band camp +kentu cky +stret ches +ke rel +ca she +ãĤ ¸ +sta x +tran si +dog gie +at ric +hal le +ci vic +brow ning +lein ster +cat day +high land +joy ous +in cumb +or lando +ro mo +col ton +del ta +car ab +ro tc +aster oid +goose bumps +mo logy +yo ko +an ds +tomor rows +red carpet +sm p +ca sio +ðŁ¤£ðŁ¤£ ðŁ¤£ +se au +rejec tion +rot ating +bi partisan +th un +mat i +bon i +ol l +ener gye +do it +l j +mother hood +lou ise +neck laces +el ite +ni x +l cs +en v +gl u +le sh +cran k +su sie +m clau +so tu +crow ley +rat ri +use d +bre ton +alfre do +ye o +travel pics +ti pp +elli son +sax ophone +me red +heu ghan +ta ine +f es +vi ro +suppo sedly +i as +dige stive +y le +li zzy +wildlife photography +bri anna +west field +ra ined +am her +ðŁĺĦ ðŁĺĦ +distribu te +bott om +pre serving +oil and +craf ty +de scen +col ling +shakespeare sunday +r wc +ang led +ci an +t ations +mon tage +me yers +france sca +ðŁĮ · +wi ggins +san ford +volunte er +car ra +bar k +vari ed +pl in +am u +kap il +rock ers +qu ind +br ane +in mate +ent al +impro vis +michi gan +re tweeting +progre ssing +mercedes benz +smo ker +physi ology +dor ado +watt pad +h wa +sr bachchan +w ga +vol atility +hi re +ac ap +wn ba +hein z +stit ches +kidnapp ing +bur ys +lim b +f itters +thumb nail +ton e +mir and +desi rable +ad dison +tar an +tamil nadu +spec tator +soci ology +amit shah +remo tely +âĻ ¦ +ham id +r ds +g lee +smooth ly +sch ro +er c +lali ga +he als +us f +ni shi +d hu +un il +h le +tro mb +bhu tan +pilip inas +se ung +whit man +te y +min ce +snow boarding +re au +k ker +av o +zach ary +ran veer +ti k +gover n +qu al +beck y +anthropo logy +att en +grocer ies +de bit +war p +sil icon +hawa ii +ðŁĴ ħ +pomegran ate +pe er +orang es +people schoice +end ure +ðŁĴĽ ðŁĴĽ +ãĤ¹ ãĥ +ac ial +a haha +stu k +imper ial +bl ond +pow der +kno ts +vin ce +wood lands +den a +watch in +mat cha +ma hat +galax ies +middles brough +k ö +stre e +resc ues +wal do +lero y +desp ic +real ities +tm nt +ha q +un o +pe c +bolly wood +blin ds +design thinking +he ms +and hra +ab sen +fan s +ste ch +shire hour +bla ine +shak ti +pu rely +ðŁı ı +tra fal +ke ynes +gr ate +to bias +spon taneous +satur ated +caval ry +pri sc +ðŁĺ ij +wh t +pas si +~~ ~ +vir at +patt inson +la o +weir do +sym pathy +ju da +occa sionally +cred ited +stat u +es co +hil ly +esc ape +dischar ge +se er +may nard +sud bury +z lat +or al +we er +encoun tered +sm elling +over sight +ê ¸ +that cher +mack ay +you can +fre ep +freed oms +prophe cy +ho e +ishq ba +dra ke +qu its +pel led +tur k +o vi +wesle yan +new music +leg g +ch eng +h illi +ay y +pan ties +ad versity +ad jac +vaccin ation +ju ke +ga c +exce ed +time sof +sta ining +ep cot +v ital +up ward +bethe sda +apar k +ma hi +camp fire +enchan ting +rha pso +h z +na ver +fa x +vali dation +ac ad +ny r +as ym +coordin ated +depar ted +all ery +var ies +spr ite +chap lin +ss occer +s wat +bre t +relu ct +tunes app +super star +reminis cing +o co +home grown +dough nut +un canny +la pd +thyro id +! âĿ¤ï¸ı +botan ic +bre s +sp ade +i ste +echo es +du lil +bur sting +qui ero +ðŁij İ +loy ola +amuse ment +ha ils +sleep y +burgl ary +âľ ı +ro gue +cot land +mo ors +low er +wic ked +ðŁĶ Ĭ +compet iti +argent ine +yvon ne +karti keyan +ili ary +gat sby +precin ct +six ty +na ji +cam s +practiti oner +ðŁĺ³ ðŁĺ³ +pu ne +neg li +juli en +inv aded +cali br +cla m +duba i +mu k +lan tic +produc t +fe dex +ï¸ı : +eu ra +dari us +s ling +virtual reality +home stead +ðŁı³ï¸ıâĢį ðŁĮĪ +pac ed +in ha +pul mon +la zy +premi ering +ma stered +in he +con gregation +ba jo +sport ing +new jersey +hor ny +lma oo +leng thy +du t +yo gh +swe aring +philosoph ical +pap ua +in ski +know les +dy ke +âĢ ² +to ken +mc guire +ri ot +probab ility +mc con +gro s +su mat +c ite +da a +on da +mad dow +che w +board games +spar ked +re claimed +ad hd +ny se +imwith her +equ inox +boo ths +balsam ic +ha zy +dor chester +ag os +se aw +moder ator +seri ea +ander sen +pilgri m +âŃIJ âŃIJ +itch en +hal li +x ton +nathan iel +mun ition +celesti al +ga f +zo om +mark le +pen thouse +cal e +s fa +bar king +tu cket +em ery +cal orie +li que +ad ar +mc nam +tor tilla +wood pecker +mo town +bad ger +ayr shire +scram ble +dd ay +cra ziest +per rie +cho co +cast e +i ot +wre cked +selec ting +uss r +gra ft +pun t +lab ou +ir st +ba ek +Û Į +su ki +que u +ach at +te ster +aug mented +wc vb +sin ks +ðŁĵ » +ra ke +inter ne +be cause +belle vue +une arth +light en +ðŁĺ £ +turn around +labe led +unemp loyed +twitter kurds +le ia +h ye +great er +ðŁIJ İ +tim ed +i red +e tt +limit ations +cab e +s out +bee ch +anni hil +re trac +yo ona +ang er +den nis +supp lying +di z +" ( +sc ur +gun man +su ho +sauvi gnon +ภ¥ +wi ley +land on +choreo graphy +pre historic +ðŁı ĥ +var gas +assess ments +pinn acle +di i +chamber lain +ì Ī +v p +present ers +deut sche +sun shine +sal utes +r one +bu siest +- .- +motor ists +hemi sphere +al wx +ps p +ow a +den ying +cho c +gu tier +han uk +mus kete +jait ley +se wage +t ame +thin kers +shi m +se quo +pap ar +middle east +k wa +ke g +patag onia +no y +bar ça +take off +he a +à ¬ +n sc +g dc +ðŁij Ī +mou stache +mel ania +thr a +â¬Ĩ ï¸ı +pier ced +ze us +fon ts +ber a +it iner +q atar +contr ary +ire land +i fy +ou los +commun al +fin s +un paid +pa a +ðŁijĩ ðŁı» +ri os +ou p +f iller +cafe teria +à¸ Ń +kas i +cali ber +z ulu +v sco +ts ford +dragon fly +smo kin +pi st +psycho logist +diplom at +we bs +buc cane +à® ¾ +motiv ational +du ne +ba e +c fs +with out +er on +i ac +ate e +pen sion +fra zier +en sis +sk is +par ting +ger y +territ ories +nach os +eni ght +ever lasting +msd honi +tel e +sp un +po di +sab ah +environ mentally +ce ase +beau mont +mar ta +kel vin +ho ff +sun il +n da +co b +sh ale +ree dus +un boxing +u bio +re opened +n all +capsu les +mar r +himalay as +swee ter +ja z +f mr +twee ter +dha ka +na u +de mi +d fs +ta urus +fad ing +it utes +ci p +over flow +jef frey +don ny +car tunesapp +ðŁį ij +prefe cture +danc ed +c pt +ple asing +ital k +earth quakes +ul ation +hi o +ãĢ ĭ +ant an +nutri ent +de ere +selec ts +enrich ment +r iti +tram pol +bl amed +j ia +contribu tors +chesa peake +pi geons +tribun al +mad uro +w su +ilo ve +effici ently +dar cy +war ms +ar ra +ec u +ho wer +strugg led +rajini kanth +ðŁĺ¢ ðŁĺ¢ +hou sing +str at +eli x +disp ro +raf fic +thi erry +na sty +c fb +staf fing +al ma +back ers +hen son +sky walker +reale state +roo s +ness y +chan ce +cair ns +c ci +pe dal +ly ft +cross word +wait er +only in +kru ger +k ir +alej andro +car tier +car rera +re paired +ou at +un clear +un breakable +today in +qu eries +jo dy +gen ital +win ner +to l +kelown a +fascin ated +ãĥ ¬ +sris ri +squ ared +spr ung +negoti ate +priv ately +av en +>> >>> +g ical +gav in +chester field +zu mba +or r +nat alia +impeach ment +mn l +car at +criti que +credi ble +trac y +tan i +musi k +jig saw +gam bia +tol kien +fe u +as per +sav ory +fo xx +f itt +mar lon +l rt +v ell +p br +imprison ed +i om +chu l +wind shield +kay e +ba a +chor d +s art +al gon +minister ial +nat geo +la zio +nor ms +ðŁijį ðŁijį +lic king +fut bol +un sung +dalla scowboys +sh red +distur b +dev ine +be ards +ch f +b day +ro sso +ig or +ay i +si ren +k air +sti les +ro f +mag nets +un cover +mou se +bang ing +si ghted +spe ople +impac t +row land +kir a +environ ment +love the +p sis +mish ra +gl endale +ca jun +o che +de ception +sex ist +stra ws +s ga +buff er +apost le +sp l +pop up +ðŁļ Ĺ +r g +up er +ball in +i dy +occa sional +national park +ðŁı Ĭ +u an +innov ation +ภ« +te aparty +re tte +counter fe +b ha +rec s +ig en +ðŁĮ IJ +humming bird +cu r +ha ven +la zar +pue blo +: : +zi onist +op ath +inver ness +promo ter +carto on +cabine ts +mahog any +surve ying +r ational +feel ing +testi fy +so w +oc on +ภ¢ +ne el +mar is +sol itary +che mo +rad cliffe +sim ons +ros ary +new er +jo die +re tali +pra wn +pad dy +hen ge +k ala +im plant +at y +bren twood +par adox +ene z +re designed +p our +wy d +al de +௠ģ +sol d +biomed ical +๠Ĥ +tt tt +mat teo +ys er +new ton +de bun +ner dy +loo l +wo on +elisa beth +ec c +wh i +ach o +salv age +sal aries +qu ity +navig ating +oph thal +con soles +re built +o pec +ast ers +sho red +set list +kathr yn +rhy mes +re visiting +ash ish +li ft +re post +sole il +âı ± +weal th +sa at +we c +king james +flipk art +field work +se gu +mo dal +bu b +are rs +ðŁį Ĵ +clo oney +pad dington +necess ity +guth rie +pen te +li mo +jo sie +ar tin +en c +l hs +betra yal +info graphics +i er +mo a +hear ings +bon jour +sym bolic +ag ro +wed ges +krist ina +wild flower +athle tic +photograph y +pe sh +ca hill +chi lean +gou l +fi oren +ðŁij ¶ +z il +sk im +bad oo +deli a +tre ble +n cc +ðŁĩ¦ ðŁĩ +a house +bul lock +sol itude +ا٠Ĩ +can cers +futureof work +hu tch +water shed +war mongers +sp illed +colom bo +mo th +associ ations +weigh ed +global goals +not just +christ i +tor g +swe ating +man eu +clu sters +â̼ï¸ı â̼ï¸ı +ta ped +ul y +tru sting +yu suf +te in +ra b +, ,,, +sin ai +audi ble +explic it +cro wns +sch iz +at least +ðŁĹ £ +de bra +je suit +ene gger +z hen +one sie +i it +ss f +gur gaon +chak ra +bear cats +k ran +k awa +reque sting +han over +g end +sor os +mer cy +lovel y +do omed +tim my +ku z +ul l +ab ram +sa ison +ãĥ « +clean ers +re mo +circu its +bar red +o th +mo ist +madele ine +gall o +u j +per mits +hea viest +car ols +az te +gior gio +flo ats +decl aring +us rc +min at +craf ts +pri ma +conven i +nickelo deon +danc ing +ceremon ial +blo gg +tw p +anglic an +she k +k nick +( (( +hubb ard +harve y +hit man +fen g +we some +for za +s word +op us +bro m +gi bility +z al +m unch +dance hall +gre edy +hd mi +re birth +ðŁĺĭ ðŁĺĭ +s world +figur ine +com post +k f +engra ving +gior no +st ana +k man +ham ster +compos ers +aj e +func tionality +pol k +is ons +air planes +te se +hor rors +musc at +gi ven +sp ence +ðŁĩ¸ ðŁĩ +eli ot +ach illes +fre ck +crypto currencies +sou ther +hal o +bor neo +polit ic +hahahaha h +up state +si ena +obsc ure +hau sen +lloy d +happy friday +motor bike +bon a +americ as +hol s +- ( +spor ty +un aware +reven ues +christop her +bank sy +av an +ev apor +com press +eyel iner +to dos +buff y +renewable energy +ly rical +ar chan +rapi st +fair trade +lma ooo +beat z +pro active +la pse +ir ical +revers al +po de +mcin tyre +mac au +ãĥ ķãĤ +nash grier +f sa +g all +çĶ Ł +perpe tr +il ya +configur ation +% ; +str ange +rac i +ภĩ +pic kups +kov sky +mam mal +w ps +g able +compar ative +z h +save our +da vey +on etsy +mu ssels +mis er +cri stina +electr on +cra ve +lo ren +precipit ation +m z +ðŁį « +vin cen +snow board +no ida +ah n +marin ated +g tr +town hall +min is +bethe l +adv an +su ra +shi el +fur ry +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤ +lyn d +so il +sc ence +sen eca +shar jah +dick ens +credenti als +av ar +per k +requ iring +pre fer +j ian +de ca +r ach +ing for +del e +be ep +ðŁĴ » +cis ely +hu ddle +green sboro +haw king +ho ax +hang ar +ç ľ +mis o +lo vin +gre ta +ab ad +logi e +at an +snow flake +mahe sh +fear the +al kal +bobb lehead +ba hn +ju dged +fu tu +feli x +ðŁį ĵ +pi ke +der iv +notic es +au er +dis super +or da +wi pes +am ino +stri kers +foo tb +dram as +pun ching +score less +heming way +bi h +bal lad +chat ter +am mo +kle in +fabric ation +kari m +z end +hi sto +vol ta +rock y +marke ter +xtre me +sequ encing +paradig m +cle ats +boom ing +âģł âģł +block ade +promp ts +yogh urt +pur pose +nu r +regu late +nois y +ing rid +bird watching +bar tender +Ù ĥ +wor dof +cha otic +shor ty +el dest +z app +onceupon atime +fl yo +rit os +mike quind +ðŁIJ ´ +regi stering +. ] +ad ol +gg gg +pur ge +kid lit +ar bor +val ves +synago gue +o th +unanim ous +veri fication +dar rell +ãģ Ħ +vander bilt +tape stry +pro sper +did dy +dra fting +de cep +marqu is +st int +michael jackson +pee led +men us +bb b +sc are +ema il +wri gley +it is +f ell +some thin +bar ra +ed gar +di pping +pu ddle +sla de +lear ner +jal en +ðŁ§ IJ +the daily +mikequind azzi +ju x +iq bal +mckin ney +ra iser +ef an +dr one +cat o +pic ket +cro we +l att +uk o +giuse ppe +hin i +synthe si +ponti fex +song writing +to d +swit ches +din ners +h q +gabri elle +pensac ola +cir cle +expo ses +ev s +riyad h +pro men +o ck +sa j +cit ation +brew co +jo si +ep aper +dri f +point less +tang led +cri pp +line ups +fairi es +daz e +mour n +bla dder +sal z +bur undi +book mark +the people +sub sequ +princi pal +sk er +court ney +a oki +rac ers +ad m +mom a +critical role +hou n +shed ding +sa ka +ace ous +mck ay +hus bands + ½ +me da +accu sations +ro sel +nc is +witne ssing +or ama +go ds +hil ton +el man +ÃŃ n +meg ap +cra ven +announ cer +crit eri +sheffiel dissuper +milit ant +consu l +hoo ded +aby ss +b x +ma dam +lo cu +mary am +manic ure +grat is +ac tresses +ros ario +this dayin +king ly +gn ome +cel ine +r ous +he el +lil ac +vish al +ab h +thor ns +s ls +ne al +construc ting +be ren +s lang +ma ins +far ra +sar ko +pai ge +gu iller +l ala +ice berg +nou n +plann ers +u mmm +ou ses +ill ary +ma an +box ing +zi pper +srin agar +migu el +o str +mp o +responsi bly +lan terns +appli ance +x b +gren ade +neglec t +dy sle +ham mock +ne ctar +wit cher +r gv +di ence +ser bian +seed ed +cru z +bi sh +sp he +e q +sky rim +alge bra +phil ately +bungal ow +ge off +y ves +demand ed +consider ations +the vamp +pawan kalyan +co ded +grit ty +erup tion +se infeld +uni denti +ëĭ Ī +wor m +ac us +se ung +dun g +ro land +su d +di visions +ab lanc +shor test +j f +p oun +plant based +be to +tough er +mc o +don et +mark us +v fl +ðŁı ł +open ing +co ward +caber net +o xi +burle sque +sand ra +su mo +consi st +tho t +cay man +motor ola +gutier rez +d slr +y w +no bel +nov ice +moms demand +grun ge +sp or +d cc +pre sses +sli st +allot ment +voc ational +ft c +pu ja +lo ven +utt arak +tan dem +sh ep +come dians +anat om +cant wait +healthye ating +west side +mar gins +chi ang +asbe stos +stupi dity +proble matic +fit bit +: $ +ceil ings +shu a +protec tions +bio tic +beng ali +re sts +bien nale +tim o +cul min +e minent +affe ction +unbeliev ably +individu ally +canvas sing +wh itt +nov asco +chin son +h pe +go w +gloucester shire +pa o +thresh old +chev ron +s ine +we ther +pp ie +aqu ino +antwer p +âĸ ¬ +po on +inst af +equ ine +cinemato graphy +nbaf inals +vali ant +kil kenny +te rence +syste mic +sr l +p ound +made ira +pl ough +tre cht +mat ed +mp d +ransom ware +ph in +li qui +bb ce +boom er +i standwith +con ju +r te +nar a +foo lish +da shing +vier nes +br ite +da u +juni per +ai da +you now +ra zer +de i +repe ating +comfor ting +adjac ent +e to +ca sted +chat ur +mu er +syn th +san itary +mac le +independ ent +law ful +e erie +h or +ðŁĴ Ń +am rit +vel o +station ery +mu f +may may +contempl ating +elabor ate +gre gor +dri es +ac col +ภļ +schwarz enegger +ill nesses +day break +follow back +collu sion +electr onic +jo vi +hiro shima +ta w +hom ec +mic ah +qu itting +fro sting +ben fica +hel i +s ical +pic cad +corpor ate +ment orship +you are +sing er +shi va +ru ne +ing er +ri um +play able +doo p +wil low +ter re +ni p +at d +war bler +profession ally +er ase +proce ed +pedestri ans +mis chief +ben ding +alas kan +c kett +mo p +dd les +shut ter +ge ared +atene o +ma deline +g ations +o sha +der ick +sw ild +an gry +pat ents +hun k +decre ased +fr y +ðŁĴĸðŁĴĸ ðŁĴĸ +sal on +quant ities +d ario +ni gel +ku ma +jen n +happ ye +xx x +rex perience +pro s +au sch +rele ssly +ham burger +fuku shima +er ne +stat ec +ren d +may field +j one +lef ty +bern stein +sm il +gener ates +fore station +band its +ta yo +r ca +ac ci +rodri go +kn app +elo vers +vege tation +u ral +le ft +ħ ï¸ı +worl dre +sur i +embar k +w son +ba you +mu ller +mo vers +ðŁķ º +presby ter +l f +cre e +bat b +sal am +demonstr ations +an ec +n pc +it ics +to graphy +re inst +thur st +tal e +off ences +smart city +bro tha +ofthe year +in valuable +ear n +ðŁijı ðŁı½ +kre mlin +gra dy +town fc +guern sey +ma ha +contag ious +dre x +be en +( £ +nati vity +k tm +somer halder +comp ounds +íķ ĺ +" â̦ +af g +ott news +h ound +fire fly +cil an +donet sk +volunte ered +ak ira +è ª +sing ul +st h +dro wned +mand o +he ir +ðŁİīðŁİ Ī +tax is +y uki +vel d +k ans +el k +ran ts +hash tag +t eng +ro g +a at +gru b +e ber +in india +colo ssus +sig ni +so ever +mile stones +der o +differen tial +phu ket +master mind +an gh +mel ani +bro ker +actor vijay +stun ned +continu ity +af fl +vo cal +perenni al +fianc é +in complete +hun ts +re issue +domin ates +tur meric +ro am +ri on +bag ged +nas sau +fu t +x ox +national trust +jo ye +san o +hearth stone +dis respect +le es +h se +siber ian +offe e +re stock +wolf gang +re gan +plan o +un wind +re par +mil le +] , +skul l +fat ally +concep tual +ðŁĮ ² +f é +ber to +b ms +u a +mag na +notre dame +le te +la undering +heartw arming +buffe tt +go at +pe abo +wind mill +v ac +continu ally +az alea +mem brane +can cels +make yourown +athe red +p to +tor pe +ðŁĺ ł +ðŁĴ § +sc ares +le aking +z et +pix els +ac i +kh il +marath i +ðŁĻı ðŁı½ +u la +tam u +chandi garh +z agre +aa b +pronoun ced +aubre y +sand er +pun ta +har low +ic elan +celebr atory +so t +unci ation +stru ly +mc dowell +deepi ka +remin ders +my stical +ct c +chat ted +s ica +bar gains +ch hat +ru bin +m net +oiland gas +pel ican +o at +mor ality +k our +i h +nu clear +gc u +ric her +vene zia +m ma +le ith +ac company +rich mond +sports net +ba ahu +smu ggling +mm i +ðŁĩ®ðŁĩ ª +twi sts +sahi b +.... . +amb itions +il lo +histor ical +fo rec +show biz +pon ies +chas ers +remo del +will ing +prince sses +am ple +cushi ons +ac les +lot r +da ch +an the +in corporate +new bury +ki ri +fried rich +ab v +ball ers +alber t +ðŁij Ń +let i +nan op +ci de +anal o +n sf +)) )) +griffi ths +valen ci +ro ano +fun run +babys itting +ca day +ent re +u ck +slu g +tic al +the sims +ro ar +car ney +g am +sto we +fi d +bun ny +sham rock +pe cu +mol ina +go cougs +con tributes +transform ation +mo y +v aj +sever y +antioxid ants +thir teen +sight seeing +l j +reversi ble +odd ly +hoo kah +nou vel +hal al +fe i +stab les +mul t +ho pped +bra ids +inter change +ghana ian +ww ww +eth no +con junction +ago v +ye ti +earth and +ts p +con serve +heir loom +metaph or +woo f +tor io +self less +n wa +em ilia +yl ene +y xe +gi ar +moder ating +pro bz +b fi +ne er +du mmy +hanuk kah +we bber +k v +eye brow +dag ger +su mp +ra ges +ork ney +tb o +hal sey +assign ments +tr onic +scri b +co on +an war +# âĢİ +jal ape +flori da +qu aid +haw keyes +âĻ¡ âĻ¡ +street car +ro g +dat lantic +gran ola +un changed +expect ation +Ù ĩ +mar lin +gu mmy +ðŁĻı ðŁı¾ +awareness month +oil painting +mu th +per ch +jun to +villa gers +mor g +che ated +web comic +the future +d ps +la kings +men tioning +vo or +ident ities +accor d +mc gu +l pga +rum our +massi vely +m pls +heal y +d ate +sp oli +re visited +on t +al and +scru tiny +lakel and +bl ending +< / +an kara +jami edor +metab olic +f ences +ann y +å ħ +semic on +oo tt +space ship +wack y +le ta +ap ac +she e +in herit +do res +ðŁĩ¨ðŁĩ ¦ +gent e +tw ick +ri ms +gal ve +de ville +king fisher +scorpi o +ow l +al ar +vari an +ðŁĹ ĵ +vene tian +star dust +then orth +q ing +har rington +consul ate +spectac le +ho bbs +tur ks +gre er +mat ing +ðŁİ Ģ +ðŁĮ Ģ +direc ts +í ĭ +pompe o +vo iced +la os +tz u +pro me +pri sm +mer c +fortun ately +bc fc +mcdon nell +not sorry +smi led +t ba +for war +mid term +dar by +we instein +up grading +wol ff +bron co +cab ello +ðŁ¥ ĩ +fi able +shar pe +bat tered +sat o +myth ical +instap ic +pre pped +eni um +e spo +di aper +explan ations +who pping +ragn ar +pe el +antibio tic +l acks +harri son +li sm +au l +qu ail +martin a +sent encing +sc ams +di di +tr onics +ãħł ãħł +go ff +za in +param ore +cha ined +clin ton +li ff +cott ages +em on +reve rend +consu mer +ce an +t any +lum pur +e bay +sto ol +ðŁĺ» ðŁĺ» +ta pro +h ath +modern art +just ine +prover b +app y +tra x +mani fest +am bu +nai k +pe pp +r sd +mer chants +kitch ener +shi fted +li zz +âĺħâĺħ âĺħâĺħ +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +uto pia +tom o +ou ted +com ers +chiroprac tic +book club +cin dy +pro hibition +se uss +ë¯ ¼ +thin kin +rr rr +go fund +t ack +om b +catastro phic +ling u +guild ford +bo td +ॠĭ +plan ter +^ ^ +win k +kath mandu +sto ppers +smooth ies +re efs +hin d +bell amy +Ħ ë +waste water +vo or +nat l +! ] +re el +y ap +scoo by +work space +corin thians +bl un +obli gation +g bbo +dy son +cra vings +ell ington +dap l +wre xham +earthand clouds +uk runchat +positi oned +kal b +four square +jo ck +im pending +even ing +ath y +pro claimed +c ites +ann apolis +san i +mar th +ir l +accom mo +ka a +fin a +y aa +di sper +ec ar +bha k +will y +ðŁĺĢ ðŁĺĢ +mcder mott +mo j +gener ational +u said +train ing +lon ely +lo res +impe cc +âĢ IJ +beav ers +ma ki +he b +aap l +å ı +wolver hampton +leader board +me u +c fa +easter n +hu r +civil war +ou rage +hor ned +le high +awar ds +evi dent +gi gab +r ous +ma del +ro byn +ur gently +k ors +en as +heis man +bam bam +fab ian +f om +evalu ating +assemb ly +out sourcing +hun tsville +ðŁĶ ª +justi fied +cashi er +sp aper +buc keye +analy tical +illumin ati +au tho +o j +sha de +geel ong +wh ey +he aton +terri bly +ele k +un charted +sd live +moto cross +her mes +dar shan +dar lington +cash mere +gri pping +cilan tro +pun ish +... : +ðŁĴ Ħ +inst ance +der i +lo bal +muk her +sp ar +thin ker +fre mont +com piled +color ado +vig ne +sm d +whe ad +villa ge +le ek +formula e +ta res +persist ence +?? ???? +ped ago +he z +alzheim ers +vul ture +off ence +is great +suff ra +kick in +h mmmm +broad way +ï¸ı @ +art i +alli son +endor ses +ry u +lolli pop +soy bean +kend all +cer a +inv ade +( ðŁĵ·: +conver ter +car pets +ho bo +fr it +pe ac +es qu +ern an +ou f +an il +di ffer +ch ing +bre cht +sp g +daven port +stra va +sever n +n gos +stor ians +fe te +parame dic +j hb +al amo +sne aking +gold coast +roof s +isi l +depic ted +projec tions +nu mb +o ss +ep i +glu cose +zid ane +infin iti +íĺ Ħ +ran som +ton ics +fal k +g ler +ou tw +re ss +week ly +the on +n ole +ðŁĩªðŁĩ º +vol ley +sum mar +neg ativity +sam son +ye w +aus votes +ju l +ju dy +f art +pra yed +pal ate +multicul tural +double header +cycl ones +pier re +ãģ ¨ +âĺ łï¸ı +rt w +conver ting +wir ral +l ari +ir relevant +austin mahone +an che +ya an +sd f +$ . +explo ding +ulti mate +prof ici +gofund me +cell ence +ep stein +bul lied +sep tic +à® ¤ +lu mber +cu ff +vsco cam +pl or +ภ¥ +se ok +ro to +venezu elan +sor ta +spir ited +daniel padilla +team sisd +radio active +icelan dic +ðŁĴ ¤ +ver e +accommo date +shi pp +ot ter +ol ina +e go +su la +san antonio +de as +simil arities +âļ ¾ +y om +bro ward +å ° +can cun +veri fy +on te +candle light +ìł ķ +inf ants +az am +ðŁĺ ° +le ven +un stable +bloom ington +x ford +con tour +y p +innov ator +histor ies +po y +lolo lol +ex pires +cat alo +bill boards +an ab +el ic +novasco tia +fa ire +ìĿ ´ +rock well +gr ille +az tec +joh or +ur struly +fi ren +dun lop +id le +port man +jo es +tx hsfb +hol m +cham ele +under world +lo ss +ti em +therap ists +past ure +pa ste +ing now +vul can +ra gon +lar kin +o shi +ho co +child hood +umb rel +success or +kath y +iz en +° ï¸ı +share holders +ol ga +ai b +he ap +fl aming +ro u +air tel +rat t +z ane +vo w +thor ough +sn ag +par th +un conscious +ve y +new release +gh ee +croati an +facilit ating +swan son +astor ia +to logy +master y +ðŁ¤ ij +bil bao +trou pe +the ori +chey enne +ro tt +shore line +gra sso +master chef ++ ) +vi x +ellen show +as g +an ak +ku ya +safar ilive +debu ting +blu m +list ener +v ins +book shelf +smart cities +makeyourown lane +; ; +ðŁIJ ¯ +ri zz +on ward +bull dog +bear ish +vir uses +fri gh +lin den +we iser +sn t +gon a +dre sden +fl anders +cu k +wheel ing +ba u +atu esday +surf ers +swi ft +mc call +arbitr ation +aw d +mon c +b ine +at x +re fr +mi ro +po sey +n are +rit ter +âģ ¦ +play book +blow out +sports manship +s oooooo +malay alam +gri ms +bur bank +infin ity +sar gent +oit nb +joseph ine +ski pping +par kin +excur sion +semin ars +jo har +par tridge +post game +ll ll +blan che +temp ting +m na +lu ka +is ers +to ffee +bar ron +he mmings +sa e +go hawks +cu pid +li mbs +con se +un common +z ada +head shot +so ils +pione er +mam ma +sem itic +pan dey +jamiedor nan +spl its +vel a +son i +ra ff +t mobile +âŀ ĸ +pra wns +lit er +enjo yment +egg plant +tu b +cultur al +us ic +suspici on +sy cam +summ ed +ma du +ho ck +up wards +eye ing +ri ve +assas sins +âĤ ¬ +out fy +chi ves +t ner +la is +por ridge +sad dest +w cc +vick i +sna ils +biz italk +mill an +ðŁĮ į +sam oa +j ing +mi key +gu j +chel ms +eli gibility +arma da +thro p +surger ies +ãĤ ¿ +mo hawk +ex its +me m +is lington +c me +land fill +kait lyn +ðŁİ ¼ +combin ations +tomorrow land +ver b +cor a +pre cisely +na om +ðŁĨ ķ +shr ink +sof tly +merce de +mand el +poo dle +ball erina +sop h +jux ta +y at +ary an +hesit ate +lo wered +gu lar +dungeon sand +ron an +my ri +sp f +men opau +gra sp +pa thi +fe asi +fla w +shi story +ste ward +gg le +fay re +cli que +credi bility +yo g +sec tion +mu sko +se ville +no tt +cal m +mate o +indic ted +fi ba +by l +lin o +u kin +!! # +enig ma +siri us +bu sc +ðŁį Ĭ +mac kerel +psal ms +a at +tomorrow spaper +ðŁĺ ĸ +p fc +........ ... +shre k +mul let +o sh +danger ously +immen sely +am ur +ðŁį Ĥ +pro por +sy a +london marathon +abo ve +obli gatory +pro v +ra cha +alex is +pri mary +sh h +ether net +d stv +cou gar +un lucky +ni l +steak house +mel a +fc bayern +cause way +ca therine +fluore scent +nx t +to kyo +au sp +releg ation +qui zz +shored itch +proud tobe +promo s +inter acting +home brew +da esh +w pg +stead ily +provin ces +bal lots +i ah +al to +< << +you u +ri ley +prefe rence +tra verse +incen se +am munition +ho dges +# @ +hail state +tart an +witch craft +vent ilation +liber tarian +! â̦ +ow es +% ! +ong chang +bru shing +le ic +fi ber +under attack +down load +ex pir +hy o +pompe y +mc bride +y ag +stre e +com bat +ten ding +ai ra +gug gen +ab ra +in na +fli ps +aw al +m ach +dol lar +inspir ations +z um +o du +it ty +video game +aqu aman +har u +bel fast +je b +but ch +us gs +calcu lus +go yal +mor gen +x finity +stand up +contrac ep +sab re +na be +in secure +gener ously +epit ome +l w +t ca +narr atives +don nell +pand as +ber gh +tu t +ker al +fel icity +br ampton +quinte t +nom ore +ðŁĶ ij +lo i +alham dulil +ðŁĶ¥ ðŁĶĹ +ston er +shaw l +clin ical +bren dan +gon e +fla wed +tri ppy +j g +al location +po aching +ve vo +mo cks +lef tist +bon uses +condem ned +abil ity +st ating +microbi ome +bio logist +for you +wahl berg +ss or +ift ar +w ul +ÑĦ оÑĤ +pom er +me me +ver te +tre ll +tra it +in let +hormon es +deliber ately +vill ar +battle ship +p bl +tw enti +ho kies +dal ail +say a +may fair +han s +die ts +⾨ ⾨ +od in +hot spur +pap i +k ana +k amp +fin na +flo tus +ti ans +unic orns +tribe ca +chang ers +fore ground +out a +inv aders +gett ys +tomorrowspaper stoday +mac millan +hand written +w fp +u de +state of +base d +âĺģ ï¸ı +cas m +psy ched +histor ians +fol d +d da +ag grav +p ans +green way +au sv +ðŁĺ ¶ +shradd ha +inde x +be sti +zim mer +t ness +eye shadow +ot te +go ts +distribu ting +pro min +yo l +ace a +tram rahim +hoo per +supre me +jam min +intu itive +quali fications +sli m +sid di +jay ne +tri pping +g tx +pun s +e manuel +om g +mid summer +in to +succul ent +ri en +new mexico +o or +hoo king +in f +ðŁ¤ Ŀ +flir ting +na hi +g friend +t ps +hel ix +z s +on ie +ct f +kri s +irresi stible +fla p +ðŁijıðŁı» ðŁijıðŁı» +us wnt +ru d +ram ps +pin oy +ot w +lol z +low ering +favor ite +t mc +phra ses +her mi +aver aging +em br +ben o +estu ary +sle eve +ribb ons +ta sh +ภ¹ +x f +aw gs +sun ited +brew eries +anir ud +pun ches +ol die +ip ads +wi fey +land lords +d ji +gun ner +íķ ´ +tex an +ex op +cas sandra +s off +ðŁļ « +igh ton +bak ers +awareness week +v all +ear p +bts bbmas +apologi zes +âļĵ ï¸ı +was ps +states man +snat ch +watch dog +ra fi +after party +spi ke +j er +peri ph +r nc +mu ll +le en +shi es +li eu +urstruly mahesh +mer ton +de sai +shi f +ðŁĮ ± +pe dic +gos ling +arrang ing +ww g +gen y +you uu +netfli x +e ttes +k wi +bernar dino +am iga +Ø ¨ +kashmir i +t ings +emer itus +de cat +ab domin +dc i +pha ses +d jan +be am +op ry +i shed +the ellenshow +the st +habit ats +to ons +mclau ghlin +ri pper +micro biology +tal aga +clu eless +ss u +cro che +bro mance +longe vity +zagre b +prev ented +tra ve +spo ilt +darry l +migra ine +al cat +dd dd +vi v +ser pent +mat tel +jam a +con quest +î Ħ +sam sung +presbyter ian +ket ch +fire fox +mo tif +le c +cho pping +cher no +j ann +ðŁIJ ° +pro lon +wake up +conver gence +mersey side +heart broken +lo oming +hal lucin +mai ze +commun ism +mo h +twitter storians +serge y +res eller +favor able +ed gy +re iter +mal aga +live me +ka hn +pul sion +big g +kim kardashian +ati o +tyr anny +ru ption +q ant +pro ven +by z +pu shaw +kri stin +e er +tar dis +ri z +awak en +mi ko +un documented +path finder +indirec t +resemb les +h ler +conce aled +scand al +re im +d nb +cr itters +attend ant +apprentice ships +aa u +scre amed +l su +fa h +har bour +ed d +bat sman +li ss +mi sha +spani el +it f +advan cement +fa c +close up +cecil ia +medi c +narcis si +lav ish +gi ac +ma ys +le it +wine wednesday +pushaw ard +let to +curren ts +bug atti +out ine +w j +un do +ler osis +devo tional +ðŁij « +on na +fais al +sa una +himach al +am ii +à® ® +di zzy +screen writing +ph x +sp n +ick i +ag irl +fi shes +wb z +pi m +bo ar +ac id +! .. +rocke feller +n ga +dra stically +simpli fy +dru mming +autum nal +gur mee +lor de +jo ann +give up +b our +am ura +der land +sim pler +wat son +tri dent +concor dia +bel lum +bre k +dum plings +vi on +dungeonsand dragons +sp ri +ascen sion +wil datlantic +u st +rob ins +legi on +insi st +jar o +gue ss +so b +bigh it +pool side +negoti ating +mc gill +bil d +techn icians +miti gation +ajay devgn +b to +ant en +cosmo politan +ðŁĺĬðŁĺĬ ðŁĺĬðŁĺĬ +patri oti +temp er +promen ade +nav ajo +nam m +wrink les +dc fc +le ach +bru nette +r f +cout inho +al ti +tradition ally +op tome +na z +accord ingly +rec ard +de ets +sw ell +po sure +whit ening +strang er +illi on +here ford +u wu +ro bber +cotsw olds +cl en +gor ge +nam aste +re lish +gri ff +adren aline +bla sio +val e +ê ² +toler ate +rail minindia +jen sen +ho ven +el lu +ob sole +eisen hower +unidenti fied +than niversary +body guard +Ø ¯ +i dge +sch al +stock port +sn i +re taining +po po +pix ie +oli thic +ki er +ha jj +sa z +cor bin +!!!! !!!!!! +v it +me gat +de h +circu it +af fleck +theore tical +hope less +u ab +slu mp +b ice +jam med +let stalk +can i +side ways +labyrin th +re fs +ha hn +jare d +ðŁį ¹ +jam bo +ph yl +enhan cement +c tr +ful lest +se ye +do ba +cho ic +yo s +cb j +andr é +re watch +pri ma +doctr ine +for gets +u hm +ar ound +u le +art lovers +shi raz +har th +ex tor +Å ¡ +unexpec tedly +eli us +y x +em my +se ac +ðŁijĩðŁijĩ ðŁijĩ +correc ted +com bu +wom anc +cou gh +what son +publi shes +divers ity +back bone +lock down +mesmeri zing +nor te +ma b +desig ner +í ģ +ra gh +mole cules +get outside +the beatles +semicon duc +nach o +lun es +ham mers +sul tan +o on +fe ren +att ach +ar qu +uttarak hand +s ash +; - +tre ad +i ko +ar thur +scandin avian +r ation +ga el +charge able +fish y +v ma +hand bags +char a +ay ne +de fam +sett lers +qad ri +pal ais +in wx +apocaly ptic +poo ja +a es +at ories +proof ing +n lp +ts la +v ina +li do +dee phouse +informat ics +v v +pp ings +di ss +à ¯ +uhur u +st ony +betra yed +b aff +my ra +as pen +allow ance +tam ara +ci f +cor bett +ser ge +di go +ambi gu +pain ters +p cr +p ca +nom s +lo ft +ve e +opend ata +ðŁIJ ± +alex andre +identi fies +fantasy football +re production +brom ley +ware agle +mm er +p ss +cu es +ay at +hut chinson +sar ac +jack man +ira h +ap ink +col s +aussi es +ex ecs +day ton +ðŁĻ Ĩ +im v +har am +chuck le +authent icity +ar do +incub ator +ภª +photo shopped +embrac ed +fight for +gor man +zz zz +schol astic +cri sps +te apo +mid night +ga ine +col lier +s ate +de tte +å Ń +imag ine +i ff +tw ili +i fication +teat ro +nor ma +es ur +emergen cies +rise up +r inger +hass le +cait lyn +tranqu il +vers a +se b +over look +gin i +bo go +se re +may ne +henri k +contamin ated +rhapso dy +pro portion +wildatlantic way +âģ© . +organis ers +tran e +stand ard +sper m +laun cher +ric ci +her ts +paper work +showcas ed +mer yl +pen a +p imp +disa strous +^. ^ +phar a +x is +fron tal +sw irl +sp ills +swag ger +smart watch +sizz ling +savi our +cat ar +bb cr +refurbi shment +dr is +citro en +absor b +patrioti sm +il leg +chro mo +fresh ers +ru s +lim iting +ef ish +down ed +man dir +hazel nut +p all +mac on +disappear ing +quali fies +bo on +bar racks +am ine +gen dere +ðŁļ ĺ +j es +ãĥ Ń +qu ito +middle weight +sch au +quad ru +aci ones +limit less +ðŁijĮ ðŁı½ +ch man +ar av +regulat ors +it up +batter sea +mil ford +g z +tic king +gh ou +cru shes +tu tu +dread ful +fam ine +for change +dalail ama +ðŁĴ į +whit aker +hash mi +h us +vo d +bet te +aa ah +iso o +ðŁ¥ Ī +ha ar +la ine +b v +all day +spr out +indie games +free bie +gree ks +but ler +ill in +ha al +ware ness +si ma +public health +gam a +wa a +oun g +goo oo +okin awa +off enders +im pose +ho c +young ster +story teller +sc ap +figh ter ++ , +whit es +music monday +re za +go ducks +bri a +mi um +cas per +cru mbs +a ad +marti alarts +ch p +ri gged +tn g +harve sted +sa k +do jo +mill wall +b nw +oc d +histor yof +t mr +si rens +fan ci +caregi vers +vir a +son i +recur ring +acknowle dged +ðŁı Ł +oph ile +bu cky +stre ssing +roo k +di gger +vi val +san do +fle et +si ers +sel caday +refre shed +anti fa +a que +po lo +disappear ance +de mb +âĮļ ï¸ı +ren ted +ber ger +g mb +cu la +ss al +goo dy +u hh +marcel o +w anna +soft ware +shop small +turt le +tom as +fri sco +ðŁĺį ðŁĴķ +jim enez +c su +day z +an do +wyn ne +choreo grapher +cerv ical +trail blazers +ed g +zend aya +travel blog +el s +whole some +co g +lab out +ar ney +del le +su isse +ma si +ine se +om be +fi ddle +re claim +pa u +wat cher +sla in +ber ty +opti mum +el ites +min is +tur key +patro ls +ger ard +au reli +wild ly +wal tz +br gy +w ob +cre st ++ ++ +ve z +fro sted +davi do +the x +param edics +p into +han k +du pont +ur g +fo stering +micro poetry +spec tre +---- > +ne uro +fri da +music al +galve ston +e ffic +sc ape +pal azzo +th all +pro visional +p js +au re +ðŁĶ ľ +mam amoo +kit ties +cre e +wa k +lo ool +lu pus +cn blue +à º +ðŁİ ¬ +rac ed +tro se +om as +stri de +co ors +⤠µï¸ı +in comparable +cy ril +broad er +arec lipse +ðŁį Ķ +inter val +ti ru +co working +w aco +a ham +a bee +flouri sh +the times +ol ini +kick boxing +lu cer +at la +as un +casser ole +mi aw +lobb ying +jan ice +cir que +re flex +le ary +sanat omy +tem pest +se mb +mur dering +us av +ro bo +on et +p cc +nati ves +life of +sa ha +ruth less +rel ates +appeti zer +pye ongchang +nor d +er u +a thing +ug ly +pl ying +bran ce +organ ise +kend ra +dat o +chees es +par ma +burn out +a stra +pre toria +adjust ment +uk u +sl o +li ken +fav ors +cli ve +be ets +snow donia +go tv +sy n +open house +pan i +portra yed +sl ated +me cca +ren al +supportsmall streamers +staf fs +da o +bi ker +vik tor +tit us +admi red +ðŁĵ ± +hurric an +he ats +gl ory +photo genic +mer i +de por +burn ham +or angu +dj ing +impre ssionism +ign ition +ca i +w ynn +de pe +cove ted +colla gen +sau s +or nam +administr ators +ss on +nh politics +hahahaha hahahaha +aspir ations +r gb +swol len +so we +sc r +diver gent +hou ghton +han oi +d ory +ni ki +land ry +b cci +ðŁijĮ ðŁijĮ +is mail +tri pod +her d +bhat t +dress age +tab by +ingu ish +hur on +à³ į +à ł +to das +evangel ical +chor ds +st john +slo ppy +marty r +face book +ali ght +sen sei +kath niel +r ites +zi one +u o +revel ations +weight lifting +pan o +nc wx +ac ton +à® ķ +Ø ² +som a +à¸ Ĺ +respec ting +mar che +fore man +be tty +ki k +shi bu +po on +argy le +k swx +et z +mar bella +brac kets +stand by +fire side +defi ance +v ex +britanni a +in habit +appo int +piyu sh +le ash +sci ento +fla sk +sen na +> : +at roc +sand erson +id lib +dhan ush +ðŁĺ Ļ +en thr +hit ch +de dly +al ley +dor k +mon do +cudd ly +mis sin +ye sss +night ing +j pn +w ary +ump ire +ma z +ê ³ +bab s +ĭ ãģ +stan ford +posse ssed +exce eded +ðŁĶ ¶ +wall art +tra p +j il +hi bis +sp ying +scri be +khali l +trans lator +lu mb +di zed +ch c +super vision +shut ter +ja g +_ * +yester days +ms f +hi hi +gonz aga +gille spie +vive k +ec static +this morning +ch us +ed es +ston ed +be es +ðŁĩ¹ ðŁĩ +tur in +ho ver +at rics +ster n +sam heughan +auti sm +mi ya +eye witness +writ ings +travel tips +chut ney +px rtg +keny ans +my stic +k rit +/ $ +red head +world ly +am us +op la +le ve +gab bana +se en +o clock +gang a +keen an +sc ent +ol dies +go green +corner stone +comp ly +con cours +ðŁİ¶ ðŁİ¶ +ha an +con fis +aw son +cle op +î Ģ +su zu +sau té +al gar +subscri ber +este emed +ãĤ¤ ãĥ +worth while +mel rose +flo ck +bri ghtly +viol inist +p ere +sli pping +and co +si gh +ha van +cu lo +m sa +fibro sis +matil da +ra fting +aw ard +ë ª +mm mm +ge aux +ste iner +sin n +help ers +beet les +ai mee +tai wan +pistachi o +mac beth +m zan +descend ants +on sale +in r +il m +grou se +sa ig +mo w +bi gre +adjust ments +tu la +mathe w +transl ates +mu h +bol lah +ðŁĴĽ ðŁĴĻ +amo res +ab outs +bomb shell +bla ster +x avi +s ns +k roger +ga ther +erad ic +daf t +chem o +ben ches +ðŁĩ© ðŁĩ +ut v +our a +n ko +gator ade +biaf ra +ok state +im danielpadilla +dom ains +open ingday +kid do +do i +ric e +day care +mac millan +ba thurst +cheer leading +ðŁ¦ ģ +cash back +k won +hob bies +exem pl +ries ling +âļ ª +ag les +ny s +every thing +nav is +ad di +magne sium +faceli ft +ark ham +grand es +extre mist +don at +vit ality +pump kin +be tta +sl td +arti san +li by +pe aked +ah hhhh +mary am +assi m +un sc +ment e +al aya +low ers +ar as +gri ev +le ip +gr ati +cri ses +spr ints +exe cute +w to +ms d +mag ical +re viewer +spark les +juke box +ðŁĺĤ âĿ¤ï¸ı +pay back +licen ses +dun kin +bel t +lake wood +h ateful +bud gets +rev amped +ph erson +ky iv +went worth +ro sen +cru ise +gi ggle +def star +assassin scre +ym outh +win kle +w fc +band wagon +b kk +w iring +kear ney +south side +pe tit +! ðŁĺį +nor dic +mir za +mu gabe +v l +scon es +k tv +sand al +du c +m alls +ðŁĴŀ ðŁĴŀ +it c +al ay +im pair +un rest +flo ss +c é +ab ou +var ying +muse o +ser ver +di ya +hibis cus +ero y +mer ritt +fin dom +f pp +un usually +go tt +conting ent +ali aa +ball on +jo l +hi ked +zy me +ay r +ag n +ga z +perio dic +spar ty +practi sing +lin ton +tal is +cy pri +womanin biz +radio disney +ðŁĮ ¼ +jump ers +endo cr +ðŁļ¨ ðŁļ¨ +and on +shar apo +mi er +ma sonic +fac tories +vi en +bb ers +ìĽ IJ +hol d +ke bab +be ak +approach ed +ac milan +mun ro +ko sher +excell ency +negoti ation +walt disneyworld +cr ouch +te asing +suppre ssion +en ya +b ce +transformation tuesday +cal lie +vis was +p gat +ic ted +end ings +esc u +recru ited +it fc +collabor ations +g ino +snu ck +ausch witz +i fc +x ii +ke sha +ger vais +clo ak +x l +sa ad +prob ation +pre cau +mac in +anasta si +le k +e azy +daysof code +mariah carey +yo g +stit ched +boy friends +sh ar +ph ile +ag u +twin kle +phi shing +week ender +ic ton +gurmee tramrahim +al ton +l eness +all an +pen ultimate +kry stal +go u +lan de +dis mant +ab using +nor se +pat erson +ed mun +ap an +xi umin +sk el +cat walk +re act +wal led +t angle +br yn +ve to +super moon +cas ablanc +appreci ates +ski d +bo th +catal ina +ele ague +cyber monday +cau tious +ðŁ¤ ĵ +nov o +hamp ton +ha ye +jose f +var an +lo bos +roano ke +orph ans +tt in +squ ads +ishqba aaz +black panther +e tu +k sh +cru mble +cess na +reli eved +scul ly +pollin ators +explore canada +ki es +kam loops +kir an +pri mal +sett lements +hot spot +brain storming +ce dric +bi ennial +sh ant +âĻ¡âĻ¡ âĻ¡ +do on +hear n +walk way +fe m +ve al +deport ation +tox ins +elimin ating +descen ding +by the +bla sphe +ha sta +comple ment +as cent +ri ga +provo st +âĸ ª +wee ping +anti semitism +employe e +unearth ed +pin o +natali e +bla d +ang ola +lock heed +in ian +ag r +ni ster +im pala +m ke +fan atic +âĺħ âĺħ +ðŁij ¸ +lu ch +simpli fied +gall ery +econom ic +cy borg +con i +sel ma +in ception +ko ala +dv ds +cre sted +m mor +visi ble +n sd +ðŁĻĮ ðŁı½ +w under +refriger ator +re opening +e era +carou sel +as p +balli stic +victor y +mo tive +tre y +sharapo va +si i +mon ter +int end +west chester +sp e +cy mb +vi dal +ll ama +uni v +fin er +crafts manship +jazz fest +b ch +ag gio +n cc +lamb da +tranqu ility +cis co +ba den +so bbing +of i +go ta +ru mored +war med +ore an +ac ton +mar ci +gh ani +âľ ĵ +as sorted +pembro ke +pen elope +da f +at ty +aim o +pretz el +carni val +than os +ko chi +mer sal +ham radio +ar twit +cas c +guer rilla +kush ner +k app +al ise +todd lers +steward ship +o tti +ter ri +tem pe +rest less +vit o +zay ed +rsp b +pi on +hi ppo +haw thorne +in as +am ily +nut cracker +lo p +d ali +tro pic +ðŁ¤ ł +ul o +jare dle +py rene +pale o +usa ir +m ould +it ated +gene tically +biom ass +ðŁĩ³ðŁĩ ± +do dd +practic ed +monarch s +un manned +m buhari +am al +photo gra +ko ol +bren don +ju ices +cu re +world bank +poin ters +ðŁĴ Ŀ +tur f +le ds +bor ussia +bapti sm +warwick shire +moun ts +gay o +be gg +co pied +asi ans +k g +moder nist +gi d +front man +concentr ated +y t +sc avenger +iron ically +adi c +ps n +ðŁ¥ ī +cultur ally +yu v +mac arthur +fertili zer +be withyou +ri gor +min ors +z oning +âĸ ł +ri r +adole scent +vin ny +ren g +sand stone +gu et +we sth +ple dged +lac ed +sp ide +v ai +ty coon +seiz ure +du p +appalach ian +ro k +cathol ics +sey chel +posse ss +la ger +jo di +cham p +stra s +d ina +cent uri +cal der +blur ay +ðŁĩ¨ðŁĩ ³ +mo do +an nette +youtu bers +chap s +ang ling +label ing +a qui +pk wy +ly le +bi sexual +lit ur +dug out +li bby +grey sanatomy +sub stances +august us +rall ying +fi del +ing ue +äº º +hallmark channel +tooth brush +m á +adi rond +ag gi +ðŁĵį : +cru sade +tax ation +k z +i ver +dou bling +room ie +wa b +en rolled +az on +a ju +grand children +as df +ðŁ¥ º +mat ic +ough ton +utili ze +ðŁĴ £ +pon der +rais in +dys function +co bain +butter nut +e man +su red +dri an +and friends +with the +on omy +heine ken +bri dal +leader ship +pyram ids +deutsch land +jo cel +bo wel +y qr +horse power +be acon +ing eni +gra dient +fer mented +mo om +thing y +pot assi +wrist band +bor d +bo died +ðŁĺŃ ðŁĺį +ma pp +ka u +cyber punk +ph ish +loo king +co ates +ap ur +am ie +uk labour +at in +g la +adop table +shel by +v illi +ri ya +m ingly +cli mber +bumble bee +ðŁĺ ¸ +c sd +âĿ ¥ +hospit alized +c ki +hat er +ch r +re tina +it a +fan base +beat rice +gwy ne +go ss +fo s +favor ited +swachhb harat +mal ade +mon mouth +" [ +si van +sh hh +command ing +sains burys +wee d +g man +ss w +rep tile +iv y +tro pics +roll ers +over cast +ex position +masquer ade +man crush +wa ist +spr inter +sle et +le vin +j pg +_ ( +o pel +explo it +ap a +po we +wrec king +jong in +or b +er ick +bo sco +pra ising +ber tr +to wing +in security +ku t +resto cked +rr p +prescri bed +trafal gar +per t +g ases +app rais +g har +music als +âĸ¬ âĸ¬ +mc fad +ag ony +conditi on +equi p +shi k +atra vel +ðŁĩ¿ ðŁĩ¦ +ke h +abduc tion +pe oria +wil kins +g ms +as d +ev i +ðŁĴĹ ðŁĴĹðŁĴĹ +u z +mo c +halle lujah +guad alu +lou vre +dra wing +go ve +ph ant +fri e +web dev +program mer +z able +games com +clari fy +li th +kin ky +âĿ £ +labour doorstep +son ata +ju ris +mai den +vi adu +buch arest +conditi oned +capit alist +u de +ps b +sp ca +lul la +footh ills +kay o +bon d +wom b +roun der +ce sar +bur sts +ap ra +sw oon +sab rin +fra grant +cle arer +ku brick +cli max +jour no +ag le +ðŁı½ âĢįâĻĢï¸ı +poo ch +hal e +sol it +sal mon +organis ms +bron son +art en +hodg son +alo ve +vent ure +bb i +ae a +ðŁIJ ¢ +ld n +d nr +o zone +el las +man ny +azz ur +un beat +tru ffles +th ong +ma ñ +las ers +ley e +gettys burg +back packs +or is +ma ison +craw ling +la bra +cl ing +dra gging +ste al +dou bt +de van +ck ers +agent sof +photo bomb +elon musk +abo y +dist ances +story line +sp i +nor than +europe ans +wh ale +ser pent +ðŁļ ² +fi or +tr it +ox o +awar ding +class mate +su fc +smar test +rich es +pr k +big foot +ar mb +bi polar +dw elling +om ars +k wan +gri me +m eng +freder ick +navar ro +sorry notsorry +jaredle to +pa ve +sl ack +barn sley +att ar +evic tion +accumul ation +o ir +cat chy +wel ter +vik as +has see +nik ita +mo yes +mathe ws +shi v +gat wick +pro filing +compan ions +mar rake +an tics +ðŁĻĮðŁĻĮ ðŁĻĮ +se se +bo i +bart lett +poison ous +ab uses +ym m +kam pala +guggen heim +imv kohli +dol om +bre e +thro ttle +gare th +fitz patrick +un ya +par ad +mar got +j nr +we a +potassi um +p nc +disgu ised +cra sh +ren ergy +ill ic +coup led +ni els +ci ones +æĹ ¥ +im ent +despic able +d ye +what cha +conne ctions +paralym pics +gaunt let +wait rose +suici dal +star ship +vap or +st ou +law maker +coo led +si mo +then o +offro ad +ja den +bas que +vick y +lu kaku +centr o +tri sh +strate gist +medic ations +hor st +b fc +gra il +sharp ly +ad itya +tom b +kau fman +tri pad +sam ba +pastor al +brit ney +sag an +hill side +mas ons +sar a +z one +x u +to tes +rob bie +app en +mon tag +der o +short film +charis matic +tat ors +ki ba +and ri +al arming +split ting +ic ar +th ug +scari est +sylve ster +an an +u trecht +a difference +me ade +bu ster +air strikes +cu ffs +account ants +ðŁĺ¡ ðŁĺ¡ +new t +bo tt +issu ing +cl ancy +wwen etwork +kyu hyun +rese mble +pajam as +sin k +kin ney +sul ph +or k +li es +la gh +or ton +ra hul +d sc +we will +re am +collo qui +shar ia +hec tic +sar casm +land er +tm z +endor f +ro z +ham mered +fri s +w adi +pope francis +he it +flash light +un born +op es +hol iness +ðŁIJ ¦ +nach t +im sa +gr acing +bj p +ver ts +c sc +home owner +a que +bigo try +anni e +bag h +âĿ¤ï¸ı ðŁĺį +car i +thom p +dispo sable +cardio logy +pat ented +hh hhhh +ld r +stephen son +cro res +fan ning +cli mat +ðŁijį ðŁijįðŁijį +ðŁijį ðŁı¼ +aer on +piccad illy +bank rupt +sil via +emplo y +don ny +commen ting +screen writer +io ta +ce an +anc ers +tu an +street wear +ठ¯ +sk ine +esp a +asi f +os ce +she ppard +more cam +bott le +der s +orac le +google play +aver aged +edmon ton +steph an +sister hood +cru sted +stag gering +methodo logy +congress woman +c abo +tri ggers +mil ky +gli de +tooth paste +room mates +nu ff +gu am +sprink les +alternati ve +wat fordfc +uof t +hal ey +cont acted +bun dy +pro stitu +gh ar +pre ston +on site +hil ar +g ts +c att +hamp stead +? ?! +ðŁĩ§ ðŁĩ +bbc qt +aless andro +resi st +ma idan +t ko +shad ing +pin up +gal lo +sin u +at ec +fun k +ac lu +stri des +rhy me +wet land +bbc springwatch +t ins +wild card +st our +flamen co +pau la +onto logy +gang sta +am ade +ãĤ « +t bs +skelet al +run ner +jard in +harri er +hun ted +z hen +believein film +de mean +au diti +re start +chon dri +âĿ¤ï¸ı ðŁĴĻ +mcla ren +ga b +sh um +au sa +lewi sham +y pg +k jv +fur nished +dor o +bon ded +mor ty +lat itude +_ ) +lo va +water ways +vin ai +shor th +drun k +c ay +ay ana +kap lan +capp uccino +spr o +life boat +has bro +spol ice +tor on +do ing +dam n +sh ree +foun tains +ent ation +mar u +boar der +to pless +j ada +chan ning +ul ls +en closure +gib son +fractu red +brit ton +à ¶ +t ous +por th +dra f +tra iling +mar gate +eli fe +down ward +lin n +gla des +girl power +ak rish +u ki +ron da +ts c +appreci ationday +vis ing +lo om +ðŁį ³ +mex ican +ar gos +y ya +jad ine +south port +d end +si sta +rede em +men g +bra xton +antioxid ant +s key +mp g +fin ding +vibr ation +ce u +kh art +di mini +cl ine +shel ly +hin es +ī ï¸ı +to pical +no ver +ma xx +prim itive +illustr ate +b ounds +tren ton +join tly +breed ers +u chi +wakeup america +b ada +ðŁĹ £ï¸ı +gu acam +sp heres +pere gr +youth ful +lo lo +bir min +t ly +jeremy corbyn +defe cts +co sm +a rent +v aa +bag els +medi ac +cori ander +ic ago +g haz +ab bas +re model +struc turing +pu m +out law +ad ani +r bc +gul ls +n li +confu se +ðŁijĩ ðŁı¼ +vil a +mcnam ara +correc tions +mug hal +ser i +re gain +ss b +lea ve +haha hah +gran de +di stressed +re chargeable +ho a +hou sed +sti l +attribu ted +opath ic +di ps +pri t +head phone +conclu de +pil o +he t +ut sa +nit in +je m +sni ppet +tutor ing +op er +sun k +en sla +cha u +ac orn +quinte ss +ran kin +affili ated +our lives +cl int +se ater +isa ac +ba shing +sme ar +nur se +doo dling +" ; +sa ku +atroc ities +im am +g fs +viol ating +comm end +brad shaw +er ville +b illed +b be +thul hu +i phones +moo se +di os +re w +me thane +strang ely +whis ky +ti ghtly +spiel berg +radi us +notic ing +wi f +ig nati +i fa +ap is +w ali +ha itian +bu shes +y z +v l +ex ited +asse l +tru ec +dom en +ash er +in king +newyear seve +hend ricks +bat i +ìĿ´ ì +rich ter +mon santo +con line +agre at +ðŁ¤ ¯ +master pieces +ar n +rough s +cle ve +se v +fashi ons +to ya +sh ail +cop eland +aqu ari +dec als +are you +y aya +a str +fon t +ml m +ar ca +pp or +pol lock +xper ia +conserv ation +chain saw +ag gie +?! ?!? +si le +sh on +ìĹ IJ +note books +marque tte +de us +bb led +spic er +mc cabe +nor wich +modi fication +boo sted +stru m +sales man +bang le +nis san +hez bollah +brea sts +a af +anth us +sk er +ow ed +her os +gi fs +fo sters +eat ers +du es +_ / +lymph oma +sf am +me gal +afri di +ag ic +p amp +jeal ousy +ðŁijĮ ðŁı¼ +calcul ate +napp ing +g ale +ðŁ¦ Ħ +lub bock +assu med +ren ting +íĥ ľ +subur b +ãĤ · +tech nic +u cla +in front +gar net +ster oids +stri ving +ho war +mo ver +le ton +bull do +is in +ci ao +sn z +fore front +d ams +mid wife +ma wards +cla pton +we in +subsi dies +spr oud +rother ham +phan tom +ar ach +spi el +rac ket +sel amat +no on +l bc +enti ally +ðŁĴ ¸ +sil ve +m oud +kine tic +y asi +ðŁİ © +o ol +mi ku +i za +fer a +flo ren +barber shop +groo t +z est +ne ars +stan is +z and +police man +juris dic +form ations +appar atus +sp d +arti fact +to sc +motiv ating +womanc rush +re dro +diagno stics +ra za +out fitters +el xn +dod gy +ry n +sh d +ortho don +ol de +jay anti +bal ances +quic kest +can ton +friday reads +! * +na a +a ak +ðŁĶ · +behavi ors +rasp berries +ä » +polit ical +cam il +å ľ +di k +ast ounding +lie be +novel ty +tur moil +sul ly +spring break +hon ouring +cc g +ðŁı Ĵ +my little +ky c +pro ms +ðŁķ Ĭ +à ¨ +bi ge +av ril +ðŁĩµðŁĩ ° +mari on +as ants +sur ya +oc tag +luf than +ac ron +fayette ville +ti que +love s +en ca +de kalb +ta ver +de vote +aux iliary +joh annes +tread mill +ay an +qu r +donald son +cher yl +" .... +s ven +kir sty +gun ners +ra dish +o ahu +v sky +i ble +con course +b ps +elo qu +ash ford +te bow +roblo x +ma da +dri ving +th day +spro ject +m ms +band ed +. !! +libr arians +flan nel +intoler ance +her al +ç µ +neme sis +list a +tar ak +cry pt +star plus +vish nu +sc ale +cr is +% ), +j illian +regg ae +pegas us +ol in +ip ment +man ic +l fc +godd ard +ite am +parl our +anch ors +lee minho +talla hassee +ant it +d ho +kid ney +y ash +batt led +az ad +gar is +faul kner +sni ff +papar azzi +ed m +phy llis +con tested +aa ay +se ca +k ton +vel ve +rain ier +for um +tam pab +ho sp +trac tors +ox fordshire +no tion +guang zhou +ðŁĺ ¯ +ref ill +wednesday motivation +sli der +mukher jee +pr att +fon taine +alph on +af ar +ts i +pest icides +fi ends +mo cking +bra w +tran sat +do ses +co res +hom ophobia +docu menting +zlat an +con doms +s é +sun set +kun st +ton ga +ภª +v ation +sp ray +chow der +ra ps +palla dium +nor wood +music history +hoo ker +si si +osp rey +ph ys +conce ded +bob cat +ar mad +ze it +Ù Ħ +ðŁĺģ ðŁĺģ +mer idi +ðŁĩ· ðŁĩº +corn wall +! ), +touch downs +ze it +chal et +mm m +al che +gor illa +fo ss +ati ku +lumin ous +ivan ka +be ek +sta res +sw iss +âĿ¤âĿ¤ âĿ¤âĿ¤ +scru bs +me ath +gusta v +jo gging +confe tti +as os +ers fc +breit bart +applic able +autho red +ya ho +h in +displac ement +j v +ðŁĮ¹ ðŁĮ¹ +ot c +non profits +diec ast +gu sto +inte stin +c ages +me en +lu kas +moon ey +ðŁĺ · +very day +tor ah +is sion +wa c +lever aging +ish able +cu se +le wood +may an +turn table +ju ice +tru sty +tu p +eti quette +supervis ors +stu n +gu zman +confe ren +ric o +fe ast +back ward +pol aris +mic he +jo g +h ing +field house +vel ing +sho cker +esc ence +ठ¾ +vi be +anasta sia +mar ched +kill ing +Ķ ë +fe tt +exop lan +... ( +snow day +lo h +ir ani +la khs +del a +po caly +boom ers +dictat orship +ac er +tur keys +quarter final +muskete ers +ðŁĴĽ ðŁĴļ +sf x +museum week +sc ala +ri sis +( ðŁĵ· +ãĢ Ĥ +z ies +bo eh +hu es +lu sci +dol a +impeach trump +roo d +don caster +tor re +hero es +fo yer +tar i +blur red +ke w +frank ly +dro id +ap al +Ð ¼ +y af +bre t +par agu +cac ao +ðŁĻĮ ðŁı¾ +ru e +head aches +shaw ty +char ley +pal er +go wns +correc tional +ðŁĺ© ðŁĺ© +breaking bad +ol ing +da p +endeav our +cit adel +tra d +incumb ent +medit ate +foo ted +ðŁĴ µ +shab bat +dayof the +wil lem +gal way +to red +marri age +f illion +sleeve less +aud itor +jin young +invin cible +kad una +a and +volcan oes +mon eti +indie gogo +buccane ers +ðŁijī ðŁı½ +ãĢ Ĥ +lay ton +cuck oo +hu mber +buzz er +Ï ī +to re +stra ins +sto m +pa ine +s we +du ff +z ou +si mi +li pp +ur n +se agu +ðŁĶ ® +sun dae +hi c +ðŁĺ ¨ +bull pen +u per +flyo ver +al dridge +glo bes +ali es +ken zie +ge es +y cle +sp lin +mag enta +j ha +bal u +gh orn +ti pper +wick er +taste of +con clave +ch ale +inv asi +cat er +dio xide +me gab +win n +at p +transform ative +nest led +hi g +bri dging +lil ies +chee red +bad dest +sc rolls +real is +dipl o +ðŁĶ « +conce ssion +prefe rences +explo des +er gon +introduc tory +ine au +ch af +som es +land rover +spir ation +sex y +sco recard +illustr ates +soul mate +wi en +inter disciplinary +fore casting +ent ities +glu ed +en lar +cur t +percep tions +boot leg +mi re +asho k +v az +hor ne +cal le +ac ulture +ther oy +night time +oc al +character design +ar mist +ðŁĺı ðŁĺı +yah oo +ac eae +to se +even to +sou t +nay anth +wh om +v are +ri gging +gen us +hi ve +com mands +sti e +day a +ethan ol +en f +hi fi +flu ence +cle mson +re invent +thermom eter +humor ous +emer ging +aci ón +ðŁĺĺ ðŁĺį +s ity +haw ke +accompan ying +t ility +ðŁĺ ª +re cess +protag onist +l ery +dun dal +int l +britt any +q bs +off the +marri ages +how to +viol ated +adel aide +wit t +lanc er +pak v +hu me +st ade +bra gging +ou tright +ad c +super st +real time +cu res +garden ers +ero ck +dale jr +ver o +bar tol +mo ti +mc fly +v pn +st ink +over rated +guer ra +e tis +ath ome +twd family +th ab +tn x +rafa el +family travel +x ley +sat anic +equ ations +ru dy +wal dorf +stan i +tu be +meas les +zimmer man +obli gations +i ously +bow ser +trans former +sho ppe +shak en +gh ouse +to d +ke tball +share holder +mar ca +kp mg +ak an +given chy +coast al +au th +roller coaster +mar ches +coordin ate +cine ma +apprentic es +par lor +mit o +men on +consider able +bar re +glo ss +enh ances +jaz eera +fal mouth +thra sh +stat en +k zn +eng el +samanth ap +flo ppy +sal om +ðŁıĨ ðŁıĨ +w ack +deliber ate +osc ill +herit ag +du sted +orni thology +pad dle +fer ns +bar un +cl ans +anticip ate +a ay +mat ically +é ĩ +tu mble +post man +unic ef +tro tter +op d +leaf let +ge ist +cease fire +scre ws +cre ation +wal nuts +longh orns +under statement +ab b +proxim ity +na x +un ity +turn pike +orda ined +dub step +chak ra +me ch +love her +look alike +donne in +vir on +Ù Ī +bang ers +vari ants +out dated +in ta +cri sto +sp elt +food and +f on +stefan i +margin al +hu tton +ti ara +tel ford +qu en +fair grounds +que tta +mikha il +heal er +v ball +ty re +under grad +gl end +hom ers +scri bed +main tains +po che +mis sal +mar ko +u as +á n +sh p +con vey +pad re +sab a +pu glia +madhu ri +pa xton +chap lain +n ago +ca si +... !!! +fli rt +sal eh +k are +di re +stam ped +extre me +ðŁĺĥ ðŁĺĥ +ho ppy +guadalu pe +advant aged +eu char +p low +un n +mac qu +port land +cla sh +pe s +lou bout +y p +keep ing +arca dia +fran kie +fi u +de th +encyclo pedia +si ze +inve sts +ðŁį © +geo logical +fran ç +con front +ðŁĺ ¥ +d ys +af m +tex an +graph ene +repost app +ac f +ur sula +gaz a +dd led +fu m +wsb tv +m be +fron tiers +chrono graph +ke s +inter faith +tab oo +spar ta +won do +flori st +em braces +ca w +no el +arch ers +ðŁIJ · +roman o +ban an +sh akers +melo dies +geo thermal +se phora +ìļ ° +оР´ +pro c +hand shake +pan de +popul ated +slow down +hor tons +registr ations +un deni +lan ts +pas sover +thak ur +li ef +adhe sive +pe tal +micro scopy +memph is +confir ming +air drop +mesm er +perce ived +ming le +lifel ine +gh j +worcester shire +pas sions +ach er +el lar +ah o +firen ze +bar ang +letter man +hat field +lu cha +je ter +e shop +william s +horo scope +pre de +east bourne +dur ga +di version +al trin +seis mic +premi osm +nar co +ti r +ori g +or m +land fall +ci ous +lin do +max ine +x ico +tra y +os wald +c ba +ric otta +n cr +mar au +ภ² +gladi ator +ch ery +lun g +u me +po psic +lon ging +can als +ta ya +decentr alized +sho pp +pres sures +mahar aj +eti had +wal greens +succe ssion +sign aling +li g +staf fer +north korea +def ying +as ma +de g +peri meter +oak ville +m sk +balti more +rece ip +de ple +ðŁĺŃ ðŁĺĤ +jambo ree +> .< +rsp b +puni sher +consider ably +in tothe +pari sian +acceler ated +polye ster +low es +fr ying +sauté ed +mou ths +seychel les +ra x +go dis +dak ota +house wives +the me +mat inee +black bird +ye sung +pre fers +pelle gr +in ated +trun ks +stronger together +re pet +re pairing +ped als +toler ant +her r +dun ne +indic ation +decat ur +b tv +exhibit ors +ik on +friday motivation +bra gg +live tweet +al ves +womens art +foreig ners +wal lets +min dy +lan ey +bb in +tv miaw +lif ter +tar get +tam e +dr ou +astro photography +mp c +g pu +nord strom +fric tion +run off +lov able +sp nfamily +ext ingui +bloo dy +sch el +arti stry +sw ish +scar ce +ph ils +max im +pos sum +com promised +sty li +sc fc +is sa +birmin gham +sket ched +angel ica +ordin ance +je ts +conqu er +ðŁĺ IJ +online shopping +s ori +reason ably +nue stro +ar turo +ch l +benef ici +spho to +wel t +ni kk +ðŁ¤ ŀ +dan ao +for mid +as se +af irst +âľ Ĥ +gil lette +as sor +an onym +sel ca +fe mi +bear able +y and +ar mory +cre pe +celtic fc +bra vo +in expensive +de lec +ge cko +new market +snow flakes +kab ir +con tra +can ning +mor pho +gar wal +ðŁĴĥ ðŁı» +fight ing +mu tation +woo dy +ju gg +gr aces +premiosm tvmiaw +kenne dy +gu p +sa e +op ha +off spring +fini sher +bet ts +span ning +mar j +h one +sh ing +contin ents +samanthap rabhu +un related +l acy +explo sions +benjam in +sophi e +no ting +micro soft +as sen +a hoy +i ker +ho fer +mo e +ah madi +yan n +an ak +ma hi +be u +aha h +creep er +baahu bali +am at +pri ory +haw keye +deloit te +sko da +print making +assemb ling +mirac ulous +no ch +sw o +leg a +oper ates +border lands +eli e +stron gh +rep tiles +pir ate +un fold + ¯ +qual comm +un predictable +ot r +rose wood +direc tional +counsel ors +corn ell +liber ated +j ad +ir regular +bulgar ian +high ness +vodaf one +sw ild +mini mize +gra zie +๠ĩ +r stats +stre ep +ome tric +humb le +lu mp +l ille +b ü +home depot +tripad visor +ki wan +a via +er z +ex ico +du f +blu men +mi zing +ar ma +in im +con stan +sor a +ju al +au n +tw ell +tren ches +her a +r k +po plar +recipe oftheday +ll an +bhu ban +short ages +ing don +bridge water +ðŁIJ ĺ +fortn ite +cam den +un cture +pro w +colon ies +t ks +n go +b hm +live pd +spl ace +sli ke +happye aster +ter rence +revol ver +j ed +yy yy +office of +m ts +exist ential +r ourke +explore bc +sse d +pri est +vix en +si ding +k pa +a har +ju ic +ob struc +foren sics +uk mfg +cancell ation +we ary +ab q +ele c +pri zed +deb ts +me zz +salv atore +m dc +gre tte +c gc +th on +snow storm +ts ch +cook ery +å ¹ +wa xing +n acional +mur s +ra ve +cap es +ger main +dri pping +sub mitting +ome lette +iter ation +aj es +shim mer +fu eling +ðŁĩ§ ðŁĩª +li po +bo bble +un follow +islam ist +hi ber +cat s +agentsof shield +sen si +____ _ +ster ia +inst al +ausp icious +har row +over land +femini sts +inst ant +char iot +blind ness +sp ed +sc arec +nu it +mini atures +ho seok +glo ck +fifa worldcup +e te +dis m +we iner +ex foli +ear ts +à¸ Ķ +my art +man il +iss ant +form a +in cu +buffal ob +in tim +mc cul +anj ali +po po +un doub +hil a +fun gal +thank ful +fu tur +en dish +ren ds +th ar +she ff +ring o +nichol ls +io wa +po tom +cl ams +ãģ Ħ +acon f +stadi ums +di mp +di k +residen ces +do v +caric ature +seagu ll +kl m +confe ss +sla pped +cele b +turb ines +pp v +nur ture +el ab +.... .# +tu ff +de press +al far +amii bo +di spon +e wing +que er +friend s +for re +âĺ ¼ +sw t +aqu arius +head liner +cur d +fi gs +o tters +love fl +kare em +go vegan +fri yay +consol ation +at ri +ì§ Ħ +âĺĿ ï¸ı +poly ne +gu ed +o ya +la us +intestin al +cam illa +scal p +pi r +leed s +horri fying +bore tum +dand elion +fer rer +ell ic +as x +so ren +re loaded +ale ague +navig ator +ine tte +add ams +al chemist +ak shay +dystop ian +awe c +n aya +al isa +ai led +ag or +avi ator +ali zer +smo bile +findyour park +cop ying +to ddy +sh ti +mon ger +cal houn +nap kin +break up +y atra +se thu +ric hi +eras mus +fer ry +am ore +prac tise +bo bo +power point +oo se +li ffe +chin a +sh ka +fad navis +du ane +war on +fal se +ðŁļ Ĥ +wa shes +disc ip +==== ==== +g k +ab b +stub born +medi eval +p ci +ðŁį ª +maril yn +h yo +man di +cr i +prede cess +continu ation +om usic +s lat +wh al +mall ory +bon n +shen zhen +ca i +âĺ ĥ +sa fest +for wards +dra wers +bla sted +sle e +mor phe +mb ta +dumb ass +ÑĦоÑĤ о +alhamdulil lah +ec lub +al beit +heal ey +ayurve da +adverti sed +cro cs +itt les +bry son +be i +nj pw +honore e +fu sed +ðŁĶ ĺ +mul tin +n aga +de parts +ko p +kin o +jhar khand +ed na +ax le +mil ton +supremac ist +marrake ch +domin ic +tran script +] [# +: ). +wo c +sur rounds +o gil +leaf lets +co well +whe w +tru de +proli fer +succe s +sports man +con dom +po che +k up +imprison ment +{ } +scram bled +å Ľ +ka ine +cell phone +metam or +con i +remn ants +ee z +down pour +afterno on +exerc ising +ber ser +architec ture +wick low +m ns +is p +bo c +n iss +mn wild +stu mble +r si +lu ffy +sil en +dd ad +bul lies +haw ker +bb cc +scu ba +e pp +que ts +for aging +pal let +ha di +cinemato grapher +cat chers +to aster +k hi +lite coin +kid lit +amher st +maur icio +ip ad +mar malade +fe y +don nelly +g to +est as +cere bral +ant grasso +zz led +vir gil +swa pped +ðŁĺħ ðŁĺħ +no dapl +greate st +nhl bruins +fra ser +b mo +ane w +. âĿ¤ï¸ı +se gregation +remark ably +mccor mick +lo gger +er as +contrac ting +âłĢ âłĢ +yor ks +uku lele +touch screen +de cked +ben n +south wark +ra vin +nu mis +ðŁ¤ Ļ +ru t +gre co +eth ic +red neck +ar r +t cs +ih ri +ðŁĩ« ðŁĩ· +l k +inher ited +zy k +viadu ct +marty red +hi gu +ss n +be in +street style +fer gie +bank of +æĹ ¥ +stake holder +exempl ary +cre ss +ess a +ero tica +intre pid +gom es +bra un +bethan y +bang tan +pulmon ary +m illing +doctor ate +trump russia +ठ° +s ani +bl att +pla u +depri ved +t le +ful ly +bour n +st ak +lufthan sa +kio sk +far oo +def y +bad an +ðŁĺĺ âĿ¤ï¸ı +rit z +tri sha +ran ds +middle sex +arab s +pro j +sport scenter +repe ats +iv f +bleed blue +as sure +o bs +territ orial +ele n +bever ley +ann ah +âĿ¤ï¸ıâĿ¤ï¸ı âĿ¤ï¸ıâĿ¤ï¸ı +z l +for good +science fiction +gla u +son ya +pri th +st weets +mix ers +mari o +ant elope +writing community +went z +den ham +be di +sf o +harley davidson +look book +immuno therapy +or phe +es ville +ed ged +tas k +sb ball +corro sion +kilom eters +co sting +play back +ke ke +di visi +u ter +re location +yel led +pen g +up beat +ser ve +âļ ł +hal en +stir ring +reh man +en v +schu macher +frag ment +alkal ine +sb k +resil i +share point +rol lover +tra sh +counter part +âĻ « +ob itu +à ½ +ãĤ ¹ +mul berry +ðŁİ Ĩ +auton omy +spra ying +nat l +love you +fran ki +nu k +esc ar +can teen +ali baba +de plor +mole cule +pu d +fort night +blon die +sp hin +portra yal +ta che +bu te +consi sting +freep alestine +c sp +im mort +d ns +ðŁĴ¥ ðŁĴ¥ +tour de +coo king +archi val +ga thers +bit t +b anc +pre mature +snow ball +poetry day +lou dly +fug itive +ed ay +em ra +ðŁĩ¸ ðŁĩª +sci en +node js +jur gen +je ong +band ana +un is +fox sports +v andy +pro visions +wee p +tu k +i ko +h oun +zig gy +z r +fil let +bat a +tin k +con e +we want +k ilo +hor ace +sl t +sc t +stay tuned +victor ia +umb ria +att acker +ingham shire +fright ening +no ir +fr at +con tempt +lia ison +ho i +br ink +tr ill +ni agar +kick ass +dun das +not my +rho de +bu mble +no xi +fa g +spec tators +mancrush monday +jin ping +distr act +dais y +wal den +portra it +ar thistory +vol tron +ev el +is c +ac m +r ite +na o +de ported +swe ats +ru fus +lo bo +labor day +gam o +ihri thik +bl it +abdomin al +ãħ¤ãħ¤ ãħ¤ãħ¤ +i it +e q +bu sy +allu arjun +un disclosed +de ton +pro create +ki l +ðŁİĤ ðŁİĤ +mitch ell +ki i +inherit ance +al p +jo burg +pat rolling +compul sory +un signed +ni am +l ga +eshop suk +tr illi +ma w +appreci ating +rock ab +mañ ana +an tal +mal vern +roy o +grand prix +sut ton +go ftheday +dig i +ãħĭãħĭ ãħĭãħĭ +t les +varan asi +erec ted +discip les +cont act +ðŁĺ µ +li d +⬠ĩ +scen tre +radi ator +ing tips +trans itions +thursday motivation +chem ical +separ ati +sal is +mi m +geo graphical +book fest +/ . +âľ ĭ +v ae +cur rie +ag garwal +acceler ation +the ses +lg m +u mass +pro portions +nat a +ani ans +ku ch +be acons +ap r +@ # +ðŁĴª ðŁı¾ +nu ke +sher aton +ki o +ma kati +polit ico +mor ale +ì Ļ +econom ically +gg ly +ss en +pa stries +intern ships +vic ente +fanta ken +aveng ers +accu se +slee pover +indic ated +the dream +ster one +ren ders +fro st +ou i +gre gg +d ore +⾨ ⾨⾨ +pu gs +sat y +nu mb +hems worth +tam i +la ssic +schi ff +igle sias +ag awa +] " +re shi +game stop +divor ced +theat er +clau di +un conventional +prophe ts +ac in +twel f +tow ering +t ml +sc lerosis +k wan +ge ts +distur b +na ira +ener g +pir acy +pru itt +noti fied +hen na +bra m +ground water +bl s +opti mis +$ ) +luci e +biz hour +fang irling +gr ills +or l +ver se +c ina +law less +artistson twitter +tele vised +marshmal lows +radio head +bar r +m fc +bre vi +mmor pg +g aya +âĸ « +sub titles +j t +disney land +to bago +nh m +groo ve +fi awec +" / +ba o +scra bble +om ni +ff l +um c +si mba +ali er +ter rell +plu me +mi di +dig nit +co c +bru t +ad ata +alche my +d sm +ðŁĺĨ ðŁĺĨ +win try +spa res +cu er +conclu sions +to ys +od or +fl ann +gar vey +scrip tions +inspec tions +cat ap +ang lo +st louis +heim er +at ay +tr ich +en yc +chil ds +vent il +mont p +guiller mo +circu lare +z ell +mode led +craf tsman +al ina +stimul ation +cashe w +ju das +best of +to ire +susp ends +scol lege +real ising +by tes +bloo ds +as si +ðŁĴ ¿ +o hs +ðŁį ĭ +scallo p +ठµ +gi fting +camo gie +wil kes +o zzy +ðŁ¤ ¤ +ver onic +sav oy +deme tri +baby girl +ðŁĺį ðŁĺŃ +so x +cly de +induc tee +count down +self care +ठľ +vi ka +tor re +phd chat +pe ars +aw h +suff rage +le sn +admir ation +mp p +shark week +schul z +santor ini +clo ver +( * +stras bourg +ex iting +so yu +finger print +che a +ãĢ ľ +vin dic +song writers +so a +prou der +nam a += )) +simple st +delici ously +gil les +u q +mn wx +ep p +sh un +ken nel +fall on +ðŁIJ £ +sin d +tra gically +out es +modern ism +co ke +gy n +spi on +âĺ¹ ï¸ı +le am +compress or +apolog ise +twent yon +fan atics +âĻ » +sco tsman +sa wa +ko u +as er +ภļ +welter weight +phen om +twick enham +stri a +p out +ka z +gi am +cd p +ho y +emplo y +red mond +ภĦภ+sm ere +trance family +proto cols +pie ce +lu iz +iter acy +carl s +united states +har med +phd life +ch aw +foot prints +l é +cho ker +z ana +sli pper +eric sson +insul ting +articho ke +advis ing +acquis itions +op or +mut ations +re ar +ॠģ +pod cast +wi ther +kun g +íĺ ¸ +win slow +di apers +ðŁĵ¸ @ +ec ker +col lar +hu ey +gi ro +mono gram +kas ich +si veness +malay si +arom atic +gre s +gali leo +u ji +rob b +dr m +none theless +as a +: > +lo a +l np +at work +ag t +laksh mi +pipel ines +id al +stre l +re all +chain z +stone wall +san sk +ðŁı ´ +pied mont +hoste ss +ci u +t é +analy ses +wil helm +scott y +rw by +mosqu it +use mb +qu ins +ðŁij İ +tu cker +s conf +speci fications +psychi atry +broo kes +s ils +ol af +de to +co di +cli p +fil th +womancrush wednesday +go to +ang erous +be ale +w tc +paneli st +ne x +lar sen +emili o +tab leau +h itters +conce ived +americ ani +or tega +mar di +Ñ ĥ +pain tball +thir sty +new yorker +etis ation +go ss +we aker +u gh +tro ll +har ga +du al +ght ning +at ine +ðŁĺİ ðŁĺİðŁĺİ +cook out +pyrene es +po ss +authent ication +sports wear +yun ho +kir o +archi pel +shen ko +ren der +nov ation +divin ity +ðŁij £ +su fi +humb ling +ge opol +devote es +wait ress +tr ough +py ro +i ba +bl ing +gra f +epilo ts +bt r +of tball +bas king +domin os +so om +r ath +sher yl +qu el +astronom ical +wel d +track list +sig nee +slee pless +com man +ch ron +summ on +pure michigan +cri spr +sli p +la gi +ra q +um u +thal ap +char med +scru mp +quad copter +ski p +peter sen +mun i +ðŁĮ ¾ +mon aghan +tra ys +ick ed +canad aday +te gr +ï¿ ½ +hot ness +heavy metal +ab ar +gop debate +az ul +spider man +sun flowers +ľ ë +web comics +bar d +Ð ² +nichol as +slu sh +ram an +mark ham +ffici al +ff ler +íĬ ¸ +ple ss +anush ka +to to +sk aters +pro wrestling +compet es +ay ala +myster y +thr ills +mp g +independ ently +y ul +imper ative +formid able +tire less +st acking +ton gues +mal tese +pot ts +mat ti +char ting +chill out +super nova +ome o +sky sports +nu tty +ðŁĹĵ ï¸ı +ro han +insp ired +concier ge +ser ra +ma kk +gal at +chi pp +ye v +ì £ +reim bur +op ul +kimber ley +i eee +bre men +ch itec +or in +nak u +bon kers +foo ty +emer gence +ðŁĨ ĺ +sti p +serge i +zo ey +ai me +wou ld +dy es +destin y +vinai grette +dri er +circulare conomy +an archi +ss r +sch el +cin er +gro om +determin ing +gar min +cal ais +incarcer ation +bu kit +no i +chelms ford +mckin ley +chi pped +belong ed +tu mors +str oud +mi i +influen za +wwen xt +tun dra +tele communications +cat sofinstagram +t ages +beat ty +o du +ml kday +oo per +dang le +ak ley +cru mb +anti gua +ti mbers +rou hani +ðŁĴª ðŁĴªðŁĴª +ha fi +... !! +w cs +coo p +sn c +lit res +ãĢ Ĭ +ha z +co z +k ant +green field +cur ti +y ale +flye agles +what soever +wor thing +rou lette +flyeagles fly +un da +a inted +stand ing +lusci ous +h pc +effic acy +ash land +me ghan +ky wx +n pr +bath tub +ac os +h ani +mar cor +man tis +da isi +bo ba +ab bie +mu til +vi al +spy der +po z +g ti +el fie +nigh tw +metro id +anton i +mad die +dh ry +dar lings +ten ds +taek wondo +atlan ta +me ow +chlo e +ãĥ İ +ym es +siber ia +k con +gu es +mar iner +fac il +azz le +[ ... +han nover +bav aria +vir go +te uk +u sps +) # +wall a +sam pson +need less +ver bally +hay ley +bow led +pi us +lam pard +ham string +vol vo +road safety +cho king +sor bet +a hem +healthy food +brai ded +horticul ture +cr ative +che ek +ad do +the force +ko ko +schiz oph +j ie +w ada +twentyon epilots +h bcu +pro ton +pau ls +lou isa +lat am +kyr gy +com pac +sd k +sap i +?? ? +liber alism +ep silon +ai den +w usa +spra yed +baske tball +kim ono +blue wave +ali as +ë§ Ī +mug shot +ce c +do gre +ad ora +ðŁĵ· @ +kra kow +intrigu ed +exhau sting +astron omer +ven ison +lady bug +ci v +bra e +us m +bri be +acup uncture +pembro ke +ke ating +chi e +y ad +t si +sm i +see ding +gate shead +lis boa +gy p +canv ass +ðŁĶ´ âļªï¸ı +op i +ni r +soci etal +ly te +ati es +c sm +ar tery +al in +aka poor +abstr acts +â̦ â̦ +teen wolf +ne we +travel gram +sentim ental +per ched +han del +ho ek +f ay +coordin ating +anim ate +man ian +effor t +jer ky +f ck +adri enne +ma bly +tra ding +my el +spi ro +sol a +stor ing +over drive +monday morning +dream team +pul se +bon di +ber nie +pgat our +tri poli +son am +plat t +âļ ¡ +ag roup +îIJ Ĵ +inv ading +v cu +k ell +ñ os +un dead +pod casting +mercede sam +mana fort +cor tex +que so +impecc able +pal mer +wil doz +sport sc +guacam ole +dispen ser +cate gori +stun ts +per il +invit ations +dune din +xi e +achi eves +saf er +pre ds +ph an +knuck les +k ak +igno res +lovemy job +aru ba +ound ation +datac enter +co vert +gr ing +cou ple +ا ر +vol i +mc cle +arti sans +lu do +kal am +arom a +under taker +hu la +wiz kid +gu mb +god frey +bakers field +ker n +engine er +car ve +pal in +guaran tees +pe bbles +b ays +zi eg +fin k +â¬ĩï¸ı â¬ĩï¸ı +down pours +ro chelle +rasp berry +ðŁĺ ® +gra phies +stom p +caf es +ari zed +utt ar +cal vary +dri e +crusad er +bus an +tux edo +si u +seam us +cul tured +blan chard +town house +ge red +butter milk +flu ctu +roger federer +hel i +ðŁ¦ ĥ +u ous +ram esh +mu ppets +email marketing +ye ss +br ice +ri zio +pel o +donnein arte +u rable +inve stin +bump ing +raji v +sav a +thro wer +fore x +o hhhh +th rust +pull man +r fid +sep sis +le ed +fri ght +roun ding +ne b +ph ins +ai sha +utili zing +squ ats +gold smith +j ic +bo ks +vau s +i po +exclu sion +tari ff +po kes +min al +land s +en force +washington dc +or char +g x +mar ys +ey our +aussi e +bak ers +un popular +latin os +lar ge +pu tnam +bol o +wa de +pel o +di zz +ob struction +fla ppy +weare the +depend ence +pajam a +e te +y ann +e wan +disc la +a ay +kar ina +e ic +an trim +w soc +neg atively +kai do +fotogra fia +dh ru +colo ssal +mcle od +k wang +mani pu +ex hilar +us atoday +summer slam +co les +tapro om +unbeat able +de ma +tic ks +k ling +fil s +campaig ners +ภķ +brew ster +audu bon +qu ay +ch s +ki gali +d ler +strength ens +som al +sign ingday +gol ds +pig ment +orche stral +g q +lin kin +ðŁı ĩ +ta w +algar ve +ho v +ear le +gold fish +am ig +ex er +ben in +dru id +ðŁIJ ¸ +she m +quat tro +mer cen +men te +incorpor ating +bon anza +state fair +en de +concep tions +e es +âĻ¥ï¸ı âĻ¥ï¸ı +d son +fire arm +orb ital +we h +multi p +fo b +requi em +p light +thou se +sa id +oc re +remem brance +n old +chi pping +be v +er t +ca thy +sy m +ri ggs +m ley +dialo gues +sl ender +how l +gau teng +wd w +to bi +smo kes +im plo +b pm +ad n +mom basa +cap sul +bloom field +artic ul +cle o +goog led +flu ffy +l ard +en zyme +ve sti +ibra hi +fl ame +e mea +out ages +dispro por +ble ak +an sel +ick er +st louis +stock market +good friday +sau lt +stal led +pro m +ep som +b é +the se +sau ces +me w +lit fest +pre d +re u +kar ak +si enna +ell in +bio technology +ï¸ıâĥ£ - +tac tic +sa in +por k +mon za +ka j +lu sh +compart ment +chang ing +shraddha kapoor +fo al +ar tem +cu ando +can ola +ori ente +me sse +d ited +br c +box er +bbc two +s st +ment day +em ing +de wey +kof i +âŀĸâŀĸ âŀĸâŀĸ +reali zation +smo l +tw ood +san je +flag staff +ber wick +cor set +can ary +whistle blower +et ched +com posing +squee zed +bow er +auto desk +ne h +mathi eu +ba ja +Å Ĥ +hy dra +da im +am eri +insi sted +mer lot +gar ros +heart news +gaine sville +cut ler +bo de +ðŁĺī ðŁĺī +lew es +scoun try +g sa +us u +cc m +god awgs +phara oh +cra e +mor ley +hyp noti +f ades +neur ons +fu zz +ing co +high landers +star k +vig ne +pac kets +amar illo +reu ben +insul ts +bas ic +vec tor +n me +ac ruz +tro s +transm itter +ðŁĺ ŀ +interpre t +ðŁĺ ² +pre quel +mc gowan +dis semin +ðŁĴĺ ðŁĴĺ +mascul inity +indie gamedev +ali ve +te t +pe tal +ema iled +ar med +ko o +he er +ba ird +super junior +metro polis +delav in +decl ines +stit utes +Û ģ +p tbo +g lan +cho res +e aling +chri ssy +ste mc +vi an +assassin ated +pron ounce +illeg als +discover y +cav ill +fri fotos +f al +so i +sabot age +t int +p dc +ðŁİīðŁİ Ī +ãĤ Ĭãģ +ji o +endeav or +in sig +commit tees +she arer +me tz +mar rying +h dd +g by +fre t +tri sh +pu l +scrip ted +sa ki +l w +ke ye +shim i +nan aimo +ca h +à « +tem pered +ici an +du gg +dish washer +air field +s rugby +gr inch +y st +r ms +mahat ma +lan kan +disc ar +dige stion +no des +l ls +om ic +gu tter +tis garh +feder ico +election day +bo he +master card +fire ball +âľ Ķï¸ı +oy ster +p ong +do k +en route +m vc +beat the +ali stair +shu b +sh aming +cherno byl +ghi bli +the s +pin ion +d bs +sal ts +ic tion +epi ph +nc pol +in convenience +whit ley +inspec ting +wood ley +wi ener +skil let +no les +m ca +h ina +a sha +willing ness +well ness +tam ed +show time +dis advantaged +ber nat +us n +mission aries +coun selling +arrog ant +quant itative +leg alization +ho dge +energye fficiency +cameron dallas +pos sessions +p bb +harris burg +v g +hindu ism +happy thanksgiving +fi b +re acting +tweeta picture +pol iti +mu ppet +hur rah +pac e +coast guard +guar ded +as am +par ry +fore very +x q +oom f +ke anu +j ind +ri st +customer service +sac red +ðŁĺ º +ton er +occur rence +mat u +val dez +red d +is ak +power rangers +pe asant +raj ini +abra ham +e mil +car do +tr il +hair styles +obsole te +sam pler +direc tive +delavin kisses +ver ton +glo s +sp ay +paler mo +com ets +man ziel +chicag of +ski pped +pic torial +h ant +b mi +a ol +re opens +pad dling +devo s +fra ud +bas eline +que ues +sp ired +sn are +eu ve +descri ptions +daisi es +ca ching +gall eria +tri mmed +stin o +recy cla +ic ular +bir ken +raw lings +fli x +chic as +b gt +lik eli +argy ll +thel ove +ga ston +bl anca +ha k +f one +sailor moon +h aci +ima c +fl yn +de can +bel les +ap ic +zo g +taun ton +con stance +lasag na +ker nel +in ka +har bor +collec tively +calcul ated +av ille +shil pa +pur du +gi mm +fun er +a est +pembroke shire +nighting ale +n unes +hyper tension +hu bert +sli ders +infer tility +comm ended +transat lantic +metr ical +!! @ +Å Ł +ss g +bac ca +inver ted +fun factfriday +it ans +albu m +acqu ainted +ri er +whel an +sar ab +mu e +snoo ze +pi ff +agre eing +sp itting +jer maine +n ye +âľı ï¸ı +am bush +ze ph +con greg +univers ity +s app +wann abe +pat rice +ib d +do glo +fri dges +sun d +king ston +ar gon +kam en +hardro ck +ds ley +do lores +ì ° +ota ku +pi ping +be having +âŃIJï¸ıâŃIJï¸ı âŃIJï¸ı +blue bird +an sari +teapo t +fire work +cro p +log ans +ty ped +thick ness +ig ers +c fp +dys functional +contra sting +et ty +aston martin +tx st +dra grace +at tributes +marath on +manu scripts +john stone +ðŁĺ± ðŁĺ± +bo er +ay u +aru gula +poo rest +con du +assu mption +anag h +no h +delav in +sit ter +g ö +mor ow +kick start +com i +gl acial +ghe ad +ba in +ker shaw +en dof +fre ud +om at +i af +hu g +sign up +each other +defin ite +tu bing +shak ira +ðŁijı ðŁı½ +uu uu +sw in +sham bles +ol as +sk ell +brit ain +kn w +clu tter +om y +j ens +hang ed +city scape +scra ps +un locking +dead liest +er no +breast cancer +a it +inspec t +fu ri +ðŁĴ Į +ku d +ju le +or ah +mi ds +m dt +bur gring +r attle +pu sa +stal k +cle ans +iss ance +z ek +worth it +nam eis +musko ka +council man +urban art +bar rac +un solved +tu l +g ita +white board +soy beans +em ent +cont i +saturday motivation +conveni ently +doc king +t ado +âı © +sp ino +puppy love +po f +fabric ated +robb ers +adop ts +ti fied +kk r +indulg ence +notic eable +macqu arie +chap el +sensu al +ki ko +melan oma +lore tta +li ance +ab en +sp lus +ga al +ac ele +lib dems +compar isons +ðŁĮ µ +rhy thms +mer y +en capsul +nap ier +ðŁijĮ ðŁijĮðŁijĮ +ðŁij IJ +plat z +fre sno +re formed +ran bir +el it +the best +bhu shan +vin nie +impro vised +s ittin +re created +e ba +ec ker +ac rob +pon te +cor d +gi ddy +eur usd +fe ver +intu ition +gar i +dum mies +bud weiser +amend ments +te tra +sch nit +ay as +mar ys +ci st +k ani +ker mit +ðŁĺ±ðŁĺ± ðŁĺ± +tin ker +strol ling +di visional +niger i +omin ous +menstru al +kar ab +k hy +bw fc +pan handle +l illi +well er +stra pped +son the +transfer ring +ethe real +sne aks +ru dol +gab les +jac king +cin code +for tune +canadi ens +con for +ab normal +frank lin +tit a +mu la +persi st +cu ties +ki el +ðŁĩ± ðŁĩ +her mann +aw k +fi asco +ko to +we ta +hi ker +budd y +preven tive +mcgra w +game boy +forsy th +top shop +si ob +sad h +in tram +follow art +so aps +dragon ball +ou x +morri son +๠ĥ +lu bric +adul thood +morri sons +âļ łï¸ı +her mo +ta ka +stall one +mis use +team gb +ra gha +con fined +at y +hom ophobic +nw o +sky news +ho ya +ac rosse +wi iu +pur ée +jed dah +ðŁ¤ § +advis ers +ph ine +an is +scrump tious +ë° ķ +c ke +vin y +ter m +s dc +o do +home school +vas c +leop ards +debor ah +illic it +cur ran +as roma +nau ght +mar ig +brand i +em p +ðŁĺį ðŁijĮ +î Į +su spend +lu z +initi ation +sch aft +jensen ackles +craw ler +post doc +des ks +trail blazer +den omin +tri x +no ise +po et +± ï¸ı +s mug +vol atile +proof s +pharmac ist +sardin ia +mash able +kim chi +co ed +schal ke +doo dled +c sw +sh ur +ro x +do k +chris brown +mathemat ician +ab ound +ang elic +rock ford +d ole +yor kers +ms n +g man +xavi er +bor rowing +mark ings +longh orn +k ja +diver ted +mm it +euph oria +ay yy +te a +pa h +ck i +un cut +li ven +ky ung +fan art +mer ing +red ding +amo vie +gri di +c thulhu +schol arly +ju dah +th bewithyou +eu calyp +ðŁIJ ķ +hert fordshire +cour troom +by u +auc tioned +ple ase +mar cia +ê° ĵ +succe eded +el as +arvin d +t lot +saig on +re tt +ra kesh +fd ny +as en +se bring +gladi ators +you know +v lad +gol a +par ap +ÑĢ Ð¸ +sab cnews +one team +oh l +sun e +ri j +cd c +star gate +run down +plat o +ph c +chat ter +ra viol +mn f +mand ala +li et +ภķ +mari a +hun gover +consoli dation +fer rell +tradition al +ilove art +gal ap +ðŁı Į +que zon +espa ña +ðŁĩ¨ðŁĩ Ń +ho bby +steam boat +mali gn +guil lau +pro hi +its me +íĥ Ģ +in scription +al z +mari an +k ade +mm on +adju sting +ne sts +intern ally +ci r +vik ram +mal ala +k ph +fel icia +the real +cap tivity +at is +marcor ubio +kale ido +che v +mano j +le more +gent ri +vi ps +tro pe +" âĢĶ +pair ings +mal nutrition +fr ay +desig nation +brun omars +az e +tor rential +pan zer +ga il +under the +the ological +schizoph re +dazz le +freder ic +mo par +ad illa +so ggy +ra un +medi ocre +colo rec +i fe +p inst +blu ef + ² +world water +gir oud +clar inet +ad olf +tar antino +receip ts +assu mp +ðŁij Ł +coffe es +âľĬ ðŁı¾ +du plex +s of +r x +lin o +timber wolves +pan dit +mo tm +e ga +ay ama +ach s +outsi der +ll en +co er +til ly +cheese burger +ma ds +ple dis +emp ty +national parks +az iz +p mi +jun kies +f ener +sq n +è s +gener ation +cleop atra +bhuban es +mosqu es +ty free +popp ins +tw c +or well +n age +ka whi +hol low +dal ai +¨¨ ¨¨ +ou ro +m health +gi on +az o +vis as +reneg ade +re ic +w sop +ðŁĴļ ðŁĴĽ +e chel +tox icity +mü n +bun k +stimul ating +asth our +\ ' +ep h +ende mic +cn bc +shrin king +peabo dy +michel angelo +can yon +wal e +su mi +si ders +inu it +? . +profession alism +dr acing +plat oon +p ons +out bound +maple leafs +de sol +cen cy +a than +ver ma +ru bbing +ok an +ðŁij ł +mull ins +authent ic +Å į +alman ac +ga ia +bb q +on imo +ke h +ty a +tou ts +y av +re posit +, . +wi ght +se eyou +cal lof +done sia +bar gaining +gr anth +sd su +amphi theater +p su +re watching +wine tasting +peak district +dete cting +thur man +phe e +èª ķ +u mich +re r +sculp ted +go le +name sake +ðŁĶ ģ +serv icing +bau gh +pu gh +pen cil +dar th +munch kin +at orium +ten ers +sun y +rolling stones +mag ing +star rer +i dris +fe instein +ag ron +âĺºï¸ı âĺºï¸ı +supervis ed +chamele on +aggre gate +succe ssive +mo gul +inst yle +pol dark +custom e +ohio state +ha ya +ci des +broker age +angel ou +fifa wwc +de forestation +al ton +pam ph +hu gged +ho bo +change able +ku ber +bur roughs +demon etisation +cape cod +vers atility +or ice +le ila +womenin science +tu a +he dges +embarrass ment +ali fe +so ars +ni ghter +hy mn +gi pp +chas u +tech s +ni all +k illa +hi ka +cam els +valu e + ¢ +sc oops +mah moud +clu sive +adri ana +pac o +oz il +un as +transl ations +whispe rer +s bi +bu xton +bio tics +indi ffe +ken ney +k lar +et ching +barra best +inst ability +se ine +vo tel +blo gged +whis key +my space +t ant +lan dia +give back +illu s +aw ak +ac ab +f bloggers +cloud computing +blat ant +syri ans +band ra +sty n +an em +ke ted +kar thik +barun sob +pin ot +gu bernat +gay e +arti ste +i fied +conven tions +hu an +geni uses +eeee ee +fol ly +somer ville +pride month +ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +chemo therapy +paul s +bak ar +ìĦ¸ë¸ IJ +taiwan ese +fol lo +c ss +re ign +nn nn +fla un +catastro phe +iti es +frag ments +extre mists +ym oun +car men +eze kiel +conne cting +se h +man ta +remodel ing +we ymouth +at oms +ce m +ne well +lu mi +the open +mo c +mili band +g land +z shq +mag gie +mani acs +m sp +ad y +cre ams +le anne +e sta +py g +af finity +pray er +dun bar +ligh troom +ac adi +wyn onna +roman tic +state dept +sick le +wh os +lam o +et our +fin ity +shru b +shar pen +pun dit +ed on +af ore +mar s +jeff ery +ter ps +medal list +kath arine +accu sing +ta z +roy d +from home +confron tation +alle gh +ðŁijī ðŁijī +refresh er +ran veer +never land +jo jo +lu crative +en am +ca ver +pa edi +man jaro +flu ids +the ssal +oppre ssed +mu ss +joh anna +Ø ® +cn g +buil dthe +sett les +s ith +fu ego +cl amp +ar ag +pay er +ted x +mand y +inter stellar +fr c +ch and +b cc +mo lo +len til +johan sson +grims by +nature lovers +ðŁļ¨ ðŁļ¨ðŁļ¨ +shin de +x in +international dayof +transiti onal +sat a +cad dy +wo d +if u +ha ys +holl yo +j ang +ir c +co im +grad able +" " +ðŁį ´ +ঠ¾ +a el +n yo +west lake +time out +sof i +phenom ena +cultiv ation +ag no +un armed +so t +con j +gen o +royal navy +nutriti on +fair mont +ti relessly +sn g +re ty +mic a +lu cent +slo ane +droo l +riz al +od ell +critici zed +. '" +la ze +deser ted +co der +pra s +l illian +itiner ary +dav y +an ap +whi pping +hobo ken +kare ena +çľ Ł +vi us +ter n +nan tucket +mis understood +bu laga +st ant +chin ook +z am +reli es +d ss +ed mond +sket chy +m ell +fe x +rec tor +dist ill +day dream +wine maker +ri pley +billion aires +hel ene +ati f +cul prit +bertr and +wou ldnt +ma pped +v ak +gla dly +parliam ent +kidlit art +ware ness +goli ath +âĨ ĵ +view point +tat ted +fu ls +dor sey +ang lers +li ds +ki ya +bow les +be h +b ite +compati bility +ance stral +pro x +beha ved +gubernat orial +ch field +sab an +z h +teen y +shibu ya +holli day +pan cy +âĿĦï¸ı âĿĦï¸ı +seun gri +? , +ðŁĩ¦ ðŁĩ· +im itation +impac tful +any i +gene vie +añ os +bate man +gli der +af ar +ra sheed +effor tless +sh war +dach sh +er un +at os +kin i +ch d +kha ki +k lin +felici dades +bel o +as l +to ppers +fin ley +stac ey +rigor ous +kar ting +le ppard +car michael +be ret +c se +ak hi +mer ingue +ab an +ha ke +ger i +er jee +re sto +comm anders +pr it +fl or +ad ven +ex termin +remain der +å IJ +es g +martin o +lulla by +| @ +mi gn +in store +big bang +cor di +cau ley +ante bellum +dg ate +cro ck +span dex +scaf folding +ore os +ê°ĵ ìĦ¸ë¸IJ +pom ona +ma uro +uni versi +re mi +af ootball +t ant +sm alls +ne h +worl do +tropic al +mor ph +jav elin +gla r +arqu itec +reminis cent +tu bs +spide y +make u +syl la +progressi ves +blo t +shor ten +keep in +ch ak +ang st +super food +decad ent +ston y +neuro logical +ar boretum +ann ak +fe ma +per cu +dis respectful +small biz +lo x +co om +c sc +bs bi +pre valence +him ss +esp an +mo ga +fr ampton +sky map +mas se +levi athan +( ). +noctur nal +car ameli +ang or +amne sia +outsi ders +she alth +rhin o +ant ag +ag io +ðŁĴ° ðŁĴ° +take me +kab addi +c si +m sh +coch rane +thessal oni +sil a +ha us +du sting +obe se +mack lemore +mani sh +len in +m dc +gro wn +shef field +s rs +ke le +car son +ch um +dah lia +can tore +opp o +how ling +cyber crime +sur realism +sc ran +fa iz +thre n +rac ists +r out +pk not +se mana +sin i +mc cull +ma chi +alfon so +y b +sar dar +kend rick +den g +reci pro +on f +doom sday +bri bery +custom iz +art is +c pi +ðŁĻĪ ðŁĻĪ +sla va +let te +en s +âĿ¤ï¸ı ðŁĺĺ +cra yon +ad an +tr c +migr ate +simp son +row ers +king sley +farmers market +shee han +ne phe +bor non +car ton +mic key +all ure +u lu +sli pknot +heb do +gui do +dog celebration +online marketing +acceler ating +) .. +origin ated +macar oni +ed tech +out field +mit z +disc us +adverti ser +man or +ha shi +descri p +cap ita +ful bright +recep tor +con n +con ey +spion age +r attle +pre st +u li +blog post +acker ay +) â̦ +red velvet +mat th +inspir ing +b sd +ker ri +po con +mil lar +re pur +accent ure +ä ¹ +ram bo +ragnar ok +dele ting +british museum +pat ory +leip zig +flori an +sci fi +in ers +br ate +yo y +melis sa +ab er +ma sa +po te +mosquit oes +transpl ant +r pa +; )) +bast ille +yl an +joye ux +melo dic +cap tions +atri st +roch dale +gott i +pew die +cuties aturday +who is +aqu aculture +tiv a +sp el +he ss +ha ji +fred die +co per +brand o +v k +photo book +* , +my dayin +micha ela +brune i +sr ini +in te +Ä ± +de ol +d fc +separ ately +bun d +ve sts +to c +me ck +rein forced +constra ints +car roll +sq ft +re ver +cam per +bird man +in action +gener ators +triumph ant +pe sts +o vo +gy pt +al amo +sc aled +suresh pp +sd n +is mo +gi os +) @ +justic eleague +restaur ant +gab i +den gue +next gen +exemp li +ap ex +inspir ational +down side +kid z +u pl +et na +alvar o +fel dman +bar net +m ha +es ch +bloo ded +>>>> >>>> +kan i +ho fficial +casablanc a +bir ds +ty ga +sw amp +o day +new castle +nb ap +ci sion +cho ols +af lo +ne p +mon ton +ak b +super model +down time +th os +sc wx +snoo py +ag greg +yo ke +nor cal +we tt +prolon ged +me tast +beat er +f ta +t lap +disgu sted +y h +voice over +itch y +ip c +ðŁİ ¾ +phe asant +stra its +ram pant +j g +fer til +assu res +fortun es +sal inas +liz ards +kett le +i bs +cyn thi +he g +mc cr +soccer oos +happen ings +cor den +ðŁĺĤ ðŁijĮ +t ches +egre t +wolver ines +congratul ated +ho gg +bott ling +wr i +fer ri +bo sch +af ire +og den +s jo +j dm +sv t +con tex +tol lywood +min k +me se +super sonic +op oulos +å ¸ +âĶ ģ +knuck le +gu ise +gam i +chu cky +z inger +radi al +compla ined +bo da +fe tal +discipl ines +cor ro +ðŁĩ®ðŁĩ ¹ +op ted +filtr ation +ad nan +em cee +mi stre +insom ni +fer gus +tra jec +on don +med tech +tanger ine +madra s +gru e +cab s +z hu +sureshpp rabhu +insul ated +day swild +pp m +band ai +v day +s ff +squ id +lo thing +not dead +expre ssive +cu ll +ala stair +x u +up front +fish ers +en es +um d +dis missal +sti er +sel s +lu st +re active +prote ster +eyel ashes +al im +goo de +gre eng +da ir +com pen +anush ka +proto typing +ma pu +bear ings +ðŁIJ Ł +for me +bsbi botany +timo thy +out skirts +am bed +are tha +wend ell +stre aks +ni m +k pk +sne e +fit ter +quo ta +p ate +win ning +ðŁį Ń +sho pping +ma inst +cul ver +ste vie +mcfad den +counter parts +gren fell +fol som +dor set +tech crunch +⬠ħï¸ı +tip tuesday +us l +tre x +geor gie +ranveer official +lic ks +se wn +k f +' â̦ +jap s +p ate +orth op +fe sta +stra s +mon tal +hammer smith +fore most +wido ws +mad re +ite z +mito chondri +lig ans +z ona +cari bou +m ss +andre i +weather channel +gh c +: ... +ta ft +awe ather +al isation +bru tal +bliss ful +nik ola +mal icious +q m +mpg vip +bro die +bl itz +applau d +dri bb +v ague +dog go +transl ating +interpre ted +hat ched +ge tyour +benefici aries +spar ring +caes ars +aw illiams +la hat +bro ke +ti mp +virtu es +rel ying +pie tro +k tn +ici sts +pab lo +lou i +a ag +pn pp +cha st +pul ses +fini sh +usair force +type writer +thomp son +dog s +ut to +ãģ į +sand al +new ly +do ge +z w +wan kers +ne gr +mu cha +determin es +black fish +sk unk +mu ps +instru ment +phy to +daysto go +skin ned +hai der +con ten +ðŁIJ¾ ðŁIJ¾ +we iler +undoub tedly +chair ing +wall is +sh ard +zind abad +adul t +absor ption +pre sto +deplo ying +drum mond +battle front +seag ulls +how dy +juda ism +des de +part ition +âľ Ŀ +no logy +national bestfriend +lesn ar +film fare +co asts +christen sen +ac an +mb u +co pped +ru bble +sw c +fun nier +far ther +where as +nano technology +with stand +pil low +bow ers +to pe +it ly +con fit +ma kar +comfor ts +bo sh +cli pper +bal la +sti k +mil b +safe guard +musi que +eas port +ya z +pad ded +bad er +fore ign +chop in +archi ve +o ka +tran sporting +tml talk +aj it +consequ ence +sc roo +ff o +collabor ated +pug chat +ye mi +jav ed +au burn +o of +ma w +sau cer +miti gate +i les +evangeli st +ter ie +re cl +indic tment +cat a +bright ness +may the +whim sical +un lv +key word +cu min +med way +west world +tra w +im posing +form ity +coul ter +ab z +ny pd +grass i +kel sey +qld pol +clock work +f dr +di anne +âĺ ij +ad h +p ann +bra vely +ae ge +un lawful +ver di +pocaly pse +phar o +kar la +reson ance +ma stiff +la dak +bu u +ma iled +hi i +craw ley +tor rent +mach ado +liby an +effort lessly +fal sely +q vist +ke ef +craf thour +cheri shed +val kyrie +s ari +kal amaz +be he +ðŁĮ Ļ +th im +ro ddy +col trane +but chers +ach im +wk end +awk ward +cab rera +:) ))) +fran c +decl an +con dos +a ja +pandor amusic +char ter +ph ill +mon trose +hatch back +handic app +gre aves +eucalyp tus +ut most +t son +bur ton +mid wives +in cur +ðŁĺį # +moo d +compre ssed +tom a +must ang +mo g +as ana +te stic +sho tel +in sol +cor sair +nh q +ben ny +sm ma +kap ur +in con +jon as +ener gies +don al +as ad +se z +n pa +archi ved +stimul ate +do p +hy d +gri eving +ãĥ Ī +ron a +why te +tree house +ss ell +sand ro +ko bo +ther most +se clu +hi ya +ge ez +mam as +prisc illa +flav oured +fas s +w old +maker space +cospla y +p tv +happy valentinesday +sequo ia +love craft +gu an +d tm +ci i +yoko hama +pos thum +re q +ðŁĶµ âļªï¸ı +galat asar +dol by +hamp tons +disturb ance +stone henge +ok c +disrup ting +month sary +jun gle +head lights +du stin +micro sof +happy mothersday +ko ko +gra zi +te sto +na idu +mal ay +ari al +ru mb +ab oo +har man +tra pe +spo ils +je ho +go dly +lock screen +z un +pi ous +ma gento +l enders +prob able +corpor al +m our +aw al +su a +call me +ton ne +go vin +devast ation +x j +gear box +war lock +per me +it ate +gaza underattack +du val +paras ite +clement e +le th +i va +fro zen +tho les +to bin +cair n +s ill +luc kiest +conver ts +st ale +pan cra +euro pale +wis dom +sch ur +ì ¶ +verti go +bi j +u bc +nu re +righte ousness +mt c +factor y +ver st +revers ed +hur i +hee chul +fab er +ar r +ul ous +ven om +ph at +green ery +bra dy +à ¦ +: (( +never giveup +di sha +mo ta +health care +dun ham +dex po +den zel +bb ins +f ics +wh am +mc g +eli an +wat a +str alia +tel lu +pe sky +spin off +ar moured +re acted +do fficial +te du +sag ar +mor ally +paralle led +fi os +dow ner +dau gh +re do +world cup +tari q +bar ne +glaci ers +oc cult +barbar ian +her mosa +!! !) +y ur +inter nation +p ss +sit u +p int +american air +sw am +dopp ler +ðŁĴĻ ðŁĴľ +cincode mayo +le van +hell enic +mc ne +ju di +yu h +st x +qu are +ðŁĺĤ . +sti g +g els +mot ley +hard work +euro zone +e ad +ç¥ Ń +seab ir +ci us +la id +alpac a +presu mably +pewdie pie +boo ted +am ari +tam ine +sol ace +bar row +acade mies +x ian +om ination +dun geons +b ma +de ity +ai k +stab il +hir a +affection ate +ving ne +new port +ãħĭ ãħĭ +thir ds +re tains +aroma therapy +ski er +ni ma +do pe +cr inge +con domin +to or +anim ator +sar aj +seas cape +minim alism +lake shore +calla way +berg man +à¤ Ĺ +whisp ering +stupi d +ri ghtful +requ is +ir n +se va +ut pol +tuber culo +squ ish +de but +govern mental +christ ine +all man +weap on +s ito +bur i +lo lita +leaf y +fu ch +tin ted +mck en +a hahaha +ðŁĩµðŁĩ ¹ +repe al +ne gan +ðŁķ Ĭ +tail gating +game insight +ðŁıŁ ï¸ı +yaku za +z t +ti ring +pro posing +bow lers +tra itors +ak shi +cler gy +cit o +up sets +tu scal +symph onic +sil ently +shu ff +black well +ðŁĺĤ ) +ko be +rober to +ri dg +dc u +mer ino +ft p +east side +. ~ +nb l +mn leg +ts for +frau dul +ca pping +in my +gymna st +ston es +ss in +twe aks +shag gy +oak land +dem sin +sang ria +mm va +hen nessy +down ton +ri ghtly +in it +aga ve +ob last +northe ast +friend ship +dal a +tro phy +ðŁij ½ +mag in +margar itas +ê · +ww fc +fa sh +di ke +cu d +char t +ðŁij ® +refuge es +jop lin +n cs +imp y +firm ware +pas cu +flam in +health tech +bell letstalk +w aka +ol ls +la go +co wan +bombar dier +sh ome +ðŁĻ ħ +mc master +na ve +well s +u ta +tell ers +mis fits +kap il +face off +af firm +a pro +whit epaper +super yacht +speci mens +al located +... , +- __ +ka w +dachsh und +djo ker +s work +qui ere +or um +ðŁIJ ł +som m +c mt +ingh our +skin ny +lgb ti +gi ggles +break away +resear ched +par ity +my al +ms l +re tained +si vity +make inindia +sol ves +defam ation +wal tham +sri racha +road way +concep tu +al in +iw ant +å Ī +del ft +tender loin +ga ins +faul ts +sw ire +st ellen +pol lo +dy ne +bornon thisday +asdf ghj +sq l +sali m +advis es +vo ip +ìĹij ìĨ +un touched +she il +ontari o +uph ill +so bre +de shi +nov ella +du tton +craw fish +ا٠Ĩ +ma a +tw ine +kal in +ðŁĩµðŁĩ Ń +ye ss +brook s +hoo siers +ton ka +umbrel las +ay ers +ate am +acqu iring +su ction +ä n +wi es +tari ans +soci o +mat tb +shepher ds +o so +charity tuesday +s logans +ninj as +al bat +by te +bash ir +trampol ine +mydayin la +i ja +bas el +ror y +gol die +fi rec +un noticed +pecu liar +sch a +ker son +mour ns +liquid ity +qu ipment +hi bs +ar s +aeron au +slide show +sla bs +delici ousness +sk itchen +hta fc +full erton +cre ighton +aer ob +procrastin ation +az ores +white hall +uss occer +medi ation +djoker nole +and me +um en +noxi ous +jo ss +ili fe +anni vers +sudan ese +et res +under mine +whole foods +diso be +kor i +ade le +eli z +can ti +al on +gymna sium +sarko die +meteoro logist +yl de +ste en +stamp collecting +nas al +lo tt +fran ks +ex ol +ack i +good year +animal rights +y les +vio lets +mm es +s thel +ra pping +tu scan +wai ver +tur ner +eat local +northe asthour +anim ations +tom morow +t sh +ff ame +bra e +pe tron +glam our +br yn +d cs +bal es +ðŁĶ ¶ +bro v +bre v +b ons +physi que +car ne +x e +elix ir +vol ved +l oma +ìľ ł +æ ĺ +van u +ri gs +bal ance +va res +bon ita +sprink le +perfec to +di on +le ak +calcu tta +o ba +d ma +c mon +tun er +pneu monia +bo gus +apolo ge +cl ough +bor ne +)) )) +revi ved +o varian +ner f +c legg +fan fest +cho u +reali zes +mc n +li gu +leg alize +just saying +for ster +bo sni +k hi +in dom +hei del +en cryp +si ss +ed di +mar bles +brisban e +y ing +pre paid +wal sall +cooper ate +orche str +mar isa +ho wie +che wy +bren ner +andro meda +e gan +sto cki +cav endish +ag an +ban o +de ir +go g +bl k +re thinking +ch ig +rhe u +sni p +p eng +semin ole +m swx +an nex +lyn da +lewisham ilton +cu mul +tb l +dolph in +agu ero +........ .... +pre lude +at our +gr anger +too ting +ro tun +dis ar +home items +da res +**** **** +ðŁij Ĩ +compre h +jin x +as well +iri e +circul ating +ðŁIJ ¥ +over board +cultiv ate +rhe tt +oriente ering +ca k +bal kans +s itt +jas min +britney spears +ro tor +se aling +g bc +oc ci +f as +eman cip +com er +war time +tic kle +son ny +pac es +log g +at rix +sr p +g win +do bbs +uz be +the wanted +dru sh +ex tru +m icky +honore es +dar win +re dux +mm j +ram i +jalape ño +io c +do ver +ju ju +whit ney +s eng +en ly +au ch +archipel ago +vigil ant +man gal +wil dest +parano id +hal i +bb ly +sanc tioned +real ms +con co +u ddin +c sk +play time +libr a +sav ag +oc tane +rec tan +re turn +par rish +mor rha +cc p +c mu +sa iled +se vent +ro sie +pil ing +he w +boar ded +seg ments +neph ro +( . +cr ats +bak es +ðŁį ¸ +back tothe +sibl ing +kirk land +ke o +gu wa +bre ads +ðŁĺľ ðŁĺľ +t q +haras sed +ga u +wil bur +j isoo +ep er +li sam +tri ppin +sh ino +ru kh +beast mode +cho a +inst aweather +rich land +gar i +fe z +cowboy snation +fur suit +k run +a en +sycam ore +se gun +ent ennial +di h +o ax +demsin philly +ðŁĻ Ģ +sn hl +pen nies +pass words +ma kin +ty e +d eng +kni gh +jeep life +hel pline +a for +zz zz +ste amy +pic ker +iter ate +happen ingnow +ki b +bloom berg +martyr dom +bul ly +assor tment +a hora +zo e +no i +illu stri +agar wal +p sc +electr onica +recruit er +gar diner +rad ha +naf ta +dot net +pi ero +geor g +bel s +ðŁĺĤ ðŁĺį +tuberculo sis +run nin +mor is +haul ing +ev oc +bre thren +sha ir +frame works +a stu +ri gid +ku ma +kre me +jin nah +insu rers +ny u +f ere +nol lywood +good vibes +- ... +toi le +sk ril +instaweather pro +cze ch +pa vel +one piece +nike plus +fi let +cav ity +ðŁı½ âĢįâĻĤï¸ı +ðŁİ £ +dra stic +dail ys +siam ese +re bu +oste o +lar k +f re +sh elling +p é +glad ys +ðŁıĢ ðŁıĢ +gusta ve +submer ged +grand stand +att u +won t +f pv +b ley +jon i +ang ames +weigh ted +al ou +ठ¶ +les bians +f j +anni es +am l +dor ia +dav in +be ta +can c +madewith unity +ha j +bad lands +mu l +blu ec +pa wn +cov ington +neuro logy +htt weets +dysle xia +thel ove +ne at +fork lift +autom ate +une ven +monte ss +he in +ha g +rel ics +competiti veness +can elo +mar tens +bullet proof +sk ittles +g ya +pri mo +americ afirst +woo o +abor tions +?? !! +ma che +ld ers +rl ly +preli ms +direc t +cour se +swa in +super cell +ec centric +sting ray +ple ts +wil cox +west in +okan agan +kir an +car bo +bomb ings +ra rest +bo h +gaw d +di gg +mo ana +enti rety +en closed +dodge ball +par ton +milky way +at r +thorough bred +re ally +qant as +epiph any +ine e +aero smith +spi eth +ar thro +ell ini +du bu +bra ving +âļ½ âļ½ +re structuring +illumin ate +equ ili +mp i +ash ton +pony tail +ma scots +flat tering +cru m +ast a +à® ° +stranger things +bar nab +ر ÙĬ +make shift +got cha +will am +cho irs +kilom etres +gho sh +eu than +dol ly +un ning +the ar +cre we +w sw +j ace +dis miss +ke an +ho ta +kh at +~ > +thir u +ren dez +hart man +tee ssi +cas ca +z ah +hydr ange +fo d +aw p +mzan si +thick er +nago ya +ne va +sti que +cast el +dam ian +there by +ji ang +ale k +music islife +ra q +calla han +gou ache +somal iland +sean hannity +ra heem +lo se +elo ve +whar ton +rectan gular +illustr ating +har ne +auti sma +scra pped +ell and +decre e +nag pur +ki pp +so re +n md +ma as +gun a +gart ner +bel li +then ight +je on +gendere quality +gi ver +a el +gar ments +ne u +mardi gras +mar sden +ro wer +pollu ted +camer aman +vin od +be asley +cro c +ji u +hollyo aks +anesthe sia +al les +ste ward +lati mes +ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ ðŁĩºðŁĩ¸ +tic ian +gor ia +come dic +ðŁ¤Ķ ð٤ĶðŁ¤Ķ +nai ve +sli ons +ł Ī +bur glar +ðŁĺŃðŁĺŃ ðŁĺŃðŁĺŃðŁĺŃ +york shi +se ñ +fan boy +lau rel +inci dence +potom ac +rober ta +presi den +pr yor +os bourne +w ku +te me +pal ae +ðŁ¥ º +re boun +itu de +red dish +k hand +coloni alism +north carolina +ðĿ Ĵ +manne quin +lady bird +ta sty +knowledge able +g shore +ðŁĮ Į +à® © +qu aker +salz burg +med alists +chy na +bridesma id +ma ori +ro p +outra ged +in adequate +truck ers +al ana +ìĿ ¼ +ri x +oooo oooo +command ments +lam beth +aa j +eco friendly +bla z +morecam be +boun cy +rou x +rai ded +mi zed +sh c +gaw x +labor atories +ru bs +rest room +consult ations +ca jun +virgin i +so ir +rev ue +ple in +wag er +ç ¹ +we do +growing up +! ðŁĺĬ +face ted +sin ners +ho vering +ti ene +seas oning +an ja +leg go +il is +fla x +dev o +ash ram +mati sse +ker i +go wer +bo tox +mar shes +unh cr +ts m +opti mus +dun i +stu ffs +so k +order ly +n bad +islam ophobia +raviol i +fab er +cre ds +won ka +in fusion +over weight +daily news +assi mil +acol lege +medalli on +kili manjaro +sti ff +tham es +sun ken +th ard +my dubai +hilari ously +han nel +plu mber +fair view +separ ating +rasc al +qui en +necess ities +confeder ation +ll ll +: ] +weak nesses +bron co +ra ffles +el ot +ãĤ¸ ãĥ +advent calendar +ðŁİ ¹ +stra vel +tun ic +k su +im peach +e spionage +! - +di ment +cur rant +bio de +commu ting +by ron +ðŁĴĵ ðŁĴĵ +shad ed +tr uro +cray ons +ar ne +h sc +fre aked +dram ati +fle ek +u cd +marl borough +^ - +cross ings +mal o +black ops +bin ance +cho ked +chen ey +pl o +ge stures +val edic +ryan air +rem ington +v cs +mc kee +ec z +be gs +nail art +mayor of +happy fathersday +war t +pet itions +n ingly +clean energy +bro x +sl alom +exist ent +ab ay +ug liest +tom p +stom a +sel by +goal scorer +ben ji +overwhel mingly +lan s +semiconduc tor +south korea +re scheduled +sk yl +en listed +dow ski +si del +rosen berg +nas ser +white head +pri us +har are +en n +ry der +í Ĥ +mon g +clas ico +transpor ter +po tty +is me +** *** +vic e +sk it +ode ssa +l mp +her n +raci ally +pin oy +paragu ay +obitu ary +go es +bu cha +side walks +angu lar +un constitutional +transiti oning +i bu +gu ys +un packing +oooo oo +black girl +ber gs + ¯ +wordof theday +trump train +thunder bolt +m si +fasci sts +ठ¬ +t sk +collap ses +raje sh +loveis love +migr ating +set back +ðŁĺĬ âĿ¤ï¸ı +t els +safety first +nar rated +jae joong +un answered +lique ur +en nes +dal go +bill ings +salt water +mer maids +lon gs +clap ham +we arec +pic collage +n ach +h ace +pois oned +lo th +ag na +adel rey +guar dia +poli shing +peace keeping +d all +p isa +la pland +process ors +de andre +so bs +p once +dra ins +c be +ðŁİ¥ : +spla sh +meat ball +fon tana +worcester shirehour +ne v +bri sk +b int +ac r +po x +cay enne +skril lex +j fc +hahahaha hahaha +gla s +en gul +tempor al +oni zed +con cre +com pose +vibr ations +plant ers +fer t +criticalrole fanart +t bli +sch allenge +huck abee +munici pal +iam bic +radi os +ne vis +dura bility +mc cla +horse back +inst itutes +ful fill +atta ch +ate ur +ak an +resi sting +illumin ation +hand le +hair care +om ent +macle od +ka iser +g no +bear down +ly f +gl omer +distor tion +z m +san k +roo sters +is now +as ports +ag en +wo ken +st george +ro mper +my le +econom ists +ru to +t will +health and +d ito +ws l +tair p +pra kash +mic heal +h ts +w rights +kat su +fioren tina +defen seman +d itch +var sity +texan scheer +ba ham +sc anned +we il +seduc tive +ðŁijį ðŁı½ +fu e +er win +dav ison +ter ran +moo ds +wool f +re source +@ . +cu sh +ðŁį ° +regre ssion +cur led +la zer +jo anne +ab bott +mo z +down ers +mm mmmm +valent ina +k hair +dream t +cro ok +che k +ste aming +nephe ws +cl eric +as ober +indefin itely +w ye +us news +joy ce +flu shing +wynonna earp +ron do +kis s +hot dog +bar ns +sax ophon +far ley +gas p +decre asing +al way +pe x +l sd +shi ft +p outine +ra zz +rescu ing +ni ko +ho ch +cc l +u aap +n ts +m car +il wx +conqu ering +ket tering +stur dy +delay ing +sto k +vani shed +cath ar +bin gham +in v +ic hiro +he mo +budge ting +[... ] +be ss +sebasti an +slow ed +ðĿ ij +musli m +stun s +acton climate +ve a +se ton +rose tta +oun t +hard in +flu id +ca w +ðŁ¥ Ĥ +yach t +un l +sp hy +provoc ative +or ic +is back +__ _ +nicol as +gy an +loo se +fl in +reb ate +: :: +! "@ +com icon +she ff +down stream +chic hester +beach life +mom life +diabe te +ar ra +van e +ok u +ye o +man go +try out +app ell +he irs +arjun a +dd u +na veen +movi c +soci alists +s back +criteri on +soyu z +k her +da z +yol anda +wine oclock +re ina +one w +leon ard +en dez +u bs +support local +facilit ated +carameli zed +b pa +vuel ta +my tho +m ami +spe are +nbap layoffs +fe vre +nick jonas +im print +c so +craig slist +la salle +gi deon +ha doop +dis regard +w ud +tu c +ma gee +acou stics +ta a +qui e +pol a +cr t +dw yer +dis sec +capit ol +men tion +kn oll +he igh +fin ders +plac ements +l se +indi ra +gur i +madhuri dixit +kingdom s +iambic pent +geor gina +je ky +conflic ting +bay an +aga tha +uph old +dr on +vic ar +ex pat +periph eral +pe ssi +fa f +ance stor +? .. +wid get +pun c +comm enced +beav s +air waves +ad dis +po a +de sses +co den +vu e +ru pee +kar in +spo ck +m sy +ภ° +pr ick +fill more +ti fication +thing sto +sar de +em ile +pere ira +n ad +bright ening +arre sting +wo king +usc g +sp ill +raspberry pi +hu go +ite c +is ma +cuff links +optimi zed +oc c +mi wx +en ka +el ited +afford able +sa kh +coron ado +ho h +at ul +ai oli +jim cantore +accoun ted +vin ay +her mit +groo ves +ran ch +r illa +we tter +ou tof +veter in +ni kov +ki an +fair banks +ram apho +n iti +k ko +ru sty +ne stle +tv xq +shahe er +âĿ¤âĿ¤ âĿ¤âĿ¤ +penn ant +gem stones +dem debate +ðŁIJ Ĭ +auton ews +support indiefilm +mach o +ve x +new sat +ne ti +conce ssions +can died +yof the +mac au +den ds +cricke ters +san iti +mari ano +gh at +ar toftheday +¡ ľ +e gos +gen oa +chat bots +bri er +al labout +mon ty +spi ed +r tr +comfor t +sni ppets +real time +gra in +exam ined +en lightening +tt u +god bless +release the +sing ular +ki ans +ha ka +sor ren +defe ct +mar g +equ ities +d orian +su ka +per l +aishwar ya +pul lover +preci sion +fair way +ne ve +rive ting +vill anova +en com +ak o +passion ately +europale ague +siem pre +x vi +enligh tened +c fr +âĺħâĺħ âĺħâĺħ +wast eland +is f +new comers +emergen cy +amphi theatre +- . +text books +figur ative +tre mb +pe sc +ab hin +ab bot +ac acia +har ds +por sche +kau ai +el isa +car rick +abo u +elli er +be ch +neu tron +galap agos +ru ben +in nis +how to +nun s +sab ine +i ac +clin ched +no tori +fi ves +cairn gor +per i +gr c +ðŁĴ¯ ðŁĴ¯ +mal m +twelf th +di ff +rout ines +marty n +lin den +synthesi zer +nu mber +game cube +fal kirk +byz antine +queu ing +gr ill +scal able +char red +rou ting +her bali +gri zz +ðŁĺŃðŁĺŃ ðŁĺŃ +tol l +termin als +l pc +ab d +war mups +remo vable +¯ \ +vi go +pap aya +ne ve +lov ingly +jo kers +ib les +sse tt +poten ti +pel e +gi gi +sadi q +leg acy +son o +ru pees +retar ded +ele e +par r +fi ance +ey re +say ers +pend ants +mak nae +al bans +adap ting +p ff +pu berty +ji u +ing rad +hypocr ite +diplom ats +phys ical +rob by +bon sai +ãģ · +f att +catal unya +âľ ĸï¸ı +ro ma +more land +so e +conver sions +stl blues +shol m +gra ssy +pra do +on u +assaul ting +> _ +sett es +dis graceful +aph ra +âļ½ï¸ı âļ½ï¸ı +ठª +kil n +goal tender +s ru +philanthro pist +b als +th n +stu den +sando val +dogre scue +eli ons +asse ssed +lar go +hec tares +sh rm +sa if +cle avage +no ches +n ene +fat alities +cur ing +clean ser +al es +p vp +south bank +pizz eria +marsh als +kni fe +an dover +tbli ghtning +sr sly +ou te +digi mon +timesof india +prome the +le bo +f su +wit z +rever e +man as +mam ba +ch ica +gu an +exhibit or +csr racing +d ere +xx xxx +gu sta +story time +ston ey +organ ics +and u +se am +min ogue +anushka sharma +ab a +ðŁİĻ ï¸ı +ugand an +chro matic +as sn +document aries +sh t +ru paul +loy d +k ats +e us +ite ch +me dusa +pan ty +kel logg +et to +talla de +sha a +do st +p ms +mari ana +je ster +croo ks +ðŁĶ ¬ +min danao +ind hoven +ðŁ¤ ª +le xi +tv n +jan is +co te +ãģ Ĩ +ser rano +iw m +ðŁIJ ¬ +k ke +distribu tors +cap u +counterfe it +camp site +ag gie +ðŁĺ ¼ +chhat tisgarh +~ @ +state u +san di +prevent able +cl s +can ne +mm c +i ver +sa haran +pal is +night out +do s +ap ia +absc bn +manag erial +aro se +mo wx +aro sa +ðŁĮ ³ +under dog +remo ver +astronom ers +lent ils +su scep +smoo ther +pend leton +fau cet +e mory +dal mati +af cb +tic us +exem pt +en rol +d heim +ðŁIJ º +restric tion +star fish +sto w +snor kel +thunder birds +she ad +homo sexual +dy n +as li +andre tti +dou che +dom o +tar mac +slu mber +pr onto +first dayof +mini ature +mari achi +argu s +recomm ending +mobi les +in ce +illustri ous +or c +adver ts +gr its +wea sel +pag oda +over pass +gre ys +maxi mus +arma gh +wood land +sun ni +ðŁĴ ī +ë Ŀ +ti one +soci o +ho s +ðŁ¤Ĺ ðŁ¤Ĺ +wind sor +subsequ ent +munch ies +id h +exclu ding +e mi +cu th +z ai +week days +law suits +barn ard +Ø ª +pe tting +net es +mul ligan +pharmac ists +ra quel +e ton +cran ston +gil ded +cle ary +ce ph +ra a +pam per +lombar di +as in +sher ry +pro d +for te +ari anism +buffalob ills +æľ ¬ +ðŁĶ¥ # +uu u +just ices +car ina +nat in +mas low +dro oling +cog nac +cam ber +el ong +r dr +in en +convic tions +am use +tro ck +harm less +visit ation +gen omic +bl and +beno it +chim p +tuscal oosa +gre asy +x po +gil t +se q +per mitted +christma seve +book s +mu e +old school +human right +be ati +ðŁĶ Ŀ +sh at +sculp ting +h wan +fern andes +sci utto +fu entes +endeav ors +maid stone +un paralleled +shou ted +queen of +mer c +band ic +ve da +sel angor +pi le +ja han +intimid ating +disapp ears +cl ich +za ha +w urst +hi v +fod ils +cor dless +aaaa aa +hy dra +bel inda +e els +bu f +su staining +rugby league +no c +brig itte +( ðŁĵ¸: +tromb one +soo the +smo g +ad p +stab le +ing ley +diagno se +ms g +we ss +tic keting +one e +nsw pol +e up +auto psy +adity anath +sun down +river front +si ya +p is +hier archy +dur ango +di jk +ren shaw +he aps +epide mi +david bowie +interne tof +dd i +nation ality +mb ar +air y +win der +w alia +elli ott +c x +bav arian +pl att +an tw +wi wx +sof ter +ne ha +h eller +th and +dani ela +bo ast +degra dation +ðŁĴ¦ ðŁĴ¦ +transform ing +man e +av ut +ðŁĺĪ ðŁĺĪ +vo ter +the e +t ate +pu ff +in door +sop roud +boy ce +boris johnson +wait in +immun ology +ðŁıĨðŁıĨ ðŁıĨ +âĿ Į +street food +liz asober +cavali er +c elia +need le +motor ing +g ato +, ) +ra de +harve st +t ms +jar pad +on ey +air men +v re +impair ment +abhi shek +snoo p +l ant +fam ously +bl ou +s ze +g ander +un touch +tu f +dee jay +col lateral +b ind +ðŁļ © +pin ning +ic n +' ; +the economist +ul tram +worldwater day +ti poff +the i +feed ers +campa ign +sc umb +day weekend +yo m +pe dic +h ough +ps v +pl in +on de +boston marathon +az zy +* _* +con ley +thi ago +hoo o +gal erie +luci d +je tt +gl itz +final fantasy +achiev ers +y ung +peregr ine +op hi +dam es +biom ar +âĺĢï¸ı âĺĢï¸ı +sk c +l ics +fl ank +ar rahman +ho of +uphol stery +t ats +wo z + ¿ +snor ing +ra er +l ju +ap d +pl ating +kan u +im ation +fragr ances +m ra +mor ay +mo tt +im muni +hearti es +bho pal +tim ers +g ata +color way +car nation +win get +si ghs +s ville +optimi st +chate au +olympi ans +ci o +singer songwriter +ny o +fi bers +bur ch +ag ro +mil ne +ig bo +cr amer +ation als +dan ube +pad ma +nor mani +en forced +bre ck +boeh ner +ar den +sur rendered +pros thetic +om a +ha iled +calcul ations +w fa +bi b +fcb live +fon da +west coast +que sts +friend ly +to wie +fit ch +bal ot +star dom +scrat ching +ho sa +thi ka +o ven +stro ke +out post +pharmaceu ticals +hi kari +mu y +af d +fallon tonight +squ at +or u +dra ined +chocol at +ë¯ ¼ +wor ths +ri b +mu j +that s +residen te +it el +boo st +mi gos +mul led +la a +etsy shop +don keys +me k +p tc +flin ders +e hs +ro hit +mu ir +g ad +compos itions +åĨ Ļ +combu stion +i kh +yemen i +wav ed +gar ci +ak os +oo ds +fu sion +se que +s lan +pl ur +kic chasu +shenan do +s ams +worl den +horo witz +with me +mic robes +k ki +ðŁĴĶ ðŁĴĶ +w su +patch work +fre er +y aki +the art +symboli sm +mil er +bt n +ma bu +side kick +motiv ates +sag itt +natur als +serv iced +ps ori +pa ola +qu ig +i badan +gi ggs +ë ³ +sciento logy +si oux +salam at +d res +cad bury +d hawan +ci ón +_ ' +swa pping +maris ka +james bond +explo sives +ay les +af er +s agu +cen sor +tom a +jeff erson +ring ed +par tist +ir responsible +aguil ar +vac ay +equ itable +altrin cham +ac ur +man ish +ger min +schoo led +pu tter +ed ad +nav al +toast y +sol areclipse +dish u +coy ne +ac co +mu ck +mar an +el os +len der +cro ix +worth less +ha ber +gun men +ðŁį ĵ +zen ith +t enders +hur st +hol tz +itali ans +car low +u cd +characteri stic +bun g +av l +u th +sa sia +rs l +red man +neighbor ing +green peace +sti ps +follow party +y gk +en os +omni bus +na issance +chri ssy +secu re +call back +ji hoon +memor y +block er +l anta +daf fodils +bil t +ffer ty +fau st +ie c +nipp les +so g +m nd +jagu ar +bol dly +ab poli +pro position +gun sense +evan sville +cu tters +we go +dou n +do x +stal lions +ka j +shi ppers +j awa +vol o +le ven +pap rika +kov ich +jor di +induc tees +app alling +dial ysis +allevi ate +âĢĶ âĢĶ +pie ter +mid wi +q tr +juli ette +inter mission +haw ks +act ment +one ill +k lin +vam ps +fam ous +cou ld +autom obi +da an +west end +elli p +nh c +mel anch +web series +ton gue +snat ched +smy th +tan gible +sl i +e asing +bar stool +over lay +afford ability +ting ed +ter as +ay ush +wanna one +rh ine +dan a +sh ana +kend al +fer tile +w ir +repl eni +lar vae +is ro +con vos +ab brevi +u cc +hun gry +bur rows +ag er +nav i +mat in +du per +cer n +ma don +ķ ï¸ı +é ģ +tu ps +hy att +sh ep +friday night +wis er +hei di +hat ton +p gh +foun tain +wrist bands +ahmadi yya +aeri al +subscri bed +so los +m ace +sla yed +for fe +dul ce +christ mass +arun jaitley +viol ate +ob stru +ni eces +w vu +idy l +fa ze +pre serves +infr inge +premi ers +inter vals +agen cy +( © +stand alone +di mes +bo er +param eters +ge tit +ðŁĺĺðŁĺĺ ðŁĺĺðŁĺĺ +tu lane +for given +scol l +mb ps +smash bros +rob bi +prima vera +ali st +ghost ly +ay at +ye ats +impre ssionist +ear phones +caul field +wai kiki +sal ute +sc ou +mu ay +louis vuitton +bak hta +ado g +inven tions +hur d +forec lo +stream line +thalai var +ch snews +will ard +t sn +euro parl +cru sher +my sore +gro wer +ra ping +pat ti +g den +sm w +muf ti +kid man +ab r +soun ders +skep tical +ðŁĶ İ +sun dar +i me +fer g +feather weight +ar lington +pas qu +ag azine +wearab le +nati c +mccl ure +inter mitt +hor de +six ties +car te +bha v +ze al +experi ential +ador ned +som mer +eno te +hypo thesis +stin ky +pro to +dead lines +vo gel +mus ings +monc ton +gu ter +f le +aci on +voice of +ta sha +inhabit ants +type face +s ba +bts x +ðŁĶ Ĵ +wor x +u hc +jo ko +cell ars +gor o +continu um +... & +weather cee +ha p +sr k +ris ers +lonely planet +un named +co eur +ðŁį Į +the world +ili ke +fa sten +ami go +ri ba +ramapho sa +staf fers +had ley +? ?" +fi ore +sal ut +hu ff +bez os +Ñ ĭ +ra der +kam ala +in line +fill ers +um atic +all in +shat ter +re in +o ku +ch ases +fla gged +baby metal +water stones +ts b +cut out +op hel +aam a +rockab illy +sto lic +jet blue +ich ick +down ton +uzbe kistan +pat na +la q +gr ange +) _/ +subsi di +sc p +newsc ast +it sa +twee tyour +e mor +archae ologists +uni fication +por ta +q x +protec tors +pro hib +charis ma +car tag +ren fre +scul pt +guwa hati +de ma +boo p +unf pa +dex ter +lay la +alleg es +sou ps +never again +l ys +cal c +bar oness +visu alize +ger ber +absor bed +i ers +a han +fon tein +detec tors +verst appen +sv c +formul ated +ac dc +li x +in competent +bh k +lour des +water house +snow ed +appreci ative +sig ma +lizasober ano +pen ned +pay check +tall inn +fanc afe +par isi +av alley +vi g +ru fc +hard ship +so cute +po ise +ì ¹ +roth schild +k ly +???? ???? +l hp +il ay +f hs +am ad +ide als +brad bury +bal boa +nic ot +kid nap +wol ve +tas manian +op t +matthi as +ãĥ³ ãĤ +super markets +mylittle pony +me lee +li ster +gr oun +fe dora +kind ness +en en +bra hms +¯\ _( +ros well +mar lene +ic u +re formation +or ail +he brides +dispar ities +terrac otta +swal lows +re id +influ encing +flu or +den e +tum our +blon des +thunder bird +sh eva +moga dishu +ka b +cre eps +i ving +ene ed +anno y +âĶ Ģ +intri gue +enqu iry +ar aj +tur al +kuber netes +end lessly +divi dends +tor a +ti sh +commemor ates +un ra +tri b +pon ty +ne m +diss ent +brew ingco +ðŁĺ ½ +nor mali +bi of +( ... +chil len +ì£ ¼ +mell on +av is +mccor mack +ing ra +enrich ed +custome rexperience +testo sterone +snu g +sett i +ger onimo +inqui rer +bre aches +very thing +bloom ing +mu ra +dispo s +bi de +de va +shade sof +in trin +sh ev +s ven +nayanth ara +gan esha +c ws +ber ta +label led +use um +nick named +ma han +car uso +ap ur +ðŁij Ĩ +w q +orphan age +discar ded +mag nu +lu e +je on +bridge port +pac ing +mercur y +( ðŁĵ¸ +marx ist +amphi bious +transplant ation +stit ching +then burg +gradu al +ãĤ Į +ro ft +ma ils +ine c +guy ana +dopp elg +ver o +re write +head less +harb augh +gate way +car sforsale +sw i +st is +mach t +un de +sura baya +stap leton +nur turing +mil ner +ya o +lma oooo +ko sh +arsen al +k ame +er ry +ar royo +dis misses +ru bbed +rc b +lew d +dil u +and or +vi de +ur in +inter sec +ha ar +al b +year swith +app leton +é al +ul livan +suc cu +monter rey +d mx +artem is +ron nie +farm land +s football +gro tto +anth i +ãĢ ģ +à® Ł +vid ya +jimmy fallon +ൠį +t zer +gravit ational +w thr +u hhh +e hr +tin ker +ti juana +scran ton +ram charan +bar clay +re van +m si +ka p +wr s +we thenorth +tor al +sat u +gro m +fac ep +erick son +z yn +se dge +oo dle +spur sofficial +ds p +sic ilian +soli hull +recei vers +ladak h +hend rick +ther i +presi ding +mc guinness +litt ers +gun nar +gh oul +wi b +n tv +kar o +fro ck +b lau +ampli fy +all is +ul lah +memo irs +kh loe +intercep tions +pet day +lo oney +con fin +ch ay +piyush goyal +frequ encies +ut z +event ual +warm ly +obli vion +an ka +ta it +âĿ¤ï¸ı . +director ial +ru lers +prince s +mu ck +stur ridge +deu ce +abri dged +bagu ette +un cles +pen du +min ding +forre ster +av ila +wall er +wall street +ment or +hin o +high way +crom well +fanart friday +mb i +co yle +a hi +tro ve +spie gel +pay tm +mcin tosh +jan sen +nit i +nash ville +len o +leicester shire +le gos +dic t +ðŁĵ ½ +sp ad +beverly hills +sy rah +separ ates +z ain +un fit +dra gs +tan ia +over flowing +hri thik +haw thorn +z ani +mac far +fi de +to tem +pe ds +fundament ally +cal ico +sin ner +j ä +hil de +ds d +ten ay +ta hit +mil f +lie b +inform ing +up lift +ra el +mortg ages +lec t +ii ii +guillau me +compos ites +old smobile +l end +gar th +com mish +bapti zed +scorpi ons +ru cker +bringback our +alli ance +thalap athy +tal i +sp ans +eri dge +wither spoon +lin da +sky lar +kor n +hom s +Ä į +sil enced +caf fe +ar ty +dist inguish +to wed +pun g +jessic a +ear nest +beau fort +t ama +study abroad +si khs +new bie +nav ratri +mar ble +loun ging +lit ter +dal it +so sa +iz es +gra de +com promising +tr iton +de tta +v j +chau ffe +spec tral +powe red +montess ori +artic ulate +hal ton +al co +ye y +mn twins +acoun ty +ðŁijı ðŁı¾ +âī Ī +mad men +kal a +gru m +chi k +ati s +su me +akh tar +job search +high lighter +bo ath +âĦ ¹ +tar zan +lam bo +âĽĦ ï¸ı +ox fam +dump ster +pretz els +mac os +incl ined +fac tual +adverti sers +shu i +pu ree +ml pfi +anti dote +cap o +pa str +merc ado +but ton +ar min +ag g +lol la +horri bly +er rands +christop he +time snow +monday motiv +li ss +scand als +mc i +dispropor tion +âĺ İ +sur pass +samar itan +so tho +pu rest +fl att +trivi atuesday +delec table +leop old +hermi one +chou dhary +en rich +¡ ¡ +subsi diary +ine qualities +bachel or +auto immune +la kota +i hop +ad jec +the simpsons +sh es +se k +gret chen +up stream +hin akhan +coper nic +x tina +lu g +tough ness +e ad +cli pped +bi us +sl v +fah ren +dee pak +ca u +x an +im mature +dig ni +bo bs +shred ding +but tery +accommod ations +de ven +chun ks +super league +sky bet +kil dare +je et +ë į +ce k +wrec ks +pro pane +oh l +tb d +quo i +trum pp +mi mo +reluct ant +ver ne +o ic +ma gh +ar nau +se ver +li dge +stair way +kicchasu deep +ðŁĶ º +mach ining +aama admi +ot i +c da +al it +pan y +inst alls +ac ct +e shop +di em +hard well +fulfill ment +sc afe +qu ack +extrac ts +swee tened +fi ghton +f di +d inger +wal tham +us ur +refe rees +seok jin +gran n +af rin +th n +sch af +par cels +bet is +amar ine +nom an +kh tar +mor itz +cou pling +bar ons +ðŁIJ ¸ +à ¸ +sl p +sad ler +x ander +tri ad +mc millan +kh z +divi ding +ìĹijìĨ Į +dar yl +zed d +le ys +pla ques +flu ori +tipper ary +on nell +di dier +lang ford +im c +the sun +bir dies +ar cha +ye ssss +t di +dar ia +cand ace +al tam +pal aces +ch it +sant am +event ful +book of +ad b +mon stax +cre ole +co el +âĸ ½ +we aren +sten nis +she ath +ati sm +gron ingen +mlpfi m +le pre +wrong ly +rsp ca +rendez vous +acknowle dging +pel vic +solic itor +sla ys +nue stra +lo d +is lander +fer oci +fashion show +ra ss +dge on +adole scents +sma shes +negli gence +grate ful +ved ere +sw oop +ing l +apol ice +vand alism +gan n +jo ao +di supdates +zimbab we +under age +radi ance +w of +bour geo +pla s +cr ani +gh ue +wrec kem +warran ts +re form +jim mie +at wood +ys l +neil himself +l bj +i man +tan to +nois se +ver bs +equip o +al together +mam ent +l ice +dou glass +tier ney +pri med +j hal +furn itu +braz ili +v ill +past els +n ison +u ff +paral ysis +jay e +im po +ðŁij ģ +strate gically +pakistan is +was sup +super bike +thank u +tru elove +sha ikh +israel is +vi p +to g +li en +la ker +grey hounds +cul ars +bian chi +balot elli +ar ran +loo s +str ates +he bron +ar vo +sunder land +the al +tomb stone +sand man +c pac +thanks giving +love him +lat ino +an in +aka if +ĭ ãĤ +tor quay +di est +alli anz +ðŁĺ ķ +golf club +cl lr +wal cott +sch nau +promp ted +nomin ating +len nox +val et +mon ro +may ward +e ph +ðŁĶ Ķ +inter oper +r da +re flex +arm chair +ê° ķ +stri pper +por ti +ph arm +ham za +ni reland +ne ue +h pv +port foli +sun burn +fris bee +be al +bapti ste +x h +ty m +pr ati +o vers +haz rat +deser t +der ry +us ky +em mett +ach arya +)_/ ¯ +shu d +may a +ham ill +ra im +nr c +fitt ings +cur vy +ðŁı ĩ +ster ling +à¥ Ģ +wal kin +short cuts +mil ly +ast ur +alpha be +pl i +pe z +miss you +rad ford +ml g +ta eyang +notjust lakes +du mps +seren dip +le ur +ra ving +e ster +de priv +absc bn +ðŁijĩ ðŁı» +scar city +o cr +mean ings +cap t +da hl +fer mentation +bri oche +to win +out lander +massi mo +en cro +ðŁ¥ ³ +buil t +po tam +kir i +tm w +monit ored +k ites +peoples vote +gray son +íģ ¬ +afri ka +a dies +i vote +gy ne +g annon +di x +c mc +ou ral +fox andfriends +bel i +ig ne +gl an +katrin akaif +co politics +qual itative +p si +lu cci +disc oura +âĺ ® +kel li +gau tam +carac as +reale st +pu la +in us +hill top +make aw +atten borough +tw y +r arity +peck ham +ma hon +corn elius +clin icians +ton line +tb i +paradi se +ka si +inev it +fresh ness +colling wood +lun atic +defen se +cop d +in fra +wain wright +sains bury +alab am +te ma +lac o +chec ker +releg ated +tren t +stal ks +huff post +bhubanes war +ast ral +share your +prim rose +hi me +cat an +end ment +en dow +cle mens +mal oney +hil ary +game time +den ise +collabor ators +b wo +radic als +gue tta +ici on +au a +snap matic +sat chel +excav ation +base man +s ão +gn ation +fel d +surve y +shah zad +ma st +anirud hofficial +tru cker +ot ago +geo graph +ethe l +âļ¡ï¸ı âļ¡ï¸ı +s ver +mu tt +internetof things +ancho red +wh ouse +bang la +bal main +ç¹ ĭãģ +break fa +á Ģ +twi ster +te tris +ca v +stag s +g z +au b +stor med +hel ens +yar mouth +st asy +gustav o +co sc +vin son +up p +sc ricket +assump tions +app e +nu h +u er +pre mise +n aga +e amon +coron ary +na f +north side +el mer +ro tar +out lining +el f +re surg +kat elyn +in can +hyster ia +ce e +am bani +pro lly +Į ãĤĬãģ +ax es +san jose +rem brandt +mag pie +even ly +scor sese +qu aint +f g +b buk +indian football +weare all +spd wy +pis ces +ec g +âĺħâĺħâĺħâĺħ âĺħ +pre orders +: | +ni pple +sal azar +ju me +jail break +min n +bas sett +ze tta +jef free +ad jun +tic on +san diego +drink local +chol era +solic itors +o bo +com post +ni an +wr a +tre ach +ic ic +profession al +del ve +leg ate +histor ia +cro issant +con noisse +nam o +palli ative +chem trails +i ority +global warming +comic art +behavi oural +re sted +li as +cli mates +Ł ãģĦ +rut land +nou rish +menopau se +hot ties +demen ti +ve spa +mel ville +anal ogue +tz man +str ung +im perfect +gl are +cir cling +ros berg +rec o +oc ity +lo ire +em be +do ssier +ne el +nan do +me a +gal vani +fin esse +ag p +berke ley +asi m +âĺº âĺº +quil ted +ish ere +un matched +po tion +for z +at re +selfi es +juli ana +ðŁļ ¶ +âĸ º +mel ton +âłĢâłĢâłĢâłĢ âłĢâłĢâłĢâłĢ +spin rilla +pur cell +ed p +at leti +tony awards +ra ja +pro gno +mol ten +stu ff +p ally +nobel prize +âĻ» ï¸ı +spiritu al +spe ake +sa sha +bri um +tru ss +critici ze +assassinscre ed +yor uba +u lo +fire man +workin progress +ef cc +fla res +ro bot +hi kers +cl l +shado wing +pat sy +leh man +c ns +å ± +guad al +à± į +ra pe +r honda +paralle ls +son ja +langu age +land ings +z ola +cr amps +bur ning +apprais al +jol la +ham m +kas a +gul ly +f go +uly sses +ri be +ðŁĴ Ħ +ib u +eti enne +bri ar +fin ely +comb ating +y ql +go tham +we chat +to paz +primar ies +l se +iz z +hel e +dispon ible +cy stic +bel ichick +th rush +kansas city +ge om +soli di +red bubble +by stand +cambridge shire +par fait +ast le +ow o +ind ore +stom ping +sm elly +ðŁ¤ ĸ +locom o +adm itting +hol me +clock wise +min sk +mc co +for get +ev p +cam ra +ab ella +yo tes +universit yof +mé xico +silver ado +ric ket +crom bie +pu j +eradic ate +deli ght +y go +glam ping +vic a +du ggan +coun ters +cf d +sc our +react js +pu ram +paras ites +in ki +vill en +stel la +li mbo +ang as +k cr +ðŁĴļðŁĴļ ðŁĴļ +vap ori +mum ford +oli gar +à ¼ +al oo +boo ties +ad r +k elli +dru mmers +av ici +nature uk +ron al +in trac +un splash +le che +g oma +el ine +envir o +bi onic +bu eno +mi k +av in +star ling +em powers +cake day +boy cot +ðŁĴļ ðŁĴļ +ðŁĮ¸ ðŁĮ¸ +v ach +m ci +fractu res +ger i +sk ing +exclu ded +lu ce +ja ve +ig gy +evi den +aki stan +a wn +mor als +luci fer +ha ban +tumb ling +sunday motivation +mo sley +captain america +sch icago +the one +mo td +d ts +ðŁIJ ¼ +rep ell +ii i +locu st +geo spatial +mer sey +immer se +desc end +ber nade +j s +boat sales +win der +cran k +sing leton +candid acy +ben a +ðŁı» âĢį +high lander +ol t +k prs +healthy lifestyle +four teen +end the +ith aca +circul ated +r ans +pre valent +ha vas +splend or +roo ster +kalamaz oo +jewell ers +enne dy +rou sey +es y +cann ons +ornam ental +// // +ren don +win ne +mol ding +eid mubarak +coun tess +simon a +ha wa +fo es +du ster +sb u +por tray +mar ries +goo dday +cho co +achi ever +ðŁĺ¹ ðŁĺ¹ +pre neur +tr amp +tom i +n bat +garden chat +farra khan +ever glades +ab ru +sou sa +se ce +homes wee +terre strial +bar it +sri devi +ol u +mel inda +f rick +can dies +ðŁĺŃ ðŁĴķ +qu reshi +family fun +exor cist +cardin al +ny t +dies el +cu mulus +capric orn +si ology +lor na +dou gie +an die +super sport +c fl +п ÑĢи +say ang +pe ek +ภĬ +lo be +j em +ing lis +gg led +c sn +amne sty +chu ps +ba es +sau er +ðŁı IJ +mongo lian +en et +back street +dr illed +acce ssing +ce o +b se +ai ken +pur r +wor sen +whe res +war k +testi fying +bu ri +bla st +aw g +ðŁĵ ĭ +re defining +hear ing +u ci +c mp +bon i +tail oring +ta ji +noc chi +em t +stephen king +ne et +compla ins +campaig ner +luci ano +twili ght +ti esto +pas sports +flo yd +cathe dr +na ked +caregi ver +b coz +ade cides +ku ri +ly k +br aries +dren ched +disc lose +ðŁĴª ðŁı½ +le blanc +je tty +gar ty +chip mun +b su +rhyth mic +ic z +fri d +anne x +ame x +solo ist +lanc ers +arro whead +speci fication +simul ated +na is +inver te +bo wing +wor ship +f z +abo ss +sha q +ì¶ ķ +challeng ers +an arch +aamaadmi party +ãħĭãħĭ ãħĭ +suffol k +so corro +sn ell +cla dding +absor bing +shaw a +particip ates +ðŁį Ķ +book stores +bak u +seap ort +ko jima +gab y +pack ard +electr ician +let it +mo wing +fa wad +young jae +hot mail +men ing +u rie +intim acy +con ti +: ") +lifeis good +in ciner +i dri +craz iness +jour nos +fran chi +bott len +al da +ff es +k x +south we +air a +clay ton +sco ti +f j +bri ga +ð٤ĺ ðŁı» +demonstr ators +y z +stor k +na q +casc ades +travel chat +plat a +pad ma +fran ci +at tain +bat girl +lom bard +hoo s +d dos +neon atal +discla imer +r ss +r ant +di sen +tex aste +so cal +frac tal +cam ry +stri fe +sn acking +mu h +sant ander +mor ons +gra f +par ades +hu ston +dru pal +mi ento +kir stel +hy de +vom it +forti fied +sphin x +da v +bir yani +win nings +s baseball +mer ged +lovel ondon +ling ering +dream big +car leton +liveli hood +djan go +astri d +gri ds +down e +bru ised +s ne +scarec row +hel ium +f nc +bi ggs +an ter +restor ative +em pires +ab del +life style +kiwan is +colloqui um +me en +pr ick +anti que +ze b +mi mic +edmon ds +ðŁijĬ ðŁijĬ +q ing +pp el +mc gill +interpre ting +âŀ ķ +rash ad +do ka +narr ator +electro magnetic +ash by +sau ra +iran deal +âģ īï¸ı +krish nan +in di +ff en +bre a +os man +multin ational +chi ppe +recruit ers +aus biz +p ounding +re gen +cur sor +refu sal +mac s +in ak +ax ial +wa ifu +up cycled +hindu stan +cas sini +carly le +scrat ches +re ef +man atee +eat ery +ðŁĵ ¢ +un condition +sen pai +on ther +comic book +pro sciutto +de mar +mi se +ma ge +fre ec +aye sha +al der +android games +ley ton +ho ck +door way +chicagof ire +aali yah +sw elling +bi x +. ðŁĺĤ +evan kirstel +torpe do +kon stant +genevie ve +ma ia +ha user +do torg +hide ous +fi k +sp raw +e ek +z appa +wan dered +' ' +ra jan +bam bi +( $) +wid ening +tool box +sa ir +illumin ating +pra ys +out patient +i w +day o +lo b +sw fl +sha des +gu ms +coo kin +ko di +gri ffin +traum ati +ste a +slaugh tered +god bless +air time +pseu do +b sa +hau led +ar if +à¸Ńภĩ +le l +wc po +mil iti +char ters +worl da +ru k +k gs +digital india +is able +idyl lic +esp ino +marie tta +e bo +team canada +ab our +wil ton +rock stars +fav ored +phys ic +wrink le +tb r +d print +ball arat +ad al +z ey +ðŁĺį ðŁĶ¥ +tom lin +mt r +pal sy +fener bah +tight en +phil ia +ir oning +ry u +b ant +enqu ire +ca ir +abur ger +tru n +green berg +chau han +ir ina +sh ani +trend setter +pre tt +zaf ar +alo ve +v ici +pan ic +no o +lu stre +disrup ted +bal lis +son sof +mon si +inst ac +ake st +ëĭ ¤ +kw ame +horror movies +distric t +sau cy +mb an +ar mies +with drawn +med ics +loft us +er oom +be kind +ar ns +all on +un ison +davi ds +cr at +nicot ine +so or +sm x +on co +cospla ying +zombi es +har ms +e ger +ro sy +moon shine +fe in +ce tt +du brov +reg ents +ben itez +ðŁijıðŁı¼ ðŁijıðŁı¼ +ste c +m alia +prioriti ze +ic eland +ft se +v amo +lam ont +homo sexuality +bre es +regu i +cb p +te j +sky sports +deter gent +sha sta +de rel +conserv ancy +colori zed +accol ades +vis o +show your +nan ow +bice ps +us ability +bi m +dailys ketch +pearl jam +stran gest +mega deth +broad casts +bar ren +ar ton +chri ss +confi gu +lu res +is the +e ul +railway ana +global health +gi anni +u aap +s lum +consci ously +ab re +n up +bud get +v ada +e sch +real ness +er ased +th unt +be z +armist ice +ðŁij ¹ +sh run +o led +driver less +ðŁ¤· ðŁı»âĢįâĻĢï¸ı +won dr +sk an +sal aam +mother land +h wang +gen o +gang nam +tw right +endor sing +en ic +ador ation +pau sed +patric ks +do cked +plat te +ff xv +ethnic ity +auto show +side show +after life +re located +orphan ed +food network +dare to +and ra +sla ps +v live +swim s +re imagined +mist le +re vise +real ity +bhar ti +ðŁĴĻ ðŁĴĽ +late st +prou dest +gra sses +lan yard +fresh est +carcin oma +anom aly +zieg ler +sum ner +ly rix +gor g +is d +av el +swild life +me squ +john cena +euro league +sab er +master ful +yar ra +cogn ition +jacob son +abo lic +sir loin +shuk la +moj ito +su pere +st weet +me z +e sa +rudol f +gur a +where you +tt m +win s +trust worthy +ny k +bra den +table top +good food +es on +be k +lingui stic +gra ys +ch ath +h cs +mon i +de ans +cu ssions +ch ell +slo ws +he mi +d app +shar pie +boo sters +a os +str ack +se dona +mu eller +hard wick +or nate +thor a +sal ud +o twol +ch um +mi ho +for age +thel ittle +tear ful +ones elf +min dy +sm g +gmb h +emer ald +ðŁĶ´ âļªï¸ı +tu tti +recep tions +re vising +i brox +tope ka +sal ami +expan se +i books +dob son +cli o +at s +ðŁļ Į +mo ha +is ance +shu tters +moo t +jan ine +marvel comics +jor dani +pos er +kenne th +hy ung +de ja +ase ball +speci ality +eu ston +classic car +had ith +ðŁIJ ī +chas ing +iz o +gros ven +ag lia +thisdayin history +t row +om ile +hu ar +by n +sal ine +div ine +demon ic +ty ran +han dover +revit alization +pa ella +cryp tic +se dg +m end +dun kirk +bre d +wal d +sport scar +a ard +whe aton +da ener +k lan +br t +bakhta war +spi res +schu bert +ro ti +poli sh +o se +ag ame +wonder con +prote stant +bo sa +ðŁĺ Ł +d ü +joy ride +ger trude +âĿ Ŀ +gil a +v h +tw a +tra v +swal lowed +star ve +la in +ent ren +rei ki +su kh +cra ic +az u +web page +kee fe +hypo the +hir sch +hel le +camp ground +w amy +tra vi +sha hi +san deep +ru i +han uman +dw p +reposit ory +no or +no ff +un real +p ell +black history +har vick +ma scar +pay ee +pa sha +gastron omy +d ÃŃ +ai g +rosen thal +open day +embelli shed +t tip +sun bathing +go pack +end ome +ï¸ı # +invali d +final four +st fu +squish y +ra sta +mo sch +jam esc +die trich +sel a +mel b +el vi +t dp +sun i +sli t +j ha +bi za +spi ked +l li +l illard +vam pi +syno psis +az har +kendrick lamar +ĮãĤĬãģ ŁãģĦ +heart less +country file +air play +arrog ance +pre e +virtu oso +ãħłãħł ãħłãħł +raj u +le bu +for ward +tu g +dro s +mondaymotiv aton +concep cion +thel o +pad i +looo ol +ÑĢ Ð¾Ð´ +it ss +eth ical +end uro +__ : +expend iture +mon ste +mas king +terri ers +ib is +e mber +cu mple +punctu ation +pi per +ir vin +ade e +yy yyyy +flash backs +cel sius +don nie +bo gota +ben evol +the script +shil pa +pro se +fin dia +ze ke +ne ko +do ves +blues lyrix +fro sh +sowe to +mp lo +al ai +sab i +raq qa +wf tv +stro ller +ian somerhalder +ðŁĶ ª +an on +mo seley +! ?!? +sta king +mol y +car tri +c sg +ast or +transc end +ma er +de ux +cow girl +sas k +pun ter +ma ken +o ates +love tt +grow ler +sag in +v n +ssi ble +officeof rg +y mc +sab ar +faul ty +ap ha +ak on +ðŁij « +snow don +ae w +raise the +ðĿ ĵ +grue some +clement ine +sp ing +lat a +worlden viron +mi mic +can aria +bakhtawar bz +ao a +fal a +ãĤ Ń +avi va +you uuu +thi gh +la dders +gu mbo +tz ky +fu zz +plastic pollution +est ate +strength ened +k ant +dr in +cal vert +transform ational +frigh tened +mac lean +elited angerous +ear thy +t son +to da +j nu +.. , +mic hal +i ban +je ong +is real +sim coe +exclu sives +blue bells +ben e +te u +pil sner +pens ke +athe ists +m pu +cartag ena +ðŁĴĹ ðŁĴĹ +million aires +kk kk +it ar +subscri ptions +remo te +ma fi +hin ton +w cc +ho k +ds b +ab leton +sevent y +pun ks +e indhoven +sh one +mcfar lane +lim popo +empha si +à ¼ +sin fo +pe tre +man grove +ch ino +ber tie +play lists +push awards +p af +deb bie +c do +r ino +ðŁı¾ âĢįâĻĤï¸ı +fol ke +bon nar +th ine +sl an +hal ter +evi e +aw some +vul tures +spar ky +seiz ures +âľ Ķ +ram one +ine ffe +al n +pro ctor +ast ra +the voice +gro te +sci on +dead line +am aya +tain ted +patter ned +exce eding +cross fit +kay lee +drop box +ru shes +tack led +mo by +retro gamer +n cbd +benef itting +shay kh +guild hall +gen try +dream cast +dread ed +bun dled +th aw +revol ving +n pt +kylie jenner +imagin ative +ron i +over came +family time +ds burg +car naval +relation ship +recogni zable +cor oner +ho le +fan fic +emir ates +bur ritos +analy se +thin ner +ne es +galli poli +bl r +cat woman +-- >> +au lt +ada ily +nau ghty +ili o +solit aire +mtv br +jocel yn +arun ach +rep ent +south gate +hy acin +essenti al +fent on +and um +it or +go pal +sl inger +po sei +aw il +wi elding +ra ila +eli as +a sto +à ¤ +tend ency +str ata +ker t +< - +im acele +da es +sti mulus +han ley +fit nes +ec stasy +lim ous +ha iling +ðŁ¤ Ń +chis wick +tar ies +sla v +pul i +moderni zation +black mail +b ingham +h fx ++ + +ðŁĩ®ðŁĩ ³ +ni v +we a +profess or +k off +bol ster +su ave +sequ ences +pepper oni +not te +dre n +ãģ¨ ç¹ĭãģ +hs v +o ga +ap tly +z ad +excel si +rin ka +mol dova +min n +ma bel +conferen cing +bas ing +of er +ob si +hamill himself +care less +brief ed +inhe rent +par ish +dub nation +town sville +sar awak +gee ky +doncaster isgreat +was abi +gu p +phen o +dra inthe +carrie underwood +ble eds +bbc world +ane w +alta f +dul wich +ani ston +w ti +sumat ra +gra fton +bl n +me ster +bode ga +re go +es q +an jo +sump tuous +mai sie +ï¿ ½ +wil t +jak ob +el vis +se pul +mu ster +air pollution +president e +happy monday +exten sively +fl ondon +t ls +play ing +pe ed +din ho +var dy +pi ka +n iro +au cus +ðŁį ¦ +nu ll +el ondon +juvent us +imag ines +dis ab +lit o +d ura +work places +promo te +mc caf +wood work +waw x +à® ª +tt ino +shar i +sem per +better together +ðŁijĬ ðŁı» +ze bra +pon dering +en chil +ho m +cosm ic +tan z +mo cked +ec cc +ath ed +abo lish +prop eller +paris agreement +assemb lies +indu stry +fraudul ent +pe sa +chang min +ax x +ðŁĴ µ +irr ational +cu sa +ramad han +octa via +on elove +jac ki +bar ak +taxi der +seri ous +nathan fillion +mc en +ch k +po part +grav ity +copp ola +reading fc +illu sions +j ig +ww x +re sh +ex porting +buzz ard +âĻ ¤ +p cm +lan apar +ko s +arom as +antal ya +ww dc +ven a +phil a +ball in +ðŁij Ħ +quin ta +ma o +f ery +eigh ty +sentim ents +safe guarding +r wa +pu ffs +luc ille +de cath +sl u +nu gent +de ter +braz il +ze iss +super bowl +subsi dy +alter n +hi dalgo +enz ymes +ä ½ +tag ne +hair dresser +adri en +walk out +oppo ses +can tina +bed side +af an +ðŁĶ Ĺ +prophe tic +dan es +un successful +super charged +pk k +exem ption +hart le +secu lar +cli pping +br s +united way +c net +pat chy +ha gan +e en +âļ ľ +var a +sym pathi +never trump +affir mation +om f +ny cfc +ma ja +sur ro +keer th +up scale +sandal wood +mon archy +kno bs +å ĭ +po tholes +hunger games +ter races +na sir +coun sell +welcome to +wa q +se aman +m ita +stun ningly +on theroad +in ability +) !! +bon go +ant v +sp ut +worldenviron mentday +resu sc +y td +fi m +eun hyuk +sa chin +rose anne +cler mont +ape c +am ina +v ening +n antes +al most +sin us +ex as +ty l +ti en +ple ad +lanc s +bur naby +re k +jo om +observ ers +disco graphy +cl g +âĻ ¦ +sn ack +r ti +o ily +crystal li +bru te +web development +topp ings +la f +an is +ad der +reli ving +car lin +battle of +we g +syri an +pon t +n dc +lagh ate +yu ma +sp p +p iti +ro bbing +mart ing +rey kja +raj put +nc ds +kie wicz +âĢ¢ âĢ¢ +vam pire +substan tially +opio ids +nepal i +k line +ar oo +under stand +lit t +u it +thro mbo +sar ies +qu ot +b alling +t tr +s gh +philip p +br ant +ac l +m ello +whit taker +. ; +defi ant +b gc +repl ying +mir ren +metamor pho +sch wab +bul ge +utili zed +pick ering +par don +d sa +à¸ Ī +doo ley +cumul ative +Ð » +ur gency +e mir ++ /- +¦ Ī +ot as +âı ³ +station ed +grape vine +ar ac +karan johar +f ancy +sau l +coo gs +lgbt q +ا٠ħ +jav i +u mmer +pl l +den is +dai pur +pu ffin +lewi sham +fand om +co pe +ves matter +s ve +hel pless +deo dor +ostr ich +kaz an +friday the +con dor +v x +sophom ores +rob les +cu tt +cli mbers +ë¦ ¬ +sle g +sn f +mac ys +hydr ating +grou pe +po yn +mou lin +hg tv +lmfa ooo +sulph ur +asdfghj kl +annab elle +hump back +bra ved +viswas am +multi purpose +hu midi +escor ted +barb ican +f ad +cor sa +ðŁ¤ « +pi ppa +here to +can y +ser gi +or cas +o vie +ed ou +s any +glob alization +man cini +food truck +f is +defi brill +sch re +sma fia +love wins +la ut +k aka +hol lande +game on +resurg ence +out side +olympi ad +int an +abstr action +rapi d +pal om +cal le +jas min +attack ers +swag g +mit ra +ky lo +à® ² +her mitage +gor do +e ira +so sfam +roll out +exc ite +sy nod +mer rill +c als +as sa +liveli hoods +ju ve +the black +gopack go +ant lers +alban ian +wool ly +qu iche +puri fication +are th +smar thome +ne k +all blacks +mex icans +is m +ger ms +comple xion +mar ck +u shi +ðŁIJ IJ +char l +ca stic +till erson +giuli ani +biode gradable +mal bec +bo is +ju bil +im es +r ame +gene tic +esp nu +ch ley +so ho +go pher +g sc +buu ren +cu be +bridesma ids +webin ars +to e +mani pur +viol ently +notic ias +ex changing +chi ev +replac eable +muay thai +bu ss +sp il +instal ment +div ya +cait lin +o lim +fil tering +whirl wind +sta red +prior it +pr am +pompe ii +mono logue +k ite +bu ka +â̦ .. +vac cine +bre ro +woz ni +sol ent +re ferr +my rt +gridi ron +galatasar ay +fro ze +clare mont +ðŁ¥ ĥ +victori as +ssel dorf +pa stures +net neutrality +ch or +ðŁij ģ +ಠ¿ +we ho +symp tom +jo sel +in ous +dragon con +power ball +p te +four thofjuly +ec la +ear buds +where abouts +salt life +depriv ation +ch ter +wi ggle +syste m +ps st +ch az +d any +ri mo +oax aca +lanapar rilla +barcel on +melanch oly +way back +ho tro +n si +l illy +kur o +ja han +intellec t +board game +ðŁı Ĭ +sneak peek +k prc +jail s +cand el +zan zi +mor timer +star ch +ra gs +p fa +long live +k art +gir ona +cro cker +christop h +precau tions +war ship +per m +paren t +van gogh +gif ford +allegh eny +ra yn +ut m +sten cil +rec alling +pen ney +z azzle +ìĥ Ŀ +hin ds +aren as +nu ev +law ler +gu in +do this +ðŁij ķ +ì¶ķ íķĺ +we g +ti b +ri din +complex es +turbul ent +pe sos +de marcus +vall arta +sam sun +kis ses +hein rich +deport es +wil ms +ur d +then ext +inki gayo +ho wi +fir sts +carri age +clean liness +mas war +is ch +ax el +si zzle +road house +fr ans +ent ourage +co bble +boo th +benedic t +tal on +fc u +year ofthe +ray on +raider nation +fo yle +ko val +pi anos +l pg +bur mese +man ure +geo caching +cosc ino +b np +fer ra +stro phy +mar ais +ce es +legen dof +kat niss +eno ch +av ed +you know +d prk +ðŁĺ¢ ðŁĺ¢ +sp un +pro st +sor rows +cent red +ke a +gal icia +? ðŁ¤Ķ +ÑĢод а +bou chard +ðŁĴĻ ðŁĴľ +yu i +seed lings +jon ah +reco vers +ny rd +board room +su ma +my japs +tun g +sha i +ir gc +eli o +wag ons +ka shi +polic emen +john nie +ale coscino +shop ify +dot ted +de tri +va w +to fficial +in your +chal mers +trac ed +no vi +by es +ari el +nipp on +la pel +gri ez +b gs +fool ing +d ita +vijay sethu +nm wx +as ot +kr anti +hel m +ve di +sic kest +mo chi +k abo +shru bs +he red +b sp +sq m +ham r +dul kar +anth a +nr f +avoid ance +at en +publi x +be arers +nas i +ha p +h ells +ðŁĸ ¥ +ภ· +thelast jedi +oh wx +ðŁį « +wa hoo +there se +rec aps +ss nhq +bird photography +v ay +pet ti +pau lo +bel vedere +( * +gr l +du vet +c pec +sa it +por sch +meas urable +avi ators +fre mantle +bre en +on om +me and +life saving +eu ref +en don +embar as +aira sia +el is +dun kin +star magic +s ill +porto bello +ki efer +ex e +mu ted +ãģ ¦ +we thepeople +logi a +liber al +theforce awakens +min ed +haun ts +freck les +care taker +s india +âķ IJ +dev lin +list on +direction er +oh n +fi garo +em manuel +du bois +cl ones +bru ise +ðŁİĪ ðŁİī +disin fe +der matology +as r +s watch +dis comfort +tam anna +pi day +mack en +k atic +delu sional +shaw nee +gu d +al bino +p ali +din gh +cucu mbers +coffe y +anticip ating +treas ured +web summit +shel tered +sav or +pedago gy +m gs +sh ma +s bu +den ali +cam pos +bubble gum +o ir +le aps +y ler +r one +sansk rit +min t +meat less +futuri st +du de +a vel +prote sted +squ ire +z aki +sz n +har court +cycl one +bour dain +gather ings +d ant +advent urer +parag on +alt man +dd ing +ban erjee +snorkel ing +mother well +mis sy +en der +glo ws +ki wis +chick pea +por o +e fron +app t +u y +speci fied +gab by +e strada +com bos +bour bon +vin i +var un +steph ani +key words +car vings +amit abh +wr ought +tw al +re els +clu bbing +ubi quit +cri t +ambed kar +æ Ļ +prun ing +vaccin ated +boe ing +s ks +lo ona +hypno sis +edel man +pho l +he w +colo sse +mckin sey +u on +to te +sacrific ing +ox i +n ang +e mu +пÑĢи ÑĢода +m th +kers wednesday +argu ed +timel apse +ris king +regul ating +ni gh +likeli hood +cu bic +au ction +rein for +pi stor +no ses +ye l +snu ggles +pe i +jean ette +ta ku +ri th +guy z +ภŀ +y te +ver ted +pay soff +jau regui +hoo ligans +procedu ral +mi b +har dy +el eng +chec kers +all ine +the met +prou dof +keerth yofficial +collabor ator +ni u +infl icted +adv ani +re twee +memor iam +f icial +ti ghter +sal em +re viewers +br ics +ben digo +am ell +tur kish +sush maswar +paul son +pal awan +mol lie +stitch er +s burgh +ir u +hay dn +en ers +aro a +u zzi +saraj evo +hel a +apol lo +nine ty +vac a +sp on +vent u +jel ena +hei fer +avo ids +sp ine +pri ze +mar ist +re creating +me de +woo den +find lay +ro fl +n di +compreh end +yu go +y ü +to work +u fos +son ar +pi ston +recor ding +tent ative +art forsale +pel lets +fre do +ÙĪ Ø± +mu ses +custom ization +pro found +is ner +ide ally +si am +plan kton +cm dr +man ger +fran ken +customiz able +ठ® +walk away +swi vel +vast ly +no ton +lex a +ex moor +z as +tan te +reduc tions +lol ly +hip sters +benef ited +ë ² +ww www +mascul ine +fi ji +dre y +ph ill +ane ous +nic ol +men dez +disapp ro +ch ner +through s +shen mue +east man +ðŁIJ İ +yu ck +under tale +re ys +go beavs +eng en +c na +mer r +bir k +ãģ¨ç¹ĭãģ ĮãĤĬãģŁãģĦ +âĥ£ @ +yn na +ste ed +offen der +at um +vani shing +presi denti +love them +g nocchi +fri ggin +per il +mad hya +ag ne +dee jay +mar nock +m tb +fold able +@ ___ +stand re +bron x +bow ski +fin ite +cro ckett +b sf +ge tit +seren awilliams +mir o +ignati us +sla y +rin se +fon due +sel dom +s more +gan i +dy ce +dmit ry +cru mb +late post +pri mark +oh ana +flor als +do a +remembrance day +d ds +azi one +toon ami +air port +æĿ ± +th ad +fi st +dine sh +dr who +ad words +admi rer +pro je +kyrgy z +à « +manife station +le wan +j ic +thi bau +le ased +van ity +nouri shed +never theless +aug mente +fu elled +che ad +wil shere +ru di +p z +my co +mor ro +herbali fe +hardro ck +de man +dre ality +sp ades +ce vic +bha i +bar on +ultimat efan +hou news +to bi +stru t +ke el +affili ation +the masters +sm al +hu e +este ban +con v +om nic +datab ases +co v +ter ti +st g +snoop dogg +metab ol +leth bridge +ðŁı» âĢįâĻĢï¸ı +year ling +residente vil +nws l +iy aki +griez mann +c ous +ðŁĵĿ : +tor ian +sam i +ðŁĶ¥ðŁĶ¥ ðŁĶ¥ðŁĶ¥ðŁĶ¥ +g are +alli ances +whit field +we ther +refin ing +coy i +kra ken +ðŁĺĺ âĿ¤ +singul arity +lil i +h ns +bol dand +waw rinka +misogy ny +lo vers +c q +b dg +ad ona +gar ter +women of +sc d +recogn ising +mun a +str ou +sign alling +lare do +hell boy +alek sand +un available +pedi atric +as in +mer ia +ri shi +futuri sm +w ye +polari zed +e we +pro pel +in forms +cre ase +~ " +arti ston +like for +heidel berg +er ra +life in +len ny +inter rupt +cohe rent +ca z +vick ers +le veled +f bs +cab ins +bu mmed +apost les +we h +ten don +souven irs +infu ri +pier ce +asse t +m las +go th +di ggin +ann as +yl or +th waite +sw el +pan era +mur derers +croo ked +bs go +ac u +a on +re an +one of +ko hl +bloo dh +pest icide +lost dog +fle xing +ëĤ ĺ +su pra +eter nally +ðŁļ Ļ +pa olo +ol an +mom o +is elle +captain marvel +s lou +mistak enly +akhi lesh +mer t +il inan +bu on +bal kan +mir ro +mill en +der ail +dam on +tit i +bi os +re don +pic ard +par te +ðŁ¤ Ł +Ø º +son ics +fir sth +dd c +veg ans +tur ban +ni gan +lot tie +lyn don +star buck +pink floyd +life styles +am ara +a she +r sc +val a +sm er +cw gc +cli ent +buen as +jag an +coo ps +ðŁijij ðŁijij +speci alizes +snag ged +g lar +ben net +wildlife wednesday +bow den +pi k +art in +empor ium +ar l +re ba +pas ser +disappo ints +additi ve +âľĬ ðŁı½ +bay er +missou la +ha skell +comm ences +ni x +ne man +explo ited +plastic surgery +cc d +aso cial +vo t +sie gel +fro ome +kap am +far a +e ha +pro bes +mw f +meet ing +p bb +ak ins +mistle toe +kingdom hearts +for kids +ec r +bal e +escor ts +adidas originals +k wa +k ts +hallo ffame +ðŁĺį . +wag s +pot ted +o wing +honey comb +he fty +uro logy +mer le +b pd +stri pping +re ich +k state +gu ay +yon ge +shak ti +g loom +bat t +son om +n ery +el ba +blan ks +hel le +triple ts +bom bay +ak arta +ab ia +transm itted +rol f +ja is +angular js +fi erc +m ss +trac e +ॠĩ +tom bs +old man +kom bucha +fo l +e health +cere als +are lli +in ari +ðŁĴ © +wo l +liber ties +fa wn +af firm +nun avut +hyster ical +k drama +art es +âĢ¢âĢ¢âĢ¢âĢ¢ âĢ¢âĢ¢âĢ¢âĢ¢ +valent in +man slaughter +gal es +eo in +energi zed +del s +with draws +st les +sar castic +ram esh +incredi bles +lock hart +ya wn +ultimatefan live +oooooooo oooooooo +mu en +guru dev +te er +pe eling +new snow +lingui stics +direc tv +ag end +uni lever +ru ger +han dedly +ero se +li mel +the c +royal ties +fini shers +nr g +m gt +fid get +com ps +bac on +aggre ssively +ab it +ch â +tar de +slu gger +q anda +gre ening +d ats +ensla ved +spec tor +o ye +fre ef +b hand +stop brexit +mis conceptions +cav a +ðŁĺįðŁĺįðŁĺįðŁĺį ðŁĺįðŁĺįðŁĺįðŁĺį +multit asking +hou sel +ferre ira +cen time +ank les +jo dh +hel ly +fro me +out tuesday +nar nia +bal aji +l bloggers +jyo ti +ðŁį ĩ +lan cia +cap ri +y ap +nat ash +down fall +." âĢĶ +à ® +ligam ent +coat ings +ai ded +hi ko +fall ing +encryp ted +yeg food +infringe ment +cu di +ce p +ðŁĺį ðŁĺĤ +tra d +super rugby +ed win +wh iche +vi meo +lay ne +in vigor +he he +dubrov nik +bie ber +u tr +sham an +op ers +ham ill +en ig +di f +ar um +scrap book +min h +diver gence +mckin non +life time +guter res +wil le +ple as +patt y +mic ron +k z +dom aine +ru sher +m ds +ches ney +screw driver +âģ© , +sle dge +hau er +chan a +stam ina +sprink ler +pl n +he ff +bol ton +om on +car rington +accor dion +jor ge +inter ception +in puts +gu ll +tran scription +vanu atu +it ical +eth os +tic h +spac ey +pee king +u mi +ha ger +psycho tic +illi an +illi a +bonnar oo +an ese +pu c +laghate parth +en hall +econom ical +dre dge +% - +u we +tu bular +scoun cil +pe asants +fl er +tumb ler +he p +ford ham +row ley +initi als +ev asion +er nation +plu gins +coch ran +c attle +acid ity +ðŁİĬ ðŁİī +re grann +jump man +ef ace +x ma +patri archy +esco bar +cristi an +tip ton +nu eva +hack ney +back seat +kill arney +aid an +sta dion +simul taneous +ida ho +a je +u th +figu re +clo s +bur k +volun tar +rec ite +macfar lane +cur few +bou do +w gn +sti x +sla p +scrat ched +philli p +jour ne +ex pelled +wa z +u ke +tati ana +ou e +ho pp +dimit ri +ðŁĵ £ +mato logist +electri fying +blu ffs +bill smafia +az cardinals +y aa +x mas +shar a +r ith +g ills +dre s +bar ton +authori zation +imperi alism +home of +to do +foot path +band width +visit spain +moh sin +erup ted +mi ki +insig nia +mike l +ss h +ger a +bank holiday +aw an +t weak +star craft +e al +construc tion +skelet ons +le ep +ine m +bar clay +ship wreck +monsi eur +yo h +ron t +form ative +ser o +le p +horse man +hoo sier +haz mat +cylin ders +cen ti +ðŁĴ¥ðŁĴ¥ ðŁĴ¥ +re em +na ire +mus ically +gras shopper +est onian +termin ology +ro main +blogger rt +tox in +stan ce +cultiv ated +an ast +ðŁIJ į +shi mano +go pher +ene i +recycla ble +gam ification +fight for +c q +avoc ados +ke ys +eli ke +gly cer +shak ur +mobili zation +gal ley +expla in +ex changed +pe th +obe dience +illa ge +en nis +ãĥ ŀ +wi v +walla bies +ma ar +ig ers +fin tech +fin alized +wo j +meaning less +in field +onna ise +e et +bron te +pass ages +ðŁij § +strick land +northern lights +lom ond +h tc +wr ay +shi fter +di alog +ðŁį į +>> >>>> +te atime +ste ch +sic huan +qu ill +fran ca +comple mentary +bar rington +marcu s +mal am +goo oo +for sa +elec tra +af s +âĹ Ĩ +tri fe +sn azzy +fo lia +and olan +after dark +wood son +stra de +litt lest +o gun +con wy +co wards +ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ ðŁĺĤðŁĺĤðŁĺĤ +íĬ ¸ +se ul +mur phy +dun ks +kapil shar +jo achim +wom ack +equal ity +aver ages +a ine +ðŁ¦ Ī +tac ular +dis ability +u ked +mid century +bar thol +teas ers +tab ern +nj caa +sp out +op i +ku bball +bl om +so ar +popu lism +meth yl +ðŁijĬ ðŁı¼ +o spre +alo ils +ðŁĵ ĸ +ðŁĮ ļ +x er +sp illing +publ ica +car dam +adi sh +sa cha +p kg +bu da +lyric ist +i bc +gru mp +ho ver +hal ep +anti body +anem one +âĻ¥âĻ¥ âĻ¥âĻ¥ +m cl +litho graph +cc u +s fest +path ic +calli ster +otta wa +gun sn +rut ger +hali but +en vision +differenti ate +ðŁļĢ ðŁļĢ +pir an +lat el +uc n +trou bad +ra ine +fierc ely +learn english +lea se +wex mondays +em it +dray ton +bur rell +scuba diving +hol ler +dr u +clo cked +w ral +ap ro +trans lucent +w bo +patri arch +mo ja +lan nister +fish ery +ne derland +mil dly +mi rai +ma ko +ja p +ðŁĺ©ðŁĺ© ðŁĺ© +pro statec +p anna +ar ama +under taking +tomp kins +ne op +soli ds +sav oury +e ames +cut lery +wood bridge +steam er +ri zzo +wild cat +rat na +lamin ated +kin eni +jal ap +ai des +acknowle dges +?! ?!?! +! ðŁİī +w afc +mag gio +ha ves +dar je +of i +gr il +v asi +bru x +mo hd +fake speare +arn old +r mb +for be +wal leye +ro di +therapeu tics +strate gi +ob ste +mu dder +download able +dd ings +d ca +asi angames +campe on +appropri ation +th century +ram atta +dra ped +bul lion +mu c +one x +se greg +ophel ia +bod ily +âĿ¤ ðŁĺį +wi zar +te ased +ade my +to id +sur a +lazar us +sn ickers +ma se +lo h +bow ed +bibli o +x change +har lan +gho shal +flavor ful +bha gat +alle z +whiche ver +ten stein +disc er +organ iser +mt g +dream liner +t se +hok kaido +mo k +indulg ent +hick man +blin ded +al yn +aaa ah +sp ool +lough borough +inter pret +et v +aristo tle +optimi zing +avici i +madu rai +ju li +naw az +mat chups +ab ide +paint ing +w elling +vel i +octag on +in scribed +po king +plac er +life cycle +kili g +g sp +eli ves +cle ments +na sheed +me sut +incarcer ated +dist illed +wal ang +delic acy +del gado +che z +ch ita +ad ero +tu x +pati l +o do +abh cosmetics +tv c +p bc +in accurate +hardwork paysoff +ball er +quot ation +merchandi sing +ga stri +defen ses +dro gba +bex hill +ban kno +win ona +si eg +p gs +hahah ha +agu chi +su bram +mirac le +de sch +li bre +ba cher +ent ine +bbcra di +lou dest +r ps +pi erc +fr yer +storm trooper +rafael nadal +pas co +exhau stion +epic onetsy +rc tid +kel lie +ga ines +d bz +sm riti +s bridge +lim ited +cla w +technic al +bio graphical +ado red +ภ° +exclu de +ac adia +key boards +fur man +so ca +sur u +ni ps +sw aps +server less +run e +pu ffy +north ampton +nish ings +hen der +cartri dges +gun shot +ðŁĵ ¹ +fil ament +respon dents +pey ton +mountaine er +mer ging +life span +intimid ation +p afc +nl wx +expan sive +pur r +f ck +ca e +at ti +tele thon +so hn +mend el +lo pes +dor i +un broken +te red +tast ings +in active +disin tegr +t assel +share the +pi ano +is lay +air space +z awa +ricci ardo +ming ton +fresh er +cur ry +re vs +pharo ah +h mv +exhilar ating +wh oo +lin kin +kri spy +competen cy +ste wards +ne bu +kat su +ad mins +baz ar +as ar +giving back +s summit +song z +lin us +raj kumar +farm ington +fanta sia +ðŁĺ´ ðŁĺ´ +so bri +lis se +barry more +pri sm +blo b +sen ew +mono xide +exp ire +eigh teen +di pper +xi ao +kil t +hin ch +bbc sport +bam boo +p ter +ex al +ðŁ¦ ĭ +ham lin +expe ditions +star gazing +food security +wy lie +ul f +st ingly +on storm +lo eb +bro ome +bn ha +pancre atic +eli ve +!!!!!!!! !!! +ther apper +ortho pedic +avengers endgame +antit rust +ìļ ° +go te +om d +off side +gy llen +win eries +white water +ad l +lu pita +exce eds +consi sted +chew bacca +ash leigh +nhl jets +is san +sh ld +hay at +cran berries +ð٤ĺ ðŁı½ +rock the +spring training +fall out +dairy free +wa j +un decided +so wn +rc n +north wales +htt r +fu mble +d its +comp elled +popu list +min ted +blan chett +. '' +pro pulsion +m illa +au berg +her tz +h ta +u daipur +serendip ity +azte cs +als ace +ðŁIJ ij +lu n +sho es +char li +gar za +ðŁĴ Ł +pro biotics +fox tv +ol is +mi ff +loc alized +diffu ser +si gue +fun ko +rend ous +ðŁĴ ij +jeky ll diff --git a/src/comfyui/comfy/sd1_tokenizer/special_tokens_map.json b/src/comfyui/comfy/sd1_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..2c2130b544c0c5a72d5d00da071ba130a9800fb2 --- /dev/null +++ b/src/comfyui/comfy/sd1_tokenizer/special_tokens_map.json @@ -0,0 +1,24 @@ +{ + "bos_token": { + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "pad_token": "<|endoftext|>", + "unk_token": { + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/src/comfyui/comfy/sd1_tokenizer/tokenizer_config.json b/src/comfyui/comfy/sd1_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5ba7bf706515bc60487ad0e1816b4929b82542d6 --- /dev/null +++ b/src/comfyui/comfy/sd1_tokenizer/tokenizer_config.json @@ -0,0 +1,34 @@ +{ + "add_prefix_space": false, + "bos_token": { + "__type": "AddedToken", + "content": "<|startoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "do_lower_case": true, + "eos_token": { + "__type": "AddedToken", + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + }, + "errors": "replace", + "model_max_length": 77, + "name_or_path": "openai/clip-vit-large-patch14", + "pad_token": "<|endoftext|>", + "special_tokens_map_file": "./special_tokens_map.json", + "tokenizer_class": "CLIPTokenizer", + "unk_token": { + "__type": "AddedToken", + "content": "<|endoftext|>", + "lstrip": false, + "normalized": true, + "rstrip": false, + "single_word": false + } +} diff --git a/src/comfyui/comfy/sd1_tokenizer/vocab.json b/src/comfyui/comfy/sd1_tokenizer/vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..469be27c5c010538f845f518c4f5e8574c78f7c8 --- /dev/null +++ b/src/comfyui/comfy/sd1_tokenizer/vocab.json @@ -0,0 +1,49410 @@ +{ + "!": 0, + "!!": 1443, + "!!!": 11194, + "!!!!": 4003, + "!!!!!!!!": 11281, + "!!!!!!!!!!!!!!!!": 30146, + "!!!!!!!!!!!": 49339, + "!!!!!!!!!!": 35579, + "!!!!!!!!!": 28560, + "!!!!!!!!": 21622, + "!!!!!!!": 15203, + "!!!!!!": 9168, + "!!!!!": 5203, + "!!!!": 2360, + "!!!\"": 28048, + "!!!)": 42532, + "!!!": 995, + "!!\"": 20556, + "!!#": 34997, + "!!)": 28352, + "!!": 748, + "!!@": 40705, + "!\"": 2947, + "!\"@": 43819, + "!#": 9670, + "!'": 13222, + "!),": 37904, + "!).": 26225, + "!)": 4571, + "!*": 37737, + "!,": 29325, + "!-": 43499, + "!...": 22121, + "!..": 35475, + "!.": 22517, + "!:)": 31671, + "!:": 17545, + "!": 256, + "!?!": 29767, + "!?!?": 47081, + "!?": 6004, + "!@": 15117, + "!]": 34466, + "!â̦": 35068, + "!âĿ¤ï¸ı": 32559, + "!ðŁİī": 49085, + "!ðŁĺĬ": 43434, + "!ðŁĺį": 36438, + "\"": 1, + "\"!": 10377, + "\"\"": 41530, + "\"\"\"": 25539, + "\"\"": 8575, + "\"#": 8345, + "\"'": 31065, + "\"(": 32741, + "\")": 13112, + "\",": 4332, + "\"-": 9375, + "\"....": 37785, + "\"...": 9049, + "\"..": 25403, + "\".": 2811, + "\"/": 39486, + "\":": 7811, + "\";": 37549, + "\"": 257, + "\"?": 11727, + "\"@": 1512, + "\"@_": 20236, + "\"[": 36930, + "\"â̦": 33993, + "\"âĢĶ": 41151, + "#": 2, + "##": 15483, + "#...": 31491, + "#:": 30144, + "#": 258, + "#@": 35062, + "#â̦": 12834, + "#âĢİ": 34262, + "$": 3, + "$$": 24233, + "$$$": 31859, + "$$": 14929, + "$)": 39460, + "$.": 34682, + "$": 259, + "%": 4, + "%!": 35070, + "%),": 37819, + "%)": 16063, + "%,": 14505, + "%-": 48784, + "%.": 12475, + "%;": 33379, + "%": 260, + "&": 5, + "&&": 27791, + "&": 261, + "'": 6, + "'!": 13781, + "'\"": 19479, + "'#": 15319, + "''": 46594, + "''": 8445, + "')": 19175, + "',": 5662, + "'-": 26152, + "'...": 20474, + "'.": 4645, + "':": 7182, + "';": 44517, + "'": 262, + "'?": 17242, + "'@": 26397, + "'d": 1896, + "'ll": 1342, + "'m": 880, + "'re": 982, + "'s": 568, + "'t": 713, + "'ve": 1200, + "'â̦": 42120, + "(": 7, + "(!)": 30253, + "(\"": 18741, + "(#": 6229, + "($)": 46597, + "($": 15186, + "(&": 15042, + "('": 18235, + "((": 22944, + "(((": 33287, + "((": 13796, + "().": 41737, + "()": 8475, + "(*": 48004, + "(*": 39575, + "(+": 12903, + "(-": 20228, + "(...": 45159, + "(.": 43055, + "(:": 8528, + "(;": 23983, + "(": 263, + "(?)": 22885, + "(@": 2181, + "(£": 33987, + "(©": 44886, + "(ðŁĵ·:": 34610, + "(ðŁĵ·": 37999, + "(ðŁĵ¸:": 44422, + "(ðŁĵ¸": 45204, + ")": 8, + ")!!": 47518, + ")!": 7805, + ")\"": 13046, + ")#": 39981, + ")'": 23613, + ")(": 27956, + "))": 13720, + "))))": 42911, + "))))": 34181, + ")))": 18305, + "))": 5167, + "),": 2361, + ")-": 19034, + ")...": 15274, + ")..": 41822, + ").": 1818, + ")/": 26616, + "):": 4143, + ");": 19686, + ")": 264, + ")?": 18765, + ")@": 41928, + ")_/": 45028, + ")_/¯": 45781, + ")â̦": 41844, + "*": 9, + "*)": 30956, + "**": 9825, + "****": 21326, + "********": 42974, + "*****": 43571, + "****": 25167, + "***": 7829, + "**": 4441, + "*,": 41895, + "*-*": 23568, + "*.": 31304, + "*": 265, + "*_*": 44535, + "+": 10, + "+)": 34810, + "++": 47298, + "+++": 35986, + "++": 19056, + "+,": 35885, + "+.": 25238, + "+/-": 47614, + "+": 266, + ",": 11, + ",\"": 3823, + ",#": 11215, + ",&": 26905, + ",'": 10599, + ",)": 44493, + ",,": 21340, + ",,,,": 33225, + ",,,": 14811, + ",,": 8844, + ",-": 29821, + ",...": 20365, + ",.": 41277, + ",": 267, + ",@": 13975, + ",â̦": 14601, + "-": 12, + "-\"": 18646, + "-#": 10151, + "-$": 24946, + "-'": 28010, + "-(": 33345, + "-)": 3535, + "-*": 21527, + "--": 2154, + "----": 5753, + "--------": 11772, + "----------------": 23122, + "----": 30164, + "---->": 35999, + "---": 11079, + "--->": 14518, + "--": 2432, + "-->": 6422, + "-->>": 47252, + "-.-": 32765, + "-...": 43147, + "-.": 44040, + "-": 268, + "->": 5081, + "-@": 10087, + "-_-": 27227, + "-__": 42718, + "-â̦": 30047, + ".": 13, + ".!!": 37805, + ".!": 14030, + ".\"": 18650, + ".\"-": 21234, + ".\"": 1081, + ".\"âĢĶ": 48703, + ".#": 5014, + ".'\"": 41558, + ".''": 49379, + ".'": 5938, + ".(": 22294, + ".)": 5376, + ".*": 26145, + ".,": 5276, + ".-": 12481, + "..": 608, + "..!!": 23707, + "..!": 17994, + "..\"": 15229, + "..#": 15735, + "..,": 47143, + "...": 3002, + "...!!!": 38351, + "...!!": 39915, + "...!": 16860, + "...\"": 5240, + "...#": 8195, + "...&": 44979, + "...'": 23167, + "...(": 37981, + "...)": 14040, + "...,": 42717, + "....": 2386, + "....\"": 26689, + "....#": 20346, + ".....": 34151, + ".....#": 38867, + "........": 8246, + "................": 24855, + "............": 42965, + "...........": 35008, + "..........": 25526, + ".........": 19881, + "........": 14720, + ".......": 9917, + "......": 5590, + ".....": 3104, + "....": 1390, + "....@": 29790, + "...:": 34570, + "...": 678, + "...?": 16388, + "...@": 12672, + "..": 852, + "..?": 23875, + "..@": 21124, + "./": 31975, + ".:": 15811, + ".;": 47596, + ".": 269, + ".<": 29442, + ".?": 29294, + ".@": 1230, + ".]": 33511, + ".~": 42651, + ".â̦": 18047, + ".âĿ¤ï¸ı": 39085, + ".âłĢ": 30097, + ".ðŁĺĤ": 46580, + "/": 14, + "/#": 13217, + "/$": 36266, + "/-": 19811, + "/.": 39382, + "//": 15348, + "////": 46271, + "///": 22734, + "//": 3502, + "/": 270, + "/@": 8216, + "0": 15, + "0": 271, + "1": 16, + "1": 272, + "2": 17, + "2": 273, + "3": 18, + "3": 274, + "4": 19, + "4": 275, + "5": 20, + "5": 276, + "6": 21, + "6": 277, + "7": 22, + "7": 278, + "8": 23, + "8": 279, + "9": 24, + "9": 280, + ":": 25, + ":\"": 29498, + ":\")": 46432, + ":\"": 12089, + ":#": 26625, + ":$": 33769, + ":'": 8017, + ":'(": 21250, + ":')": 10701, + ":'": 23851, + ":((": 42496, + ":(": 5965, + ":)": 11070, + ":))))": 42339, + ":)))": 21840, + ":))": 10164, + ":).": 39010, + ":)": 1408, + ":*": 12617, + ":-": 13021, + ":-(": 25137, + ":-)": 4223, + ":-": 10323, + ":...": 42140, + "://": 12441, + ":/": 13604, + "::": 33077, + ":::": 43818, + "::": 9788, + ":": 281, + ":>": 39677, + ":@": 14339, + ":]": 43486, + ":|": 45986, + ":â̦": 22365, + ";": 26, + ";))": 41873, + ";)": 3661, + ";-": 35657, + ";-)": 10475, + ";;": 34824, + ";;": 24492, + ";": 282, + "<": 27, + "<-": 47280, + "": 34308, + "<<": 24588, + "<": 283, + "<<": 16482, + "<<<": 35054, + "<|endoftext|>": 49407, + "<|startoftext|>": 49406, + "=": 28, + "=))": 39587, + "=)": 17840, + "=": 284, + "==": 11748, + "====": 21734, + "========": 38952, + "==>": 29688, + "=>": 9714, + ">": 29, + ">.<": 38507, + ">:": 36196, + ">": 285, + "><": 28015, + ">>": 8270, + ">>": 2988, + ">>>": 6395, + ">>>>": 18461, + ">>>>": 18435, + ">>>>>": 32972, + ">>>>>>": 48947, + ">>>>>>>>": 41947, + ">_": 44144, + "?": 30, + "?!": 9785, + "?!!": 25342, + "?!\"": 29315, + "?!": 2835, + "?!?!": 16349, + "?!?!?!": 49084, + "?!?!?": 37619, + "?!?": 11395, + "?\"": 3283, + "?#": 24018, + "?'": 13610, + "?)": 9626, + "?,": 41628, + "?...": 22641, + "?..": 43905, + "?.": 41251, + "?:": 21067, + "?": 286, + "??": 5195, + "??!!": 43219, + "??!": 37341, + "??\"": 44996, + "??": 2197, + "???": 40017, + "???": 3824, + "????": 15936, + "????": 10362, + "?????": 21370, + "??????": 34589, + "????????": 45091, + "?@": 29258, + "?ð٤Ķ": 47928, + "@": 31, + "@#": 39397, + "@.": 43730, + "@/": 28639, + "@": 287, + "@@": 30314, + "@_": 2692, + "@__": 17042, + "@___": 48308, + "A": 32, + "A": 288, + "B": 33, + "B": 289, + "C": 34, + "C": 290, + "D": 35, + "D": 291, + "E": 36, + "E": 292, + "F": 37, + "F": 293, + "G": 38, + "G": 294, + "H": 39, + "H": 295, + "I": 40, + "I": 296, + "J": 41, + "J": 297, + "K": 42, + "K": 298, + "L": 43, + "L": 299, + "M": 44, + "M": 300, + "N": 45, + "N": 301, + "O": 46, + "O": 302, + "P": 47, + "P": 303, + "Q": 48, + "Q": 304, + "R": 49, + "R": 305, + "S": 50, + "S": 306, + "T": 51, + "T": 307, + "U": 52, + "U": 308, + "V": 53, + "V": 309, + "W": 54, + "W": 310, + "X": 55, + "X": 311, + "Y": 56, + "Y": 312, + "Z": 57, + "Z": 313, + "[": 58, + "[#": 11115, + "[...": 39975, + "[...]": 43790, + "[": 314, + "[@": 15148, + "[]": 22240, + "\\": 59, + "\\'": 41239, + "\\": 315, + "]": 60, + "]\"": 39434, + "],": 34067, + "].": 26262, + "]:": 21641, + "]": 316, + "][#": 39009, + "][": 29329, + "^": 61, + "^)": 30720, + "^-": 43516, + "^.": 31552, + "^.^": 35791, + "^": 317, + "^^": 34454, + "^^": 9064, + "^_": 14423, + "^_^": 15995, + "_": 62, + "_'": 44701, + "_(": 36951, + "_)": 37393, + "_*": 36237, + "_,": 31417, + "_-": 23193, + "_.": 26841, + "_/": 37647, + "_:": 13109, + "_": 318, + "__": 2355, + "__:": 47043, + "__": 3838, + "___": 43812, + "___": 13530, + "____": 4727, + "____": 25350, + "_____": 38803, + "________": 9549, + "________________": 20115, + "`": 63, + "`": 319, + "a": 64, + "a": 320, + "aa": 1821, + "aa": 3894, + "aaa": 14376, + "aaa": 9583, + "aaaa": 6727, + "aaaa": 19336, + "aaaaa": 31095, + "aaaaaa": 44413, + "aaaaaaaa": 23126, + "aaaah": 49151, + "aaah": 35856, + "aaay": 37846, + "aab": 34108, + "aac": 23251, + "aac": 11346, + "aad": 20464, + "aad": 35894, + "aaf": 37638, + "aaf": 31534, + "aag": 42174, + "aah": 28990, + "aaj": 28727, + "aaj": 43411, + "aak": 37739, + "aal": 22268, + "aal": 30208, + "aali": 27896, + "aaliyah": 46577, + "aam": 12943, + "aam": 22775, + "aama": 45018, + "aamaadmi": 45563, + "aamaadmiparty": 46406, + "aamir": 27456, + "aan": 20705, + "aan": 13426, + "aand": 38054, + "aap": 12023, + "aap": 12052, + "aapl": 34516, + "aar": 4695, + "aar": 13234, + "aard": 46932, + "aaron": 13948, + "aaron": 7709, + "aas": 28542, + "aas": 32205, + "aat": 34018, + "aat": 35004, + "aau": 35426, + "aay": 38281, + "aay": 40249, + "aaz": 26770, + "ab": 596, + "ab": 3937, + "aba": 44204, + "aba": 11102, + "abad": 33444, + "abad": 7155, + "aban": 41662, + "aband": 8595, + "abandon": 28805, + "abandoned": 11227, + "abar": 17860, + "abar": 39805, + "abas": 25402, + "abay": 43542, + "abb": 38954, + "abb": 38297, + "abba": 30870, + "abbas": 37494, + "abbas": 24412, + "abbey": 31927, + "abbey": 10132, + "abbie": 39949, + "abbo": 13536, + "abbot": 44046, + "abbott": 43737, + "abbott": 15649, + "abbrevi": 44843, + "abby": 30586, + "abby": 14694, + "abc": 13137, + "abc": 5334, + "abcnews": 31566, + "abd": 44093, + "abdel": 46511, + "abdomin": 35335, + "abdominal": 39328, + "abdu": 13361, + "abduc": 17884, + "abducted": 31520, + "abduction": 36984, + "abdul": 14227, + "abdul": 15593, + "abdullah": 21317, + "abe": 15856, + "abe": 12734, + "abee": 36037, + "abel": 31938, + "abel": 25318, + "abella": 46156, + "aben": 40865, + "aber": 7828, + "aber": 41867, + "aberdeen": 30539, + "aberdeen": 17236, + "abh": 27484, + "abh": 33649, + "abhcosmetics": 49189, + "abhi": 18113, + "abhin": 44045, + "abhishek": 44502, + "abi": 16867, + "abi": 14161, + "abia": 48604, + "abide": 49163, + "abig": 20863, + "abigail": 25686, + "abil": 21135, + "abilities": 8724, + "ability": 35146, + "ability": 3024, + "abit": 48668, + "ablanc": 33716, + "able": 10102, + "able": 863, + "abled": 10655, + "ableg": 24055, + "ables": 8486, + "ableton": 47169, + "ably": 6748, + "abnormal": 40934, + "abo": 2889, + "abo": 21861, + "aboard": 11661, + "abol": 31768, + "abolic": 46827, + "abolish": 47403, + "aboo": 42433, + "abor": 8416, + "aboriginal": 20422, + "abortion": 12336, + "abortions": 43218, + "aboss": 46401, + "abou": 36455, + "abou": 44053, + "abound": 41037, + "abour": 46637, + "about": 20204, + "about": 781, + "abouts": 36339, + "above": 35019, + "above": 4348, + "aboy": 37077, + "abpoli": 44779, + "abq": 38767, + "abr": 44932, + "abra": 10694, + "abra": 35087, + "abraham": 40623, + "abraham": 15869, + "abram": 33255, + "abrams": 29852, + "abre": 22472, + "abre": 46756, + "abri": 28605, + "abridged": 45333, + "abroad": 11253, + "abru": 46295, + "abs": 18431, + "abs": 11109, + "absc": 25389, + "abscbn": 44260, + "abscbn": 45810, + "absen": 32453, + "absence": 19240, + "absent": 30363, + "absol": 4624, + "absolu": 7055, + "absolut": 4666, + "absolute": 7501, + "absolutely": 4703, + "absor": 14303, + "absorb": 35806, + "absorbed": 45059, + "absorbing": 46412, + "absorption": 42210, + "abstr": 7530, + "abstract": 23885, + "abstract": 10197, + "abstractart": 31170, + "abstraction": 47696, + "abstracts": 40065, + "absur": 21639, + "absurd": 29757, + "abt": 9850, + "abu": 9167, + "abu": 11787, + "abud": 20180, + "abudha": 21450, + "abudhabi": 25256, + "abuja": 23371, + "abun": 20544, + "abundance": 23236, + "abundant": 31611, + "abur": 23377, + "aburger": 46660, + "abuse": 7678, + "abused": 23855, + "abuses": 37132, + "abusing": 36558, + "abusive": 26858, + "abv": 34172, + "aby": 16342, + "aby": 31378, + "abyss": 33632, + "abz": 42292, + "ac": 546, + "ac": 2816, + "aca": 9213, + "acab": 41388, + "acacia": 44047, + "acad": 32537, + "acade": 2892, + "academia": 22662, + "academic": 31178, + "academic": 7935, + "academics": 26417, + "academies": 42569, + "academy": 29968, + "academy": 4041, + "acadi": 41455, + "acadia": 49236, + "acam": 26172, + "acan": 42227, + "acan": 26318, + "acap": 32357, + "acar": 22232, + "acare": 16961, + "acc": 26805, + "acc": 9318, + "acca": 30883, + "acce": 8564, + "acceler": 10161, + "accelerate": 23619, + "accelerated": 38513, + "accelerating": 41821, + "acceleration": 39387, + "accelerator": 25261, + "accent": 28110, + "accent": 18931, + "accents": 31738, + "accenture": 41853, + "accep": 4616, + "accept": 16447, + "accept": 9338, + "acceptable": 14209, + "acceptance": 17090, + "accepted": 9159, + "accepting": 12855, + "accepts": 22338, + "access": 7596, + "access": 3822, + "accessi": 10787, + "accessibility": 23407, + "accessible": 13977, + "accessing": 46339, + "accessories": 10220, + "accessory": 20417, + "acci": 4263, + "acci": 33943, + "accident": 6608, + "accidental": 24895, + "accidentally": 11061, + "accidents": 22072, + "acclaimed": 21172, + "acco": 44730, + "accol": 33858, + "accolades": 46731, + "accom": 23658, + "accommo": 34495, + "accommod": 14386, + "accommodate": 34708, + "accommodation": 18066, + "accommodations": 45536, + "accomp": 24985, + "accompan": 14746, + "accompanied": 20715, + "accompany": 34142, + "accompanying": 38179, + "accompli": 10205, + "accomplish": 25542, + "accomplished": 16462, + "accomplishment": 26100, + "accomplishments": 24965, + "accor": 4182, + "accord": 34293, + "accord": 28513, + "according": 4717, + "accordingly": 35535, + "accordion": 48760, + "accoun": 3081, + "account": 18424, + "account": 4684, + "accountability": 19377, + "accountable": 24216, + "accountant": 31026, + "accountants": 37222, + "accounted": 43951, + "accounting": 14805, + "accounts": 9974, + "accra": 31900, + "accred": 17451, + "accreditation": 27015, + "accredited": 27647, + "acct": 45569, + "accu": 5618, + "accumul": 19275, + "accumulation": 37112, + "accur": 6551, + "accuracy": 18423, + "accurate": 8858, + "accurately": 24206, + "accusations": 33615, + "accuse": 39414, + "accused": 9434, + "accuses": 27496, + "accusing": 41474, + "acdc": 45067, + "ace": 2675, + "ace": 804, + "acea": 35219, + "aceae": 38153, + "acele": 40868, + "aceous": 33610, + "acer": 37990, + "acer": 25809, + "aces": 5725, + "acet": 28735, + "acf": 38389, + "ach": 972, + "ach": 987, + "acha": 22686, + "acharya": 45780, + "achat": 32706, + "ache": 27771, + "ache": 7214, + "ached": 17048, + "acher": 38442, + "acher": 17936, + "achers": 25051, + "aches": 14823, + "achi": 3264, + "achi": 9087, + "achiev": 8160, + "achieve": 14798, + "achieve": 8175, + "achieved": 12359, + "achievement": 8245, + "achievements": 16114, + "achiever": 46286, + "achievers": 44544, + "achieves": 40123, + "achieving": 16120, + "achilles": 33327, + "achim": 42335, + "aching": 12864, + "acho": 33130, + "achs": 41195, + "aci": 4359, + "aci": 34100, + "acia": 30163, + "acial": 32422, + "acid": 35474, + "acid": 10085, + "acidity": 48800, + "acids": 27751, + "acies": 20162, + "acin": 39442, + "acing": 9442, + "acio": 26202, + "acion": 44965, + "acion": 24968, + "acional": 26435, + "aciones": 35832, + "acious": 16020, + "acity": 7511, + "ación": 38175, + "ack": 877, + "ack": 725, + "acked": 5698, + "acker": 31201, + "acker": 7940, + "ackeray": 41843, + "acki": 42857, + "acking": 5515, + "ackles": 28503, + "acknow": 13563, + "acknowle": 18100, + "acknowledge": 25209, + "acknowledged": 35913, + "acknowledges": 49083, + "acknowledging": 45645, + "acks": 3858, + "acl": 47593, + "acl": 23073, + "acle": 6504, + "acles": 34164, + "aclu": 37354, + "acm": 39317, + "acmilan": 36500, + "acne": 24195, + "aco": 9463, + "aco": 8800, + "acol": 17431, + "acollege": 43468, + "acom": 17224, + "acom": 22342, + "acon": 11621, + "acon": 11571, + "aconf": 38851, + "acons": 31599, + "acor": 22076, + "acorn": 37537, + "acos": 39943, + "acosta": 31994, + "acou": 8794, + "acoun": 31295, + "acounty": 45449, + "acoustic": 10616, + "acoustics": 43873, + "acp": 19627, + "acqu": 7946, + "acquainted": 40713, + "acqui": 12194, + "acquire": 21576, + "acquired": 15932, + "acquires": 27376, + "acquiring": 42785, + "acquis": 14207, + "acquisition": 16543, + "acquisitions": 39649, + "acr": 43648, + "acre": 26749, + "acre": 9493, + "acres": 11630, + "acro": 21060, + "acrob": 40891, + "acron": 37770, + "across": 2500, + "acrosse": 40979, + "acruz": 40455, + "acry": 10440, + "acrylic": 12252, + "acs": 11782, + "act": 10305, + "act": 1393, + "acted": 10971, + "acti": 4786, + "acting": 6319, + "action": 12493, + "action": 1816, + "actions": 6271, + "activ": 3430, + "activate": 26737, + "activated": 22249, + "activation": 26769, + "active": 19009, + "active": 4046, + "actively": 18645, + "activi": 7230, + "activism": 20117, + "activist": 10850, + "activists": 12649, + "activities": 6514, + "activity": 6206, + "actment": 44807, + "acton": 36167, + "acton": 36697, + "actonclimate": 43797, + "actor": 12181, + "actor": 4035, + "actors": 9255, + "actorslife": 25117, + "actorvijay": 34033, + "actress": 5805, + "actresses": 33639, + "acts": 6816, + "actu": 2375, + "actual": 7488, + "actually": 2955, + "acu": 9204, + "acu": 48475, + "aculture": 38145, + "acup": 30869, + "acup": 27278, + "acupuncture": 40043, + "acur": 44719, + "acura": 30120, + "acus": 33710, + "acute": 19734, + "acy": 18717, + "acy": 2356, + "ad": 594, + "ad": 680, + "ada": 25785, + "ada": 1886, + "adaily": 47254, + "adal": 46646, + "adam": 6037, + "adam": 4944, + "adamlambert": 27659, + "adams": 7942, + "adan": 41802, + "adani": 37499, + "adap": 6341, + "adapt": 22666, + "adaptation": 16566, + "adapted": 26657, + "adapter": 21839, + "adapting": 44120, + "adaptive": 28672, + "adar": 27702, + "adar": 32681, + "adas": 23250, + "adata": 39500, + "aday": 31367, + "aday": 10280, + "adays": 24337, + "adb": 45630, + "adc": 38201, + "add": 19408, + "add": 3536, + "addams": 38912, + "added": 4149, + "adder": 47557, + "addi": 36378, + "addic": 5709, + "addict": 14614, + "addicted": 16275, + "addiction": 11751, + "addictive": 29638, + "addicts": 29997, + "adding": 8676, + "addis": 43911, + "addison": 32369, + "additi": 26927, + "addition": 6698, + "additional": 10666, + "additions": 22575, + "additive": 48546, + "addo": 40001, + "address": 5834, + "addressed": 20817, + "addresses": 12702, + "addressing": 10594, + "adds": 9944, + "addy": 24746, + "ade": 2194, + "ade": 1928, + "adecides": 46374, + "aded": 9994, + "adee": 47054, + "adel": 4434, + "adel": 27308, + "adelaide": 38193, + "adelaide": 11611, + "adele": 42843, + "adele": 21220, + "adelrey": 43627, + "ademy": 49123, + "aden": 28669, + "aden": 28688, + "adena": 23648, + "adequ": 18232, + "adequate": 22281, + "ader": 21365, + "adero": 49185, + "aders": 27672, + "ades": 5793, + "adh": 42301, + "adhd": 32649, + "adhe": 21175, + "adhesive": 38429, + "adi": 2486, + "adi": 8779, + "adia": 26874, + "adic": 36780, + "adid": 8086, + "adidas": 22396, + "adidas": 9589, + "adidasoriginals": 48575, + "adies": 45834, + "adifference": 37217, + "adilla": 41167, + "ading": 15000, + "adio": 15060, + "adirond": 36843, + "adish": 49009, + "adity": 28596, + "aditya": 37186, + "adityanath": 44437, + "adjac": 32517, + "adjacent": 33836, + "adjec": 45512, + "adju": 16413, + "adjun": 45995, + "adjust": 13784, + "adjust": 28073, + "adjustable": 20476, + "adjusted": 30515, + "adjusting": 41132, + "adjustment": 36081, + "adjustments": 36331, + "adl": 49351, + "adler": 30222, + "adm": 9892, + "adm": 33604, + "admi": 11666, + "admin": 12528, + "admini": 6434, + "administr": 12174, + "administration": 9502, + "administrative": 22424, + "administrator": 22603, + "administrators": 36123, + "admins": 49297, + "admir": 17031, + "admiral": 21013, + "admiration": 39569, + "admire": 17791, + "admired": 36103, + "admirer": 48344, + "admiring": 29835, + "admission": 11315, + "admissions": 22463, + "admit": 13769, + "admits": 16332, + "admitted": 20427, + "admitting": 46148, + "adn": 40339, + "adnan": 42037, + "ado": 4775, + "ado": 2933, + "adobe": 29256, + "adobe": 16484, + "adog": 44913, + "adol": 33512, + "adole": 22704, + "adolescent": 36793, + "adolescents": 45656, + "adolf": 41179, + "adon": 25907, + "adona": 48419, + "adop": 4183, + "adopt": 16441, + "adopt": 11159, + "adoptable": 36905, + "adoptdont": 19674, + "adoptdontshop": 20089, + "adopted": 12538, + "adopting": 30158, + "adoption": 11544, + "adopts": 40853, + "ador": 4992, + "ador": 9162, + "adora": 40031, + "adorable": 6298, + "adoration": 46781, + "adore": 15502, + "adored": 49233, + "adores": 30290, + "adorned": 44953, + "ados": 20079, + "adox": 32188, + "adp": 44426, + "adr": 46189, + "adren": 24204, + "adrenaline": 35552, + "adri": 5935, + "adrian": 25012, + "adrian": 13163, + "adriana": 41363, + "adrid": 26562, + "adrien": 47469, + "adrienne": 40081, + "ads": 2485, + "adu": 16882, + "adu": 24446, + "adukone": 30511, + "adul": 7222, + "adult": 42209, + "adult": 7115, + "adulthood": 40964, + "adults": 9391, + "adv": 1647, + "adv": 21018, + "advan": 33411, + "advance": 27291, + "advance": 7022, + "advanced": 7465, + "advancement": 35437, + "advances": 15852, + "advancing": 21355, + "advani": 48189, + "advant": 7017, + "advantage": 8573, + "advantaged": 38361, + "advantages": 23506, + "adven": 41670, + "advent": 3071, + "advent": 15199, + "adventcalendar": 43492, + "adventur": 29627, + "adventure": 17251, + "adventure": 4377, + "adventurer": 48098, + "adventures": 7941, + "adventurous": 31179, + "adver": 4806, + "adverse": 30348, + "adversity": 32516, + "advert": 19080, + "adverti": 5682, + "advertise": 31473, + "advertised": 38987, + "advertisement": 18713, + "advertiser": 41829, + "advertisers": 45472, + "advertising": 8158, + "adverts": 44306, + "advice": 4973, + "advis": 4634, + "advise": 25962, + "advised": 23196, + "adviser": 20367, + "advisers": 40984, + "advises": 42761, + "advising": 39648, + "advisor": 12380, + "advisors": 23197, + "advisory": 10224, + "advoc": 6657, + "advocacy": 14443, + "advocate": 12044, + "advocates": 17757, + "adwords": 48343, + "ady": 41446, + "ady": 8781, + "ae": 5548, + "ae": 4542, + "aea": 37048, + "aed": 26912, + "aege": 42304, + "ael": 41533, + "ael": 43340, + "aen": 43085, + "aer": 10195, + "aeri": 27685, + "aerial": 44866, + "aerial": 12440, + "aero": 10196, + "aero": 25026, + "aerob": 42824, + "aeron": 37286, + "aeronau": 42816, + "aerop": 27735, + "aerosmith": 43253, + "aerospace": 20530, + "aes": 10617, + "aes": 35677, + "aest": 40694, + "aesthe": 21181, + "aesthetic": 16179, + "aesthetics": 29295, + "aew": 47108, + "af": 702, + "af": 4391, + "afa": 24953, + "afan": 47474, + "afar": 41637, + "afar": 37866, + "afb": 27022, + "afc": 29742, + "afc": 6571, + "afcb": 44276, + "afcon": 30019, + "afd": 44626, + "afe": 30487, + "afe": 13912, + "afer": 44707, + "aff": 8849, + "aff": 14864, + "affair": 13998, + "affairs": 9830, + "affe": 4556, + "affect": 11361, + "affected": 9715, + "affecting": 18448, + "affection": 33780, + "affection": 28381, + "affectionate": 42578, + "affects": 17285, + "affili": 12120, + "affiliate": 18652, + "affiliated": 37540, + "affiliation": 48377, + "affinity": 41451, + "affir": 25343, + "affirm": 42711, + "affirm": 48625, + "affirmation": 47495, + "affl": 34036, + "affleck": 35584, + "afford": 7951, + "afford": 13223, + "affordability": 44828, + "affordable": 43944, + "affordable": 8926, + "afg": 33994, + "afgh": 9029, + "afghan": 15919, + "afghanistan": 9836, + "afi": 24074, + "afi": 31958, + "afil": 27209, + "afire": 42010, + "afirst": 38601, + "afl": 15132, + "afl": 14356, + "aflo": 41959, + "afm": 38385, + "afootball": 41694, + "afor": 43102, + "afore": 41468, + "afp": 18311, + "afraid": 9474, + "afri": 13888, + "afric": 2136, + "africa": 3093, + "african": 17471, + "african": 4736, + "africans": 26534, + "afridi": 37651, + "afrika": 45833, + "afrin": 45586, + "afro": 16267, + "afro": 21795, + "afs": 48960, + "aft": 22693, + "after": 2278, + "after": 953, + "afterdark": 48966, + "afterlife": 46790, + "aftermath": 20958, + "afterno": 22330, + "afternoon": 39035, + "afternoon": 2716, + "afternoons": 31631, + "afterparty": 35305, + "afterwards": 23911, + "ag": 602, + "ag": 5241, + "aga": 1050, + "aga": 4654, + "again": 1495, + "against": 23838, + "against": 1601, + "agame": 46943, + "agan": 42946, + "agan": 9178, + "agar": 13199, + "agar": 17544, + "agarwal": 43117, + "agas": 20430, + "agate": 25454, + "agatha": 43896, + "agave": 42671, + "agawa": 39433, + "agazine": 44942, + "age": 4758, + "age": 805, + "aged": 3889, + "ageing": 25349, + "agen": 10101, + "agen": 43696, + "agencies": 13887, + "agency": 44885, + "agency": 6270, + "agend": 48653, + "agenda": 8728, + "agent": 21210, + "agent": 6576, + "agents": 10199, + "agentsof": 37074, + "agentsofshield": 38801, + "ager": 44847, + "ager": 10443, + "agers": 22123, + "ages": 2321, + "agg": 45482, + "aggarwal": 39386, + "agger": 27836, + "aggi": 36844, + "aggie": 44244, + "aggie": 37618, + "aggies": 31047, + "aggio": 36685, + "aggrav": 35203, + "aggre": 10426, + "aggreg": 41968, + "aggregate": 41318, + "aggression": 28900, + "aggressive": 16295, + "aggressively": 48667, + "agh": 17917, + "agh": 14402, + "aghan": 31276, + "agi": 24036, + "agi": 17645, + "agic": 37652, + "agile": 16276, + "agility": 32161, + "aging": 4336, + "agio": 41746, + "agirl": 35469, + "agle": 37035, + "agle": 16702, + "agles": 36374, + "agles": 22679, + "aglia": 46912, + "agm": 19162, + "agn": 36474, + "agna": 43626, + "agne": 29374, + "agne": 48303, + "agnes": 26213, + "agno": 41540, + "ago": 6276, + "ago": 1468, + "agomez": 27127, + "agon": 26775, + "agon": 14901, + "agony": 36977, + "agor": 38920, + "agos": 32657, + "agov": 34227, + "agp": 46048, + "agr": 36639, + "agra": 26660, + "agra": 29830, + "agram": 2447, + "agre": 3180, + "agreat": 37594, + "agree": 5953, + "agreed": 12774, + "agreeing": 40720, + "agreement": 8286, + "agreements": 25865, + "agrees": 17854, + "agri": 20527, + "agri": 30326, + "agricul": 7234, + "agricultural": 15440, + "agriculture": 9720, + "agro": 33178, + "agro": 44589, + "agron": 41314, + "agroup": 40099, + "ags": 16926, + "agt": 39681, + "agu": 3922, + "agu": 36544, + "agua": 18482, + "aguchi": 49206, + "ague": 2095, + "aguero": 42964, + "agues": 7000, + "aguil": 27946, + "aguilar": 44715, + "ah": 1772, + "ah": 1288, + "aha": 12082, + "aha": 8429, + "ahah": 38661, + "ahaha": 32423, + "ahahaha": 42620, + "aham": 36036, + "ahan": 45061, + "ahan": 19255, + "ahar": 31038, + "ahar": 38760, + "ahe": 27688, + "ahead": 3158, + "ahem": 39995, + "ahh": 13152, + "ahhh": 14769, + "ahhhh": 21054, + "ahhhhh": 36392, + "ahi": 45349, + "ahi": 24154, + "ahl": 30433, + "ahmad": 32167, + "ahmad": 16902, + "ahmadi": 38656, + "ahmadiyya": 44865, + "ahmed": 19491, + "ahmed": 12081, + "ahmedabad": 26966, + "ahn": 33405, + "aho": 28114, + "aho": 38444, + "ahora": 43113, + "ahouse": 33197, + "ahoy": 38652, + "ahs": 16937, + "ahu": 11908, + "ahu": 16515, + "ai": 2014, + "ai": 2215, + "aia": 27046, + "aib": 34780, + "aic": 29454, + "aid": 13723, + "aid": 5182, + "aida": 33830, + "aidan": 48814, + "aidan": 26945, + "aide": 31558, + "aide": 9746, + "aided": 48707, + "aiden": 40020, + "aides": 49082, + "aids": 11759, + "aig": 27295, + "aig": 46989, + "aii": 22478, + "aik": 42575, + "aiken": 46342, + "ail": 1457, + "ail": 9154, + "ailed": 38919, + "ailing": 29999, + "ails": 27024, + "aim": 6787, + "aim": 11255, + "aime": 39872, + "aimed": 20247, + "aimee": 36318, + "aiming": 21768, + "aimo": 36706, + "aims": 13326, + "ain": 8326, + "ain": 2210, + "aine": 48983, + "aine": 17634, + "ains": 27621, + "aint": 29543, + "aint": 13099, + "ainted": 39933, + "aioli": 43949, + "air": 1281, + "air": 1922, + "aira": 35085, + "aira": 46444, + "airasia": 48020, + "airbnb": 23098, + "airborne": 22755, + "airbus": 15324, + "aircraft": 7706, + "airdrop": 38434, + "aire": 7682, + "aired": 21938, + "aires": 17034, + "airfield": 40525, + "airforce": 23511, + "airing": 20453, + "airline": 14847, + "airlines": 8929, + "airmen": 44499, + "airplane": 16451, + "airplanes": 33319, + "airplay": 47024, + "airpollution": 47362, + "airport": 48337, + "airport": 3259, + "airports": 21543, + "airs": 18539, + "airshow": 27139, + "airsoft": 30134, + "airspace": 49280, + "airstrikes": 37220, + "airtel": 34784, + "airtime": 46617, + "airwaves": 43910, + "airways": 14299, + "airy": 44453, + "ais": 7616, + "ais": 11393, + "aise": 30505, + "aish": 21946, + "aisha": 40211, + "aishwar": 29687, + "aishwarya": 44019, + "aisle": 26917, + "ait": 25613, + "ait": 40814, + "aj": 3990, + "aj": 6342, + "aja": 42343, + "aja": 19633, + "ajax": 21933, + "ajay": 22494, + "ajay": 28726, + "ajaydevgn": 35515, + "aje": 48818, + "aje": 33315, + "ajes": 38791, + "aji": 26102, + "aji": 21153, + "ajit": 42261, + "ajith": 24118, + "ajo": 26958, + "aju": 36855, + "ak": 819, + "ak": 1196, + "aka": 19154, + "aka": 3412, + "akaif": 45736, + "akan": 43678, + "akan": 38244, + "akapoor": 40064, + "akarta": 48603, + "akb": 41962, + "akbar": 27180, + "ake": 10558, + "ake": 5776, + "aked": 6115, + "aker": 14245, + "aker": 3074, + "akers": 5788, + "akes": 4764, + "akest": 46679, + "akh": 14821, + "akh": 30660, + "akhan": 28158, + "akhi": 41660, + "akhilesh": 48495, + "akhtar": 45458, + "aki": 18173, + "aki": 6592, + "akin": 24630, + "akin": 13601, + "aking": 1809, + "akins": 48568, + "akira": 34001, + "akis": 27732, + "akistan": 46221, + "akley": 39908, + "ako": 44027, + "ako": 14541, + "akon": 47105, + "akos": 44659, + "akrish": 37434, + "akron": 26115, + "aks": 2953, + "aksh": 28226, + "akshay": 21483, + "akshay": 38914, + "akshaykumar": 23624, + "akshi": 42634, + "aku": 18151, + "aku": 20815, + "aky": 11977, + "al": 526, + "al": 566, + "ala": 12783, + "ala": 3449, + "alab": 6365, + "alabam": 45880, + "alabama": 8422, + "alach": 24622, + "alad": 23074, + "aladdin": 29951, + "alai": 47072, + "alain": 28999, + "alam": 16612, + "alam": 16012, + "alamo": 41922, + "alamo": 34632, + "alan": 9563, + "alan": 5773, + "alana": 43405, + "aland": 34304, + "aland": 6819, + "alar": 34333, + "alarm": 11321, + "alarming": 37209, + "alarms": 31236, + "alarts": 31422, + "alas": 7276, + "alas": 22412, + "alaska": 9562, + "alaskan": 33898, + "alastair": 42062, + "alay": 30289, + "alay": 36450, + "alaya": 36397, + "alb": 45248, + "alba": 25254, + "alban": 10882, + "albania": 29170, + "albanian": 47721, + "albans": 44119, + "albany": 17359, + "albat": 42797, + "albeit": 38984, + "alber": 6413, + "albert": 34174, + "albert": 9507, + "alberta": 11048, + "alberto": 22714, + "albi": 18512, + "albino": 48062, + "albion": 24071, + "albu": 2216, + "album": 40712, + "album": 2431, + "albums": 10705, + "albuquerque": 31079, + "alcat": 35361, + "alche": 37909, + "alchemist": 38913, + "alchemy": 39501, + "alco": 6848, + "alco": 45446, + "alcohol": 9426, + "alcoholic": 25098, + "ald": 4539, + "ald": 2928, + "alda": 46440, + "alde": 33114, + "alden": 17155, + "alden": 27710, + "aldenrichards": 20051, + "alder": 18220, + "alder": 46571, + "aldi": 23204, + "aldo": 9933, + "aldridge": 38084, + "alds": 14285, + "aldu": 6505, + "aldub": 10532, + "aldub": 15247, + "ale": 1440, + "ale": 1336, + "alea": 26518, + "aleague": 38909, + "alec": 29804, + "alec": 19954, + "alecoscino": 47948, + "aled": 4970, + "alee": 24515, + "alej": 23440, + "alejandro": 32950, + "alek": 26906, + "alek": 43310, + "aleksand": 48429, + "alem": 11825, + "aleppo": 19258, + "aler": 25674, + "aler": 27335, + "alert": 4662, + "alerts": 22144, + "ales": 44171, + "ales": 5962, + "aless": 21864, + "alessandro": 37344, + "alestine": 31945, + "alex": 2959, + "alex": 4134, + "alexa": 16273, + "alexand": 10696, + "alexander": 25527, + "alexander": 7563, + "alexandra": 19054, + "alexandre": 35711, + "alexandria": 21171, + "alexis": 35023, + "alexis": 14243, + "aley": 21635, + "alf": 27098, + "alfa": 23482, + "alfar": 38870, + "alfie": 28598, + "alfon": 31947, + "alfonso": 41784, + "alfre": 20982, + "alfred": 16553, + "alfredo": 32291, + "algae": 25654, + "algar": 36291, + "algarve": 40290, + "alge": 24336, + "algebra": 33694, + "alger": 18568, + "algeria": 25257, + "algon": 33007, + "algori": 14912, + "algorithm": 23295, + "algorithms": 26039, + "alham": 23352, + "alhamdulil": 35129, + "alhamdulillah": 38982, + "ali": 835, + "ali": 3558, + "alia": 2492, + "aliaa": 36468, + "alian": 3464, + "alias": 40026, + "alibaba": 39231, + "alic": 25265, + "alice": 23759, + "alice": 9192, + "alici": 31630, + "alicia": 20914, + "alie": 8697, + "alien": 22846, + "alien": 9639, + "aliens": 14883, + "alier": 39493, + "alies": 38086, + "alife": 41347, + "alife": 21100, + "alig": 21272, + "alight": 36157, + "align": 31160, + "aligned": 29292, + "alignment": 27267, + "alik": 31141, + "alike": 12665, + "alim": 42075, + "alin": 42746, + "alin": 40063, + "alina": 39529, + "aline": 21799, + "aling": 5169, + "alion": 19049, + "alis": 21308, + "alis": 20114, + "alisa": 38918, + "alisation": 42143, + "alise": 36718, + "alised": 25099, + "alism": 5607, + "alison": 28653, + "alison": 16970, + "alist": 44900, + "alist": 3320, + "alistair": 40551, + "alistic": 22302, + "alists": 5653, + "alit": 45566, + "alities": 27925, + "ality": 1694, + "alive": 40467, + "alive": 4716, + "aliz": 30979, + "alization": 8026, + "alize": 10268, + "alized": 6141, + "alizer": 38922, + "alizes": 26181, + "alizing": 13023, + "alk": 30246, + "alk": 21577, + "alkal": 33450, + "alkaline": 39210, + "all": 813, + "all": 615, + "alla": 13884, + "alla": 14000, + "allabout": 43996, + "allah": 6378, + "allan": 36552, + "allan": 15404, + "allblacks": 47728, + "allday": 35862, + "alle": 4870, + "alle": 29478, + "alled": 7379, + "alleg": 7456, + "allegations": 16992, + "alleged": 12133, + "allegedly": 14177, + "alleges": 45051, + "allegh": 41479, + "allegheny": 47851, + "allegi": 28832, + "allegiance": 30955, + "allen": 16712, + "allen": 6386, + "allenge": 31387, + "aller": 10116, + "aller": 30630, + "allergic": 28809, + "allergies": 28247, + "allergy": 24408, + "allery": 32542, + "alles": 43354, + "allevi": 31682, + "alleviate": 44799, + "alley": 36205, + "alley": 10329, + "allez": 49137, + "alli": 4123, + "alli": 15268, + "alliance": 45404, + "alliance": 8945, + "alliances": 48403, + "allianz": 45740, + "allie": 25040, + "allied": 20045, + "allies": 17277, + "alligator": 28574, + "allin": 45007, + "allin": 22395, + "alline": 48182, + "alling": 2992, + "allis": 45309, + "allison": 34602, + "allison": 16578, + "allman": 42611, + "allo": 8107, + "allo": 18389, + "allocated": 42716, + "allocation": 35139, + "allon": 46693, + "allot": 26363, + "allotment": 33750, + "allow": 5645, + "allow": 6722, + "allowance": 35696, + "allowed": 7885, + "allowing": 12458, + "allows": 9966, + "alloy": 22467, + "alls": 1997, + "allstar": 31247, + "allstar": 22974, + "allstars": 31198, + "allthe": 29253, + "allu": 20157, + "alluarjun": 39333, + "allure": 41814, + "ally": 7461, + "ally": 769, + "alm": 28303, + "alma": 32933, + "alma": 18337, + "alman": 29394, + "almanac": 41268, + "almighty": 21898, + "almond": 15646, + "almonds": 30468, + "almost": 47534, + "almost": 2671, + "aln": 47203, + "alo": 3435, + "alo": 6183, + "aloe": 30728, + "alog": 15813, + "alogue": 9101, + "aloha": 23160, + "aloils": 49002, + "alom": 22236, + "alon": 14097, + "alon": 42846, + "alone": 4702, + "along": 8300, + "along": 2528, + "alongside": 8646, + "alonso": 25704, + "aloo": 46187, + "alore": 14323, + "alot": 16945, + "alou": 43180, + "aloud": 30028, + "alove": 46669, + "alove": 37045, + "alp": 32020, + "alp": 39342, + "alpac": 30128, + "alpaca": 42561, + "alph": 6720, + "alpha": 11807, + "alpha": 8624, + "alphabe": 45796, + "alphabet": 22335, + "alphon": 37865, + "alpine": 17055, + "alps": 18191, + "already": 2426, + "alright": 10866, + "als": 23982, + "als": 938, + "alsace": 49388, + "also": 1446, + "alt": 9995, + "alt": 10006, + "alta": 24470, + "alta": 25378, + "altaf": 47342, + "altam": 45624, + "altar": 16385, + "alter": 4949, + "alter": 21393, + "altered": 25201, + "altern": 47463, + "alternate": 15926, + "alternati": 16699, + "alternative": 37327, + "alternative": 8248, + "alternatives": 25041, + "alth": 23463, + "alth": 5863, + "although": 9421, + "alti": 35531, + "alties": 17276, + "altitude": 23241, + "altman": 48100, + "alto": 35053, + "alto": 17518, + "altogether": 45689, + "alton": 41331, + "alton": 36550, + "altrin": 38458, + "altrincham": 44718, + "alty": 5546, + "alu": 4776, + "alu": 27991, + "alum": 5404, + "alum": 10553, + "alumin": 14563, + "alumini": 22908, + "aluminium": 23631, + "aluminum": 15251, + "alumna": 30313, + "alumni": 6646, + "alumnus": 23633, + "alums": 30155, + "alv": 20928, + "alvar": 25196, + "alvarez": 26924, + "alvaro": 41941, + "alves": 38547, + "alvin": 27023, + "alway": 14046, + "alway": 43764, + "always": 24997, + "always": 1466, + "alwx": 32768, + "aly": 6468, + "aly": 12910, + "alyn": 49150, + "alyss": 29490, + "alyssa": 18898, + "alz": 12936, + "alz": 41128, + "alzheim": 15212, + "alzheimer": 21151, + "alzheimers": 34592, + "am": 548, + "am": 687, + "ama": 18206, + "ama": 1696, + "amad": 45095, + "amade": 37366, + "amag": 32049, + "amal": 15315, + "amal": 36753, + "aman": 19890, + "aman": 10110, + "amand": 14560, + "amanda": 10036, + "amar": 6424, + "amar": 19607, + "amara": 48522, + "amari": 42565, + "amarillo": 40449, + "amarine": 45591, + "amarketing": 30788, + "amas": 22716, + "amas": 15667, + "amat": 38664, + "amat": 25455, + "amate": 12453, + "amateur": 14287, + "amaya": 47210, + "amaz": 1185, + "amaze": 24846, + "amazed": 18944, + "amazing": 15949, + "amazing": 1370, + "amazingly": 20368, + "amazon": 13630, + "amazon": 4140, + "amb": 9042, + "amb": 16853, + "amba": 27003, + "ambani": 45967, + "ambas": 5634, + "ambassad": 5758, + "ambassador": 6795, + "ambassadors": 16832, + "ambed": 42089, + "ambedkar": 48131, + "amber": 18292, + "amber": 9986, + "ambi": 11844, + "ambient": 23447, + "ambigu": 35702, + "ambition": 20673, + "ambitions": 34152, + "ambitious": 18666, + "ambro": 17585, + "ambrose": 24253, + "ambu": 34423, + "ambul": 13944, + "ambulance": 15555, + "ambush": 40725, + "amc": 24942, + "amc": 16921, + "amd": 20845, + "ame": 3995, + "ame": 780, + "amed": 5660, + "ameen": 24229, + "amel": 31988, + "amel": 10960, + "ameli": 21599, + "amelia": 21433, + "amell": 48198, + "amen": 18716, + "amen": 12335, + "amend": 12425, + "amendment": 15019, + "amendments": 40901, + "amenities": 30096, + "ament": 27528, + "amer": 17081, + "amer": 16147, + "ameri": 40422, + "americ": 1283, + "america": 2224, + "americafirst": 43216, + "american": 8746, + "american": 2151, + "americana": 26221, + "americanair": 42538, + "americani": 39726, + "americans": 6676, + "americas": 33343, + "americas": 18142, + "ames": 5469, + "ameter": 23393, + "amethy": 30291, + "amethyst": 31485, + "amex": 46390, + "amg": 21324, + "amher": 32311, + "amherst": 39065, + "ami": 6100, + "ami": 3065, + "amic": 25824, + "amic": 21383, + "amid": 18908, + "amid": 11953, + "amide": 30952, + "amidst": 25172, + "amie": 36901, + "amig": 40294, + "amiga": 35329, + "amigo": 44991, + "amigos": 28176, + "amii": 35462, + "amiibo": 38871, + "amily": 36732, + "amin": 14337, + "amin": 20235, + "amina": 47531, + "amination": 30355, + "amine": 35823, + "aming": 3507, + "amino": 33464, + "amir": 26029, + "amir": 21973, + "amis": 29829, + "amish": 24958, + "amit": 15083, + "amit": 25255, + "amitabh": 48124, + "amitshah": 32374, + "aml": 43185, + "amma": 29786, + "amman": 29243, + "ammo": 33474, + "ammunition": 35060, + "amn": 24073, + "amne": 14596, + "amnesia": 41741, + "amnesty": 46330, + "amnesty": 21177, + "amo": 4833, + "amo": 11156, + "amodi": 9826, + "amon": 17492, + "amon": 24046, + "among": 12310, + "among": 4265, + "amongst": 12520, + "amoo": 26977, + "amor": 19977, + "amor": 15973, + "amore": 38937, + "amore": 22691, + "amores": 36338, + "amos": 18133, + "amoto": 25492, + "amount": 6403, + "amounts": 16747, + "amour": 29908, + "amovie": 41062, + "amp": 3521, + "amp": 6259, + "amped": 22640, + "amphi": 16379, + "amphibious": 45206, + "amphitheater": 41285, + "amphitheatre": 44039, + "ample": 34162, + "amples": 14536, + "ampli": 15647, + "amplifier": 31743, + "amplify": 45308, + "amps": 19252, + "ampton": 29410, + "ampton": 9347, + "amr": 30916, + "amreading": 16546, + "amrit": 33849, + "ams": 1396, + "amster": 9110, + "amsterdam": 9441, + "amtrak": 27855, + "amu": 11347, + "amu": 32336, + "amur": 35014, + "amura": 35487, + "amus": 36269, + "amuse": 21421, + "amuse": 44367, + "amused": 30212, + "amusement": 32570, + "amusic": 20266, + "amusing": 31789, + "amwriting": 9660, + "amy": 10547, + "amy": 5187, + "an": 514, + "an": 550, + "ana": 6588, + "ana": 1388, + "anab": 34742, + "anada": 27948, + "anag": 12115, + "anagh": 40774, + "anaheim": 23728, + "anak": 34814, + "anak": 38658, + "anal": 2785, + "analo": 34179, + "analog": 19963, + "analogue": 46031, + "analy": 4611, + "analyse": 47246, + "analyses": 39695, + "analysis": 5296, + "analyst": 14198, + "analysts": 28075, + "analytical": 34550, + "analytics": 8558, + "analyze": 28519, + "analyzing": 32107, + "anam": 29525, + "anan": 37215, + "anand": 25073, + "anand": 22083, + "anap": 41566, + "anarch": 46405, + "anarchi": 39879, + "anarchy": 27707, + "anas": 31382, + "anas": 12633, + "anast": 48902, + "anasta": 22915, + "anastasi": 36534, + "anastasia": 37975, + "anat": 10045, + "anath": 31277, + "anatom": 33759, + "anatomy": 15376, + "anc": 1124, + "anc": 17758, + "anca": 14583, + "ance": 7165, + "ance": 884, + "anced": 5071, + "ancer": 17415, + "ancers": 37296, + "ances": 3515, + "ancestor": 43904, + "ancestors": 24405, + "ancestral": 41615, + "ancestry": 30922, + "anch": 9489, + "anche": 34679, + "ancho": 26610, + "anchor": 20030, + "anchor": 13201, + "anchorage": 31950, + "anchored": 45926, + "anchors": 37830, + "anci": 4192, + "ancient": 31495, + "ancient": 5810, + "ancies": 21647, + "ancing": 7797, + "anco": 15459, + "ancy": 16282, + "ancy": 3633, + "and": 672, + "and": 537, + "anda": 2911, + "andalu": 31443, + "andco": 36302, + "ande": 26889, + "ande": 30354, + "ander": 3740, + "ander": 3935, + "anders": 10880, + "andersen": 32661, + "anderson": 26683, + "anderson": 6510, + "andes": 24052, + "andfriends": 36871, + "andhi": 21617, + "andhra": 32452, + "andi": 28870, + "andi": 14354, + "andie": 46318, + "andme": 42831, + "ando": 35950, + "ando": 5986, + "andolan": 48965, + "andon": 36488, + "andor": 45243, + "andover": 44177, + "andr": 22661, + "andra": 46795, + "andra": 21730, + "andre": 2657, + "andre": 9400, + "andrea": 10895, + "andreas": 20444, + "andrei": 42137, + "andres": 25197, + "andretti": 44291, + "andrew": 11717, + "andrew": 4847, + "andrews": 14506, + "andri": 37208, + "andro": 4417, + "andro": 17980, + "android": 24284, + "android": 5191, + "androidgames": 46572, + "andromeda": 42942, + "andré": 35609, + "ands": 32257, + "andthe": 22111, + "andu": 44200, + "andum": 47266, + "andy": 9447, + "andy": 2888, + "ane": 5846, + "ane": 3051, + "anec": 33965, + "anem": 41395, + "anemone": 49019, + "aneous": 48273, + "anes": 15381, + "anese": 48778, + "anesthe": 30622, + "anesthesia": 43353, + "anew": 39084, + "anew": 47341, + "anews": 20919, + "aney": 22387, + "anfield": 26993, + "ang": 883, + "ang": 2704, + "anga": 11641, + "angames": 43178, + "angan": 28264, + "angas": 46180, + "ange": 2960, + "ange": 3039, + "angel": 5029, + "angel": 5130, + "angela": 12354, + "angeles": 7382, + "angeli": 15265, + "angelic": 41038, + "angelica": 38582, + "angelina": 28890, + "angelo": 14342, + "angelou": 41328, + "angels": 7809, + "anger": 32737, + "anger": 6788, + "angerous": 39716, + "angers": 29756, + "angh": 34030, + "angi": 28003, + "angi": 24301, + "angie": 18859, + "angle": 21749, + "angle": 6946, + "angled": 32322, + "angler": 22284, + "anglers": 41608, + "angles": 18627, + "anglesey": 31850, + "anglia": 32076, + "anglic": 28322, + "anglican": 33284, + "angling": 36824, + "anglo": 39515, + "anglo": 30408, + "ango": 19090, + "angola": 36636, + "angor": 41740, + "angp": 19992, + "angry": 33910, + "angry": 9054, + "angs": 18441, + "angst": 41714, + "angu": 11209, + "angular": 43584, + "angular": 24981, + "angularjs": 48608, + "angus": 19688, + "ani": 1326, + "ani": 3624, + "ania": 9866, + "anian": 9945, + "anians": 39393, + "anic": 23113, + "anie": 26697, + "anie": 7671, + "anil": 28589, + "anil": 34619, + "anim": 2190, + "animal": 10697, + "animal": 4668, + "animalrights": 42859, + "animals": 4995, + "animate": 40076, + "animated": 13360, + "animation": 10344, + "animations": 42870, + "animator": 42591, + "anime": 23314, + "anime": 6469, + "anin": 45735, + "aning": 30972, + "anir": 27089, + "anirud": 35278, + "anirudhofficial": 45917, + "anis": 40986, + "anis": 47556, + "anism": 20947, + "anist": 16729, + "anistan": 9727, + "aniston": 47344, + "anit": 23683, + "anita": 18544, + "anium": 14794, + "anj": 22443, + "anja": 43440, + "anjali": 38834, + "anjo": 47353, + "ank": 13339, + "ank": 10029, + "anka": 45324, + "ankara": 34309, + "ankle": 14777, + "ankles": 48688, + "ann": 850, + "ann": 5424, + "anna": 13821, + "anna": 2160, + "annab": 22336, + "annabelle": 47661, + "annah": 39166, + "annah": 14327, + "annak": 41720, + "annan": 32166, + "annapolis": 34491, + "annas": 48467, + "anne": 9139, + "anne": 4083, + "anned": 27352, + "anner": 12642, + "annes": 24343, + "annette": 36821, + "annex": 42958, + "annex": 46389, + "anni": 2438, + "anni": 13728, + "annie": 37270, + "annie": 12173, + "annies": 43184, + "annihil": 32734, + "annis": 24742, + "anniv": 31399, + "anniver": 29671, + "annivers": 42836, + "anniversaire": 30882, + "anniversary": 3048, + "anno": 9901, + "anno": 26871, + "annon": 26385, + "annot": 30411, + "announ": 1806, + "announce": 3682, + "announced": 4103, + "announcement": 6932, + "announcements": 23735, + "announcer": 33626, + "announces": 6500, + "announcing": 11593, + "annoy": 45138, + "annoyed": 29863, + "annoying": 15248, + "annu": 21698, + "annual": 2906, + "annually": 23703, + "anny": 34313, + "anny": 5291, + "ano": 5617, + "ano": 2658, + "anom": 21612, + "anomaly": 46811, + "anon": 47079, + "anon": 13667, + "anonym": 38605, + "anonymous": 15036, + "anoo": 25690, + "anor": 13243, + "anor": 16596, + "anos": 20132, + "another": 29274, + "another": 1380, + "anova": 24116, + "ans": 24586, + "ans": 885, + "ansari": 40748, + "ansel": 40356, + "answ": 3369, + "answe": 14391, + "answer": 4518, + "answered": 14499, + "answering": 18280, + "answers": 8692, + "ant": 1103, + "ant": 773, + "anta": 3023, + "antag": 41745, + "antal": 39355, + "antalya": 47440, + "antan": 32899, + "antarc": 21338, + "antarctic": 27077, + "antarctica": 22587, + "ante": 19311, + "ante": 9769, + "antebellum": 41683, + "antelope": 39177, + "anten": 35517, + "antenna": 26370, + "anter": 46508, + "antes": 14927, + "antgrasso": 39074, + "anth": 3737, + "anth": 29741, + "antha": 47981, + "anthe": 34167, + "anthem": 12504, + "anthi": 45261, + "anthology": 21009, + "anthony": 17477, + "anthony": 6113, + "anthro": 10019, + "anthropo": 18538, + "anthropology": 32407, + "anthus": 37639, + "anti": 3120, + "anti": 3564, + "antibio": 18954, + "antibiotic": 34387, + "antibiotics": 29499, + "antibody": 49018, + "antic": 8260, + "anticip": 11435, + "anticipate": 38280, + "anticipated": 18605, + "anticipating": 48067, + "anticipation": 26983, + "antics": 37126, + "antidote": 45476, + "antifa": 35926, + "antigua": 39910, + "antine": 17641, + "antino": 27818, + "antioxid": 23010, + "antioxidant": 37452, + "antioxidants": 34208, + "antiqu": 21745, + "antique": 46517, + "antique": 9060, + "antiques": 17365, + "antis": 19748, + "antisemitism": 36630, + "antit": 37833, + "antitrust": 49343, + "antlers": 47720, + "antly": 5265, + "anto": 16826, + "anto": 24486, + "antoine": 25188, + "anton": 5497, + "anton": 19644, + "antoni": 39958, + "antonio": 30497, + "antonio": 7842, + "antony": 30707, + "antrim": 40252, + "ants": 1589, + "antv": 47520, + "antw": 44460, + "antwer": 26970, + "antwerp": 33797, + "antz": 25684, + "anu": 8537, + "anu": 17152, + "anup": 29617, + "anus": 27084, + "anush": 22765, + "anushka": 42080, + "anushka": 39822, + "anushkasharma": 44203, + "anwar": 34261, + "anxi": 9021, + "anxiety": 11103, + "anxious": 27793, + "any": 1307, + "any": 1504, + "anya": 11173, + "anybody": 10071, + "anyi": 41632, + "anymore": 7372, + "anyone": 2302, + "anything": 3582, + "anytime": 13924, + "anyway": 8931, + "anyways": 19778, + "anywhere": 8863, + "anz": 14445, + "anz": 19425, + "anza": 14669, + "anzac": 31977, + "ao": 7313, + "ao": 5703, + "aoa": 47119, + "aoc": 31918, + "aofficial": 30840, + "aoki": 33602, + "aol": 40643, + "aon": 30928, + "aon": 48476, + "aor": 32044, + "aos": 46860, + "ap": 688, + "ap": 2728, + "apa": 36954, + "apa": 13537, + "apac": 34320, + "apache": 23921, + "apal": 38017, + "apan": 36562, + "apar": 9161, + "apark": 32528, + "apart": 6474, + "apart": 7803, + "aparthe": 25121, + "apartheid": 26597, + "apartment": 8285, + "apartments": 15791, + "aparty": 26767, + "apat": 31755, + "apathy": 18145, + "apc": 20300, + "apd": 44563, + "ape": 6098, + "ape": 2609, + "apec": 47530, + "aper": 13681, + "aper": 5858, + "apers": 15846, + "apes": 9550, + "apeu": 19040, + "apex": 41935, + "apex": 23712, + "aph": 16341, + "aph": 29491, + "apha": 47104, + "apho": 21758, + "aphra": 44147, + "api": 23342, + "api": 14674, + "apia": 44259, + "apic": 40679, + "aping": 18456, + "apink": 35725, + "apis": 37575, + "apk": 27648, + "apo": 4089, + "apo": 19758, + "apocaly": 13932, + "apocalypse": 17571, + "apocalyptic": 35675, + "apol": 5023, + "apolice": 45663, + "apolis": 9598, + "apollo": 48213, + "apollo": 11554, + "apolo": 31094, + "apolog": 25530, + "apologe": 42908, + "apologi": 14977, + "apologies": 21959, + "apologise": 39608, + "apologize": 22879, + "apologizes": 35298, + "apology": 20768, + "apor": 21871, + "apore": 6679, + "apost": 20309, + "apostle": 33051, + "apostles": 48457, + "app": 882, + "app": 2231, + "appa": 4884, + "appa": 13110, + "appalach": 30523, + "appalachian": 36806, + "appalling": 44797, + "appar": 26698, + "apparatus": 37716, + "apparel": 13972, + "apparent": 23963, + "apparently": 5287, + "appe": 3748, + "appe": 45949, + "appeal": 9625, + "appealing": 25909, + "appeals": 22447, + "appear": 5544, + "appear": 9308, + "appearance": 7238, + "appearances": 17214, + "appeared": 11561, + "appearing": 18759, + "appears": 8743, + "appell": 43833, + "appen": 37201, + "appen": 26589, + "apper": 18780, + "appet": 21686, + "appeti": 24179, + "appetite": 24481, + "appetizer": 36065, + "applau": 24713, + "applaud": 42152, + "applause": 22650, + "apple": 8629, + "apple": 3055, + "applemusic": 21390, + "apples": 14032, + "appleton": 45250, + "appli": 15495, + "appliance": 33677, + "appliances": 22134, + "applic": 4235, + "applicable": 37927, + "applicants": 28035, + "application": 7241, + "applications": 7341, + "applied": 12636, + "applies": 24910, + "apply": 4356, + "applying": 17965, + "appo": 5433, + "appoint": 36190, + "appointed": 11087, + "appointment": 10890, + "appointments": 23439, + "appoints": 25132, + "apprais": 36972, + "appraisal": 46108, + "appreci": 3474, + "appreciate": 6263, + "appreciated": 9264, + "appreciates": 36573, + "appreciating": 39352, + "appreciation": 9212, + "appreciationday": 37438, + "appreciative": 45074, + "appren": 10582, + "apprentic": 15662, + "apprentice": 19122, + "apprentice": 17985, + "apprentices": 38252, + "apprenticeship": 26939, + "apprenticeships": 35425, + "appro": 2398, + "approach": 7781, + "approach": 6241, + "approached": 36499, + "approaches": 14962, + "approaching": 12164, + "appropri": 8446, + "appropriate": 10768, + "appropriately": 30383, + "appropriation": 49110, + "approval": 13549, + "approve": 19064, + "approved": 9412, + "approves": 18107, + "approx": 18266, + "approxim": 14201, + "approximately": 16128, + "apps": 7020, + "appstore": 31377, + "appt": 48112, + "appy": 34420, + "apr": 39396, + "apr": 11177, + "apra": 37027, + "apric": 25923, + "apricot": 30815, + "april": 23548, + "april": 2484, + "apro": 42712, + "apro": 49051, + "apron": 29502, + "aps": 8868, + "apse": 31843, + "apt": 17921, + "aptly": 47313, + "apu": 22166, + "apur": 36900, + "apur": 45193, + "aq": 14018, + "aq": 26862, + "aqu": 4458, + "aqua": 18613, + "aquaculture": 41885, + "aquaman": 35098, + "aquari": 37605, + "aquarium": 16814, + "aquarius": 38879, + "aquatic": 22658, + "aque": 35927, + "aque": 37268, + "aqui": 36826, + "aquino": 33796, + "ar": 516, + "ar": 625, + "ara": 24161, + "ara": 3340, + "arab": 5405, + "arab": 12028, + "arabia": 11746, + "arabian": 24663, + "arabic": 16709, + "arabs": 39155, + "arac": 47620, + "arach": 37689, + "arag": 41502, + "araj": 45142, + "arak": 23416, + "aram": 19223, + "aram": 21473, + "arama": 49066, + "aran": 20839, + "aran": 19641, + "aras": 36399, + "arat": 30856, + "arav": 35836, + "arbit": 20267, + "arbitr": 22702, + "arbitration": 34845, + "arbor": 33516, + "arbor": 24878, + "arboretum": 41719, + "arc": 4997, + "arc": 11592, + "arca": 25189, + "arca": 37612, + "arcade": 13331, + "arcadia": 38372, + "arch": 2458, + "arch": 8557, + "archa": 45619, + "archae": 10121, + "archaeological": 26163, + "archaeologists": 45035, + "archaeology": 14868, + "archan": 33359, + "archbishop": 23994, + "arche": 22474, + "archer": 21824, + "archers": 38407, + "archery": 23935, + "arches": 30771, + "archi": 4479, + "archie": 20557, + "archipel": 39750, + "archipelago": 43025, + "architec": 3359, + "architect": 12192, + "architects": 13290, + "architectural": 15360, + "architecture": 39038, + "architecture": 4920, + "archival": 39249, + "archive": 42257, + "archive": 10548, + "archived": 42379, + "archives": 9411, + "archy": 15643, + "arctic": 29716, + "arctic": 9138, + "ard": 3793, + "ard": 746, + "arden": 44600, + "arden": 27057, + "ardi": 23932, + "ardi": 19837, + "ardo": 35735, + "ardo": 9394, + "ards": 1654, + "ardu": 20906, + "arduino": 25398, + "are": 1076, + "are": 631, + "area": 2445, + "areas": 5429, + "arec": 18136, + "areclipse": 36030, + "ared": 5369, + "arel": 12798, + "arella": 24784, + "arelli": 48619, + "aren": 4033, + "aren": 4318, + "arena": 5463, + "arenas": 47860, + "arent": 37487, + "arer": 14857, + "arers": 33159, + "ares": 12224, + "arest": 11708, + "aret": 22247, + "areth": 47725, + "aretha": 42090, + "areyou": 37607, + "arez": 13108, + "arg": 27285, + "argent": 7812, + "argentina": 9789, + "argentine": 32582, + "argon": 40737, + "argos": 37443, + "argu": 7440, + "arguably": 30899, + "argue": 19788, + "argued": 48153, + "argues": 30045, + "arguing": 26549, + "argument": 16224, + "arguments": 24693, + "argus": 44300, + "argy": 21066, + "argyle": 36179, + "argyll": 40667, + "ari": 1221, + "ari": 3681, + "aria": 8883, + "arial": 42431, + "arian": 29980, + "arian": 6953, + "ariana": 14892, + "arianag": 23025, + "arianagrande": 23321, + "arianism": 44351, + "arians": 19104, + "arias": 22567, + "arie": 18774, + "ariel": 47959, + "ariel": 21025, + "aries": 5213, + "arif": 46621, + "arily": 12993, + "arin": 29564, + "arin": 18612, + "arina": 29271, + "arine": 29586, + "aring": 2142, + "ario": 8862, + "arios": 25392, + "aris": 15227, + "arise": 26490, + "arist": 12110, + "aristo": 25666, + "aristotle": 49156, + "arities": 31069, + "arity": 16608, + "arium": 11809, + "arius": 21482, + "ariz": 6516, + "arized": 40167, + "arizon": 28936, + "arizona": 7106, + "arjun": 24565, + "arjun": 20477, + "arjuna": 43835, + "ark": 11921, + "ark": 12010, + "arkansas": 12227, + "arkham": 36381, + "arl": 48542, + "arlington": 44940, + "arlington": 17865, + "arly": 3637, + "arm": 5671, + "arm": 4793, + "arma": 15887, + "arma": 38716, + "armad": 37897, + "armada": 34938, + "armagh": 44313, + "armani": 31314, + "armb": 37096, + "armchair": 45757, + "armed": 40471, + "armed": 8202, + "armen": 13145, + "armenia": 22008, + "armenian": 24891, + "armies": 46686, + "armin": 45481, + "arming": 19766, + "armist": 38150, + "armistice": 46765, + "armor": 16167, + "armored": 28214, + "armory": 38610, + "armour": 18503, + "armoured": 42514, + "arms": 5706, + "armstrong": 15005, + "army": 13541, + "army": 3133, + "armys": 27311, + "arn": 9348, + "arn": 37597, + "arnau": 45556, + "arne": 43509, + "arney": 35962, + "arnold": 49096, + "arnold": 13609, + "arns": 46692, + "aro": 7514, + "aro": 11551, + "aroa": 48209, + "arom": 16831, + "aroma": 40143, + "aroma": 26390, + "aromas": 47439, + "aromatherapy": 42584, + "aromatic": 39669, + "aron": 30855, + "aron": 28926, + "aroo": 47581, + "arora": 31897, + "arosa": 44264, + "arose": 44262, + "around": 35615, + "around": 1630, + "arqu": 35654, + "arquitec": 41703, + "arr": 39106, + "arr": 42489, + "arra": 32918, + "arra": 43827, + "arrahman": 44554, + "arran": 45722, + "arrang": 16711, + "arrange": 15410, + "arrange": 26311, + "arranged": 22451, + "arrangement": 23822, + "arrangements": 23792, + "arranging": 35321, + "array": 17293, + "arre": 4374, + "arrell": 28846, + "arrest": 9320, + "arrested": 5845, + "arresting": 43930, + "arrests": 20683, + "arri": 2115, + "arrival": 9073, + "arrivals": 19583, + "arrive": 8851, + "arrived": 3514, + "arrives": 9905, + "arriving": 10884, + "arro": 15729, + "arrog": 26997, + "arrogance": 47025, + "arrogant": 40582, + "arrow": 30920, + "arrow": 11149, + "arrowhead": 46393, + "arrows": 24768, + "arroyo": 45237, + "ars": 42815, + "ars": 864, + "arse": 22665, + "arsen": 5330, + "arsenal": 45234, + "arsenal": 6084, + "arsene": 32117, + "arson": 29937, + "art": 1486, + "art": 794, + "arta": 12031, + "arte": 13482, + "arte": 12947, + "artem": 40387, + "artemis": 45256, + "arten": 37043, + "arter": 29449, + "artery": 40062, + "artes": 48629, + "artforsale": 48239, + "artgallery": 31982, + "arth": 7146, + "arth": 20265, + "arthistory": 39313, + "arthr": 20807, + "arthritis": 22916, + "arthro": 43255, + "arthur": 35660, + "arthur": 8550, + "arti": 1635, + "arti": 34601, + "artic": 3003, + "articho": 30937, + "artichoke": 39647, + "article": 3550, + "articles": 11939, + "articul": 40343, + "articulate": 45444, + "artif": 8950, + "artifact": 37718, + "artifacts": 30249, + "artificial": 19357, + "artificial": 12040, + "artificialintelligence": 20799, + "artillery": 24465, + "artin": 33168, + "artin": 48540, + "artis": 41794, + "artisan": 36389, + "artisan": 21535, + "artisans": 40140, + "artist": 14326, + "artist": 2456, + "artiste": 41402, + "artistic": 12421, + "artiston": 48443, + "artistry": 38570, + "artists": 4899, + "artistson": 32127, + "artistsontwitter": 39469, + "artlovers": 35617, + "arto": 28464, + "artof": 31751, + "artoftheday": 43990, + "arton": 46744, + "arts": 22040, + "arts": 3812, + "artsy": 31588, + "arturo": 38591, + "artwit": 36713, + "artwork": 4188, + "artworks": 26215, + "arty": 45417, + "arty": 25916, + "aru": 13757, + "aru": 23907, + "aruba": 40131, + "arugula": 40770, + "arum": 48732, + "arun": 16105, + "arun": 31877, + "arunach": 47260, + "arunjaitley": 44874, + "arus": 22644, + "arvin": 16971, + "arvind": 21209, + "arvind": 41079, + "arvindkejriwal": 22971, + "arvo": 45726, + "arwx": 29824, + "ary": 4617, + "ary": 856, + "arya": 23594, + "aryan": 34966, + "as": 587, + "as": 601, + "asa": 39676, + "asa": 11914, + "asad": 42376, + "asaki": 22455, + "asam": 40603, + "asan": 22379, + "asan": 17841, + "asana": 42363, + "asant": 25536, + "asants": 37766, + "asap": 24199, + "asap": 10822, + "asar": 24733, + "asar": 49299, + "asb": 31186, + "asbe": 32113, + "asbestos": 33765, + "asc": 22720, + "asc": 23305, + "ascen": 20767, + "ascension": 35499, + "ascent": 36625, + "asci": 12753, + "asco": 25578, + "asco": 17488, + "ascot": 23723, + "ascri": 15506, + "asd": 36988, + "asda": 29391, + "asdf": 36857, + "asdfghj": 42758, + "asdfghjkl": 47660, + "ase": 8083, + "ase": 894, + "asean": 24472, + "aseball": 46903, + "ased": 2134, + "asen": 41085, + "aser": 39615, + "aser": 7209, + "ases": 3762, + "asf": 25863, + "asg": 34813, + "ash": 2067, + "ash": 2612, + "asha": 40572, + "asha": 13472, + "ashamed": 20633, + "ashby": 46531, + "ashe": 48523, + "ashe": 31752, + "asher": 37585, + "ashes": 12587, + "asheville": 28897, + "ashford": 37796, + "ashi": 15563, + "ashi": 15934, + "ashish": 33145, + "ashland": 39938, + "ashleigh": 49356, + "ashley": 17825, + "ashley": 8957, + "asho": 20273, + "ashok": 38141, + "ashore": 31194, + "ashram": 43445, + "ashton": 43264, + "ashton": 12228, + "ashtra": 18118, + "asi": 3596, + "asi": 12562, + "asia": 5741, + "asian": 21737, + "asian": 7128, + "asiangames": 49108, + "asians": 36771, + "asics": 31097, + "aside": 13676, + "asif": 37302, + "asim": 46050, + "asin": 48432, + "asin": 44347, + "asing": 4194, + "asingly": 15803, + "asion": 31753, + "asis": 12398, + "ask": 11027, + "ask": 2765, + "asked": 3993, + "asking": 5914, + "asks": 7953, + "asl": 41650, + "asleep": 10749, + "asley": 28206, + "asli": 44290, + "asm": 13851, + "asma": 38497, + "asmsg": 19839, + "aso": 30343, + "aso": 27932, + "asober": 43749, + "asocial": 48557, + "ason": 1163, + "asone": 31249, + "asons": 4249, + "asos": 37924, + "asot": 47968, + "asp": 17814, + "asp": 36666, + "asparag": 20301, + "asparagus": 20604, + "aspe": 10894, + "aspect": 19681, + "aspects": 18203, + "aspen": 35695, + "aspen": 25712, + "asper": 32991, + "asph": 28019, + "asphalt": 30574, + "aspir": 12669, + "aspirations": 36127, + "aspire": 24836, + "aspiring": 21862, + "asports": 43695, + "asr": 48052, + "asroma": 41000, + "ass": 12664, + "ass": 5301, + "assa": 47715, + "assad": 18699, + "assam": 19930, + "assan": 26352, + "assange": 27565, + "assas": 9603, + "assassin": 14366, + "assassin": 20029, + "assassinated": 40488, + "assassination": 24907, + "assassins": 34918, + "assassinscre": 36428, + "assassinscreed": 46082, + "assau": 7908, + "assaul": 19596, + "assault": 9679, + "assaulted": 30785, + "assaulting": 44143, + "asse": 3166, + "asse": 38600, + "assel": 37582, + "assemb": 5531, + "assemble": 26169, + "assembled": 22627, + "assemblies": 47406, + "assembling": 38670, + "assembly": 34542, + "assembly": 7059, + "assen": 38651, + "asser": 25665, + "asses": 21596, + "assess": 9209, + "assess": 23211, + "assessed": 44160, + "assessing": 31364, + "assessment": 10590, + "assessments": 32753, + "asset": 48463, + "asset": 13039, + "assets": 13170, + "assi": 2907, + "assi": 39540, + "assie": 31624, + "assign": 14190, + "assigned": 25767, + "assignment": 17342, + "assignments": 34257, + "assim": 36394, + "assimil": 43467, + "assist": 26558, + "assist": 10286, + "assistance": 11685, + "assistant": 6799, + "assistants": 31054, + "assisted": 18095, + "assisting": 24243, + "assists": 12675, + "assn": 44208, + "asso": 17617, + "assoc": 18891, + "associ": 3566, + "associate": 11777, + "associated": 11164, + "associates": 17358, + "association": 5578, + "associations": 33209, + "assor": 38604, + "assorted": 36701, + "assortment": 43112, + "asst": 24767, + "assu": 8328, + "assume": 19294, + "assumed": 37661, + "assuming": 29422, + "assump": 41182, + "assumption": 40773, + "assumptions": 45948, + "assurance": 28408, + "assure": 39161, + "assured": 25591, + "assures": 41988, + "assy": 29940, + "assy": 12963, + "ast": 1761, + "ast": 1242, + "asta": 43269, + "aste": 25033, + "aste": 25579, + "aster": 11013, + "aster": 9526, + "asteroid": 32253, + "asters": 33139, + "asth": 16684, + "asthma": 24610, + "asthour": 41238, + "astic": 15876, + "asting": 29984, + "astle": 46141, + "asto": 47275, + "aston": 24760, + "aston": 13879, + "astoni": 21962, + "astonishing": 27110, + "astonmartin": 40760, + "astor": 26391, + "astor": 47086, + "astoria": 34798, + "astounding": 37748, + "astr": 37609, + "astra": 47205, + "astra": 36079, + "astral": 45889, + "astri": 31243, + "astrid": 46499, + "astro": 8563, + "astro": 15318, + "astrology": 28526, + "astron": 7982, + "astronaut": 18376, + "astronauts": 29733, + "astronom": 23264, + "astronomer": 40036, + "astronomers": 44268, + "astronomical": 39775, + "astronomy": 17472, + "astrophotography": 38559, + "astros": 17598, + "asts": 10452, + "astu": 43137, + "astur": 45795, + "asu": 13157, + "asu": 16001, + "asun": 36044, + "asure": 3813, + "asus": 27269, + "aswell": 42978, + "asx": 38906, + "asy": 8524, + "asy": 2333, + "asylum": 15638, + "asym": 32539, + "at": 527, + "at": 536, + "ata": 4236, + "atable": 23909, + "atal": 24877, + "atal": 24797, + "atan": 33446, + "atar": 20128, + "atar": 7995, + "atari": 21549, + "atas": 30057, + "atay": 39518, + "atc": 28383, + "atch": 15938, + "atd": 33890, + "ate": 992, + "ate": 671, + "ateam": 42784, + "ateau": 16359, + "atec": 37352, + "atech": 31306, + "ated": 14589, + "ated": 943, + "atedly": 24698, + "atee": 32839, + "ateful": 5419, + "atelier": 29932, + "ately": 3862, + "atem": 17116, + "aten": 47984, + "atene": 30405, + "ateneo": 33904, + "ater": 18597, + "ater": 5877, + "ateral": 18819, + "aters": 22364, + "ates": 20370, + "ates": 1150, + "atest": 1705, + "ateur": 43677, + "atf": 28013, + "ath": 1374, + "ath": 1649, + "atha": 22530, + "atham": 23383, + "athan": 41260, + "athan": 26701, + "athe": 8963, + "athed": 47402, + "atheism": 25823, + "atheist": 22571, + "atheists": 47155, + "athen": 29112, + "athena": 30705, + "athens": 13524, + "ather": 6171, + "ather": 1817, + "athered": 34091, + "athers": 17266, + "athi": 28918, + "athing": 36069, + "athle": 3310, + "athlete": 7388, + "athletes": 7125, + "athletic": 33182, + "athletic": 9028, + "athletics": 7019, + "athlon": 14670, + "athome": 38217, + "athon": 4951, + "aths": 28835, + "athy": 34488, + "athy": 13183, + "ati": 591, + "ati": 6751, + "atia": 10908, + "atic": 20248, + "atic": 2647, + "atically": 13558, + "atics": 15666, + "atie": 30137, + "aties": 40060, + "atif": 41592, + "atiku": 37912, + "atile": 15474, + "atility": 23373, + "atime": 20158, + "atin": 36903, + "atin": 23047, + "atine": 39741, + "ating": 25653, + "ating": 1074, + "atio": 35401, + "ation": 2265, + "ation": 656, + "ational": 14205, + "ational": 3108, + "ationals": 44593, + "ationday": 20082, + "ations": 986, + "atis": 45456, + "atis": 41142, + "atism": 45638, + "ative": 18422, + "ative": 1648, + "atively": 11929, + "atives": 5629, + "ativity": 25166, + "atkins": 27734, + "atkinson": 28908, + "atl": 5411, + "atl": 10629, + "atla": 36043, + "atlan": 6818, + "atlanta": 39964, + "atlanta": 6839, + "atlantic": 28804, + "atlantic": 8189, + "atlantis": 27790, + "atlas": 15775, + "atle": 21170, + "atleast": 33231, + "atleti": 46067, + "atletico": 27501, + "atm": 14127, + "atmo": 8271, + "atmosphere": 10506, + "atmospheric": 24223, + "ato": 7987, + "ato": 4364, + "atoday": 26799, + "atom": 22418, + "atom": 24031, + "atomic": 18996, + "atoms": 41434, + "aton": 31525, + "aton": 10012, + "atop": 17455, + "ator": 10748, + "ator": 1962, + "atore": 28314, + "atorial": 32040, + "atories": 35678, + "atorium": 41306, + "ators": 3389, + "atory": 5920, + "atos": 41643, + "atour": 42967, + "atown": 24000, + "atp": 38105, + "atp": 19817, + "atr": 43247, + "atra": 20227, + "atra": 14401, + "atravel": 36981, + "atre": 46057, + "atri": 13882, + "atri": 38889, + "atric": 32238, + "atric": 13652, + "atrics": 36253, + "atrist": 41879, + "atrium": 29725, + "atrix": 43003, + "atro": 18724, + "atroc": 36197, + "atrocities": 37551, + "atry": 28334, + "ats": 46890, + "ats": 1032, + "atsu": 26531, + "att": 1017, + "att": 7103, + "atta": 7282, + "atta": 9146, + "attach": 43676, + "attach": 35653, + "attached": 11038, + "attachment": 28638, + "attack": 24971, + "attack": 3815, + "attacked": 12366, + "attacker": 39288, + "attackers": 47701, + "attacking": 16813, + "attacks": 7321, + "attain": 46459, + "attar": 37110, + "attemp": 4933, + "attempt": 7409, + "attempted": 17408, + "attempting": 18195, + "attempts": 15610, + "atten": 4084, + "atten": 32408, + "attenborough": 45860, + "attend": 9841, + "attend": 5802, + "attendance": 11928, + "attendant": 35424, + "attended": 8140, + "attendees": 14648, + "attending": 6696, + "attends": 22248, + "attention": 4936, + "atters": 30675, + "atthe": 21489, + "atti": 49265, + "atti": 16235, + "attic": 26766, + "attire": 21222, + "attitude": 10648, + "attitudes": 27611, + "attle": 14685, + "attle": 5030, + "attn": 25677, + "attor": 8856, + "attorney": 10372, + "attorneys": 29113, + "attrac": 7154, + "attract": 17010, + "attracted": 28493, + "attracting": 31909, + "attraction": 16807, + "attractions": 22307, + "attractive": 12231, + "attracts": 31024, + "attribu": 24624, + "attributed": 37520, + "attributes": 40763, + "attu": 43173, + "atty": 36705, + "atu": 15191, + "atu": 24295, + "atuesday": 34841, + "atul": 1744, + "atul": 43948, + "atum": 48295, + "atur": 14986, + "aturday": 29027, + "ature": 25305, + "ature": 4490, + "atures": 7358, + "atus": 14795, + "atv": 19598, + "atwood": 45680, + "atwork": 39680, + "atx": 34849, + "atx": 20136, + "aty": 40974, + "aty": 33107, + "atz": 30432, + "au": 627, + "au": 2566, + "aua": 45906, + "aub": 45938, + "auberg": 49382, + "aubre": 25899, + "aubrey": 34110, + "auburn": 42269, + "auburn": 14534, + "auc": 24489, + "auch": 43024, + "auck": 14588, + "auckland": 16072, + "auction": 48160, + "auction": 6462, + "auctioned": 41073, + "auctions": 24876, + "aucus": 47374, + "aud": 16107, + "aud": 19711, + "audi": 5091, + "audi": 10277, + "audible": 33227, + "audience": 6863, + "audiences": 22328, + "audio": 13792, + "audio": 5766, + "audiobook": 26282, + "audit": 12505, + "audit": 17625, + "auditi": 37377, + "audition": 18673, + "auditions": 21134, + "auditor": 38050, + "auditorium": 15063, + "audre": 16075, + "audrey": 18812, + "audu": 27934, + "audubon": 40275, + "auer": 33460, + "auf": 28924, + "aug": 15397, + "aug": 5720, + "auga": 22797, + "augh": 28310, + "augh": 14005, + "augmente": 48356, + "augmented": 32708, + "augu": 2610, + "august": 24353, + "august": 3171, + "augusta": 26144, + "augustine": 27397, + "augustus": 36835, + "auk": 19058, + "aul": 20695, + "aul": 34391, + "ault": 47253, + "ault": 10219, + "aun": 10608, + "aun": 38721, + "aunt": 12685, + "auntie": 23783, + "aunty": 29528, + "aur": 8156, + "aur": 17282, + "aura": 27728, + "aure": 36010, + "aureli": 35980, + "auror": 30067, + "aurora": 13500, + "aus": 10624, + "aus": 7630, + "ausa": 37384, + "ausbiz": 46543, + "ausch": 33926, + "auschwitz": 36523, + "ausopen": 27831, + "ausp": 35039, + "auspicious": 38806, + "auspol": 8241, + "aussi": 19762, + "aussie": 40230, + "aussie": 14424, + "aussies": 35727, + "aust": 26301, + "aust": 25418, + "austen": 29885, + "auster": 25030, + "austerity": 26982, + "austin": 12845, + "austin": 5125, + "austinmahone": 34678, + "austr": 2518, + "australi": 13798, + "australia": 3444, + "australian": 23630, + "australian": 6258, + "australians": 31488, + "austri": 8946, + "austria": 11960, + "austrian": 20638, + "ausv": 35206, + "ausvotes": 34661, + "aut": 12343, + "auth": 2381, + "auth": 38247, + "authent": 18158, + "authentic": 41266, + "authentic": 10369, + "authentication": 39746, + "authenticity": 35734, + "autho": 34552, + "author": 14447, + "author": 4358, + "authored": 37928, + "authori": 19207, + "authorities": 12729, + "authority": 10524, + "authorization": 48854, + "authorized": 28463, + "authors": 10765, + "auti": 8200, + "autism": 36256, + "autism": 11244, + "autisma": 43324, + "autistic": 29360, + "auto": 3917, + "auto": 5668, + "autobiography": 31509, + "autodesk": 40415, + "autograph": 10657, + "autograph": 13722, + "autographed": 16309, + "autographs": 17376, + "autoimmune": 45509, + "autom": 4114, + "automate": 43203, + "automated": 19022, + "automatic": 12126, + "automatically": 20725, + "automation": 12328, + "automobi": 44813, + "automobile": 25258, + "automotive": 12607, + "auton": 13100, + "autonews": 43975, + "autonom": 17870, + "autonomous": 20722, + "autonomy": 39223, + "autopsy": 44436, + "autos": 31118, + "autoshow": 46788, + "auts": 21140, + "autu": 5445, + "autum": 31783, + "autumn": 28940, + "autumn": 6110, + "autumnal": 35481, + "aux": 18154, + "aux": 8909, + "auxiliary": 37778, + "av": 722, + "av": 8484, + "ava": 12385, + "avage": 31505, + "avail": 1651, + "avail": 16686, + "availability": 17551, + "available": 1685, + "aval": 18012, + "avalan": 23970, + "avalanche": 25815, + "avalley": 45082, + "avalon": 30436, + "avan": 27971, + "avan": 33351, + "avant": 24305, + "avar": 33423, + "avatar": 18219, + "ave": 10062, + "ave": 4860, + "avec": 25828, + "aved": 47918, + "avel": 46817, + "avel": 48088, + "aven": 5963, + "aven": 32971, + "aveng": 21935, + "avenger": 24799, + "avengers": 39413, + "avengers": 12016, + "avengersendgame": 49342, + "avent": 22700, + "avenue": 7042, + "aver": 8788, + "aver": 11403, + "average": 6254, + "averaged": 37310, + "averages": 48982, + "averaging": 35266, + "avery": 20313, + "aves": 14023, + "avfc": 21304, + "avg": 19452, + "avgeek": 11114, + "avi": 3324, + "avi": 11297, + "avia": 38710, + "avian": 24115, + "aviation": 27717, + "aviation": 7617, + "aviator": 38921, + "aviators": 48011, + "avici": 46192, + "avicii": 49158, + "avid": 19118, + "avier": 14598, + "avila": 45339, + "aville": 40689, + "avin": 46204, + "avis": 45163, + "avis": 19765, + "aviv": 22130, + "aviva": 47122, + "aviz": 27607, + "avl": 44749, + "avo": 4496, + "avo": 32400, + "avoc": 12291, + "avocado": 14135, + "avocados": 48911, + "avoi": 16797, + "avoid": 30448, + "avoid": 5983, + "avoidance": 47983, + "avoided": 32103, + "avoiding": 22086, + "avoids": 48220, + "avon": 22790, + "avon": 17348, + "avril": 37763, + "avs": 31896, + "avut": 44472, + "avy": 29973, + "aw": 808, + "aw": 5557, + "awa": 4820, + "awa": 6872, + "await": 20769, + "awaited": 20092, + "awaiting": 14872, + "awaits": 15635, + "awak": 9776, + "awak": 41387, + "awake": 14695, + "awaken": 35412, + "awakening": 17017, + "awakens": 23191, + "awal": 42447, + "awal": 35090, + "awan": 48869, + "awan": 20420, + "awar": 5745, + "award": 36310, + "award": 2047, + "awarded": 7368, + "awarding": 37089, + "awards": 34528, + "awards": 2320, + "aware": 4427, + "aware": 7196, + "awareness": 19217, + "awareness": 4823, + "awarenessmonth": 34278, + "awarenessweek": 35294, + "away": 21088, + "away": 1520, + "aways": 12782, + "awaz": 18586, + "awd": 34846, + "awe": 1693, + "awe": 14106, + "aweather": 42142, + "aweather": 28681, + "awec": 38916, + "aweed": 29724, + "awesom": 16727, + "awesome": 30390, + "awesome": 1848, + "awesomeness": 22430, + "awful": 13617, + "awg": 46350, + "awgs": 35275, + "awh": 39566, + "awhile": 19171, + "awi": 15167, + "awil": 47271, + "awilliams": 42163, + "awk": 8888, + "awk": 40943, + "awkward": 42337, + "awkward": 10304, + "awn": 46222, + "awp": 43300, + "aws": 19658, + "awsome": 47196, + "awson": 36286, + "aww": 11568, + "awww": 15634, + "awwww": 26460, + "awx": 28385, + "ax": 3165, + "ax": 9203, + "axe": 19861, + "axel": 47889, + "axel": 32131, + "axes": 45970, + "axi": 30672, + "axial": 46550, + "axis": 19614, + "axle": 39003, + "axx": 47411, + "ay": 658, + "ay": 551, + "aya": 5917, + "ayala": 39827, + "ayama": 41194, + "ayan": 37781, + "ayan": 16269, + "ayana": 37400, + "ayas": 40904, + "ayat": 44902, + "ayat": 35720, + "aye": 21661, + "aye": 12446, + "ayer": 24852, + "ayers": 42783, + "ayesha": 46570, + "ayi": 33025, + "ayles": 44706, + "ayne": 35669, + "ayo": 21929, + "ayo": 18708, + "ayr": 23002, + "ayr": 36473, + "ayrshire": 32687, + "ays": 785, + "ayu": 40769, + "ayurve": 27185, + "ayurveda": 38986, + "ayush": 44831, + "ayy": 32514, + "ayyy": 41052, + "az": 854, + "az": 5468, + "aza": 22883, + "azad": 37838, + "azalea": 34087, + "azam": 34727, + "azar": 27911, + "azcardinals": 48846, + "aze": 41157, + "aze": 28485, + "azer": 19169, + "azerbai": 20649, + "azerbaijan": 23888, + "azhar": 47019, + "azi": 23914, + "azi": 18452, + "azine": 29140, + "azione": 48335, + "aziz": 41205, + "aziz": 29630, + "azo": 41227, + "azon": 36854, + "azores": 42826, + "azte": 33270, + "aztec": 34749, + "aztecs": 49387, + "azu": 27701, + "azu": 46963, + "azul": 39807, + "azure": 18514, + "azwx": 30262, + "azy": 24783, + "azz": 9817, + "azz": 26453, + "azza": 22255, + "azzi": 18758, + "azzle": 39974, + "azzo": 26779, + "azzur": 37055, + "azzy": 44534, + "añ": 23716, + "años": 41634, + "b": 65, + "b": 321, + "ba": 932, + "ba": 1792, + "baa": 33004, + "baahu": 34145, + "baahubali": 38663, + "bab": 1202, + "bab": 19039, + "baba": 12631, + "babe": 31177, + "babe": 7716, + "babes": 14253, + "babies": 6635, + "babs": 36217, + "babu": 21623, + "baby": 7268, + "baby": 1794, + "babygirl": 39554, + "babylon": 31928, + "babymetal": 45013, + "babys": 22266, + "babysitting": 34186, + "bac": 2791, + "bac": 25867, + "bacca": 40708, + "bach": 11773, + "bach": 8758, + "bachchan": 17690, + "bachel": 11283, + "bachelor": 45508, + "bachelor": 16766, + "bachelore": 26009, + "bachelorette": 29093, + "bacher": 49211, + "back": 1663, + "back": 893, + "backbone": 35635, + "backdrop": 20802, + "backed": 12721, + "backer": 22183, + "backers": 32934, + "background": 5994, + "backgrounds": 28215, + "backing": 14935, + "backlash": 31519, + "backpack": 14894, + "backpacking": 29524, + "backpacks": 37063, + "backs": 7562, + "backseat": 48812, + "backstage": 9236, + "backstreet": 46337, + "backthe": 26127, + "backto": 18703, + "backtoschool": 28730, + "backtothe": 43059, + "backup": 14415, + "backward": 37964, + "backwards": 21283, + "backyard": 12608, + "bacon": 48666, + "bacon": 7104, + "bacter": 11814, + "bacteria": 16556, + "bacterial": 26101, + "bad": 2564, + "bad": 2103, + "bada": 37475, + "badan": 39149, + "badass": 11616, + "baddest": 38112, + "baden": 36690, + "bader": 42254, + "badge": 11301, + "badger": 32686, + "badger": 22363, + "badgers": 22521, + "badges": 20084, + "badlands": 43192, + "badly": 13684, + "badminton": 21412, + "badoo": 33192, + "bados": 25755, + "bae": 32834, + "bae": 6855, + "baek": 18557, + "baek": 32702, + "baekhyun": 21572, + "baes": 46332, + "baf": 13616, + "baff": 35693, + "bafta": 29199, + "bag": 3408, + "bag": 3365, + "bage": 9698, + "bagel": 28777, + "bagels": 37489, + "baggage": 31402, + "bagged": 34047, + "bagh": 21659, + "bagh": 37271, + "baghdad": 30763, + "bago": 25105, + "bags": 6136, + "bagu": 27749, + "baguette": 45334, + "bah": 8372, + "bah": 16685, + "baha": 29592, + "baham": 43718, + "bahamas": 21224, + "bahan": 28704, + "bahn": 33452, + "bahrain": 12503, + "bai": 6232, + "bai": 23339, + "bail": 22933, + "bail": 16986, + "bailey": 27535, + "bailey": 10180, + "bain": 40784, + "bain": 21593, + "bair": 29059, + "baird": 40474, + "bait": 18010, + "baj": 20713, + "baja": 40418, + "baja": 28374, + "bajo": 32619, + "bak": 4059, + "bak": 23742, + "bakar": 41414, + "bake": 20736, + "bake": 11878, + "baked": 10364, + "baker": 27303, + "baker": 7743, + "bakers": 35293, + "bakers": 40231, + "bakersfield": 40149, + "bakery": 13377, + "bakes": 43057, + "bakhta": 44912, + "bakhtawar": 46937, + "bakhtawarbz": 47118, + "baking": 11467, + "baku": 46417, + "baku": 31852, + "bal": 1398, + "bal": 2282, + "bala": 20291, + "balaji": 48694, + "balance": 42894, + "balance": 6827, + "balanced": 15273, + "balances": 37733, + "balancing": 23541, + "balboa": 45098, + "balcony": 16169, + "bald": 11153, + "bald": 14875, + "baldhead": 29191, + "baldwin": 16242, + "bale": 48573, + "bale": 18873, + "bales": 42879, + "bali": 16432, + "bali": 10900, + "balkan": 48499, + "balkans": 42987, + "ball": 3807, + "ball": 1069, + "balla": 42246, + "ballad": 33472, + "ballarat": 46645, + "ballard": 31750, + "baller": 49194, + "baller": 25655, + "ballerina": 34962, + "ballers": 34173, + "ballet": 10703, + "balli": 29406, + "ballin": 47444, + "ballin": 33057, + "balling": 47588, + "ballis": 46675, + "ballistic": 36667, + "ballo": 8871, + "ballon": 36469, + "balloon": 13634, + "balloons": 18130, + "ballot": 14185, + "ballots": 35051, + "ballpark": 26080, + "ballroom": 15493, + "balls": 6927, + "bally": 17275, + "bally": 29451, + "balm": 24962, + "balmain": 45929, + "balo": 12395, + "baloch": 23173, + "balochistan": 21918, + "balot": 44615, + "balotelli": 45721, + "bals": 44154, + "balsam": 29121, + "balsamic": 32654, + "balt": 24441, + "balti": 8400, + "baltic": 23817, + "baltimore": 38502, + "baltimore": 9582, + "balu": 38093, + "bam": 6383, + "bam": 12686, + "bama": 20021, + "bambam": 34538, + "bambi": 46596, + "bamboo": 49322, + "bamboo": 16748, + "ban": 1159, + "ban": 2777, + "bana": 18428, + "banan": 38410, + "banana": 8922, + "bananas": 19121, + "banc": 39252, + "band": 4613, + "band": 1963, + "banda": 31865, + "bandai": 42054, + "bandana": 39265, + "bandcamp": 32229, + "banded": 37804, + "bandic": 44400, + "bandit": 27639, + "bandits": 33940, + "bandra": 41393, + "bands": 7858, + "bandung": 29512, + "bandwagon": 36432, + "bandwidth": 48859, + "bane": 9597, + "banerjee": 48102, + "banff": 29565, + "bang": 3524, + "bang": 6907, + "bangalore": 14697, + "banger": 24872, + "bangers": 38311, + "banging": 33033, + "bangkok": 12351, + "bangla": 10339, + "bangla": 45928, + "bangladesh": 11245, + "bangle": 37634, + "bangor": 31190, + "bangs": 27992, + "bangtan": 39131, + "bani": 19732, + "banjo": 27014, + "bank": 7061, + "bank": 2723, + "banker": 27316, + "bankers": 30599, + "bankholiday": 48868, + "banking": 9566, + "bankno": 49201, + "bankof": 39120, + "bankrup": 21904, + "bankrupt": 23077, + "bankrupt": 37288, + "bankruptcy": 23978, + "banks": 6367, + "banksy": 33350, + "bann": 5304, + "banned": 12012, + "banner": 9185, + "banners": 23145, + "banning": 26246, + "bannon": 29710, + "bano": 42947, + "banquet": 14254, + "bans": 15146, + "bant": 23301, + "bant": 46657, + "banter": 25535, + "bao": 39487, + "bao": 20408, + "bap": 7415, + "bap": 23754, + "bapti": 15477, + "baptism": 36765, + "baptist": 13274, + "baptiste": 45770, + "baptized": 45400, + "bar": 1040, + "bar": 2411, + "bara": 19345, + "barack": 18670, + "barack": 22481, + "barackobama": 18885, + "barak": 47419, + "barak": 16260, + "barang": 38446, + "barb": 24173, + "barb": 20913, + "barbados": 26992, + "barbar": 7906, + "barbara": 10937, + "barbarian": 42530, + "barbe": 18372, + "barbecue": 23501, + "barber": 19517, + "barber": 12296, + "barbershop": 37707, + "barbican": 47668, + "barbie": 16923, + "barca": 22942, + "barcel": 6134, + "barcelon": 47820, + "barcelona": 6412, + "barclay": 48877, + "barclay": 45276, + "barclays": 29538, + "bard": 39812, + "bard": 17514, + "bare": 16023, + "bare": 14318, + "barefoot": 30327, + "barely": 12684, + "bargain": 15076, + "bargaining": 41282, + "bargains": 34126, + "barge": 28272, + "bari": 21428, + "bari": 28016, + "barista": 31078, + "barit": 46300, + "bark": 32333, + "bark": 16560, + "barker": 20618, + "barking": 32676, + "barkley": 30266, + "barley": 22607, + "barlow": 25483, + "barn": 10490, + "barn": 10942, + "barnab": 43272, + "barnard": 44332, + "barne": 42527, + "barnes": 13102, + "barnet": 41943, + "barnett": 27650, + "barney": 24563, + "barns": 43759, + "barnsley": 37109, + "barnsley": 32153, + "baro": 17422, + "baro": 30817, + "baron": 48371, + "baron": 19349, + "baroness": 45056, + "barons": 45596, + "baroque": 25065, + "barr": 39473, + "barr": 22492, + "barra": 28442, + "barra": 33542, + "barrabest": 41376, + "barrac": 40835, + "barracks": 35822, + "barre": 13840, + "barre": 38257, + "barred": 33261, + "barrel": 11703, + "barrels": 22059, + "barren": 46743, + "barrett": 18701, + "barri": 8660, + "barric": 29189, + "barrie": 27090, + "barrier": 15706, + "barriers": 16321, + "barrington": 48954, + "barron": 34881, + "barrow": 42568, + "barrow": 24983, + "barry": 18028, + "barry": 8461, + "barrymore": 49310, + "bars": 8616, + "barstool": 44826, + "bart": 14838, + "bart": 12870, + "bartender": 33498, + "barthol": 48989, + "bartlett": 37130, + "bartol": 38209, + "barton": 48853, + "barton": 20345, + "baru": 16356, + "barun": 38278, + "barunsob": 41398, + "barça": 32788, + "bas": 1244, + "bas": 11420, + "basa": 26142, + "base": 2776, + "base": 4579, + "baseball": 23479, + "baseball": 3470, + "based": 35196, + "based": 2812, + "basel": 42803, + "basel": 20903, + "baseline": 40648, + "baseman": 45910, + "basement": 14792, + "bases": 20496, + "bash": 20462, + "bash": 10972, + "bashing": 37545, + "bashir": 42799, + "basic": 40452, + "basic": 7696, + "basically": 9125, + "basics": 15825, + "basil": 19225, + "basil": 14936, + "basilica": 27879, + "basin": 16117, + "basing": 47321, + "basis": 12278, + "baske": 3713, + "basket": 10338, + "basketball": 40023, + "basketball": 3835, + "baskets": 27787, + "basking": 39769, + "basque": 37175, + "bass": 22831, + "bass": 5992, + "bassett": 45992, + "bassist": 26496, + "bast": 28092, + "basti": 8559, + "bastille": 41874, + "bat": 2121, + "bat": 6575, + "bata": 39277, + "batb": 33962, + "batch": 9413, + "bate": 25034, + "bate": 28277, + "bateman": 41635, + "bates": 21727, + "batgirl": 46460, + "bath": 6064, + "bath": 5713, + "bathing": 20144, + "bathro": 21201, + "bathroom": 8470, + "bathrooms": 26434, + "baths": 19442, + "bathtub": 39942, + "bathurst": 36365, + "bati": 23362, + "bati": 37589, + "batman": 27811, + "batman": 7223, + "baton": 24331, + "bats": 14984, + "batsman": 35432, + "batt": 2407, + "batt": 48595, + "battalion": 20820, + "batter": 12654, + "batter": 31855, + "battered": 34375, + "batteries": 16666, + "battersea": 35839, + "battery": 7870, + "batting": 17401, + "battle": 7344, + "battle": 3528, + "battled": 37837, + "battlefield": 16055, + "battlefront": 42214, + "battleof": 47560, + "battles": 14213, + "battleship": 35165, + "battling": 17268, + "bau": 6055, + "bau": 34840, + "bauer": 22903, + "baugh": 41301, + "baum": 19840, + "bautista": 31881, + "bav": 21075, + "bavaria": 39977, + "bavarian": 44458, + "baw": 19808, + "bax": 21216, + "baxter": 26168, + "bay": 3631, + "bay": 2174, + "baya": 31573, + "bayan": 43895, + "bayarea": 28260, + "bayer": 48548, + "bayer": 29183, + "bayern": 14666, + "baylor": 21721, + "bayou": 33955, + "bays": 40156, + "baz": 10430, + "baz": 25268, + "bazaar": 20070, + "bazar": 49298, + "bb": 1174, + "bb": 3529, + "bba": 27762, + "bball": 15664, + "bbb": 33535, + "bbc": 5123, + "bbc": 5188, + "bbcc": 39052, + "bbce": 33818, + "bbcnews": 29370, + "bbcone": 28259, + "bbcqt": 37343, + "bbcr": 35802, + "bbcra": 17115, + "bbcradi": 49213, + "bbcradio": 22876, + "bbcsport": 49321, + "bbcspringwatch": 37358, + "bbctwo": 40395, + "bbcworld": 47340, + "bbe": 37559, + "bbed": 9077, + "bber": 7933, + "bbers": 36494, + "bbhutto": 28085, + "bbhuttozardari": 28135, + "bbi": 37047, + "bbin": 38553, + "bbing": 9787, + "bbins": 42504, + "bbl": 21961, + "bble": 26570, + "bble": 5924, + "bbled": 37626, + "bbles": 18093, + "bblo": 21231, + "bbloggers": 26614, + "bbly": 43031, + "bbm": 25382, + "bbmas": 22145, + "bbn": 28427, + "bbnaija": 20984, + "bbo": 21892, + "bbq": 41270, + "bbq": 6726, + "bbs": 10002, + "bbuk": 45978, + "bby": 11166, + "bby": 3810, + "bc": 3116, + "bc": 2162, + "bcc": 41509, + "bcci": 36138, + "bce": 36510, + "bcfc": 34359, + "bch": 36684, + "bcn": 25766, + "bcoz": 46373, + "bcpoli": 24389, + "bcs": 24909, + "bcu": 28299, + "bd": 24358, + "bd": 11165, + "bday": 33022, + "bday": 5781, + "bdg": 48418, + "bds": 26732, + "be": 571, + "be": 655, + "bea": 21886, + "bea": 20925, + "beach": 6068, + "beach": 2117, + "beaches": 12183, + "beachlife": 43824, + "beacon": 36883, + "beacon": 18858, + "beacons": 39395, + "bead": 31621, + "bead": 23557, + "beaded": 26661, + "beads": 14099, + "beagle": 30044, + "beak": 36498, + "beal": 45769, + "beale": 39717, + "beam": 35339, + "beam": 13663, + "beams": 23993, + "bean": 16471, + "bean": 5328, + "beanie": 21534, + "beans": 8302, + "bear": 6375, + "bear": 4298, + "bearable": 38608, + "bearcats": 33242, + "beard": 26157, + "beard": 9052, + "bearded": 28459, + "beardown": 43687, + "beards": 33020, + "bearer": 30686, + "bearers": 47986, + "bearing": 18370, + "bearings": 42083, + "bearish": 34829, + "bears": 6182, + "beasley": 43349, + "beast": 20847, + "beast": 6957, + "beastmode": 43076, + "beasts": 21771, + "beat": 3774, + "beat": 3018, + "beaten": 10864, + "beater": 41974, + "beati": 44386, + "beating": 10078, + "beatles": 11961, + "beatport": 31421, + "beatrice": 36922, + "beats": 6289, + "beatthe": 40550, + "beatty": 39903, + "beatz": 33363, + "beau": 1016, + "beau": 14298, + "beaufort": 45423, + "beaumont": 32857, + "beaut": 24559, + "beauti": 1154, + "beauties": 14874, + "beautiful": 13662, + "beautiful": 1215, + "beautifully": 10627, + "beauty": 12881, + "beauty": 2488, + "beav": 23260, + "beaver": 26432, + "beaver": 22874, + "beavers": 34513, + "beavs": 43909, + "bebe": 23331, + "bec": 6899, + "bec": 10773, + "became": 5464, + "because": 32714, + "because": 1631, + "becca": 27088, + "bech": 44055, + "beck": 8256, + "beck": 10396, + "becker": 26918, + "beckett": 27249, + "beckham": 18764, + "becky": 32406, + "becky": 18921, + "become": 2989, + "becomes": 6766, + "becoming": 6208, + "bed": 4152, + "bed": 2722, + "bedding": 31761, + "bedford": 20779, + "bedi": 39181, + "bedro": 18415, + "bedroom": 8411, + "bedrooms": 23996, + "beds": 13914, + "bedside": 47473, + "bedtime": 22115, + "bee": 6097, + "bee": 5028, + "beech": 32733, + "beech": 27596, + "beef": 21703, + "beef": 6529, + "beek": 37915, + "been": 33986, + "been": 1025, + "beep": 33432, + "beer": 8885, + "beer": 2544, + "beers": 10907, + "bees": 36249, + "bees": 9100, + "beet": 12582, + "beet": 28621, + "beethoven": 23656, + "beetle": 16534, + "beetles": 36317, + "beetro": 29251, + "beetroot": 31638, + "beets": 36087, + "before": 20898, + "before": 1348, + "beg": 2219, + "beg": 22401, + "began": 8636, + "begg": 36769, + "begging": 25371, + "begin": 19197, + "begin": 4947, + "beginner": 24351, + "beginners": 21930, + "beginning": 5791, + "beginnings": 22581, + "begins": 4635, + "begs": 43531, + "begun": 10514, + "beh": 21971, + "beh": 41612, + "beha": 5737, + "behalf": 11470, + "behave": 28825, + "behaved": 41617, + "behavi": 6149, + "behaving": 40745, + "behavior": 10461, + "behavioral": 25135, + "behaviors": 37741, + "behaviour": 14655, + "behavioural": 46019, + "behe": 42329, + "behin": 2335, + "behind": 2403, + "behindthe": 21104, + "behindthescenes": 26253, + "behold": 15929, + "bei": 38991, + "bei": 23227, + "beige": 26677, + "beij": 11547, + "beijing": 11796, + "bein": 39117, + "bein": 24168, + "being": 13481, + "being": 1265, + "beings": 17998, + "beingsalmankhan": 19637, + "beir": 20176, + "beirut": 22352, + "beit": 26963, + "bek": 46846, + "bek": 26135, + "bekind": 46691, + "bel": 1308, + "bel": 3543, + "bela": 30555, + "belarus": 30849, + "belated": 20256, + "belfast": 35100, + "belfast": 10015, + "belgi": 7001, + "belgian": 15008, + "belgium": 10239, + "belgrade": 30502, + "beli": 1859, + "beli": 45842, + "belichick": 46132, + "belie": 20854, + "beliebers": 27714, + "belief": 14802, + "beliefs": 20575, + "believ": 4972, + "believe": 15819, + "believe": 2649, + "believed": 13380, + "believein": 24294, + "believeinfilm": 37375, + "believer": 26057, + "believers": 28434, + "believes": 12017, + "believing": 19551, + "belinda": 44415, + "belize": 27990, + "bell": 5417, + "bell": 3718, + "bella": 18282, + "bella": 10418, + "bellamy": 34461, + "bellator": 31985, + "belle": 13587, + "belle": 11496, + "belles": 40678, + "bellevue": 32715, + "belli": 43335, + "bellletstalk": 42695, + "bello": 21954, + "bells": 12811, + "bellum": 35493, + "belly": 25901, + "belly": 10404, + "belmont": 25612, + "belo": 8379, + "belo": 41649, + "belong": 16453, + "belong": 13596, + "belonged": 39893, + "belonging": 28193, + "belongs": 14395, + "beloved": 9363, + "below": 3788, + "bels": 43127, + "belt": 36416, + "belt": 7373, + "belts": 21888, + "belvedere": 48003, + "ben": 1465, + "ben": 3518, + "bena": 46249, + "bench": 17770, + "bench": 8771, + "benches": 36349, + "benchmark": 31775, + "bend": 22100, + "bend": 13332, + "bender": 22551, + "bendigo": 48197, + "bending": 33897, + "bene": 12091, + "bene": 47151, + "beneath": 16850, + "bened": 13216, + "benedic": 24402, + "benedict": 47896, + "benedict": 18027, + "benef": 3260, + "benefici": 38593, + "beneficial": 24660, + "beneficiaries": 42160, + "benefit": 6399, + "benefited": 48266, + "benefiting": 29474, + "benefits": 5465, + "benefitting": 47222, + "benevol": 47060, + "benfica": 33873, + "beng": 6962, + "bengal": 17404, + "bengal": 16374, + "bengali": 33774, + "bengals": 23737, + "bengaluru": 21707, + "benghazi": 25967, + "benin": 40296, + "benitez": 46711, + "benjam": 10550, + "benjamin": 38647, + "benjamin": 12131, + "benji": 43548, + "benn": 39097, + "bennet": 48536, + "bennett": 12186, + "benny": 42369, + "benny": 20595, + "beno": 35268, + "benoit": 44373, + "benson": 19578, + "bent": 9809, + "bent": 18369, + "bentley": 16859, + "benton": 30812, + "benz": 27937, + "benz": 13470, + "ber": 867, + "ber": 1516, + "bera": 32802, + "bere": 17458, + "bered": 9193, + "beren": 33654, + "beret": 41658, + "berg": 12022, + "berg": 3294, + "bergen": 22918, + "berger": 35933, + "berger": 13873, + "bergh": 35120, + "bergman": 42597, + "bergs": 43592, + "berk": 15633, + "berke": 14639, + "berkeley": 46049, + "berkeley": 16667, + "berkshire": 27300, + "berlin": 23532, + "berlin": 5891, + "berman": 21514, + "bermu": 21032, + "bermuda": 24644, + "bern": 9195, + "bern": 18382, + "bernade": 46242, + "bernar": 11962, + "bernard": 14579, + "bernardino": 35328, + "bernardo": 27137, + "bernardo": 28696, + "bernardokath": 29081, + "bernat": 40578, + "berni": 18798, + "bernie": 40093, + "bernie": 10503, + "berniesanders": 23745, + "bernstein": 33936, + "berra": 15089, + "berries": 8319, + "berry": 15334, + "berry": 3488, + "bers": 6408, + "berser": 39037, + "bert": 17340, + "bert": 2358, + "berta": 45187, + "berth": 28317, + "bertie": 47182, + "berto": 34073, + "bertr": 36962, + "bertrand": 41594, + "berts": 30205, + "berty": 35973, + "berwick": 40407, + "bery": 11411, + "bes": 26911, + "bes": 3635, + "beside": 13519, + "besides": 17596, + "bespoke": 15612, + "bess": 43791, + "best": 3419, + "best": 949, + "bestbuy": 29749, + "bestest": 31199, + "bestfan": 23880, + "bestfanarmy": 24590, + "bestfriend": 29832, + "bestfriend": 11856, + "bestfriends": 23555, + "besti": 35210, + "bestie": 17188, + "besties": 27346, + "besto": 28615, + "bestof": 27892, + "bestof": 39533, + "bestseller": 25841, + "bestselling": 28632, + "bet": 1051, + "bet": 4430, + "beta": 43188, + "beta": 9505, + "betes": 10255, + "beth": 9993, + "beth": 4892, + "bethan": 18781, + "bethany": 39130, + "bethany": 27952, + "bethe": 12624, + "bethel": 33410, + "bethesda": 32527, + "bethle": 30760, + "bethlehem": 31827, + "betis": 45590, + "beto": 33721, + "betra": 18436, + "betrayal": 33171, + "betrayed": 35692, + "bets": 17107, + "betsy": 28946, + "bett": 17715, + "bett": 20489, + "betta": 36387, + "bette": 35855, + "better": 10320, + "better": 1539, + "bettertogether": 47392, + "betting": 14319, + "betts": 38637, + "betty": 36175, + "betty": 14350, + "between": 1957, + "beu": 38660, + "bev": 40324, + "bev": 30968, + "bever": 9924, + "beverage": 18694, + "beverages": 28521, + "beverley": 39165, + "beverly": 30906, + "beverly": 16728, + "beverlyhills": 45363, + "beware": 14532, + "bewithyou": 36787, + "bex": 18676, + "bex": 24748, + "bexhill": 49200, + "bey": 3234, + "bey": 6767, + "beyon": 11447, + "beyonce": 16632, + "beyoncé": 19219, + "beyond": 22246, + "beyond": 4432, + "bez": 28592, + "bez": 46764, + "bezos": 45000, + "bf": 19858, + "bf": 7990, + "bfc": 37183, + "bff": 11984, + "bffs": 31462, + "bfi": 34244, + "bg": 16674, + "bg": 11295, + "bgc": 47598, + "bgs": 47963, + "bgt": 40665, + "bh": 9930, + "bh": 13603, + "bha": 6144, + "bha": 33068, + "bhafc": 30779, + "bhagat": 49136, + "bhai": 48370, + "bhai": 20508, + "bhak": 34501, + "bham": 31874, + "bham": 23491, + "bhan": 27356, + "bhand": 48679, + "bhar": 9108, + "bharat": 27454, + "bharat": 17430, + "bharti": 46803, + "bhat": 23784, + "bhatt": 36143, + "bhav": 44950, + "bhi": 28943, + "bhi": 21955, + "bhk": 45070, + "bhm": 38741, + "bho": 19721, + "bhopal": 44573, + "bhp": 29776, + "bhs": 29195, + "bhu": 9172, + "bhuban": 38729, + "bhubanes": 41213, + "bhubaneswar": 45888, + "bhushan": 40884, + "bhutan": 32391, + "bhutto": 30153, + "bi": 717, + "bi": 3035, + "bia": 3841, + "biaf": 26961, + "biafra": 36355, + "bian": 19531, + "bian": 9027, + "bianca": 25854, + "bianchi": 45720, + "bians": 28141, + "bias": 11268, + "biased": 22178, + "bib": 44607, + "bib": 21022, + "bibi": 31182, + "bibl": 20912, + "bible": 26738, + "bible": 7583, + "bibli": 23465, + "biblical": 22841, + "biblio": 49131, + "bic": 5960, + "bic": 10675, + "bice": 35589, + "biceps": 46735, + "bick": 27238, + "bicy": 9247, + "bicycle": 11652, + "bicycles": 31326, + "bid": 21035, + "bid": 5553, + "bidding": 23237, + "bide": 45178, + "biden": 19451, + "bids": 16148, + "bie": 5561, + "bie": 4173, + "bieber": 48725, + "bieber": 7535, + "bien": 19176, + "bien": 25742, + "biennale": 33776, + "biennial": 36609, + "bier": 27226, + "bier": 23508, + "bies": 7867, + "big": 1915, + "big": 1205, + "bigbaldhead": 30325, + "bigbang": 41680, + "bigbang": 23734, + "bigdata": 9440, + "bige": 37762, + "bigfoot": 37095, + "bigg": 15312, + "bigg": 35399, + "biggboss": 27056, + "bigger": 6806, + "biggest": 19483, + "biggest": 3505, + "biggie": 28392, + "biggs": 46507, + "bigh": 18106, + "bighit": 35508, + "bigo": 14278, + "bigolive": 20735, + "bigotry": 37269, + "bigre": 36330, + "bih": 33471, + "bihar": 22849, + "bij": 42478, + "bik": 30306, + "bike": 11686, + "bike": 3701, + "biker": 36100, + "biker": 23449, + "bikers": 29468, + "bikes": 9227, + "bikin": 12638, + "biking": 19157, + "bikini": 14531, + "bil": 3092, + "bil": 20506, + "bilateral": 25599, + "bilbao": 34802, + "bild": 35512, + "bile": 25943, + "bilingual": 29623, + "bilities": 13582, + "bility": 4694, + "bill": 4444, + "bill": 2886, + "billboard": 10856, + "billboards": 34741, + "billed": 37558, + "billi": 7693, + "billie": 23990, + "billing": 31797, + "billings": 43615, + "billion": 14520, + "billion": 5729, + "billionaire": 19475, + "billionaires": 41590, + "billions": 20742, + "bills": 9810, + "billsmafia": 48845, + "billy": 15626, + "billy": 6814, + "bilt": 44770, + "bilt": 26654, + "bim": 46737, + "bim": 24775, + "bin": 4849, + "bin": 5346, + "binance": 43520, + "binary": 23497, + "bind": 44513, + "binder": 30541, + "binding": 21287, + "bine": 34848, + "bing": 24818, + "bing": 5665, + "binge": 22600, + "bingham": 43785, + "bingham": 47296, + "bingo": 18418, + "bino": 29172, + "bino": 24313, + "bins": 26934, + "bint": 43647, + "bio": 2830, + "bio": 5162, + "biode": 43502, + "biodegradable": 47740, + "biodiversity": 17428, + "biof": 45158, + "biographical": 49232, + "biography": 15423, + "biological": 18821, + "biologist": 35149, + "biology": 9796, + "biom": 13010, + "biomar": 44549, + "biomass": 36746, + "biome": 26218, + "biomed": 29280, + "biomedical": 33117, + "bionic": 46201, + "biop": 15009, + "biopic": 27942, + "bios": 48505, + "biotech": 22514, + "biotechnology": 40375, + "biotic": 33773, + "biotics": 41371, + "bious": 31845, + "bipartisan": 32266, + "bipolar": 37097, + "bique": 27809, + "bir": 921, + "bir": 16284, + "birch": 31569, + "birch": 22907, + "bird": 6908, + "bird": 3329, + "birdie": 29612, + "birdies": 45618, + "birding": 15851, + "birdman": 41915, + "birdphotography": 47999, + "birds": 41951, + "birds": 4337, + "birdwatching": 33497, + "birk": 48289, + "birken": 40661, + "birmin": 37482, + "birmingham": 38580, + "birmingham": 7720, + "birth": 1128, + "birth": 5397, + "birthday": 7381, + "birthday": 1166, + "birthdays": 17954, + "birthplace": 31429, + "biryani": 46489, + "bis": 5064, + "bis": 14461, + "biscu": 11532, + "biscuit": 18731, + "biscuits": 18248, + "bisexual": 36829, + "bish": 33690, + "bish": 31461, + "bishop": 20625, + "bishop": 8024, + "bishops": 31579, + "bison": 19741, + "bistro": 21770, + "bit": 3010, + "bit": 2010, + "bitcoin": 30848, + "bitcoin": 6366, + "bite": 41613, + "biting": 23016, + "bits": 7747, + "bitt": 39251, + "bius": 45525, + "bix": 46579, + "biz": 8212, + "biz": 5431, + "biza": 47013, + "bizar": 14886, + "bizarre": 16965, + "bizhour": 39462, + "bizitalk": 34929, + "bj": 4592, + "bj": 18229, + "bjj": 27437, + "bjor": 26525, + "bjp": 37264, + "bjp": 6178, + "bk": 15099, + "bk": 14083, + "bkk": 36433, + "bl": 833, + "bl": 9467, + "bla": 2205, + "bla": 19630, + "blac": 21008, + "black": 2025, + "black": 1449, + "blackand": 12809, + "blackandwhite": 23688, + "blackandwhite": 19506, + "blackandwhitephotography": 27544, + "blackberry": 16470, + "blackbird": 38526, + "blackburn": 23789, + "blackfish": 42193, + "blackfriday": 16445, + "blackgirl": 43591, + "blackhawks": 19203, + "blackhistory": 46982, + "blackhistorymonth": 20135, + "blacklist": 30295, + "blacklivesmatter": 23467, + "blackmail": 47295, + "blackops": 43519, + "blackout": 21733, + "blackpanther": 36592, + "blackpink": 20339, + "blackpool": 21031, + "blacks": 16351, + "blackwell": 42642, + "blad": 36635, + "bladder": 33593, + "blade": 10264, + "blades": 16893, + "blah": 29212, + "blaine": 32457, + "blair": 31824, + "blair": 14749, + "blake": 20229, + "blake": 9579, + "blame": 10695, + "blamed": 32906, + "blames": 27841, + "blaming": 29287, + "blan": 4609, + "blanc": 30936, + "blanc": 13301, + "blanca": 40670, + "blanchard": 40177, + "blanche": 34875, + "blanchett": 49378, + "blanco": 26801, + "bland": 44372, + "bland": 30799, + "blank": 15134, + "blanket": 12878, + "blankets": 24042, + "blanks": 48599, + "blasio": 35553, + "blasphe": 36622, + "blast": 46349, + "blast": 5964, + "blasted": 38976, + "blaster": 36341, + "blasting": 26178, + "blasts": 23067, + "blat": 22048, + "blatant": 41391, + "blatt": 39138, + "blau": 45307, + "blaz": 43413, + "blaze": 15497, + "blazer": 17606, + "blazers": 16984, + "blazing": 25267, + "bldg": 22981, + "ble": 1447, + "ble": 1059, + "bleach": 27034, + "bleak": 40355, + "bled": 12006, + "bleed": 23027, + "bleed": 24791, + "bleedblue": 39160, + "bleeding": 20311, + "bleeds": 47339, + "blen": 25651, + "blend": 10780, + "blended": 25813, + "blender": 25066, + "blending": 34307, + "blends": 28572, + "bler": 31305, + "bler": 11979, + "blers": 26930, + "bles": 5763, + "bless": 9640, + "bless": 5387, + "blessed": 4411, + "blessing": 10729, + "blessings": 11185, + "bleu": 30114, + "blew": 18176, + "bley": 43176, + "bli": 1450, + "bli": 28051, + "blin": 9678, + "blin": 5406, + "blind": 17248, + "blind": 8351, + "blinded": 49149, + "blindness": 38812, + "blinds": 32449, + "bling": 39764, + "bling": 7097, + "blink": 18976, + "bliss": 28531, + "bliss": 12893, + "blissful": 42145, + "blit": 39327, + "blitz": 42151, + "blitz": 17548, + "blizz": 13075, + "blizzard": 16111, + "blk": 42950, + "blk": 22872, + "blm": 30957, + "bln": 47348, + "blo": 1204, + "blo": 25505, + "blob": 49312, + "bloc": 30961, + "block": 4638, + "block": 4593, + "blockade": 33489, + "blockbuster": 19939, + "blockchain": 6653, + "blocked": 9106, + "blocker": 44767, + "blocking": 12652, + "blocks": 10113, + "blog": 16376, + "blog": 2589, + "blogg": 33282, + "blogged": 41380, + "blogger": 21352, + "blogger": 7806, + "bloggerrt": 48898, + "bloggers": 11627, + "blogging": 18090, + "blogpost": 41842, + "blogs": 16682, + "bloke": 24384, + "blom": 48996, + "blon": 7958, + "blond": 32426, + "blonde": 10711, + "blondes": 45130, + "blondie": 39236, + "bloo": 2373, + "blood": 9231, + "blood": 3590, + "blooded": 41946, + "bloodh": 48480, + "bloods": 39539, + "bloody": 38568, + "bloody": 9468, + "bloom": 7311, + "bloom": 10257, + "bloomberg": 43109, + "bloomberg": 21238, + "bloomfield": 40342, + "blooming": 45175, + "blooming": 19266, + "bloomington": 34731, + "blooms": 21439, + "bloss": 10017, + "blossom": 14472, + "blossoms": 21916, + "blot": 41710, + "blou": 44506, + "blouse": 23525, + "blow": 15230, + "blow": 10211, + "blower": 25832, + "blowing": 12087, + "blown": 11848, + "blowout": 34857, + "blows": 21063, + "blr": 47250, + "bls": 39458, + "blu": 1263, + "blu": 10273, + "blue": 3829, + "blue": 1746, + "bluebells": 47150, + "blueberries": 29551, + "blueberry": 18251, + "bluebird": 40747, + "bluec": 43194, + "bluef": 41174, + "bluegrass": 26241, + "bluejays": 18684, + "blueprint": 30594, + "blues": 17566, + "blues": 5159, + "blueslyrix": 47068, + "bluet": 13469, + "bluetooth": 14052, + "bluewave": 40025, + "bluff": 27232, + "bluffs": 48844, + "blum": 34818, + "blumen": 38714, + "blun": 34472, + "blunt": 19305, + "blur": 12102, + "blur": 27976, + "bluray": 36818, + "blurred": 38013, + "blurry": 21977, + "blush": 22889, + "blvd": 12578, + "bly": 20930, + "bly": 4426, + "bm": 4773, + "bm": 15916, + "bma": 42573, + "bmc": 27807, + "bmi": 40642, + "bmo": 39083, + "bms": 34074, + "bmw": 26637, + "bmw": 7869, + "bmx": 22535, + "bn": 10496, + "bn": 7992, + "bnb": 20010, + "bnha": 49336, + "bnp": 47910, + "bnw": 35903, + "bo": 647, + "bo": 2525, + "boa": 14732, + "boar": 7837, + "boar": 35473, + "board": 10419, + "board": 1972, + "boarded": 43052, + "boarder": 37414, + "boardgame": 47829, + "boardgames": 32646, + "boarding": 10086, + "boardroom": 47937, + "boards": 7963, + "boardwalk": 29043, + "boast": 44467, + "boasts": 30309, + "boat": 12426, + "boat": 4440, + "boath": 45461, + "boating": 21951, + "boats": 10080, + "boatsales": 46244, + "bob": 8444, + "bob": 4423, + "boba": 39948, + "bobb": 16891, + "bobble": 38796, + "bobblehead": 33451, + "bobby": 17847, + "bobby": 7816, + "bobc": 26153, + "bobcat": 37896, + "bobcats": 27568, + "bobo": 38939, + "bobs": 45533, + "boc": 27307, + "boc": 39042, + "boca": 26094, + "bock": 24961, + "bod": 17904, + "bod": 26340, + "boda": 42030, + "bode": 28452, + "bode": 40429, + "bodega": 47350, + "bodied": 36892, + "bodies": 9799, + "bodily": 49119, + "body": 7132, + "body": 1774, + "bodybuilding": 24538, + "bodyguard": 35565, + "boe": 23476, + "boe": 21773, + "boeh": 38002, + "boehner": 44599, + "boeing": 48135, + "boeing": 11857, + "boer": 44889, + "boer": 40768, + "bog": 23426, + "bog": 28318, + "bogo": 35769, + "bogota": 47059, + "bogus": 42907, + "boh": 43238, + "bohe": 40541, + "bohemi": 21552, + "bohemian": 25753, + "boho": 25444, + "boi": 37129, + "boi": 12673, + "boil": 31332, + "boiled": 23886, + "boiler": 28212, + "boiler": 25615, + "boiling": 32019, + "bois": 47742, + "bois": 21640, + "boise": 23304, + "bok": 26671, + "bok": 15289, + "boko": 30929, + "boks": 40216, + "bol": 2860, + "bol": 8413, + "bola": 12840, + "bold": 26975, + "bold": 8911, + "boldand": 48413, + "boldly": 44778, + "boli": 12722, + "bolic": 27343, + "bolivia": 28628, + "bollah": 36336, + "bolly": 25302, + "bollywood": 32448, + "bollywood": 9604, + "bolo": 40236, + "bolog": 22818, + "bologna": 27513, + "bolster": 47304, + "bolt": 13131, + "bolton": 48757, + "bolton": 16598, + "bolts": 26028, + "bom": 3012, + "bom": 19469, + "bomb": 18091, + "bomb": 6331, + "bombar": 25544, + "bombardier": 42700, + "bombay": 48602, + "bombay": 23890, + "bombed": 24542, + "bomber": 15436, + "bombers": 21786, + "bombing": 14475, + "bombings": 43236, + "bombs": 14410, + "bombshell": 36340, + "bon": 1871, + "bon": 4216, + "bona": 33342, + "bonanza": 40304, + "bond": 37022, + "bond": 6826, + "bonded": 37390, + "bondi": 40092, + "bonding": 19609, + "bonds": 15786, + "bone": 22502, + "bone": 6195, + "bones": 9476, + "bonfire": 23151, + "bongo": 47519, + "boni": 32269, + "boni": 46356, + "bonita": 42896, + "bonjour": 33176, + "bonkers": 39865, + "bonn": 38969, + "bonnar": 47191, + "bonnaroo": 48777, + "bonne": 25844, + "bonnet": 30636, + "bonnie": 18555, + "bono": 24476, + "bons": 42883, + "bonsai": 44129, + "bonus": 8164, + "bonuses": 35144, + "boo": 824, + "boo": 7317, + "boogie": 22639, + "book": 2828, + "book": 1116, + "bookboost": 31257, + "bookclub": 34438, + "bookday": 26327, + "booked": 12584, + "booker": 21302, + "bookfest": 39381, + "booking": 10145, + "bookings": 18345, + "booklet": 27405, + "bookmark": 33596, + "bookof": 45629, + "bookreview": 27362, + "books": 44382, + "books": 2161, + "bookshelf": 34821, + "bookshop": 24705, + "bookstore": 17999, + "bookstores": 46416, + "bookworm": 20743, + "boom": 9609, + "boom": 7121, + "boomer": 33819, + "boomer": 31766, + "boomers": 37988, + "booming": 33487, + "boon": 24979, + "boon": 35821, + "boone": 23453, + "boop": 45047, + "boost": 44639, + "boost": 6260, + "boosted": 37631, + "booster": 20877, + "boosters": 46859, + "boosting": 28480, + "boosts": 29247, + "boot": 10843, + "boot": 8087, + "bootcamp": 22051, + "booted": 42564, + "booth": 47895, + "booth": 3971, + "booths": 32653, + "booties": 46188, + "bootleg": 38139, + "boots": 7319, + "booze": 24341, + "bop": 19720, + "bor": 1141, + "bor": 15093, + "bora": 24736, + "bord": 36891, + "bordeaux": 22009, + "border": 16304, + "border": 6177, + "borderlands": 38676, + "borders": 13900, + "bore": 14084, + "bore": 24638, + "bored": 8933, + "boredom": 31460, + "boretum": 38902, + "borg": 14770, + "borgh": 17180, + "boring": 12519, + "boris": 31212, + "boris": 15704, + "borisjohnson": 44481, + "born": 17695, + "born": 2683, + "borne": 42910, + "borne": 9328, + "borneo": 33332, + "bornon": 41811, + "bornonthisday": 42757, + "boro": 26796, + "boro": 7974, + "borough": 22761, + "borough": 6203, + "borrow": 22293, + "borrowed": 28224, + "borrowing": 41045, + "borussia": 36764, + "bos": 14885, + "bos": 9644, + "bosa": 46946, + "bosch": 42009, + "bosch": 19466, + "bosco": 36960, + "bose": 23142, + "bosh": 42244, + "bosni": 42924, + "bosnia": 31396, + "boss": 17935, + "boss": 4206, + "bosses": 23906, + "boston": 11540, + "boston": 4399, + "bostonmarathon": 44533, + "bot": 4136, + "bot": 6947, + "botan": 12554, + "botanic": 32560, + "botanical": 21026, + "botany": 22612, + "botd": 34451, + "both": 36575, + "both": 2212, + "bother": 21125, + "bothered": 27997, + "botox": 43449, + "bots": 13721, + "botswana": 27584, + "bott": 3520, + "bott": 37225, + "bottle": 37306, + "bottle": 5392, + "bottled": 29331, + "bottlen": 46439, + "bottles": 9754, + "bottling": 42006, + "bottom": 32314, + "bottom": 5931, + "bottoms": 31524, + "bou": 3728, + "bou": 23165, + "bouchard": 47930, + "boudo": 48827, + "bought": 4142, + "boul": 24830, + "boulder": 18260, + "boule": 17652, + "boulevard": 19504, + "boun": 5993, + "bounce": 14316, + "bouncing": 32060, + "bouncy": 43415, + "bound": 15140, + "bound": 4567, + "boundaries": 18690, + "boundary": 21344, + "bounds": 37469, + "bounty": 21142, + "bouquet": 20961, + "bour": 2934, + "bour": 35486, + "bourbon": 48118, + "bourbon": 14652, + "bourdain": 48095, + "bourg": 20690, + "bourgeo": 45672, + "bourn": 39143, + "bourne": 13789, + "bourne": 5192, + "bournemouth": 20911, + "bout": 19982, + "bout": 8123, + "bouti": 10926, + "boutique": 12179, + "bow": 2297, + "bow": 4040, + "bowden": 48538, + "bowed": 49130, + "bowel": 36880, + "bowen": 25368, + "bower": 40414, + "bowers": 42238, + "bowie": 13036, + "bowing": 46398, + "bowl": 26719, + "bowl": 3814, + "bowled": 39987, + "bowler": 25528, + "bowlers": 42632, + "bowles": 41611, + "bowling": 10390, + "bowls": 17787, + "bowman": 22052, + "bows": 17000, + "bowser": 38234, + "bowski": 48311, + "box": 2774, + "box": 2063, + "boxed": 24190, + "boxer": 40394, + "boxer": 15363, + "boxers": 31019, + "boxes": 8350, + "boxing": 33669, + "boxing": 5554, + "boy": 2927, + "boy": 1876, + "boyband": 31568, + "boyce": 44480, + "boycot": 46208, + "boycott": 31615, + "boycott": 19559, + "boyd": 18295, + "boyfriend": 7328, + "boyfriends": 36541, + "boyle": 22802, + "boys": 25223, + "boys": 2034, + "boyz": 16152, + "bp": 23410, + "bp": 11558, + "bpa": 43855, + "bpd": 48587, + "bpl": 28901, + "bpm": 40338, + "bps": 37794, + "br": 711, + "br": 7532, + "bra": 1195, + "bra": 5860, + "brac": 6663, + "brace": 8376, + "brace": 9183, + "bracelet": 8969, + "bracelets": 20027, + "braces": 19249, + "brack": 25676, + "bracket": 14780, + "brackets": 36183, + "brad": 4848, + "brad": 9405, + "bradbury": 45097, + "braden": 46842, + "bradford": 15062, + "bradley": 31905, + "bradley": 10952, + "brador": 24062, + "bradshaw": 37556, + "brady": 42494, + "brady": 11117, + "brae": 42874, + "brae": 40040, + "brag": 30110, + "bragg": 38545, + "bragging": 38199, + "brah": 20276, + "brahms": 45114, + "brai": 25048, + "braid": 31067, + "braided": 39997, + "braids": 34221, + "brain": 9454, + "brain": 4812, + "brains": 17129, + "brainstorming": 36607, + "braised": 28363, + "brake": 14937, + "brakes": 23456, + "bral": 31309, + "bram": 14815, + "bram": 39456, + "brampton": 35124, + "bran": 3684, + "bran": 28348, + "brance": 36072, + "brance": 15413, + "branch": 7998, + "branches": 15843, + "brand": 3910, + "brand": 2896, + "branded": 18097, + "brandi": 41003, + "branding": 10841, + "brando": 41892, + "brandon": 20423, + "brandon": 9166, + "brands": 8681, + "brandt": 22552, + "brandy": 26232, + "brane": 32340, + "branson": 28280, + "brant": 28951, + "brant": 47592, + "braries": 46377, + "brary": 24520, + "bras": 22611, + "brasil": 18991, + "brass": 24348, + "brass": 11655, + "brat": 26717, + "brat": 26631, + "brate": 41864, + "braun": 39129, + "braun": 29309, + "brave": 25461, + "brave": 7769, + "braved": 47663, + "bravely": 42303, + "bravery": 25831, + "braves": 14422, + "braving": 43258, + "bravo": 38613, + "bravo": 13006, + "braw": 37871, + "brawl": 26066, + "braxton": 37451, + "bray": 26256, + "bray": 22993, + "braz": 4625, + "brazil": 47459, + "brazil": 6305, + "brazili": 45697, + "brazilian": 12111, + "brb": 25316, + "brc": 40393, + "bre": 887, + "bre": 7782, + "brea": 7318, + "brea": 46538, + "breach": 21363, + "breaches": 45173, + "bread": 18886, + "bread": 5066, + "breads": 43064, + "break": 2206, + "break": 2568, + "breakable": 30691, + "breakaway": 42732, + "breakdown": 14519, + "breaker": 14814, + "breakers": 22270, + "breakfa": 45931, + "breakfast": 30210, + "breakfast": 3290, + "breaking": 14698, + "breaking": 2755, + "breakingbad": 38032, + "breakingnews": 23837, + "breakout": 16752, + "breaks": 7263, + "breakthrough": 18802, + "breakup": 38931, + "breast": 12930, + "breast": 9475, + "breastcancer": 40813, + "breastcancer": 30065, + "breastfeeding": 29033, + "breasts": 37637, + "breath": 9508, + "breath": 9576, + "breathe": 11364, + "breathing": 14959, + "breathtaking": 14709, + "brecht": 34622, + "breck": 44598, + "bred": 46929, + "bred": 16008, + "bree": 7892, + "bree": 37138, + "breed": 28030, + "breed": 13791, + "breeders": 37472, + "breeding": 16544, + "breeds": 29021, + "breen": 48013, + "brees": 46721, + "breeze": 13125, + "breezy": 21451, + "breit": 23864, + "breitbart": 37926, + "brek": 35494, + "bremen": 39861, + "bren": 5209, + "brenda": 23786, + "brendan": 35134, + "brendan": 15414, + "brendon": 36756, + "brennan": 22372, + "brenner": 42941, + "brent": 31439, + "brent": 16355, + "brentwood": 33108, + "brero": 47781, + "bres": 32561, + "bret": 38020, + "bret": 32548, + "brethren": 43134, + "breton": 32290, + "brett": 22591, + "brett": 12394, + "brev": 42882, + "brevi": 39475, + "brew": 5048, + "brew": 7253, + "brewco": 33582, + "brewed": 23238, + "brewer": 20756, + "breweries": 35277, + "brewers": 17618, + "brewery": 8850, + "brewing": 8275, + "brewingco": 45155, + "brews": 21663, + "brewster": 40274, + "brex": 22726, + "brexit": 27666, + "brexit": 5801, + "brgy": 35983, + "bri": 1036, + "bri": 18636, + "bria": 35890, + "brian": 9824, + "brian": 4989, + "brianna": 32308, + "briar": 46119, + "bribe": 40042, + "bribery": 41792, + "bric": 27055, + "brice": 40190, + "brick": 13937, + "brick": 9518, + "bricks": 21029, + "brics": 48196, + "brid": 16995, + "bridal": 36875, + "bridal": 14284, + "bride": 18342, + "bride": 8964, + "brides": 18067, + "bridesma": 28356, + "bridesmaid": 43399, + "bridesmaids": 47754, + "bridg": 20623, + "bridge": 8647, + "bridge": 2465, + "bridgeport": 45201, + "bridges": 11811, + "bridget": 27073, + "bridgewater": 38732, + "bridging": 38109, + "brie": 26622, + "brief": 9435, + "brief": 8954, + "briefed": 47326, + "briefing": 12991, + "briefly": 26980, + "briefs": 29557, + "brien": 13504, + "brier": 43995, + "brig": 11081, + "briga": 46448, + "brigade": 16032, + "briggs": 28108, + "brigh": 6710, + "bright": 10383, + "bright": 4852, + "brighten": 18208, + "brightening": 43929, + "brighter": 18507, + "brightest": 26159, + "brightly": 36298, + "brightness": 42280, + "brighton": 28416, + "brighton": 9470, + "brigitte": 44421, + "brill": 27342, + "brill": 28601, + "brilli": 3821, + "brilliance": 28146, + "brilliant": 4106, + "brilliantly": 26803, + "brin": 25620, + "bring": 11596, + "bring": 2430, + "bringback": 28969, + "bringbackour": 45403, + "bringing": 4777, + "brings": 5138, + "brink": 39296, + "brink": 28796, + "brioche": 45818, + "bris": 9385, + "bris": 15783, + "brisban": 30431, + "brisbane": 42932, + "brisbane": 12407, + "brisk": 43646, + "brisket": 31920, + "bristol": 18159, + "bristol": 8010, + "brit": 2318, + "brit": 20066, + "britain": 40802, + "britain": 6272, + "britanni": 31373, + "britannia": 36188, + "brite": 33827, + "briti": 8155, + "british": 8651, + "british": 3504, + "britishmuseum": 41858, + "britney": 37192, + "britney": 21853, + "britneyspears": 42990, + "brits": 21832, + "britt": 10811, + "britt": 25976, + "brittany": 38187, + "brittany": 18818, + "britton": 37422, + "brium": 46079, + "brixton": 30056, + "bro": 927, + "bro": 4410, + "broad": 3491, + "broad": 12623, + "broadband": 21050, + "broadcast": 8967, + "broadcaster": 29005, + "broadcasting": 14403, + "broadcasts": 46742, + "broader": 36029, + "broadway": 34599, + "broadway": 9092, + "broc": 15587, + "broccoli": 19094, + "broch": 21419, + "brochure": 25275, + "brock": 14841, + "brock": 16745, + "brodie": 42150, + "brody": 29608, + "broke": 42165, + "broke": 6509, + "broken": 26126, + "broken": 5107, + "broker": 34032, + "broker": 20449, + "brokerage": 41327, + "brokers": 28271, + "brom": 18972, + "brom": 33296, + "bromance": 35353, + "bromley": 35715, + "bron": 4011, + "bron": 10243, + "bronco": 43488, + "bronco": 34370, + "broncos": 12516, + "bronson": 37042, + "bronte": 48936, + "bronx": 48310, + "bronx": 17183, + "brony": 21084, + "bronze": 8459, + "broo": 5204, + "brooch": 21207, + "brook": 4782, + "brook": 7322, + "brooke": 28576, + "brooke": 12549, + "brookes": 39707, + "brooklyn": 23253, + "brooklyn": 6983, + "brooks": 42779, + "brooks": 9991, + "broom": 32046, + "broom": 28008, + "broome": 49335, + "bros": 7776, + "broth": 29994, + "brotha": 33974, + "brother": 12697, + "brother": 3157, + "brotherhood": 19059, + "brothers": 4548, + "brou": 27874, + "brough": 21033, + "brought": 4222, + "brov": 42881, + "brow": 6547, + "brow": 15895, + "broward": 34719, + "brown": 6315, + "brown": 2866, + "browne": 28440, + "brownie": 23045, + "brownies": 22312, + "browning": 32241, + "browns": 14051, + "brows": 14998, + "browse": 19060, + "browser": 19768, + "browsing": 29318, + "brox": 43539, + "brs": 47485, + "brt": 46936, + "bru": 1698, + "bru": 31028, + "bruce": 21223, + "bruce": 7085, + "bruh": 17575, + "bruins": 14736, + "bruise": 48048, + "bruised": 46502, + "brum": 23862, + "brum": 28078, + "brun": 6870, + "brunch": 9113, + "brune": 29057, + "brunei": 41898, + "brunette": 35528, + "bruno": 14568, + "brunomars": 41156, + "brunswick": 24012, + "brush": 27969, + "brush": 8594, + "brushed": 30298, + "brushes": 21550, + "brushing": 35072, + "brussels": 11020, + "brut": 39499, + "brutal": 42144, + "brutal": 14556, + "brutality": 31348, + "brutally": 28132, + "brute": 47552, + "brux": 49093, + "bry": 6587, + "bry": 28228, + "bryan": 16134, + "bryan": 10412, + "bryant": 12256, + "bryce": 19895, + "bryn": 36569, + "bryn": 42877, + "bryson": 38990, + "bs": 11783, + "bs": 1329, + "bsa": 46619, + "bsb": 23070, + "bsbi": 41728, + "bsbibotany": 42086, + "bsc": 32031, + "bsd": 41848, + "bse": 46341, + "bsf": 48314, + "bsgo": 48474, + "bsp": 47977, + "bst": 19698, + "bsu": 46385, + "bt": 3317, + "bt": 4205, + "btc": 10315, + "btcc": 30759, + "btn": 44681, + "bto": 35516, + "btob": 29379, + "btr": 39767, + "bts": 15154, + "bts": 4007, + "btsarmy": 30302, + "btsbbmas": 35297, + "btsx": 44971, + "btv": 38541, + "btw": 9520, + "btwn": 28284, + "bu": 609, + "bu": 5831, + "bub": 27704, + "bub": 33158, + "bubb": 9739, + "bubba": 28149, + "bubble": 28687, + "bubble": 10799, + "bubblegum": 48078, + "bubbles": 17648, + "bubbly": 31034, + "buc": 8207, + "buccane": 32830, + "buccaneers": 38058, + "buch": 22623, + "bucha": 43582, + "buchan": 27237, + "buchanan": 28975, + "bucharest": 37013, + "buck": 6061, + "buck": 11433, + "bucket": 22596, + "bucket": 10498, + "bucketlist": 30778, + "buckets": 27168, + "buckeye": 34549, + "buckeyes": 30741, + "buckingham": 28736, + "buckle": 21948, + "buckley": 25905, + "bucks": 6103, + "bucky": 35916, + "bucs": 20011, + "bud": 2942, + "bud": 10737, + "buda": 18520, + "buda": 49012, + "budapest": 19202, + "budd": 7296, + "buddha": 13981, + "buddhism": 23744, + "buddhist": 18697, + "buddies": 14543, + "budding": 31992, + "buddy": 40948, + "buddy": 6557, + "budge": 32005, + "budget": 46758, + "budget": 5639, + "budgeting": 43789, + "budgets": 36419, + "buds": 14665, + "budweiser": 40900, + "buen": 15640, + "buena": 30876, + "buenas": 48529, + "bueno": 46202, + "buenos": 26055, + "buf": 44417, + "buff": 5456, + "buff": 21416, + "buffal": 25836, + "buffalo": 31231, + "buffalo": 8054, + "buffalob": 38831, + "buffalobills": 44352, + "buffe": 13724, + "buffer": 33050, + "buffet": 17829, + "buffett": 34081, + "buffs": 28906, + "buffy": 33356, + "bug": 14453, + "bug": 8162, + "bugatti": 35451, + "buggy": 28963, + "bugs": 13850, + "buh": 31406, + "buhari": 14661, + "buick": 22000, + "buil": 1354, + "build": 22739, + "build": 3289, + "builder": 14474, + "builders": 17694, + "building": 21206, + "building": 2307, + "buildings": 8866, + "builds": 16449, + "buildthe": 41497, + "built": 45824, + "built": 3874, + "buk": 28084, + "buk": 24317, + "buka": 47778, + "bukit": 39888, + "bul": 2572, + "bul": 10200, + "bula": 18726, + "bulaga": 41575, + "bular": 32187, + "bulb": 22373, + "bulbs": 24808, + "bulgar": 15424, + "bulgaria": 20295, + "bulgarian": 38693, + "bulge": 47603, + "bulk": 19643, + "bull": 4537, + "bull": 6029, + "bulldo": 37675, + "bulldog": 34828, + "bulldog": 15611, + "bulldogs": 13916, + "bullet": 14340, + "bullet": 12465, + "bulletin": 19638, + "bulletproof": 43212, + "bullets": 22117, + "bullied": 34689, + "bullies": 39050, + "bullion": 49114, + "bullish": 22142, + "bullock": 33198, + "bullpen": 38081, + "bulls": 10313, + "bully": 43111, + "bully": 20190, + "bullying": 13548, + "bum": 27683, + "bum": 14226, + "bumble": 25585, + "bumble": 39303, + "bumblebee": 36911, + "bummed": 48456, + "bump": 9783, + "bump": 15877, + "bumped": 22495, + "bumper": 17881, + "bumping": 40196, + "bumps": 21115, + "bun": 2591, + "bun": 13665, + "bunch": 7796, + "bund": 41905, + "bunde": 18841, + "bundesliga": 21582, + "bundle": 11793, + "bundled": 47228, + "bundles": 29834, + "bundy": 37332, + "bung": 44748, + "bungal": 29549, + "bungalow": 33696, + "bunk": 41236, + "bunker": 23615, + "bunnies": 28998, + "bunny": 34198, + "bunny": 9258, + "buns": 22235, + "bunting": 30695, + "buon": 31350, + "buon": 48498, + "bur": 1039, + "bur": 17362, + "burbank": 34862, + "burberry": 30412, + "burch": 44588, + "burden": 18687, + "bure": 11902, + "bureau": 32098, + "bureau": 15400, + "burg": 19505, + "burg": 3499, + "burge": 20522, + "burger": 22356, + "burger": 6548, + "burgers": 13007, + "burgess": 26211, + "burgh": 18141, + "burgh": 4965, + "burgl": 25554, + "burglar": 43365, + "burglary": 32573, + "burgring": 40823, + "burgundy": 23650, + "buri": 46348, + "buri": 42614, + "burial": 22012, + "buried": 14233, + "burk": 48822, + "burke": 15340, + "burle": 27891, + "burlesque": 33732, + "burlington": 23370, + "burma": 30305, + "burmese": 47906, + "burn": 7934, + "burn": 4285, + "burnaby": 47541, + "burne": 27246, + "burned": 15022, + "burner": 23243, + "burnett": 28558, + "burnham": 36111, + "burning": 46107, + "burning": 8405, + "burnley": 24653, + "burnout": 36078, + "burns": 10234, + "burnt": 15185, + "burr": 30879, + "burrell": 49045, + "burrito": 23473, + "burritos": 47245, + "burroughs": 41337, + "burrows": 44846, + "burst": 13005, + "bursting": 32566, + "bursts": 37026, + "burt": 27162, + "burton": 42354, + "burton": 12704, + "burundi": 33595, + "bury": 12276, + "bury": 3899, + "burys": 32362, + "bus": 1319, + "bus": 2840, + "busan": 40172, + "busc": 35000, + "busch": 20475, + "buses": 12879, + "bush": 11191, + "bush": 6867, + "bushes": 37578, + "busiest": 32764, + "busine": 4598, + "busines": 25364, + "business": 8346, + "business": 1716, + "businesses": 7287, + "businessman": 25635, + "buss": 47764, + "bust": 31299, + "bust": 9959, + "busted": 18643, + "buster": 37219, + "buster": 12094, + "busters": 16362, + "busting": 29622, + "busy": 39332, + "busy": 4354, + "but": 2201, + "but": 767, + "butch": 35102, + "butcher": 18732, + "butchers": 42334, + "bute": 39240, + "butes": 14630, + "butler": 35867, + "butler": 10702, + "butt": 12500, + "butt": 31523, + "butte": 31678, + "butter": 5427, + "butter": 6952, + "butterflies": 16232, + "butterfly": 9738, + "buttermilk": 40180, + "butternut": 36867, + "buttery": 45535, + "button": 45480, + "button": 8007, + "buttons": 16188, + "butts": 25309, + "buu": 42313, + "buuren": 47752, + "buxton": 41370, + "buy": 11632, + "buy": 2131, + "buyer": 14682, + "buyers": 14663, + "buying": 6566, + "buys": 15560, + "buzz": 7866, + "buzz": 8706, + "buzzard": 47434, + "buzzer": 38064, + "buzzfeed": 26613, + "buzzing": 18511, + "bv": 18958, + "bv": 35861, + "bvb": 22454, + "bw": 17672, + "bw": 15120, + "bway": 26652, + "bwfc": 40918, + "bwo": 45902, + "bx": 33633, + "by": 1713, + "by": 638, + "bye": 20076, + "bye": 4460, + "byes": 47958, + "byl": 34994, + "byn": 46917, + "byn": 11890, + "byo": 28039, + "bypass": 26530, + "byr": 15534, + "byrd": 30369, + "byrne": 19676, + "byron": 43504, + "byron": 19775, + "bys": 26740, + "bystand": 46138, + "byte": 42798, + "bytes": 39538, + "bythe": 36621, + "byu": 41072, + "byu": 23770, + "byz": 35406, + "byzantine": 44081, + "bz": 13631, + "bé": 40365, + "bü": 38706, + "c": 66, + "c": 322, + "ca": 772, + "ca": 1684, + "caa": 19316, + "cab": 3033, + "cab": 11912, + "cabaret": 26263, + "cabbage": 18407, + "cabe": 32731, + "cabello": 34371, + "caber": 29062, + "cabernet": 33730, + "cabin": 14178, + "cabine": 23354, + "cabinet": 9937, + "cabinets": 33083, + "cabins": 48455, + "cable": 7925, + "cables": 22408, + "cabo": 37318, + "cabo": 28370, + "cabrera": 42338, + "cabs": 42048, + "cac": 8298, + "cac": 23872, + "cacao": 38022, + "cache": 28993, + "caching": 40655, + "cactus": 19794, + "cad": 6297, + "cad": 20166, + "caday": 34187, + "cadbury": 44698, + "caddy": 41521, + "cade": 10497, + "cade": 17306, + "cadet": 22764, + "cadets": 19160, + "cadillac": 18156, + "cae": 49264, + "caer": 28298, + "caes": 15740, + "caesar": 21642, + "caesars": 42162, + "caf": 3471, + "caf": 20867, + "cafc": 30748, + "cafe": 15201, + "cafe": 4979, + "cafes": 40166, + "cafeteria": 32817, + "caffe": 18258, + "caffe": 45416, + "caffeine": 22487, + "café": 15304, + "cag": 15714, + "cage": 11838, + "cages": 37939, + "cah": 40519, + "cahill": 33185, + "cai": 38971, + "cai": 36116, + "cain": 13747, + "caine": 16799, + "cair": 15804, + "cair": 46659, + "cairn": 31264, + "cairn": 42467, + "cairngor": 44067, + "cairns": 32941, + "cairo": 19615, + "cait": 14116, + "caitlin": 47768, + "caitlin": 26809, + "caitlyn": 35763, + "cajun": 43425, + "cajun": 33044, + "cak": 42986, + "cake": 15295, + "cake": 2972, + "cakeday": 46207, + "cakes": 5950, + "cal": 1198, + "cal": 6372, + "cala": 32133, + "calab": 31795, + "calais": 39886, + "calam": 28841, + "calc": 45055, + "calci": 22824, + "calcium": 27815, + "calcu": 15328, + "calcul": 15734, + "calculate": 37656, + "calculated": 40688, + "calculations": 44605, + "calculator": 26093, + "calculus": 35104, + "calcutta": 42901, + "calder": 29372, + "calder": 36817, + "caldwell": 30484, + "cale": 32674, + "caleb": 19619, + "caled": 28421, + "calend": 6057, + "calendar": 7122, + "calendars": 17229, + "calf": 17508, + "calgary": 27415, + "calgary": 10797, + "calhoun": 38929, + "cali": 2857, + "cali": 16337, + "caliber": 32820, + "calibr": 32597, + "calico": 45379, + "calif": 30839, + "califor": 3526, + "californi": 21303, + "california": 3729, + "call": 7950, + "call": 1620, + "calla": 20658, + "callahan": 43313, + "callaway": 42596, + "callback": 44764, + "calle": 47699, + "calle": 38144, + "called": 2726, + "caller": 30666, + "calli": 16338, + "callie": 36512, + "calligraphy": 27775, + "calling": 4597, + "callister": 49026, + "callme": 42449, + "callof": 41280, + "calls": 4572, + "callum": 23224, + "calm": 34990, + "calm": 7011, + "calming": 30690, + "calorie": 32679, + "calories": 18029, + "cals": 47714, + "calum": 16405, + "calvary": 40169, + "calvert": 47134, + "calves": 31857, + "calvin": 27642, + "calvin": 17345, + "caly": 10244, + "calyp": 29851, + "cam": 1004, + "cam": 5982, + "camar": 31991, + "camber": 44362, + "cambo": 14662, + "cambodia": 17347, + "cambridge": 24651, + "cambridge": 9334, + "cambridgeshire": 46139, + "camden": 38735, + "camden": 17984, + "came": 1986, + "camel": 27005, + "camel": 21914, + "camels": 41357, + "cameo": 19492, + "camer": 4961, + "camera": 3934, + "cameraman": 43347, + "cameras": 12172, + "camero": 20320, + "cameron": 19634, + "cameron": 8057, + "camerondallas": 40587, + "cameroon": 24061, + "camil": 37745, + "camila": 19919, + "camilla": 38897, + "camille": 26741, + "camino": 28529, + "camo": 28702, + "camo": 19716, + "camogie": 39547, + "camou": 23588, + "camoufla": 23667, + "camouflage": 29049, + "camp": 2854, + "camp": 2877, + "campa": 2793, + "campaig": 9448, + "campaign": 44524, + "campaign": 3193, + "campaigner": 46364, + "campaigners": 40272, + "campaigning": 19594, + "campaigns": 15669, + "campan": 31765, + "campbell": 29094, + "campbell": 8806, + "campe": 16672, + "campeon": 49109, + "campeones": 30105, + "camper": 41914, + "camper": 24522, + "campers": 26619, + "campfire": 32530, + "campground": 46969, + "camping": 9982, + "campo": 27600, + "campos": 48077, + "camps": 12806, + "campsite": 44243, + "campu": 19687, + "campus": 4560, + "campuses": 31895, + "camra": 46155, + "camry": 46472, + "cams": 32590, + "can": 950, + "can": 753, + "cana": 28341, + "canad": 13193, + "canada": 2698, + "canadaday": 39800, + "canadi": 4329, + "canadian": 22160, + "canadian": 5255, + "canadians": 18989, + "canadiens": 40932, + "canal": 28585, + "canal": 9535, + "canals": 38483, + "canaria": 47117, + "canary": 40409, + "canary": 24523, + "canberra": 16719, + "canc": 43189, + "cancel": 12026, + "cancel": 21546, + "canceled": 25874, + "cancell": 28027, + "cancellation": 38765, + "cancelled": 13270, + "cancels": 34089, + "cancer": 12690, + "cancer": 3148, + "cancers": 33201, + "cancun": 34721, + "cand": 4986, + "candace": 45623, + "candel": 47834, + "candi": 6034, + "candice": 30024, + "candid": 7884, + "candid": 19206, + "candidacy": 46248, + "candidate": 6475, + "candidates": 8619, + "candied": 43982, + "candies": 46305, + "candle": 18995, + "candle": 12674, + "candlelight": 34724, + "candles": 15472, + "candy": 20741, + "candy": 6417, + "cane": 23644, + "cane": 14716, + "canelo": 43210, + "canes": 21902, + "cani": 35592, + "canine": 27380, + "cann": 4139, + "cann": 23709, + "cannab": 7577, + "cannabis": 31837, + "cannabis": 8861, + "canne": 44252, + "canned": 27290, + "cannes": 13773, + "canni": 26389, + "canning": 38621, + "cannon": 28771, + "cannon": 15661, + "cannons": 46269, + "cannot": 4785, + "canny": 26986, + "cano": 31668, + "cano": 25937, + "canoe": 23503, + "canola": 40389, + "canon": 17749, + "canon": 9310, + "canopy": 26061, + "cans": 13707, + "cant": 13395, + "cant": 5784, + "canteen": 39230, + "canter": 19301, + "canterbury": 22271, + "canti": 42845, + "cantina": 47472, + "canton": 37735, + "canton": 25363, + "cantore": 41769, + "cantwait": 33760, + "canu": 20171, + "canucks": 24321, + "canv": 30714, + "canvas": 22441, + "canvas": 7483, + "canvass": 40054, + "canvassing": 33783, + "cany": 47674, + "canyon": 41246, + "canyon": 9755, + "cao": 29207, + "cap": 1289, + "cap": 3938, + "capabilities": 19512, + "capability": 25885, + "capable": 14742, + "capac": 24665, + "capacity": 8970, + "capcom": 28342, + "cape": 10288, + "cape": 6631, + "capecod": 41339, + "capes": 38785, + "capetown": 20059, + "capit": 6889, + "capita": 41833, + "capital": 11198, + "capital": 5439, + "capitalism": 20068, + "capitalist": 37015, + "capitals": 29579, + "capitol": 43880, + "capitol": 11375, + "capo": 45477, + "capp": 16718, + "capped": 24659, + "capping": 42656, + "cappuccino": 37402, + "capri": 48699, + "capri": 30982, + "capric": 28667, + "capricorn": 46314, + "caps": 23185, + "capsu": 15608, + "capsul": 40341, + "capsule": 20627, + "capsules": 32870, + "capt": 45815, + "capt": 17369, + "captain": 14958, + "captain": 4621, + "captainamerica": 46229, + "captainmarvel": 48492, + "captains": 18706, + "caption": 11327, + "captions": 41878, + "captiv": 19776, + "captivating": 30580, + "captive": 29038, + "captivity": 41141, + "capture": 8818, + "captured": 8020, + "captures": 15305, + "capturing": 19548, + "capu": 44241, + "car": 811, + "car": 1615, + "cara": 20016, + "carab": 32251, + "carac": 30029, + "caracas": 45854, + "caramel": 14788, + "carameli": 41739, + "caramelized": 43854, + "carat": 32981, + "carav": 13814, + "caravan": 18566, + "carb": 21379, + "carbo": 43235, + "carbon": 14038, + "carbon": 7549, + "carbs": 29313, + "carcin": 31587, + "carcinoma": 46810, + "card": 10793, + "card": 2601, + "cardam": 49008, + "cardboard": 19845, + "cardi": 6211, + "cardi": 29677, + "cardiac": 21256, + "cardiff": 22488, + "cardiff": 9781, + "cardigan": 30501, + "cardin": 8457, + "cardinal": 46310, + "cardinal": 16472, + "cardinals": 12837, + "cardio": 15003, + "cardio": 23455, + "cardiology": 37276, + "cardiovascular": 29291, + "cardo": 40625, + "cards": 4094, + "care": 2050, + "care": 1776, + "cared": 27675, + "career": 20609, + "career": 3061, + "careers": 10090, + "careful": 11999, + "carefully": 15789, + "caregi": 22042, + "caregiver": 46372, + "caregivers": 35909, + "careless": 47325, + "carers": 26484, + "cares": 10968, + "caretaker": 48037, + "carey": 14895, + "cargo": 12490, + "cari": 18497, + "cari": 37273, + "carib": 9757, + "caribbean": 10368, + "caribou": 42135, + "caric": 25337, + "caricature": 38857, + "carina": 44357, + "caring": 13083, + "carl": 8273, + "carl": 9482, + "carla": 25552, + "carleton": 46496, + "carlin": 47559, + "carlisle": 23276, + "carlo": 17861, + "carlo": 15266, + "carlos": 9538, + "carlow": 44745, + "carls": 39635, + "carlson": 24114, + "carlton": 18934, + "carly": 23166, + "carly": 22689, + "carlyle": 46555, + "carmel": 30757, + "carmel": 25601, + "carmen": 41427, + "carmen": 18834, + "carmichael": 41657, + "carn": 21597, + "carnage": 31385, + "carnation": 44577, + "carnaval": 47238, + "carne": 17053, + "carne": 42885, + "carnegie": 25287, + "carney": 34194, + "carni": 8438, + "carnival": 36708, + "carnival": 10577, + "caro": 30317, + "caro": 29344, + "carol": 4242, + "carol": 11489, + "carole": 31955, + "carolin": 26418, + "carolina": 7027, + "caroline": 31064, + "caroline": 12641, + "carols": 33269, + "carolyn": 25825, + "carou": 32224, + "carousel": 36665, + "carp": 26085, + "carpen": 15584, + "carpenter": 18475, + "carpet": 6922, + "carpets": 34612, + "carr": 26951, + "carr": 17136, + "carra": 32332, + "carre": 31114, + "carrera": 32952, + "carri": 4739, + "carriage": 47885, + "carriage": 21087, + "carrick": 44052, + "carrie": 30334, + "carrie": 15848, + "carried": 12960, + "carrier": 12308, + "carriers": 26865, + "carries": 17982, + "carrieunderwood": 47338, + "carrington": 48759, + "carroll": 41911, + "carroll": 14893, + "carrot": 15435, + "carrots": 19299, + "carry": 31863, + "carry": 6998, + "carrying": 9920, + "cars": 3346, + "carsforsale": 45222, + "carson": 41766, + "carson": 13171, + "cart": 27705, + "cart": 13065, + "cartag": 45042, + "cartagena": 47157, + "carte": 44949, + "cartel": 30529, + "carter": 27330, + "carter": 7260, + "cartier": 32951, + "carto": 5487, + "carton": 41812, + "cartoon": 33082, + "cartoon": 7651, + "cartoonist": 30793, + "cartoons": 17673, + "cartri": 47084, + "cartridge": 29432, + "cartridges": 49249, + "carts": 27581, + "cartunesapp": 32888, + "caruso": 45192, + "carve": 40152, + "carved": 15127, + "carver": 28850, + "carving": 19428, + "carvings": 48123, + "cary": 22844, + "cas": 1671, + "cas": 13831, + "casa": 14643, + "casablanc": 36572, + "casablanca": 41950, + "casc": 36714, + "casca": 43296, + "cascade": 29065, + "cascades": 46454, + "case": 17698, + "case": 2068, + "cases": 6888, + "casey": 24899, + "casey": 12836, + "cash": 11050, + "cash": 5131, + "cashback": 36368, + "cashe": 32233, + "cashew": 39531, + "cashi": 29517, + "cashier": 34547, + "cashmere": 34566, + "casi": 38350, + "casino": 10473, + "casio": 32261, + "cask": 26299, + "casm": 35198, + "casper": 35892, + "cass": 22556, + "cassandra": 35289, + "casser": 31093, + "casserole": 36045, + "cassette": 19717, + "cassi": 14942, + "cassidy": 21757, + "cassie": 29323, + "cassini": 46554, + "cast": 2509, + "cast": 1970, + "caste": 32693, + "casted": 33838, + "castel": 43306, + "castell": 31792, + "caster": 32101, + "caster": 8449, + "casters": 29721, + "castic": 47737, + "castillo": 30813, + "casting": 7087, + "castle": 12496, + "castle": 3540, + "castles": 24766, + "castro": 16950, + "casts": 10595, + "casu": 15345, + "casual": 10129, + "casually": 18840, + "casualties": 30244, + "casualty": 31222, + "cat": 1481, + "cat": 2368, + "cata": 42279, + "catal": 12792, + "catalan": 30532, + "catalina": 36576, + "catalo": 34740, + "catalog": 20036, + "catalogue": 20985, + "catalonia": 27039, + "catalunya": 44132, + "cataly": 15894, + "catalyst": 25387, + "catan": 45893, + "catap": 39514, + "catar": 35801, + "catastro": 22736, + "catastrophe": 41422, + "catastrophic": 34448, + "catch": 18901, + "catch": 3042, + "catcher": 15965, + "catchers": 39060, + "catches": 17213, + "catching": 8617, + "catchy": 37114, + "catday": 32243, + "cate": 6357, + "cate": 24510, + "cated": 31823, + "categor": 17006, + "categori": 40117, + "categories": 19971, + "category": 9432, + "cater": 16634, + "cater": 38101, + "catering": 16697, + "caterpillar": 27111, + "catfish": 26077, + "cath": 9196, + "cath": 30811, + "cathar": 43784, + "cathe": 7174, + "cathedr": 46370, + "cathedral": 7865, + "catherine": 35035, + "catherine": 12339, + "catho": 7595, + "cathol": 16315, + "catholic": 20382, + "catholic": 7757, + "catholics": 36808, + "cathy": 40326, + "cathy": 22731, + "cation": 21367, + "cato": 33558, + "cats": 38800, + "cats": 3989, + "catsofinstagram": 39901, + "catsoftwitter": 17273, + "catt": 37339, + "cattle": 48799, + "cattle": 13644, + "caturday": 20892, + "catwalk": 36565, + "catwoman": 47251, + "cau": 1121, + "cau": 45529, + "caucus": 18847, + "caught": 4520, + "caul": 23460, + "cauley": 41682, + "caulfield": 44906, + "cauli": 20123, + "cauliflower": 23802, + "cause": 18982, + "cause": 1394, + "caused": 8940, + "causes": 9775, + "causeway": 35034, + "causing": 10779, + "caution": 15656, + "cautious": 36579, + "cav": 4942, + "cav": 45935, + "cava": 48682, + "caval": 24537, + "cavali": 20783, + "cavalier": 44488, + "cavaliers": 30194, + "cavalry": 32467, + "cave": 25441, + "cave": 9654, + "cavendish": 42945, + "caver": 41487, + "caves": 22096, + "cavi": 27360, + "caviar": 31228, + "cavill": 40492, + "cavity": 43156, + "cavs": 16800, + "caw": 38405, + "caw": 43804, + "cawx": 26739, + "cay": 11876, + "cay": 37399, + "cayenne": 43650, + "cayman": 33737, + "caz": 48451, + "cb": 4034, + "cb": 8830, + "cba": 38472, + "cbb": 31487, + "cbc": 14096, + "cbc": 14523, + "cbd": 13176, + "cbe": 43639, + "cbi": 30875, + "cbj": 35608, + "cbn": 26579, + "cbp": 46723, + "cbr": 28762, + "cbs": 16788, + "cbs": 8009, + "cc": 2976, + "cc": 2021, + "cca": 17987, + "ccc": 21856, + "ccd": 48556, + "ccg": 37755, + "cch": 21789, + "cchini": 28467, + "cci": 32942, + "cci": 8196, + "ccl": 43773, + "ccm": 40435, + "cco": 28786, + "ccot": 24950, + "ccp": 43045, + "ccs": 30400, + "cctv": 23097, + "ccu": 49023, + "cd": 4308, + "cd": 4480, + "cda": 45565, + "cdc": 41098, + "cdc": 25779, + "cdn": 8886, + "cdn": 26802, + "cdnpoli": 11645, + "cdo": 47187, + "cdp": 39624, + "cds": 20784, + "cdt": 18455, + "ce": 685, + "ce": 629, + "cea": 28355, + "cean": 34409, + "cean": 37295, + "cease": 32856, + "cease": 25499, + "ceasefire": 38291, + "cebu": 20146, + "cec": 29694, + "cec": 40029, + "cecil": 26987, + "cecil": 27169, + "cecilia": 35440, + "ced": 25634, + "ced": 2323, + "cedar": 24167, + "cedar": 13799, + "cedric": 36608, + "cee": 45966, + "cee": 15015, + "cees": 47914, + "ceil": 27275, + "ceiling": 12374, + "ceilings": 33770, + "cek": 45544, + "cel": 2269, + "cel": 7597, + "cele": 1314, + "celeb": 38862, + "celeb": 19393, + "celebr": 1372, + "celebrate": 31414, + "celebrate": 2694, + "celebrated": 9184, + "celebrates": 7564, + "celebrating": 3382, + "celebration": 4615, + "celebrations": 10825, + "celebratory": 34115, + "celebrities": 17071, + "celebrity": 23981, + "celebrity": 7320, + "celebs": 19803, + "celed": 25741, + "celer": 9621, + "celery": 30990, + "celeste": 29364, + "celesti": 29497, + "celestial": 32669, + "celi": 25567, + "celia": 44489, + "celine": 33644, + "cell": 9316, + "cell": 5533, + "cellar": 24282, + "cellars": 44976, + "cellence": 34687, + "cello": 23013, + "cellphone": 39029, + "cells": 8890, + "cellu": 16791, + "cellular": 23268, + "cels": 24021, + "celsius": 47057, + "celtic": 21897, + "celtic": 10523, + "celticfc": 38612, + "celtics": 16226, + "cem": 41435, + "ceme": 10517, + "cement": 4369, + "cements": 19448, + "cemetery": 11660, + "cen": 1306, + "cen": 30106, + "cena": 21591, + "cence": 24410, + "cency": 41259, + "cene": 30038, + "censor": 24230, + "censor": 44709, + "censored": 30951, + "censorship": 27284, + "census": 23677, + "cent": 1784, + "cent": 3662, + "centenary": 22422, + "centennial": 20895, + "center": 16651, + "center": 2119, + "centered": 24584, + "centers": 14494, + "centi": 48889, + "centime": 48687, + "centr": 2370, + "central": 13448, + "central": 3339, + "centre": 26310, + "centre": 2916, + "centred": 47925, + "centres": 19354, + "centri": 30872, + "centric": 19297, + "centro": 37178, + "cents": 11934, + "centu": 16818, + "centuri": 36816, + "centuries": 19014, + "century": 26134, + "century": 4275, + "ceo": 46340, + "ceo": 3559, + "ceos": 28332, + "cep": 2632, + "cep": 48714, + "ceph": 44343, + "cept": 3678, + "ception": 12346, + "cer": 1364, + "cer": 1925, + "cera": 34608, + "ceram": 10677, + "ceramic": 15112, + "ceramics": 22438, + "cere": 3984, + "cere": 22085, + "cereal": 17581, + "cereals": 48618, + "cerebral": 39073, + "ceremon": 15796, + "ceremonial": 33281, + "ceremonies": 21547, + "ceremony": 5193, + "cern": 44851, + "cers": 13638, + "cert": 27522, + "certain": 8526, + "certain": 7883, + "certainly": 10883, + "certainty": 20054, + "certi": 4888, + "certific": 9443, + "certificate": 11786, + "certificates": 25281, + "certification": 14735, + "certified": 9288, + "cerv": 25738, + "cervical": 35953, + "ces": 28715, + "ces": 1604, + "cesar": 37025, + "cesar": 28603, + "cess": 2314, + "cess": 1554, + "cessna": 36596, + "cest": 27245, + "cester": 15769, + "cester": 12718, + "cet": 14960, + "cett": 46708, + "ceu": 37457, + "cevic": 48369, + "cey": 20971, + "cf": 10189, + "cf": 11171, + "cfa": 34521, + "cfb": 32931, + "cfc": 11577, + "cfd": 46171, + "cfl": 46320, + "cfl": 22332, + "cfo": 26937, + "cfp": 40756, + "cfr": 44033, + "cfs": 32835, + "cg": 27118, + "cg": 14740, + "cgc": 38775, + "cgi": 30520, + "ch": 540, + "ch": 634, + "cha": 1587, + "cha": 4541, + "chab": 26670, + "chad": 13095, + "chad": 12923, + "chae": 9460, + "chaf": 38123, + "chag": 27989, + "chai": 31590, + "chai": 18919, + "chain": 13898, + "chain": 3946, + "chained": 34402, + "chains": 14438, + "chainsaw": 37617, + "chainz": 39687, + "chair": 4728, + "chair": 4269, + "chaired": 31664, + "chairing": 42205, + "chairman": 6901, + "chairperson": 31584, + "chairs": 12033, + "chak": 13702, + "chak": 41713, + "chakra": 38304, + "chakra": 33241, + "chal": 7397, + "chal": 30809, + "chale": 38099, + "chalet": 37907, + "chalk": 31362, + "chalk": 17846, + "chall": 2073, + "challeng": 4138, + "challenge": 29462, + "challenge": 2836, + "challenged": 17380, + "challenger": 18228, + "challengers": 46404, + "challenges": 6280, + "challenging": 11754, + "chalmers": 47955, + "cham": 1290, + "cham": 19951, + "chamber": 18983, + "chamber": 7642, + "chamberlain": 32756, + "chambers": 16501, + "chamele": 34759, + "chameleon": 41317, + "champ": 36813, + "champ": 6602, + "champag": 10283, + "champagne": 11007, + "champi": 1680, + "champion": 2643, + "champion": 3950, + "champions": 4227, + "championship": 3429, + "championships": 7047, + "championsleague": 27638, + "champs": 6240, + "chan": 1255, + "chan": 6704, + "chana": 48752, + "chanc": 13931, + "chance": 32940, + "chance": 2594, + "chancellor": 15886, + "chances": 10870, + "chand": 7126, + "chand": 41508, + "chandelier": 30570, + "chandi": 12482, + "chandigarh": 34106, + "chandler": 17595, + "chandra": 27082, + "chandra": 25348, + "chanel": 16951, + "chang": 2233, + "chang": 16461, + "change": 11608, + "change": 1799, + "changeable": 41335, + "changed": 4907, + "changer": 18406, + "changers": 35185, + "changes": 4938, + "changing": 40384, + "changing": 5621, + "changmin": 47410, + "chann": 8804, + "channel": 25837, + "channel": 3847, + "channeling": 28197, + "channels": 13961, + "channing": 37417, + "chant": 18165, + "chant": 13521, + "chanting": 32111, + "chants": 22723, + "chanyeol": 18805, + "chao": 31815, + "chaos": 10853, + "chaotic": 33501, + "chap": 3825, + "chap": 21939, + "chapel": 40859, + "chapel": 10137, + "chaplain": 38348, + "chaplin": 32545, + "chapman": 17968, + "chapp": 20634, + "chaps": 36823, + "chapter": 6014, + "chapters": 22936, + "char": 1054, + "char": 16017, + "chara": 35668, + "charac": 2792, + "character": 10997, + "character": 4009, + "characterdesign": 38149, + "characteri": 20920, + "characteristic": 44747, + "characteristics": 26037, + "characters": 6564, + "charan": 31851, + "charcoal": 19268, + "chard": 17524, + "chardon": 26599, + "chardonnay": 28161, + "charge": 25032, + "charge": 5948, + "chargeable": 35664, + "charged": 7916, + "charger": 13090, + "chargers": 17352, + "charges": 8962, + "charging": 12514, + "chariot": 38811, + "charis": 24449, + "charisma": 45041, + "charismatic": 37205, + "charitable": 23256, + "charities": 18493, + "charity": 20008, + "charity": 4607, + "charitytuesday": 42794, + "charl": 47736, + "charle": 10217, + "charles": 27983, + "charles": 5127, + "charleston": 15478, + "charley": 38027, + "charli": 21784, + "charli": 49392, + "charlie": 16764, + "charlie": 6393, + "charlotte": 18445, + "charlotte": 7871, + "charlottesville": 32027, + "charlton": 27048, + "charm": 10876, + "charmed": 39790, + "charming": 12177, + "charms": 21944, + "charred": 44085, + "chart": 42685, + "chart": 5053, + "charted": 27939, + "charter": 42345, + "charter": 13569, + "chartered": 31298, + "charters": 46626, + "charting": 39841, + "charts": 10728, + "chas": 10717, + "chas": 29838, + "chase": 21503, + "chase": 3859, + "chased": 30342, + "chaser": 29560, + "chasers": 34158, + "chases": 45011, + "chasing": 46909, + "chasing": 13376, + "chassis": 29188, + "chast": 42176, + "chasu": 41352, + "chat": 5355, + "chat": 2402, + "chatbots": 43994, + "chate": 30377, + "chateau": 44582, + "chateau": 23520, + "chath": 46849, + "chatham": 32030, + "chats": 13263, + "chatt": 21618, + "chattanoo": 28009, + "chattanooga": 29866, + "chatted": 34124, + "chatter": 33473, + "chatter": 41103, + "chatting": 12401, + "chatur": 33839, + "chau": 11263, + "chau": 37536, + "chauffe": 45440, + "chauhan": 46663, + "chav": 28997, + "chavez": 27480, + "chaw": 39639, + "chay": 45317, + "chaz": 47815, + "chc": 36233, + "chd": 41645, + "che": 983, + "che": 3842, + "chea": 39580, + "chead": 48358, + "cheap": 27036, + "cheap": 8678, + "cheape": 26164, + "cheaper": 17776, + "cheapest": 26640, + "cheat": 18180, + "cheated": 34285, + "cheating": 19722, + "chec": 1113, + "check": 7672, + "check": 1217, + "checked": 10387, + "checker": 45883, + "checkers": 48181, + "checking": 7441, + "checklist": 26989, + "checkout": 13101, + "checkpoint": 27531, + "checks": 13737, + "ched": 11341, + "ched": 2146, + "cheddar": 20551, + "chee": 5326, + "chee": 20944, + "cheek": 40000, + "cheek": 21227, + "cheeks": 23019, + "cheeky": 15068, + "cheer": 9733, + "cheer": 6918, + "cheered": 38111, + "cheerful": 28882, + "cheering": 14289, + "cheerleader": 29072, + "cheerleaders": 22343, + "cheerleading": 36366, + "cheers": 6562, + "chees": 15182, + "cheese": 10738, + "cheese": 4108, + "cheeseburger": 41200, + "cheesecake": 17803, + "cheeses": 36076, + "cheesy": 22093, + "cheetah": 27431, + "chef": 12137, + "chef": 4895, + "chefs": 14486, + "chek": 43745, + "chel": 3084, + "chel": 25970, + "chell": 46854, + "chelle": 30141, + "chelms": 34936, + "chelmsford": 39890, + "chelse": 19071, + "chelsea": 6031, + "chelseafc": 25927, + "chelten": 18889, + "cheltenham": 21589, + "chem": 5667, + "chem": 13698, + "chemi": 7179, + "chemical": 39376, + "chemical": 9208, + "chemicals": 17426, + "chemist": 23138, + "chemistry": 8841, + "chemo": 33095, + "chemo": 36348, + "chemotherapy": 41412, + "chemtrails": 46015, + "chen": 5907, + "chen": 8983, + "cheney": 43522, + "cheng": 32512, + "cheng": 30190, + "chenko": 29073, + "chennai": 28948, + "chennai": 12791, + "cheon": 11498, + "cheque": 28168, + "cher": 3597, + "cher": 3466, + "cheri": 26471, + "cherish": 20053, + "cherished": 42325, + "cherno": 35376, + "chernobyl": 40554, + "chero": 19844, + "cherokee": 22860, + "cherries": 27248, + "cherry": 21470, + "cherry": 7325, + "chers": 5789, + "chery": 38478, + "cheryl": 37784, + "cheryl": 20600, + "ches": 18346, + "ches": 1910, + "chesa": 28349, + "chesapeake": 32909, + "cheshire": 17130, + "chesney": 48747, + "chess": 27170, + "chess": 8397, + "chest": 18217, + "chest": 10563, + "chester": 10466, + "chester": 3343, + "chesterfield": 32975, + "chestnut": 21834, + "chet": 9663, + "chett": 24695, + "chev": 7152, + "chev": 41145, + "chevro": 12850, + "chevrolet": 13240, + "chevron": 33792, + "chevy": 16581, + "chew": 32645, + "chew": 22642, + "chewan": 23689, + "chewbacca": 49355, + "chewing": 31486, + "chewy": 42940, + "chey": 26968, + "chey": 31208, + "cheyenne": 34805, + "chez": 49183, + "chez": 10556, + "chf": 33021, + "chfield": 41619, + "chhat": 34127, + "chhattisgarh": 44246, + "chi": 1337, + "chi": 4039, + "chia": 19147, + "chiang": 33764, + "chibi": 22306, + "chic": 2627, + "chic": 9091, + "chica": 44190, + "chicag": 16778, + "chicago": 15038, + "chicago": 3530, + "chicagof": 40638, + "chicagofire": 46576, + "chicas": 40664, + "chichester": 43823, + "chick": 3170, + "chick": 11238, + "chicken": 26322, + "chicken": 3717, + "chickens": 21658, + "chickpea": 48109, + "chicks": 17810, + "chico": 30379, + "chie": 40046, + "chie": 12388, + "chief": 16830, + "chief": 3455, + "chiefs": 11419, + "chiev": 47761, + "chiff": 27407, + "chiffon": 31817, + "chig": 42952, + "chihu": 22857, + "chihuahu": 25437, + "chihuahua": 30181, + "chik": 45455, + "chil": 1333, + "child": 4392, + "child": 2913, + "childcare": 31133, + "childhood": 34772, + "childhood": 7551, + "childish": 31939, + "childre": 2135, + "children": 11101, + "children": 2153, + "childrens": 31551, + "childrens": 21553, + "childs": 39521, + "chile": 10022, + "chilean": 33186, + "chili": 13033, + "chill": 6498, + "chill": 6382, + "chilled": 23540, + "chillen": 45160, + "chilli": 26787, + "chilli": 17067, + "chillin": 10347, + "chilling": 10179, + "chillout": 39842, + "chills": 25460, + "chilly": 14450, + "chim": 10543, + "chimney": 26821, + "chimp": 44374, + "chin": 6555, + "chin": 8979, + "china": 38943, + "china": 2817, + "chinatown": 28582, + "chine": 4013, + "chinese": 30568, + "chinese": 4271, + "ching": 34621, + "ching": 1439, + "chino": 47181, + "chino": 27440, + "chinook": 41577, + "chinson": 33786, + "chio": 19650, + "chip": 19271, + "chip": 8730, + "chipmun": 46384, + "chipot": 17702, + "chipotle": 19284, + "chipp": 39854, + "chippe": 46541, + "chipped": 39892, + "chipping": 40323, + "chips": 8855, + "chir": 15564, + "chiro": 23413, + "chiroprac": 25987, + "chiropractic": 34437, + "chis": 19920, + "chistan": 20523, + "chiswick": 47290, + "chit": 13515, + "chit": 45626, + "chita": 49184, + "chitec": 39862, + "chive": 29222, + "chives": 34921, + "chk": 47424, + "chl": 38592, + "chley": 47748, + "chlo": 10374, + "chloe": 39966, + "chloe": 13992, + "chlor": 23135, + "chman": 35835, + "chment": 20848, + "chner": 48277, + "cho": 1327, + "cho": 5150, + "choa": 43077, + "choc": 32772, + "choc": 21983, + "choco": 46285, + "choco": 32692, + "chocol": 3443, + "chocolat": 44631, + "chocolate": 29389, + "chocolate": 3820, + "chocolates": 24120, + "choi": 23749, + "choic": 35606, + "choice": 23857, + "choice": 4051, + "choices": 11016, + "choir": 9214, + "choirs": 43277, + "choke": 30231, + "choked": 43521, + "choker": 39642, + "choking": 39993, + "chol": 19802, + "cholera": 45999, + "cholester": 26861, + "cholesterol": 27982, + "chom": 25151, + "chon": 20416, + "chon": 21601, + "chondri": 37379, + "chong": 26220, + "choo": 3869, + "choo": 24437, + "chool": 29578, + "chools": 41958, + "choose": 22756, + "choose": 5073, + "chooses": 29923, + "choosing": 13475, + "chop": 10458, + "chop": 16663, + "chopin": 42256, + "chopped": 22580, + "chopper": 24011, + "chopping": 35375, + "chopra": 24258, + "chops": 26321, + "chor": 7567, + "chor": 47795, + "choral": 26684, + "chord": 33005, + "chords": 36152, + "choreo": 17443, + "choreographer": 35952, + "choreography": 32749, + "chores": 40483, + "chori": 25718, + "chorizo": 30802, + "chorus": 20869, + "chos": 26559, + "chose": 11090, + "chosen": 10044, + "chou": 16960, + "chou": 42917, + "choudhary": 45503, + "chow": 20257, + "chow": 21657, + "chowder": 37886, + "chp": 35896, + "chr": 36918, + "chri": 1135, + "chris": 9907, + "chris": 2978, + "chrisbrown": 41035, + "chriss": 46745, + "chrissy": 44762, + "chrissy": 40485, + "christ": 1403, + "christ": 6703, + "christchurch": 27100, + "christen": 31956, + "christensen": 42226, + "christi": 3328, + "christi": 33213, + "christian": 11792, + "christian": 4729, + "christianity": 20000, + "christians": 14842, + "christie": 16084, + "christin": 30189, + "christina": 15925, + "christine": 42610, + "christine": 14712, + "christma": 12039, + "christmas": 18174, + "christmas": 1677, + "christmaseve": 44381, + "christmass": 44873, + "christop": 7917, + "christoph": 47844, + "christophe": 45486, + "christopher": 33349, + "christopher": 9630, + "christy": 28331, + "chro": 13207, + "chromatic": 44207, + "chrome": 24843, + "chrome": 9529, + "chromo": 35809, + "chron": 5577, + "chron": 39781, + "chronic": 10115, + "chronic": 13677, + "chronicle": 20034, + "chronicles": 18905, + "chrono": 29387, + "chronograph": 38397, + "chry": 13508, + "chrysler": 20078, + "chs": 40277, + "chs": 8391, + "chsnews": 44919, + "cht": 11384, + "chter": 47811, + "chu": 3799, + "chu": 13622, + "chubby": 29109, + "chuck": 13211, + "chuck": 9894, + "chuckle": 35733, + "chucky": 42026, + "chuffed": 27233, + "chuk": 25878, + "chuk": 27221, + "chul": 33001, + "chum": 46869, + "chum": 41767, + "chun": 14693, + "chun": 25391, + "chung": 28418, + "chunk": 30275, + "chunks": 45538, + "chunky": 27978, + "chups": 46331, + "chur": 2309, + "church": 14956, + "church": 2735, + "churches": 15539, + "churchill": 17527, + "chus": 36246, + "chut": 28788, + "chutney": 36261, + "chy": 15131, + "chy": 8096, + "chyna": 43398, + "châ": 48669, + "ci": 698, + "ci": 5798, + "cia": 4019, + "cial": 1143, + "cian": 32323, + "ciao": 37677, + "ciara": 31369, + "cible": 28873, + "cic": 14539, + "cic": 21517, + "cid": 27359, + "cide": 34178, + "cider": 13547, + "cides": 41326, + "cie": 19730, + "cier": 24067, + "cies": 6785, + "cif": 35698, + "cigar": 26031, + "cigar": 16525, + "cigare": 13044, + "cigarette": 18548, + "cigarettes": 22750, + "cigars": 20750, + "cii": 42408, + "cil": 9217, + "cil": 2998, + "cilan": 33998, + "cilantro": 34568, + "cili": 18977, + "ciliation": 25294, + "cim": 30021, + "cin": 2396, + "cin": 25367, + "cina": 39467, + "cincin": 13291, + "cincinnati": 14197, + "cinco": 25131, + "cincode": 40930, + "cincodemayo": 42542, + "cincy": 30015, + "cincy": 30286, + "cinde": 20660, + "cinderella": 21515, + "cindy": 34439, + "cindy": 18532, + "cine": 4015, + "cine": 27451, + "cinema": 38251, + "cinema": 6443, + "cinemas": 14845, + "cinematic": 25602, + "cinemato": 21919, + "cinematographer": 39059, + "cinematography": 33802, + "ciner": 39882, + "cing": 4014, + "cini": 25699, + "cinnam": 12768, + "cinnamon": 13460, + "cino": 18616, + "cio": 44584, + "cio": 9954, + "cion": 22024, + "ciones": 37155, + "cious": 38466, + "cip": 32884, + "cir": 2459, + "cir": 41135, + "circa": 10411, + "circle": 33574, + "circle": 7117, + "circles": 19411, + "circling": 46036, + "circu": 5143, + "circuit": 35583, + "circuit": 9801, + "circuits": 33260, + "circul": 16618, + "circular": 19733, + "circulare": 39525, + "circulareconomy": 39878, + "circulated": 46258, + "circulating": 42980, + "circulation": 27880, + "circum": 13406, + "circumstances": 18786, + "circus": 11833, + "cirque": 36049, + "cis": 9459, + "cis": 23513, + "cisco": 36689, + "cisco": 19290, + "cise": 19657, + "cisely": 33434, + "cision": 41957, + "cism": 24166, + "cist": 40906, + "cit": 4420, + "cit": 31294, + "citadel": 38036, + "citation": 33581, + "cite": 32641, + "cited": 25069, + "cites": 34490, + "citi": 4280, + "citi": 30270, + "cities": 5441, + "citing": 29088, + "citiz": 5816, + "citizen": 11720, + "citizen": 9814, + "citizens": 7949, + "citizenship": 17386, + "cito": 42636, + "citro": 27941, + "citroen": 35805, + "citrus": 17379, + "city": 5002, + "city": 1305, + "cityfc": 28751, + "cityo": 25709, + "cityof": 11595, + "cityscape": 40808, + "ciu": 39693, + "cius": 42559, + "civ": 40039, + "civic": 32240, + "civic": 11888, + "civil": 6923, + "civil": 6450, + "civilian": 21187, + "civilians": 18076, + "civilization": 22503, + "civilwar": 34524, + "ción": 44700, + "cj": 15238, + "cj": 15205, + "ck": 916, + "ck": 868, + "cke": 25224, + "cke": 40989, + "cked": 3441, + "cken": 25566, + "cker": 15509, + "cker": 4744, + "ckers": 37073, + "cket": 5525, + "ckett": 33899, + "ckey": 15029, + "ckey": 3657, + "cki": 36916, + "cki": 41055, + "cking": 4805, + "cko": 28818, + "cks": 2031, + "cky": 26229, + "cky": 3083, + "cl": 969, + "cl": 6482, + "cla": 940, + "cla": 20636, + "clad": 31606, + "cladding": 46411, + "clai": 29459, + "claim": 4290, + "claim": 6607, + "claimed": 9010, + "claiming": 15286, + "claims": 6852, + "clair": 31441, + "clair": 14039, + "claire": 20410, + "claire": 10460, + "clam": 13588, + "clam": 32598, + "clamation": 21793, + "clamp": 41501, + "clams": 38849, + "clan": 29252, + "clan": 14114, + "clancy": 37227, + "clans": 38279, + "clap": 30037, + "clap": 25546, + "clapham": 43619, + "clapton": 37683, + "clar": 3617, + "clara": 19468, + "clare": 18948, + "clare": 15927, + "claremont": 47789, + "clarence": 29320, + "clari": 15175, + "clarify": 37004, + "clarinet": 41178, + "clarity": 21323, + "clark": 13340, + "clark": 7521, + "clarke": 11548, + "clarkson": 25706, + "clas": 32003, + "clash": 38367, + "clash": 9359, + "clashes": 25193, + "clasico": 43567, + "class": 2876, + "class": 1874, + "classes": 6919, + "classi": 2507, + "classic": 9353, + "classic": 2713, + "classical": 22179, + "classical": 11355, + "classicalmusic": 27806, + "classiccar": 46906, + "classiccars": 21064, + "classics": 10634, + "classification": 26612, + "classified": 22056, + "classmate": 37090, + "classmates": 30062, + "classof": 25345, + "classroom": 9001, + "classrooms": 25768, + "classy": 11615, + "clau": 7526, + "claude": 17461, + "claudi": 39439, + "claudia": 21893, + "claudio": 31230, + "claus": 23317, + "clause": 26151, + "clave": 24111, + "claw": 49230, + "claw": 19106, + "claws": 29161, + "clay": 10402, + "clay": 8823, + "clays": 26128, + "clayton": 46445, + "clayton": 19413, + "clc": 31380, + "cle": 1321, + "cle": 2537, + "clean": 3572, + "clean": 3772, + "cleaned": 17468, + "cleanenergy": 43538, + "cleaner": 15619, + "cleaners": 33258, + "cleaning": 7210, + "cleanliness": 47886, + "cleans": 40827, + "cleanse": 28717, + "cleanser": 44170, + "cleansing": 25931, + "cleanup": 22353, + "clear": 4631, + "clear": 3143, + "clearance": 17959, + "cleared": 14880, + "clearer": 37031, + "clearing": 15481, + "clearly": 7767, + "clears": 29092, + "clearwater": 32124, + "cleary": 44342, + "cleats": 33486, + "cleavage": 44165, + "cled": 12827, + "clegg": 42915, + "clemens": 45896, + "clement": 22592, + "clement": 24714, + "clemente": 42461, + "clementine": 47112, + "clements": 49175, + "clemson": 38170, + "clemson": 19537, + "clen": 35547, + "cleo": 40344, + "cleop": 36287, + "cleopatra": 41212, + "cler": 11828, + "clergy": 42635, + "cleric": 43748, + "clerk": 22230, + "clermont": 47529, + "cles": 8077, + "cleve": 37599, + "clevel": 7701, + "cleveland": 30716, + "cleveland": 8430, + "clever": 30977, + "clever": 13385, + "clg": 47546, + "cli": 1503, + "clich": 44407, + "click": 16676, + "click": 3585, + "clicked": 29015, + "clicking": 26542, + "clicks": 31250, + "client": 48528, + "client": 7467, + "clients": 8114, + "clif": 13182, + "cliff": 23827, + "cliff": 10625, + "cliffe": 15170, + "clifford": 24226, + "cliffs": 20953, + "clifton": 23878, + "climat": 37283, + "climate": 7854, + "climate": 4589, + "climateaction": 31622, + "climatechange": 11055, + "climates": 46022, + "climax": 37033, + "climb": 7421, + "climb": 10649, + "climbed": 22528, + "climber": 36910, + "climbers": 47648, + "climbing": 9877, + "climbs": 29098, + "clin": 2879, + "clinch": 30404, + "clinched": 44064, + "cline": 37460, + "cling": 37068, + "cling": 4760, + "clinic": 7926, + "clinical": 35133, + "clinical": 9148, + "clinicians": 45866, + "clinics": 23330, + "clint": 37542, + "clint": 21160, + "clinton": 34403, + "clinton": 5820, + "clio": 46889, + "clip": 39712, + "clip": 9289, + "clipped": 45524, + "clipper": 42245, + "clippers": 23319, + "clipping": 47484, + "clips": 16594, + "clique": 34983, + "clive": 36086, + "clive": 21509, + "cll": 46091, + "cllr": 45743, + "cllr": 23034, + "clo": 1194, + "cloak": 36528, + "clock": 19878, + "clock": 6716, + "clocked": 49049, + "clocks": 25895, + "clockwise": 46150, + "clockwork": 42297, + "clon": 24477, + "clone": 22854, + "clones": 48047, + "clooney": 33161, + "clos": 48821, + "close": 10603, + "close": 2660, + "closed": 4552, + "closely": 13478, + "closer": 6377, + "closes": 11354, + "closest": 14975, + "closet": 14221, + "closeup": 35439, + "closing": 7101, + "closure": 13249, + "closures": 22923, + "cloth": 14559, + "clothes": 7080, + "clothing": 7425, + "clou": 4069, + "cloud": 12965, + "cloud": 3887, + "cloudcomputing": 41390, + "clouds": 6244, + "cloudy": 13106, + "clough": 42909, + "clover": 39574, + "clover": 22812, + "clow": 18386, + "clown": 15329, + "clowns": 30820, + "cls": 44251, + "clt": 29651, + "clt": 24236, + "clu": 996, + "club": 9642, + "club": 1736, + "clubbing": 48128, + "clubhouse": 26553, + "clubs": 9437, + "clue": 14994, + "clueless": 35350, + "clues": 23764, + "clusive": 41362, + "cluster": 15595, + "clusters": 33217, + "clut": 28507, + "clutch": 13953, + "clutter": 40804, + "cly": 12037, + "clyde": 39557, + "clyde": 18469, + "cm": 10190, + "cm": 3741, + "cma": 30554, + "cma": 31388, + "cmc": 45839, + "cmdr": 48250, + "cme": 34946, + "cmo": 24589, + "cmon": 42904, + "cmp": 46355, + "cms": 22520, + "cmt": 42727, + "cmu": 43046, + "cn": 3886, + "cn": 16200, + "cna": 48287, + "cnbc": 41242, + "cnbc": 24371, + "cnblue": 36018, + "cnc": 20571, + "cnet": 47487, + "cnews": 24319, + "cng": 41496, + "cnn": 22405, + "cnn": 8259, + "cns": 46095, + "cny": 31614, + "co": 622, + "co": 1320, + "coa": 29167, + "coach": 3275, + "coach": 2312, + "coached": 30228, + "coachella": 20222, + "coaches": 6924, + "coaching": 7766, + "coal": 10227, + "coal": 7919, + "coalition": 12920, + "coast": 6398, + "coast": 3720, + "coastal": 38246, + "coastal": 10852, + "coaster": 15944, + "coasters": 31548, + "coastguard": 40601, + "coastline": 27959, + "coasts": 42225, + "coat": 28869, + "coat": 7356, + "coated": 23401, + "coates": 36899, + "coating": 25369, + "coatings": 48706, + "coats": 18075, + "cob": 20140, + "cob": 32863, + "cobain": 36866, + "cobalt": 30896, + "cobb": 22719, + "cobble": 47894, + "cobra": 21574, + "coc": 23036, + "coc": 39498, + "coca": 21197, + "cocac": 26393, + "cocacola": 31248, + "cocaine": 20534, + "coch": 18599, + "cochran": 48798, + "cochrane": 41752, + "coco": 11850, + "coco": 13316, + "cocoa": 18074, + "cocon": 8597, + "coconut": 9581, + "cod": 16132, + "cod": 11915, + "code": 11582, + "code": 3217, + "coded": 33703, + "coden": 43914, + "coder": 41561, + "codes": 14566, + "codi": 39711, + "coding": 12647, + "cody": 23222, + "cody": 12666, + "coe": 15386, + "coed": 41028, + "coel": 45633, + "coer": 41198, + "coeur": 44986, + "coffe": 2255, + "coffee": 12898, + "coffee": 2453, + "coffees": 41184, + "coffey": 48066, + "cofficial": 18757, + "coffin": 29907, + "cog": 26362, + "cog": 35960, + "cogn": 12210, + "cognac": 44361, + "cognition": 46825, + "cognitive": 16584, + "cohe": 20669, + "cohen": 13381, + "coherent": 48450, + "cohort": 22782, + "coil": 25307, + "coim": 41528, + "coin": 14651, + "coin": 4170, + "coinci": 14015, + "coincidence": 19807, + "coins": 10530, + "coke": 39602, + "coke": 14035, + "col": 754, + "col": 9371, + "cola": 15444, + "colbert": 31647, + "colby": 32068, + "colchester": 31715, + "cold": 11146, + "cold": 3153, + "colder": 23859, + "coldest": 31438, + "coldplay": 27770, + "cole": 9305, + "cole": 8166, + "coleman": 15774, + "coles": 40265, + "coles": 30398, + "coli": 18877, + "coli": 15910, + "colin": 20989, + "colin": 10238, + "coliseum": 21836, + "coll": 25982, + "coll": 23898, + "colla": 2929, + "collab": 14013, + "collabor": 4437, + "collaborate": 21271, + "collaborated": 42265, + "collaborating": 25545, + "collaboration": 6642, + "collaborations": 36520, + "collaborative": 15841, + "collaborator": 48186, + "collaborators": 45901, + "collage": 11258, + "collagen": 36120, + "collap": 16881, + "collapse": 16520, + "collapsed": 25037, + "collapses": 43601, + "collar": 39662, + "collar": 13497, + "collateral": 44512, + "colle": 1801, + "colleague": 13067, + "colleagues": 8203, + "collec": 1733, + "collect": 10186, + "collected": 11980, + "collecti": 18530, + "collectible": 25680, + "collectibles": 21519, + "collecting": 10325, + "collection": 2548, + "collections": 12760, + "collective": 10162, + "collectively": 40687, + "collector": 13522, + "collectors": 20540, + "collects": 31576, + "colleen": 31020, + "college": 13512, + "college": 2229, + "colleges": 17357, + "collegi": 16311, + "collegiate": 18068, + "colli": 8262, + "collide": 27214, + "collie": 30611, + "collier": 35748, + "collin": 24056, + "collin": 32116, + "colling": 32319, + "collingwood": 45873, + "collins": 8684, + "collision": 15407, + "collo": 25115, + "colloqui": 37243, + "colloquium": 46514, + "collu": 25658, + "collusion": 33864, + "colo": 7300, + "colo": 27288, + "cologne": 22216, + "cology": 19187, + "colom": 8987, + "colombia": 12901, + "colombian": 28701, + "colombo": 33207, + "colon": 8280, + "colon": 29050, + "colonel": 22674, + "coloni": 22667, + "colonial": 16530, + "colonialism": 43385, + "colonies": 38738, + "colony": 18767, + "color": 4036, + "color": 3140, + "colorado": 34580, + "colorado": 6742, + "colorec": 41171, + "colored": 11775, + "colorful": 11444, + "colori": 28764, + "coloring": 17696, + "colorized": 46730, + "colors": 5389, + "colorstv": 28195, + "colorway": 44576, + "colossal": 40258, + "colosse": 48142, + "colossus": 34022, + "colour": 10240, + "colour": 4769, + "coloured": 17111, + "colourful": 15562, + "colouring": 31803, + "colours": 7626, + "cols": 35726, + "colt": 19726, + "colton": 32249, + "coltrane": 42333, + "colts": 16135, + "colum": 4164, + "columb": 31043, + "columbi": 25947, + "columbia": 9410, + "columbus": 11273, + "column": 10593, + "columnist": 28958, + "columns": 29056, + "com": 610, + "com": 2464, + "coma": 19620, + "comb": 3587, + "comb": 16380, + "combat": 35083, + "combat": 9275, + "combating": 46121, + "combe": 14363, + "combin": 25112, + "combination": 11312, + "combinations": 34950, + "combine": 12919, + "combined": 10427, + "combines": 22991, + "combining": 23561, + "combo": 10155, + "combos": 48117, + "combs": 30694, + "combu": 35629, + "combustion": 44654, + "comcast": 30043, + "come": 4225, + "come": 891, + "comeback": 8234, + "comedian": 13848, + "comedians": 33758, + "comedic": 43360, + "comedy": 19346, + "comedy": 4749, + "comer": 42997, + "comer": 20916, + "comers": 34436, + "comes": 2091, + "comet": 21405, + "comets": 40636, + "comey": 22957, + "comfor": 6563, + "comfort": 44000, + "comfort": 7808, + "comfortable": 8652, + "comfortably": 30392, + "comforting": 33835, + "comforts": 42243, + "comfy": 15736, + "comi": 40781, + "comic": 7729, + "comic": 4962, + "comicart": 46018, + "comicbook": 46564, + "comicbooks": 22018, + "comiccon": 18379, + "comicon": 43820, + "comics": 4256, + "comin": 18164, + "coming": 14916, + "coming": 1171, + "comingsoon": 19894, + "comm": 965, + "comm": 11413, + "comman": 39780, + "command": 18391, + "command": 11350, + "commander": 11265, + "commanders": 41667, + "commanding": 36933, + "commandments": 43409, + "commando": 31361, + "commands": 38163, + "comme": 29692, + "commemor": 9495, + "commemorate": 21242, + "commemorates": 45149, + "commemorating": 28734, + "commemoration": 29288, + "commemorative": 24623, + "commen": 15795, + "commence": 25059, + "commenced": 43908, + "commencement": 21666, + "commences": 48551, + "commend": 37555, + "commended": 40702, + "comment": 20035, + "comment": 5761, + "commentary": 14146, + "commentator": 32016, + "commented": 28328, + "commenting": 37292, + "comments": 6606, + "commer": 4028, + "commerce": 8333, + "commerci": 15601, + "commercial": 31802, + "commercial": 6287, + "commercials": 30724, + "commish": 45399, + "commissi": 6000, + "commission": 5292, + "commissioned": 16565, + "commissioner": 10221, + "commissioners": 30702, + "commissioning": 29585, + "commissions": 20668, + "commit": 3041, + "commit": 11797, + "commitment": 7770, + "commitments": 32136, + "commits": 20241, + "committed": 7907, + "committee": 5636, + "committees": 40504, + "committing": 21937, + "commod": 9496, + "commodities": 30350, + "commodity": 29041, + "commodore": 31129, + "common": 8414, + "common": 4176, + "commonly": 20344, + "commons": 16653, + "commonwealth": 16569, + "comms": 18832, + "commu": 9561, + "commun": 1515, + "communal": 32809, + "communi": 16164, + "communic": 4784, + "communicate": 19809, + "communication": 7999, + "communications": 10052, + "communion": 28579, + "communism": 35387, + "communist": 18602, + "communities": 6361, + "community": 14784, + "community": 1927, + "commute": 15898, + "commuter": 27782, + "commuters": 30823, + "commuting": 43503, + "como": 16236, + "comp": 2561, + "comp": 11679, + "compac": 40014, + "compact": 13690, + "compan": 1995, + "companies": 5361, + "companion": 14963, + "companions": 37124, + "company": 2634, + "compar": 7580, + "comparable": 27092, + "comparative": 33388, + "compare": 13771, + "compared": 10544, + "compares": 25104, + "comparing": 20564, + "comparison": 14186, + "comparisons": 40870, + "compart": 30072, + "compartment": 40383, + "compass": 19438, + "compassion": 14463, + "compassionate": 30193, + "compati": 17295, + "compatibility": 41614, + "compatible": 21286, + "compe": 5254, + "compelled": 49375, + "compelling": 21766, + "compen": 42079, + "compens": 15172, + "compensation": 18663, + "compet": 2932, + "compete": 10038, + "competed": 27767, + "competen": 31853, + "competence": 31165, + "competency": 49293, + "competent": 28113, + "competes": 39826, + "competing": 13068, + "competit": 15892, + "competiti": 32581, + "competition": 3742, + "competitions": 23259, + "competitive": 10687, + "competitiveness": 43209, + "competitor": 26633, + "competitors": 23638, + "compilation": 20446, + "compiled": 34579, + "compla": 7428, + "complain": 19292, + "complained": 42029, + "complaining": 20812, + "complains": 46363, + "complaint": 20391, + "complaints": 20020, + "comple": 1730, + "complement": 36624, + "complementary": 48953, + "complete": 3263, + "completed": 5976, + "completely": 5989, + "completes": 19321, + "completing": 14949, + "completion": 15915, + "complex": 16099, + "complex": 6324, + "complexes": 47870, + "complexion": 47732, + "complexity": 24815, + "compli": 5270, + "compliance": 14658, + "compliant": 29893, + "complic": 11460, + "complicated": 16621, + "complications": 29936, + "compliment": 25116, + "complimentary": 20948, + "compliments": 25477, + "comply": 36281, + "component": 21284, + "components": 16816, + "compos": 7783, + "compose": 43659, + "composed": 19916, + "composer": 12104, + "composers": 33314, + "composing": 40412, + "composite": 21606, + "composites": 45395, + "composition": 17510, + "compositions": 44652, + "compost": 46002, + "compost": 33307, + "compound": 19980, + "compounds": 33991, + "compre": 8483, + "compreh": 42976, + "comprehen": 12050, + "comprehend": 48230, + "comprehensive": 13854, + "compress": 33353, + "compressed": 42359, + "compression": 25638, + "compressor": 39607, + "compri": 29445, + "compromise": 26611, + "compromised": 38576, + "compromising": 45436, + "comps": 48665, + "compton": 28364, + "compu": 11639, + "compul": 25869, + "compulsory": 39345, + "computing": 12732, + "comra": 25553, + "comrade": 30844, + "comrades": 29282, + "coms": 30493, + "con": 616, + "con": 2457, + "cona": 30605, + "conan": 24750, + "conce": 9145, + "concealed": 35419, + "conceded": 37895, + "conceived": 39725, + "concentr": 11085, + "concentrate": 30846, + "concentrated": 36776, + "concentration": 18565, + "concep": 8389, + "concepcion": 47035, + "concept": 6353, + "conceptart": 31162, + "conception": 30510, + "conceptions": 40307, + "concepts": 16763, + "conceptu": 42745, + "conceptual": 34070, + "concer": 2228, + "concern": 12928, + "concerned": 12020, + "concerning": 21772, + "concerns": 11134, + "concert": 32180, + "concert": 3066, + "concerto": 24710, + "concerts": 14418, + "concession": 38117, + "concessions": 43981, + "concier": 28859, + "concierge": 39850, + "conclave": 38098, + "conclu": 9627, + "conclude": 37525, + "concluded": 27825, + "concludes": 30634, + "conclusion": 20932, + "conclusions": 39507, + "conco": 43034, + "concor": 19913, + "concord": 26448, + "concordia": 35492, + "concours": 36282, + "concourse": 37793, + "concre": 43658, + "concrete": 9637, + "concussion": 28321, + "condem": 13287, + "condemn": 27212, + "condemned": 35145, + "condemns": 32092, + "conden": 24816, + "conditi": 11170, + "condition": 36978, + "condition": 7336, + "conditional": 24671, + "conditioned": 37014, + "conditioner": 31239, + "conditioning": 18181, + "conditions": 5892, + "condo": 19952, + "condol": 18661, + "condolences": 20836, + "condom": 39021, + "condomin": 42589, + "condoms": 37878, + "condor": 47643, + "condos": 42342, + "condu": 40772, + "conduc": 5379, + "conduct": 11647, + "conducted": 13080, + "conducting": 16787, + "conductor": 22317, + "conducts": 32084, + "cone": 39279, + "cone": 10266, + "cones": 26718, + "coney": 41837, + "conf": 6477, + "confe": 1968, + "confeder": 17104, + "confederate": 24864, + "confederation": 43484, + "conferen": 37961, + "conference": 2230, + "conferences": 22811, + "conferencing": 47320, + "confess": 38860, + "confession": 22572, + "confessions": 29404, + "confetti": 37923, + "confi": 5005, + "confidence": 8510, + "confident": 12365, + "confidential": 28712, + "configu": 46746, + "configur": 26950, + "configuration": 33378, + "confin": 45316, + "confined": 40973, + "confir": 3930, + "confirm": 12130, + "confirmation": 19645, + "confirmed": 6346, + "confirming": 38433, + "confirms": 11803, + "confis": 36285, + "confit": 42241, + "confl": 8173, + "conflic": 19029, + "conflict": 10397, + "conflicting": 43894, + "conflicts": 28713, + "confor": 40933, + "confron": 20033, + "confront": 38382, + "confrontation": 41478, + "confu": 6890, + "confuse": 37503, + "confused": 10946, + "confusing": 24683, + "confusion": 20493, + "cong": 24407, + "conge": 20013, + "congestion": 24432, + "congo": 20334, + "congr": 1227, + "congrats": 1887, + "congratul": 1750, + "congratulate": 16633, + "congratulated": 42004, + "congratulates": 24580, + "congratulating": 30967, + "congratulation": 24751, + "congratulations": 1864, + "congre": 7947, + "congreg": 40727, + "congregation": 32618, + "congress": 12452, + "congress": 4599, + "congressional": 15239, + "congressman": 17145, + "congresswoman": 37317, + "coni": 39031, + "coni": 36651, + "conj": 41543, + "conju": 33821, + "conjunction": 34226, + "conley": 44536, + "conline": 37593, + "conn": 41836, + "conn": 20329, + "conne": 8437, + "connec": 29933, + "connect": 19969, + "connected": 27506, + "connecting": 41429, + "connection": 26840, + "connections": 37161, + "connie": 25739, + "connoisse": 46012, + "connol": 27739, + "connolly": 29537, + "connor": 21984, + "connor": 10218, + "conom": 2664, + "conomy": 22529, + "conor": 29955, + "conor": 19478, + "conqu": 13382, + "conquer": 38585, + "conquer": 19821, + "conquered": 27099, + "conquering": 43778, + "conquest": 35367, + "conrad": 22073, + "cons": 10311, + "consci": 9427, + "conscience": 27310, + "conscious": 14914, + "consciously": 46755, + "consciousness": 17894, + "conse": 34887, + "consecu": 12084, + "consecutive": 12413, + "consen": 23110, + "consensus": 25071, + "consent": 21922, + "consequ": 13003, + "consequence": 42262, + "consequences": 15682, + "conserv": 4649, + "conservancy": 46729, + "conservation": 37616, + "conservation": 8322, + "conservative": 11421, + "conservatives": 17631, + "conservatory": 32140, + "conserve": 34231, + "consi": 2899, + "consider": 12471, + "consider": 6734, + "considerable": 38256, + "considerably": 38510, + "consideration": 24310, + "considerations": 33700, + "considered": 9487, + "considering": 10761, + "considers": 24691, + "consist": 10410, + "consist": 33735, + "consisted": 49354, + "consistency": 25683, + "consistent": 16439, + "consistently": 23799, + "consisting": 39241, + "consists": 23458, + "consol": 27869, + "consolation": 38888, + "console": 13403, + "consoles": 33136, + "consoli": 21586, + "consolidation": 41111, + "consor": 27108, + "consortium": 29988, + "conspir": 12680, + "conspiracy": 15236, + "const": 3826, + "constable": 29179, + "constan": 38718, + "constance": 40682, + "constant": 32000, + "constant": 13111, + "constantine": 30640, + "constantly": 14336, + "constell": 21913, + "constellation": 25991, + "constitu": 6299, + "constituency": 22464, + "constituents": 32075, + "constitution": 12157, + "constitutional": 16091, + "constra": 28973, + "constraints": 41910, + "constru": 3983, + "construc": 13321, + "construct": 24467, + "constructed": 16876, + "constructing": 33653, + "construction": 48873, + "construction": 4585, + "constructive": 31810, + "consu": 4689, + "consul": 5295, + "consul": 33630, + "consulate": 34341, + "consult": 9438, + "consult": 26727, + "consultancy": 31735, + "consultant": 14196, + "consultants": 27203, + "consultation": 15777, + "consultations": 43424, + "consulting": 15883, + "consume": 28919, + "consumed": 29653, + "consumer": 34408, + "consumer": 10422, + "consumers": 14014, + "consuming": 30607, + "consumption": 14904, + "cont": 2036, + "cont": 21425, + "contact": 39367, + "contact": 3523, + "contacted": 37331, + "contacts": 22789, + "contag": 29259, + "contagious": 33984, + "contain": 9948, + "contain": 15187, + "contained": 23836, + "container": 14913, + "containers": 20448, + "containing": 20281, + "contains": 12844, + "contamin": 24662, + "contaminated": 35773, + "contamination": 31770, + "conte": 15402, + "conte": 26882, + "contempl": 21924, + "contemplating": 33854, + "contempor": 14538, + "contemporary": 16607, + "contemporary": 8859, + "contemporaryart": 20212, + "contempt": 39293, + "conten": 42201, + "contender": 23573, + "contenders": 29711, + "content": 15526, + "content": 4750, + "contentmarketing": 20429, + "contents": 14850, + "contest": 23103, + "contest": 4576, + "contestalert": 27313, + "contestant": 25682, + "contestants": 28062, + "contested": 37845, + "contests": 32210, + "contex": 42015, + "context": 13089, + "conti": 46431, + "conti": 40842, + "contin": 1918, + "continent": 19623, + "continental": 14089, + "continents": 38642, + "conting": 27104, + "contingent": 36467, + "continu": 4688, + "continually": 34086, + "continuation": 38964, + "continue": 3942, + "continued": 10150, + "continues": 4305, + "continuing": 11009, + "continuity": 34035, + "continuous": 17033, + "continuously": 29634, + "continuum": 44978, + "contour": 34733, + "contr": 22871, + "contra": 9880, + "contra": 38620, + "contrac": 7581, + "contracep": 35109, + "contract": 6120, + "contracting": 39091, + "contractor": 21429, + "contractors": 22427, + "contracts": 16563, + "contradic": 27957, + "contrary": 32805, + "contrast": 18501, + "contrasting": 40758, + "contribu": 4753, + "contribute": 14112, + "contributed": 19397, + "contributes": 34203, + "contributing": 21762, + "contribution": 11116, + "contributions": 14465, + "contributor": 24553, + "contributors": 32908, + "contro": 2372, + "control": 9963, + "control": 3366, + "controlled": 14140, + "controller": 12929, + "controllers": 30374, + "controlling": 26427, + "controls": 15746, + "controversi": 13674, + "controversial": 14617, + "controversy": 18659, + "conv": 48382, + "conve": 18421, + "conven": 7283, + "conveni": 33278, + "convenience": 17859, + "convenient": 18978, + "conveniently": 40844, + "convention": 6752, + "conventional": 20835, + "conventions": 41404, + "conver": 6336, + "convergence": 35381, + "convers": 4577, + "conversation": 5690, + "conversations": 12326, + "converse": 24149, + "conversion": 15111, + "conversions": 44137, + "convert": 20074, + "converted": 20808, + "converter": 34611, + "convertible": 19608, + "converting": 34674, + "converts": 42470, + "convey": 38342, + "convic": 11150, + "convicted": 18668, + "conviction": 24967, + "convictions": 44366, + "convin": 12889, + "convince": 20351, + "convinced": 17388, + "convincing": 27742, + "convo": 19372, + "convocation": 30674, + "convos": 44842, + "convoy": 30292, + "conway": 21410, + "conwy": 48971, + "cony": 14501, + "coo": 1664, + "coo": 21691, + "coogs": 47624, + "cook": 9726, + "cook": 5977, + "cookbook": 21086, + "cooke": 29979, + "cooked": 11452, + "cooker": 23806, + "cookery": 38779, + "cookie": 9367, + "cookies": 8320, + "cookin": 46610, + "cooking": 39248, + "cooking": 6283, + "cookout": 39743, + "cooks": 24256, + "cool": 5594, + "cool": 2077, + "cooled": 37170, + "cooler": 11078, + "coolest": 10566, + "cooling": 15291, + "coom": 41726, + "coon": 34260, + "coon": 16958, + "coop": 39917, + "coop": 18910, + "cooper": 7264, + "cooper": 8133, + "cooperate": 42936, + "cooperation": 11785, + "cooperative": 24517, + "coops": 48531, + "coordin": 8187, + "coordinate": 38250, + "coordinated": 32540, + "coordinating": 40075, + "coordination": 25611, + "coordinator": 13967, + "coors": 36025, + "cop": 3196, + "cop": 7070, + "copa": 22749, + "copd": 45876, + "cope": 47635, + "cope": 12564, + "copeland": 37604, + "copen": 15637, + "copenhagen": 17390, + "coper": 41891, + "copernic": 45519, + "copied": 36770, + "copies": 9851, + "coping": 30545, + "copolitics": 45846, + "copp": 20937, + "copped": 42229, + "copper": 24741, + "copper": 10333, + "coppola": 47427, + "cops": 10719, + "copter": 28049, + "copy": 11376, + "copy": 4509, + "copying": 38925, + "copyright": 15778, + "cor": 851, + "cor": 18559, + "cora": 34953, + "coral": 31220, + "coral": 12054, + "corbett": 35699, + "corbin": 35578, + "corbyn": 14026, + "cord": 40893, + "cord": 11181, + "corden": 41999, + "cordi": 41681, + "cordless": 44412, + "cords": 22164, + "core": 19622, + "core": 5000, + "cores": 37874, + "corey": 31279, + "corey": 15288, + "corgi": 31320, + "cori": 26508, + "coriander": 37491, + "corin": 17716, + "corinthians": 34471, + "cork": 18148, + "cork": 10376, + "corn": 5202, + "corn": 5894, + "cornelius": 45865, + "cornell": 38689, + "cornell": 20859, + "corner": 18509, + "corner": 5253, + "corners": 19584, + "cornerstone": 36280, + "cornish": 23774, + "cornwall": 37903, + "cornwall": 10777, + "coron": 13210, + "corona": 25564, + "coronado": 43946, + "coronary": 45955, + "coronation": 25014, + "coroner": 47241, + "corp": 29203, + "corp": 10918, + "corpor": 4258, + "corporal": 42445, + "corporate": 33877, + "corporate": 6838, + "corporation": 11282, + "corporations": 25482, + "corps": 11330, + "corpse": 29408, + "corpus": 31672, + "correc": 5011, + "correct": 8340, + "corrected": 35628, + "correction": 20843, + "correctional": 38030, + "corrections": 37507, + "correctly": 15359, + "correlation": 29218, + "correspon": 20203, + "correspondent": 29996, + "corri": 12974, + "corridor": 20592, + "corrie": 23961, + "corro": 24936, + "corro": 42033, + "corrosion": 39191, + "corru": 6501, + "corrup": 30429, + "corrupt": 15194, + "corruption": 9141, + "corsa": 47670, + "corsair": 42367, + "corset": 40408, + "cortex": 40109, + "cortez": 30461, + "corvette": 24367, + "cory": 23221, + "cory": 18329, + "cos": 5865, + "cos": 5700, + "cosby": 30324, + "cosc": 45944, + "coscino": 47909, + "cose": 26495, + "cosm": 37486, + "cosme": 9628, + "cosmetic": 23918, + "cosmetics": 12896, + "cosmic": 47398, + "cosmic": 18304, + "cosmo": 12829, + "cosmo": 32072, + "cosmopolitan": 35518, + "cosmos": 22151, + "cospla": 15149, + "cosplay": 42401, + "cosplay": 6435, + "cosplayer": 30215, + "cosplaying": 46701, + "cost": 11360, + "cost": 4713, + "costa": 10480, + "costar": 28659, + "costarica": 31272, + "costco": 31045, + "costello": 30667, + "costing": 39193, + "costly": 30170, + "costs": 7628, + "costu": 5786, + "costume": 7235, + "costumes": 15150, + "cosy": 22848, + "cot": 4718, + "cot": 5871, + "cote": 44234, + "cote": 20751, + "cotland": 32576, + "cotsw": 23303, + "cotswolds": 35546, + "cott": 8211, + "cott": 11349, + "cottage": 12155, + "cottages": 34405, + "cotton": 22218, + "cotton": 7050, + "cou": 1368, + "couch": 12724, + "cougar": 35028, + "cougar": 27042, + "cougars": 20425, + "cough": 35631, + "cough": 18498, + "cougs": 28482, + "coul": 22483, + "could": 44812, + "could": 1510, + "couldn": 4072, + "couldnt": 29042, + "coulter": 42291, + "coun": 939, + "counc": 12927, + "council": 18187, + "council": 3620, + "councill": 15732, + "councillor": 21179, + "councillors": 29695, + "councilman": 40833, + "councils": 29938, + "counsel": 13780, + "counsel": 19814, + "counseling": 25000, + "counsell": 47510, + "counselling": 40581, + "counselor": 26148, + "counselors": 38688, + "count": 6073, + "count": 5887, + "countdown": 39559, + "countdown": 7500, + "counted": 23149, + "counter": 10134, + "counter": 7352, + "counterfe": 33067, + "counterfeit": 44242, + "counterpart": 39216, + "counterparts": 42106, + "counters": 46170, + "countess": 46276, + "counties": 12338, + "counting": 9723, + "countless": 21819, + "countries": 5489, + "country": 7896, + "country": 2157, + "countryfile": 47023, + "countrymusic": 30372, + "countryside": 16303, + "counts": 12264, + "county": 18734, + "county": 2116, + "coup": 9871, + "coup": 16479, + "coupe": 16773, + "couple": 40136, + "couple": 3377, + "coupled": 37153, + "couples": 14752, + "coupling": 45595, + "coupon": 14019, + "coupons": 23945, + "cour": 1391, + "coura": 4436, + "courage": 9828, + "courageous": 25005, + "courier": 27217, + "cours": 21493, + "course": 43225, + "course": 2613, + "courses": 9464, + "court": 16837, + "court": 2908, + "courte": 5088, + "courtesy": 5228, + "courthouse": 22205, + "courtney": 33601, + "courtney": 15990, + "courtroom": 41071, + "courts": 13514, + "courty": 20121, + "courtyard": 21900, + "cous": 48397, + "cousin": 7780, + "cousins": 14073, + "cout": 29118, + "coutinho": 35530, + "couture": 14808, + "cov": 19384, + "cov": 48385, + "cove": 21700, + "cove": 14708, + "coven": 12483, + "covenant": 29647, + "coventry": 18007, + "cover": 13534, + "cover": 2202, + "coverage": 6810, + "covered": 5603, + "covering": 9462, + "covers": 7745, + "covert": 40134, + "coveted": 36119, + "covington": 43196, + "cow": 5076, + "cow": 9706, + "cowan": 42699, + "coward": 33729, + "cowards": 48972, + "cowboy": 25833, + "cowboy": 13657, + "cowboys": 11864, + "cowboysnation": 43082, + "cowell": 39015, + "cowgirl": 47090, + "coworker": 30727, + "coworkers": 30821, + "coworking": 36034, + "cows": 15204, + "cowx": 23831, + "cox": 25784, + "cox": 11597, + "coy": 12765, + "coy": 15742, + "coyi": 48407, + "coyle": 45348, + "coyne": 44729, + "coyo": 16614, + "coyote": 26586, + "coyotes": 30423, + "coys": 19736, + "coz": 39922, + "coz": 14282, + "cozy": 14873, + "cp": 7905, + "cp": 9130, + "cpa": 30095, + "cpac": 45731, + "cpc": 26125, + "cpd": 23402, + "cpec": 48007, + "cpfc": 27553, + "cpi": 41795, + "cpl": 26852, + "cpr": 25134, + "cps": 27078, + "cpt": 32892, + "cpu": 27700, + "cq": 48910, + "cq": 48417, + "cr": 1075, + "cr": 3483, + "cra": 1184, + "cra": 18362, + "crab": 27382, + "crab": 11574, + "crabs": 30908, + "crack": 11222, + "crack": 10334, + "crackdown": 29527, + "cracked": 19826, + "cracker": 16298, + "crackers": 26200, + "cracking": 13008, + "cracks": 21426, + "cracy": 24749, + "cradle": 29384, + "crae": 40438, + "craf": 10873, + "craft": 7717, + "craft": 3588, + "craftbeer": 12371, + "crafted": 12424, + "crafthour": 42324, + "crafting": 26886, + "crafts": 33276, + "crafts": 13383, + "craftsman": 39528, + "craftsmanship": 36682, + "crafty": 32317, + "craic": 46962, + "craig": 14042, + "craig": 8061, + "craigslist": 43865, + "cram": 29809, + "cramer": 44592, + "cramps": 46106, + "cran": 7761, + "cranberries": 49361, + "cranberry": 23824, + "crane": 14626, + "cranes": 26979, + "crani": 45674, + "crank": 46246, + "crank": 32283, + "cranston": 44340, + "crap": 11899, + "crappy": 30475, + "crash": 37150, + "crash": 5033, + "crashed": 16638, + "crashes": 17013, + "crashing": 24991, + "crat": 46696, + "crate": 24756, + "crater": 22663, + "crates": 30172, + "cratic": 32175, + "crative": 39999, + "crats": 43056, + "crave": 33397, + "craven": 33625, + "craving": 18344, + "cravings": 34476, + "craw": 7400, + "crawfish": 42772, + "crawford": 15918, + "crawl": 20106, + "crawler": 41012, + "crawley": 42316, + "crawling": 37066, + "cray": 24184, + "cray": 27032, + "crayon": 41801, + "crayons": 43508, + "craz": 25776, + "craze": 30637, + "craziest": 32690, + "craziness": 46436, + "crazy": 17540, + "crazy": 3578, + "crc": 25618, + "cre": 798, + "cre": 17762, + "cream": 23184, + "cream": 3867, + "creams": 41447, + "creamy": 17206, + "crease": 48441, + "create": 30949, + "create": 3380, + "created": 4080, + "creates": 10361, + "creati": 6714, + "creating": 5524, + "creation": 38293, + "creation": 6900, + "creations": 17411, + "creative": 15237, + "creative": 4450, + "creatives": 29352, + "creativity": 9636, + "creator": 10173, + "creators": 17981, + "creature": 14317, + "creatures": 13938, + "cred": 7314, + "cred": 22377, + "credenti": 29487, + "credentials": 33422, + "credi": 21097, + "credibility": 34984, + "credible": 32983, + "credit": 21467, + "credit": 3900, + "credited": 32480, + "credits": 10654, + "creds": 43462, + "cree": 33961, + "cree": 36014, + "creed": 18845, + "creek": 26120, + "creek": 5526, + "creep": 8153, + "creep": 26084, + "creeper": 38662, + "creeping": 29697, + "creeps": 45135, + "creepy": 11943, + "creighton": 42823, + "creme": 22681, + "creole": 45632, + "crepe": 38611, + "crescent": 18211, + "cress": 39124, + "crest": 35985, + "crest": 15760, + "crested": 36656, + "crete": 8584, + "crew": 21560, + "crew": 3462, + "crewe": 43284, + "crews": 10463, + "cri": 1621, + "cri": 38962, + "crib": 23271, + "cric": 4328, + "cricke": 19098, + "cricket": 21859, + "cricket": 5373, + "cricketer": 28439, + "cricketers": 43986, + "cried": 15290, + "cries": 19769, + "crime": 13872, + "crime": 4896, + "crimea": 28614, + "crimes": 11827, + "crimin": 5874, + "criminal": 30197, + "criminal": 8255, + "criminals": 18783, + "crimson": 19437, + "cringe": 42588, + "cripp": 33588, + "cris": 37818, + "crises": 36403, + "crisis": 5712, + "crisp": 15145, + "crispr": 39784, + "crisps": 35744, + "crispy": 16458, + "criss": 29708, + "cristi": 12699, + "cristian": 48808, + "cristiano": 14807, + "cristina": 33395, + "cristo": 38315, + "crit": 3613, + "crit": 48130, + "criteri": 33627, + "criteria": 24849, + "criterion": 43841, + "criti": 25333, + "critic": 12417, + "critic": 19361, + "critical": 15314, + "critical": 6808, + "critically": 21570, + "criticalrole": 33606, + "criticalrole": 22742, + "criticalrolefanart": 43663, + "critici": 20333, + "criticism": 17405, + "criticize": 46081, + "criticized": 41557, + "critics": 16946, + "critique": 32982, + "critters": 35423, + "crm": 22610, + "cro": 1192, + "cro": 22522, + "croati": 28072, + "croatia": 13323, + "croatian": 34795, + "croc": 43350, + "croche": 35352, + "crochet": 17554, + "crock": 41685, + "crocker": 47843, + "crockett": 48313, + "crocod": 24519, + "crocodile": 24757, + "crocs": 38988, + "croft": 16657, + "croissant": 46011, + "croix": 44735, + "crom": 25082, + "crombie": 46162, + "cromwell": 45345, + "cron": 17361, + "croo": 16443, + "crook": 43744, + "crooked": 48473, + "crooked": 25644, + "crooks": 44226, + "crop": 40751, + "crop": 9955, + "cropped": 31139, + "crops": 16290, + "crore": 18274, + "crores": 37281, + "cros": 16670, + "crosby": 21095, + "cross": 5266, + "cross": 3417, + "crossed": 11731, + "crosses": 20473, + "crossfit": 47214, + "crossfit": 20395, + "crossing": 8673, + "crossings": 43517, + "crossover": 17194, + "crossroads": 27427, + "crossword": 32945, + "crou": 31206, + "crouch": 36506, + "crow": 3138, + "crow": 16019, + "crowd": 12036, + "crowd": 4570, + "crowded": 20182, + "crowdfunding": 17971, + "crowds": 16092, + "crowe": 33560, + "crowley": 32287, + "crown": 22190, + "crown": 6902, + "crowned": 16109, + "crowns": 33229, + "crows": 27134, + "croy": 21676, + "croydon": 27116, + "crs": 28449, + "crt": 43877, + "cru": 1815, + "cru": 29788, + "cruci": 18499, + "crucial": 12396, + "crude": 20677, + "cruel": 16073, + "cruel": 17573, + "cruelty": 20675, + "cruis": 27721, + "cruise": 36425, + "cruise": 6764, + "cruiser": 21394, + "cruises": 19214, + "cruising": 19743, + "crum": 43268, + "crumb": 48327, + "crumb": 39909, + "crumble": 36595, + "crumbs": 35893, + "crun": 17407, + "crunch": 16620, + "crunchy": 31366, + "crusad": 19133, + "crusade": 36846, + "crusader": 40171, + "crusaders": 31319, + "crush": 22296, + "crush": 7610, + "crushed": 18270, + "crusher": 44923, + "crushes": 35844, + "crushing": 20790, + "crust": 23136, + "crusted": 37314, + "cruz": 33689, + "cruz": 8403, + "cry": 2837, + "cry": 6290, + "crying": 6828, + "cryo": 32215, + "cryp": 4865, + "crypt": 37814, + "cryptic": 46925, + "crypto": 8080, + "crypto": 9608, + "cryptocurrencies": 33329, + "cryptocurrency": 12070, + "cryst": 15891, + "crystal": 17387, + "crystal": 6517, + "crystalli": 47551, + "crystals": 18350, + "cs": 11978, + "cs": 2804, + "csa": 26355, + "csc": 41727, + "csc": 37266, + "csd": 36913, + "cse": 41659, + "csg": 47085, + "csgo": 28928, + "csi": 41750, + "csi": 28070, + "csk": 43036, + "csm": 40061, + "csn": 46329, + "cso": 43864, + "csp": 39243, + "csr": 32105, + "csr": 24598, + "csrracing": 44193, + "css": 41418, + "css": 19846, + "cst": 17016, + "csu": 35948, + "csu": 31261, + "csw": 41031, + "ct": 3381, + "ct": 1122, + "cta": 28397, + "ctar": 27842, + "ctc": 34123, + "cte": 31410, + "cted": 2910, + "ctf": 35250, + "cthulhu": 41064, + "cting": 7985, + "ction": 17578, + "ction": 1569, + "ctions": 7021, + "ctive": 9313, + "cto": 17445, + "ctor": 8108, + "ctr": 35602, + "ctr": 18481, + "cts": 6936, + "ctto": 25118, + "ctu": 20834, + "cture": 17668, + "ctv": 21213, + "ctv": 27590, + "cu": 729, + "cu": 11224, + "cuando": 40388, + "cub": 16938, + "cub": 19972, + "cuba": 11576, + "cuban": 15536, + "cube": 47753, + "cube": 11353, + "cubes": 31413, + "cubic": 48159, + "cubic": 29614, + "cubs": 9858, + "cuck": 26364, + "cuckoo": 38062, + "cucu": 16705, + "cucumber": 19787, + "cucumbers": 48065, + "cud": 42684, + "cudd": 12820, + "cuddle": 19568, + "cuddles": 24001, + "cuddling": 29696, + "cuddly": 36208, + "cudi": 48713, + "cue": 13424, + "cuer": 39506, + "cues": 35719, + "cuff": 34693, + "cuff": 22414, + "cufflinks": 43938, + "cuffs": 37221, + "cuis": 9938, + "cuisine": 10605, + "cuk": 34838, + "cul": 1877, + "cula": 35935, + "cular": 10940, + "culars": 45719, + "cule": 31066, + "cules": 18984, + "culin": 14772, + "culinary": 16466, + "cull": 21880, + "cull": 42061, + "cullen": 25973, + "culmin": 33778, + "culo": 36305, + "culprit": 41593, + "cult": 11965, + "cultiv": 16781, + "cultivate": 42983, + "cultivated": 48901, + "cultivation": 41539, + "cultur": 20780, + "cultural": 34908, + "cultural": 6753, + "culturally": 36783, + "culture": 20197, + "culture": 3673, + "cultured": 40176, + "cultures": 19552, + "culver": 42103, + "cum": 20142, + "cum": 27119, + "cumb": 10858, + "cumber": 15309, + "cumberbatch": 27541, + "cumberland": 28747, + "cumbri": 32010, + "cumbria": 17953, + "cumin": 42285, + "cumple": 47050, + "cumul": 42961, + "cumulative": 47610, + "cumulus": 46313, + "cun": 12423, + "cun": 29532, + "cunningham": 25321, + "cuomo": 25681, + "cup": 5059, + "cup": 1937, + "cupboard": 32074, + "cupcake": 17025, + "cupcakes": 12747, + "cupid": 34885, + "cuppa": 28077, + "cups": 11463, + "cur": 1092, + "cur": 33073, + "curated": 20341, + "curator": 20753, + "curb": 21931, + "curd": 38881, + "cure": 36758, + "cure": 9088, + "cured": 26248, + "cures": 38204, + "curfew": 48826, + "curi": 12640, + "curing": 44169, + "curiosity": 21583, + "curious": 9865, + "curl": 24306, + "curled": 43734, + "curling": 18543, + "curls": 24340, + "curly": 20795, + "curran": 40999, + "currant": 43501, + "curren": 6142, + "currencies": 23530, + "currency": 7853, + "current": 3653, + "currently": 3792, + "currents": 35450, + "curric": 16201, + "curriculum": 17947, + "currie": 39385, + "curry": 49285, + "curry": 8051, + "curse": 18479, + "cursed": 26408, + "cursor": 46546, + "curt": 38137, + "curtain": 17223, + "curtains": 30223, + "curti": 39925, + "curtis": 13808, + "curve": 15792, + "curved": 25789, + "curves": 22814, + "curvy": 45788, + "cus": 2736, + "cusa": 47414, + "cuse": 37950, + "cush": 43731, + "cushi": 15333, + "cushion": 20853, + "cushions": 34163, + "cussion": 16658, + "cussions": 46853, + "cust": 20900, + "custard": 26516, + "custo": 4376, + "custody": 16176, + "custom": 2662, + "custom": 4996, + "custome": 41323, + "customer": 24035, + "customer": 5102, + "customerexperience": 45167, + "customers": 5528, + "customerservice": 40611, + "customiz": 41793, + "customizable": 48253, + "customization": 48244, + "customize": 32179, + "customized": 23229, + "customs": 16880, + "cut": 10511, + "cut": 3032, + "cute": 16031, + "cute": 2242, + "cuteness": 19342, + "cuter": 27151, + "cutest": 8032, + "cuth": 44328, + "cutie": 10733, + "cuties": 40939, + "cuties": 23420, + "cutiesaturday": 41883, + "cutler": 40428, + "cutlery": 49073, + "cutout": 45016, + "cuts": 7435, + "cutt": 27338, + "cutt": 47647, + "cutter": 19719, + "cutters": 44783, + "cutting": 7266, + "cuz": 9215, + "cv": 13531, + "cv": 13947, + "cvs": 29603, + "cw": 10652, + "cw": 11065, + "cwc": 19179, + "cwgc": 48527, + "cws": 45186, + "cx": 44457, + "cx": 14283, + "cy": 1470, + "cy": 1678, + "cyber": 5830, + "cyber": 10210, + "cybercrime": 41772, + "cybermonday": 36578, + "cyberpunk": 36896, + "cybersecurity": 10581, + "cyborg": 36650, + "cycl": 9791, + "cycle": 19083, + "cycle": 5072, + "cycled": 31055, + "cycles": 14605, + "cycli": 12201, + "cycling": 26353, + "cycling": 6321, + "cyclist": 20686, + "cyclists": 20303, + "cyclo": 18122, + "cyclone": 48094, + "cyclone": 20917, + "cyclones": 34669, + "cylin": 18569, + "cylinder": 22092, + "cylinders": 48888, + "cymb": 36677, + "cymru": 24005, + "cyn": 14324, + "cynthi": 41994, + "cynthia": 23748, + "cyp": 14809, + "cypress": 25347, + "cypri": 36481, + "cyprus": 15263, + "cyril": 36028, + "cyrus": 14204, + "cystic": 46131, + "cyto": 31864, + "cz": 22898, + "cz": 22921, + "cze": 12152, + "czech": 43151, + "czech": 16141, + "cé": 36454, + "cé": 18317, + "d": 67, + "d": 323, + "da": 925, + "da": 1140, + "daa": 32642, + "daan": 44814, + "dab": 10413, + "dab": 22900, + "dac": 16222, + "dac": 27478, + "daca": 28477, + "dach": 34166, + "dachsh": 41641, + "dachshund": 42720, + "dad": 4346, + "dad": 2639, + "dada": 31325, + "daddy": 29466, + "daddy": 6546, + "dade": 23299, + "dades": 28289, + "dads": 12741, + "dae": 23358, + "dae": 15422, + "daener": 46934, + "daes": 47282, + "daesh": 35047, + "daf": 9972, + "daf": 36704, + "daffodils": 44769, + "daft": 36347, + "dag": 11434, + "dag": 25650, + "dagger": 34251, + "dah": 16976, + "dah": 11776, + "dahl": 45816, + "dahl": 22621, + "dahlia": 41768, + "dai": 13559, + "dai": 10632, + "dail": 14676, + "dailies": 21260, + "daily": 6689, + "daily": 2873, + "dailynews": 43466, + "dailys": 43160, + "dailysketch": 46738, + "daim": 40421, + "dain": 32222, + "dain": 28315, + "daipur": 47631, + "dair": 19998, + "dair": 42078, + "dairy": 25243, + "dairy": 10302, + "dairyfree": 49366, + "dais": 10502, + "daisi": 39947, + "daisies": 40654, + "daisy": 39310, + "daisy": 12865, + "dak": 6999, + "dak": 16095, + "dakar": 31137, + "dakota": 38522, + "dakota": 12358, + "dal": 2476, + "dal": 5601, + "dala": 42675, + "dalai": 41222, + "dalail": 35169, + "dalailama": 35849, + "dale": 11533, + "dale": 4677, + "dalejr": 38207, + "dales": 29031, + "daley": 28544, + "dalgo": 43614, + "dali": 36735, + "dali": 25703, + "dalit": 45432, + "dall": 43631, + "dalla": 16772, + "dallas": 27414, + "dallas": 5759, + "dallascowboys": 33016, + "dalmati": 44275, + "dalton": 21488, + "daly": 24873, + "dam": 1880, + "dam": 4926, + "damage": 6822, + "damaged": 13568, + "damages": 28842, + "damaging": 20610, + "damas": 23345, + "damascus": 25396, + "dame": 10069, + "dames": 44548, + "dami": 17783, + "damian": 43307, + "damian": 25375, + "damien": 25090, + "dammit": 31057, + "damn": 37409, + "damn": 4451, + "damned": 28428, + "damon": 48503, + "damon": 18244, + "damp": 26520, + "dams": 37680, + "dan": 2257, + "dan": 2284, + "dana": 44834, + "dana": 13777, + "danao": 38598, + "danc": 3945, + "dance": 10619, + "dance": 2724, + "danced": 32891, + "dancehall": 33300, + "dancer": 11400, + "dancers": 13153, + "dances": 24083, + "dancing": 33280, + "dancing": 6226, + "dand": 12593, + "dandelion": 38903, + "dandy": 31932, + "dane": 19330, + "danes": 47477, + "dang": 4283, + "dang": 14992, + "danger": 20083, + "danger": 11212, + "dangerous": 7350, + "dangerously": 35012, + "dangers": 23726, + "dangle": 39907, + "dani": 3001, + "dani": 17009, + "daniel": 7859, + "daniel": 4981, + "daniela": 44466, + "danielle": 30396, + "danielle": 15292, + "danielpadilla": 34702, + "daniels": 16146, + "danish": 15467, + "dank": 31849, + "dann": 11951, + "danny": 14950, + "danny": 7621, + "dano": 29703, + "dans": 16241, + "dant": 48097, + "dant": 28237, + "dante": 21911, + "danube": 44594, + "dany": 47816, + "dao": 36099, + "dap": 12149, + "dap": 38034, + "daph": 24591, + "daphne": 31687, + "dapl": 34478, + "dapp": 46857, + "dapper": 26071, + "daq": 25381, + "dar": 1377, + "dar": 6242, + "dara": 17064, + "darby": 34366, + "darcy": 32916, + "dare": 14833, + "dare": 9863, + "daredevil": 28849, + "dares": 42973, + "dareto": 46794, + "dari": 16292, + "dari": 14552, + "daria": 45622, + "daries": 18184, + "daring": 28166, + "dario": 33918, + "darius": 32606, + "darje": 49089, + "dark": 5724, + "dark": 3144, + "darker": 18737, + "darkest": 25898, + "darkness": 10521, + "darling": 13048, + "darlings": 39961, + "darlington": 34565, + "darn": 26059, + "darrell": 33522, + "darren": 20263, + "darren": 12275, + "darry": 29200, + "darryl": 35359, + "darshan": 34564, + "dart": 14001, + "dart": 19841, + "darth": 41304, + "darth": 23164, + "dartmoor": 31477, + "dartmouth": 29667, + "darts": 15246, + "darwin": 43013, + "darwin": 20926, + "daryl": 45607, + "daryl": 24532, + "das": 9940, + "das": 7359, + "dash": 13858, + "dash": 10206, + "dashboard": 27679, + "dashi": 12876, + "dashing": 33825, + "dat": 1717, + "dat": 9445, + "data": 14876, + "data": 2281, + "datab": 11941, + "database": 14678, + "databases": 48384, + "datac": 27329, + "datacenter": 40133, + "datasci": 14496, + "datascience": 15748, + "dataviz": 28138, + "date": 34300, + "date": 1524, + "dated": 13564, + "dates": 7228, + "dating": 8534, + "dation": 15311, + "datlantic": 34270, + "dato": 36075, + "dats": 48674, + "dau": 3162, + "dau": 33828, + "daugh": 42523, + "daughter": 3944, + "daughters": 13585, + "daun": 29470, + "dav": 3700, + "dav": 46488, + "davao": 31502, + "dave": 10089, + "dave": 5077, + "daven": 28350, + "davenport": 34624, + "davey": 33391, + "davi": 1732, + "david": 4640, + "david": 2259, + "davidbowie": 44448, + "davido": 35989, + "davids": 46695, + "davidson": 13166, + "davies": 13120, + "davin": 43187, + "davis": 24426, + "davis": 5536, + "davison": 43725, + "davos": 31887, + "davy": 41565, + "daw": 5971, + "daw": 24404, + "dawg": 18660, + "dawgs": 26431, + "dawn": 30590, + "dawn": 7689, + "dawson": 18611, + "dax": 29458, + "day": 1405, + "day": 575, + "daya": 38165, + "daybreak": 33862, + "daycare": 36363, + "daydream": 41587, + "dayin": 20332, + "daylight": 20809, + "dayo": 29856, + "dayo": 46605, + "dayof": 16272, + "dayofthe": 38043, + "days": 1161, + "daysof": 12379, + "daysofcode": 36537, + "daysto": 29886, + "daystogo": 42198, + "dayswild": 42052, + "daytime": 22830, + "dayton": 35729, + "dayton": 20262, + "daytona": 16335, + "dayweekend": 44526, + "dayz": 35949, + "daz": 15449, + "daz": 43844, + "daze": 33591, + "dazz": 17149, + "dazzle": 41164, + "dazzling": 28821, + "db": 19100, + "db": 8128, + "dbacks": 31175, + "dbs": 40558, + "dbz": 49226, + "dc": 5074, + "dc": 2743, + "dca": 49107, + "dcc": 33747, + "dccomics": 17610, + "dcfc": 35526, + "dci": 35336, + "dcs": 42878, + "dcu": 42647, + "dd": 1353, + "dd": 3766, + "dda": 35202, + "ddad": 39049, + "dday": 32689, + "dday": 26243, + "ddc": 48513, + "ddd": 24183, + "dddd": 35362, + "dden": 5013, + "dder": 9300, + "dders": 24827, + "ddi": 44450, + "ddin": 17175, + "dding": 48101, + "dding": 8974, + "ddings": 49106, + "ddington": 29238, + "ddle": 17633, + "ddle": 8357, + "ddled": 38392, + "ddles": 33901, + "ddleston": 25647, + "ddling": 30981, + "ddlovato": 28244, + "ddos": 46463, + "ddr": 26027, + "dds": 48334, + "ddu": 43836, + "ddy": 14981, + "ddy": 7876, + "de": 561, + "de": 654, + "dea": 18477, + "deacon": 29155, + "dead": 3906, + "dead": 2747, + "deadliest": 40811, + "deadline": 47209, + "deadline": 8458, + "deadlines": 44959, + "deadly": 10756, + "deadpool": 21471, + "deaf": 28229, + "deaf": 18358, + "deal": 7249, + "deal": 2696, + "dealer": 15218, + "dealers": 21697, + "dealership": 32096, + "dealing": 13138, + "deals": 4469, + "dealt": 30101, + "dean": 13807, + "dean": 5828, + "deandre": 43635, + "deans": 46852, + "dear": 15696, + "dear": 3817, + "dearest": 24880, + "dearly": 31880, + "deas": 34715, + "death": 7163, + "death": 2767, + "deaths": 12253, + "deau": 12399, + "deaux": 19883, + "deb": 2987, + "deb": 25687, + "debat": 32082, + "debate": 5196, + "debates": 19239, + "debating": 23472, + "debbie": 47186, + "debbie": 16735, + "debit": 32410, + "debor": 16738, + "deborah": 40997, + "deborah": 22150, + "debra": 33233, + "debris": 19208, + "debt": 8932, + "debts": 38770, + "debu": 9790, + "debun": 33123, + "debut": 42608, + "debut": 4085, + "debuted": 25215, + "debuting": 34817, + "debuts": 17044, + "dec": 3063, + "dec": 4628, + "deca": 33428, + "decad": 29914, + "decade": 11099, + "decadent": 41716, + "decades": 10488, + "decal": 26678, + "decals": 37606, + "decan": 40677, + "decat": 35334, + "decath": 47455, + "decatur": 38540, + "decay": 22703, + "dece": 3534, + "deceased": 30035, + "december": 3864, + "decent": 10698, + "decentr": 28960, + "decentralized": 38485, + "decep": 33529, + "deception": 33046, + "deci": 2262, + "decide": 8447, + "decided": 4939, + "decides": 17269, + "deciding": 22513, + "decision": 5575, + "decisions": 9903, + "decisive": 28690, + "deck": 24885, + "deck": 6943, + "decked": 39096, + "decker": 21449, + "decks": 23968, + "decl": 7091, + "decla": 10739, + "declan": 42341, + "declar": 18040, + "declaration": 19714, + "declare": 19856, + "declared": 13845, + "declares": 23641, + "declaring": 33273, + "decline": 15084, + "declined": 28911, + "declines": 40478, + "declining": 29221, + "deco": 26412, + "deco": 16422, + "decor": 5148, + "decor": 6928, + "decorate": 23651, + "decorated": 15917, + "decorating": 16968, + "decoration": 16029, + "decorations": 19158, + "decorative": 19289, + "decre": 12284, + "decrease": 24703, + "decreased": 33913, + "decreasing": 43763, + "decree": 43327, + "ded": 16744, + "ded": 1241, + "dedic": 4701, + "dedicate": 27610, + "dedicated": 6770, + "dedication": 10188, + "dedly": 36204, + "deduc": 22799, + "dee": 5268, + "dee": 6705, + "deed": 30260, + "deeds": 24516, + "deejay": 48304, + "deejay": 44511, + "deemed": 28102, + "deen": 26456, + "deen": 12912, + "deep": 5462, + "deep": 3383, + "deepak": 45528, + "deeper": 15224, + "deepest": 22245, + "deephouse": 35684, + "deepi": 19371, + "deepika": 34120, + "deepikap": 29903, + "deepikapadukone": 30646, + "deeplear": 22181, + "deeplearning": 24362, + "deeply": 11449, + "deer": 19454, + "deer": 8700, + "deere": 32901, + "dees": 12547, + "deets": 35537, + "def": 2044, + "def": 11649, + "defam": 35670, + "defamation": 42741, + "default": 21650, + "defe": 4148, + "defeat": 8477, + "defeated": 8927, + "defeating": 22594, + "defeats": 16317, + "defect": 44013, + "defects": 37485, + "defen": 3619, + "defence": 30307, + "defence": 9659, + "defend": 21970, + "defend": 11397, + "defended": 27161, + "defender": 10618, + "defenders": 20063, + "defending": 13098, + "defends": 20134, + "defense": 45875, + "defense": 6021, + "defenseman": 43714, + "defenses": 49198, + "defensive": 10824, + "defi": 17244, + "defiance": 36186, + "defiant": 47597, + "defibrill": 47684, + "defic": 18022, + "defici": 23387, + "deficiency": 30685, + "deficit": 20156, + "defin": 3188, + "define": 14919, + "defined": 15278, + "defines": 28218, + "defining": 20504, + "definite": 40793, + "definitely": 4824, + "definition": 11405, + "definitive": 25298, + "defl": 31467, + "deforestation": 41330, + "defstar": 36427, + "defy": 39148, + "defying": 38496, + "deg": 38498, + "degra": 28939, + "degradation": 44468, + "degre": 4653, + "degree": 7119, + "degrees": 8000, + "deh": 35582, + "dei": 33833, + "dei": 23279, + "deir": 42948, + "deity": 42574, + "deja": 46902, + "dek": 23901, + "dekalb": 37775, + "del": 1233, + "del": 2003, + "dela": 37986, + "delaney": 31528, + "delav": 23706, + "delavin": 40477, + "delavin": 40776, + "delavinkisses": 40631, + "delaware": 17547, + "delay": 12955, + "delay": 10934, + "delayed": 14567, + "delaying": 43781, + "delays": 11232, + "dele": 7922, + "dele": 33431, + "delec": 38615, + "delectable": 45500, + "deleg": 8046, + "delegate": 27259, + "delegates": 14623, + "delegation": 14632, + "delete": 19204, + "deleted": 16588, + "deleting": 41857, + "delft": 42749, + "delgado": 49182, + "delhi": 26723, + "delhi": 5717, + "deli": 1932, + "deli": 18601, + "delia": 33193, + "deliber": 18316, + "deliberate": 38271, + "deliberately": 35163, + "delic": 13366, + "delicacy": 49181, + "delicate": 18768, + "delici": 19993, + "delicious": 3959, + "deliciously": 39589, + "deliciousness": 42819, + "delight": 46165, + "delight": 13073, + "delighted": 5943, + "delightful": 15513, + "delights": 25330, + "deline": 18797, + "delines": 13562, + "delish": 25093, + "deliver": 19561, + "deliver": 7396, + "delivered": 7278, + "deliveries": 29336, + "delivering": 9943, + "delivers": 11753, + "delivery": 5619, + "dell": 24381, + "dell": 10242, + "della": 22986, + "delle": 35963, + "deloit": 29428, + "deloitte": 38667, + "dels": 48636, + "delta": 32250, + "delta": 8768, + "delu": 18779, + "delusional": 48059, + "delux": 13709, + "deluxe": 14056, + "delve": 46008, + "dely": 15040, + "dem": 3251, + "dem": 7825, + "dema": 40268, + "dema": 45046, + "deman": 48366, + "demand": 13072, + "demand": 5650, + "demanded": 33699, + "demanding": 17099, + "demands": 14241, + "demar": 46566, + "demarcus": 47873, + "demb": 35930, + "demdebate": 43973, + "deme": 25143, + "demean": 37376, + "demen": 12604, + "dementi": 46028, + "dementia": 14047, + "demetri": 39553, + "demi": 32879, + "demi": 14480, + "demise": 28756, + "demo": 2930, + "demo": 7380, + "democr": 3573, + "democracy": 7758, + "democrat": 15431, + "democratic": 9149, + "democrats": 8865, + "demographic": 31308, + "demol": 19382, + "demolished": 26537, + "demolition": 22237, + "demon": 5635, + "demon": 12085, + "demonetisation": 41338, + "demonic": 46920, + "demons": 18388, + "demonstr": 8579, + "demonstrate": 22231, + "demonstrated": 29477, + "demonstrates": 24806, + "demonstrating": 22107, + "demonstration": 16722, + "demonstrations": 33964, + "demonstrators": 46450, + "demos": 19304, + "demp": 22490, + "dempsey": 30188, + "dems": 10989, + "demsin": 42664, + "demsinphilly": 43091, + "den": 1177, + "den": 1181, + "dena": 32431, + "denali": 48076, + "dence": 3370, + "dency": 11659, + "dend": 37447, + "dends": 43985, + "dene": 45128, + "dened": 19571, + "deng": 43098, + "deng": 41788, + "dengue": 41932, + "denham": 39180, + "deni": 21995, + "denial": 25716, + "denied": 15780, + "denies": 19565, + "denim": 13606, + "denis": 47630, + "denis": 18750, + "denise": 45900, + "denise": 20899, + "denmark": 13268, + "dennis": 32738, + "dennis": 10534, + "denny": 26808, + "denomin": 41016, + "dens": 16533, + "dense": 19353, + "density": 22431, + "dent": 3593, + "dent": 1258, + "dental": 24635, + "dental": 8382, + "dentally": 10346, + "dented": 21923, + "denti": 4418, + "dential": 5459, + "dentist": 17816, + "dentistry": 25754, + "dently": 28817, + "denton": 23567, + "dents": 1517, + "denver": 27847, + "denver": 8569, + "deny": 18679, + "denying": 32771, + "denzel": 42503, + "deo": 26406, + "deo": 12121, + "deodor": 47639, + "deol": 41902, + "deon": 31466, + "deon": 16079, + "dep": 6079, + "dep": 24370, + "depar": 10794, + "depart": 5343, + "depart": 30649, + "departed": 32541, + "departing": 26902, + "department": 5744, + "departments": 29523, + "departs": 38998, + "departure": 17850, + "depe": 36118, + "depend": 13894, + "depend": 27371, + "dependence": 40243, + "dependent": 23280, + "depending": 23673, + "depends": 20497, + "depic": 11307, + "depicted": 34637, + "depicting": 24970, + "depiction": 31071, + "depicts": 29340, + "deple": 38504, + "deplo": 9356, + "deplor": 39232, + "deploy": 26944, + "deployed": 20009, + "deploying": 42212, + "deployment": 20183, + "depo": 14276, + "depor": 36110, + "deport": 23389, + "deportation": 36617, + "deported": 39320, + "deportes": 47878, + "depos": 21266, + "deposit": 16775, + "deposits": 30740, + "depot": 12589, + "depp": 24941, + "depre": 7107, + "depress": 38869, + "depressed": 23269, + "depressing": 29235, + "depression": 10023, + "depri": 28587, + "depriv": 45809, + "deprivation": 47810, + "deprived": 39140, + "dept": 9201, + "depth": 10350, + "depths": 28855, + "depu": 6912, + "deputies": 24914, + "deputy": 7932, + "der": 839, + "der": 801, + "dera": 20696, + "derail": 48502, + "derby": 13904, + "derby": 7177, + "derbyshire": 22147, + "derdale": 21513, + "dere": 5701, + "dere": 44194, + "dered": 3776, + "derek": 22461, + "derek": 11205, + "derel": 46728, + "derer": 11289, + "derers": 20882, + "deri": 34573, + "derick": 33908, + "dering": 6076, + "deriv": 33458, + "derived": 26461, + "derland": 35488, + "derman": 29740, + "dermatology": 48051, + "dern": 30086, + "dero": 37203, + "dero": 34026, + "derrick": 21798, + "derry": 45777, + "derry": 20535, + "ders": 37307, + "ders": 1923, + "derson": 12677, + "dery": 17172, + "des": 6797, + "des": 1437, + "desai": 35316, + "desc": 13866, + "descen": 32318, + "descend": 26004, + "descend": 46241, + "descendants": 36323, + "descending": 36620, + "descent": 19375, + "desch": 49209, + "descri": 4637, + "describe": 10967, + "described": 14671, + "describes": 13678, + "describing": 24239, + "descrip": 41832, + "description": 13951, + "descriptions": 40653, + "desde": 42218, + "dese": 27195, + "deser": 3659, + "desert": 45776, + "desert": 7301, + "deserted": 41560, + "deserve": 7043, + "deserved": 10061, + "deserves": 9079, + "deserving": 26615, + "desh": 25320, + "desh": 7448, + "deshi": 42769, + "desi": 6772, + "desi": 26635, + "desig": 1250, + "design": 8359, + "design": 1681, + "designated": 24119, + "designation": 41155, + "designed": 4486, + "designer": 35640, + "designer": 5728, + "designers": 12720, + "designing": 13467, + "designs": 6747, + "designthinking": 32450, + "desirable": 32368, + "desire": 11858, + "desired": 28631, + "desires": 27598, + "desk": 11937, + "desk": 6550, + "desks": 41014, + "desktop": 14345, + "desmond": 27821, + "desol": 41258, + "desp": 3642, + "despair": 28097, + "desper": 10144, + "desperate": 15072, + "desperately": 21993, + "despic": 32442, + "despicable": 37158, + "despite": 5325, + "dess": 7096, + "dess": 10001, + "dessert": 9753, + "desserts": 22948, + "desses": 43913, + "dest": 6540, + "dest": 4549, + "destin": 4934, + "destination": 32191, + "destination": 9179, + "destinations": 16981, + "destined": 28525, + "destiny": 39875, + "destiny": 10867, + "destro": 8287, + "destroy": 8308, + "destroy": 11930, + "destroyed": 9965, + "destroyer": 25291, + "destroying": 19613, + "destroys": 27634, + "destruc": 22945, + "destruction": 14281, + "destructive": 29591, + "det": 28966, + "det": 15366, + "deta": 1914, + "detached": 26252, + "detail": 7657, + "detailed": 12609, + "detailing": 23163, + "details": 2353, + "detained": 20260, + "dete": 5606, + "detec": 17991, + "detect": 22744, + "detected": 26988, + "detecting": 41290, + "detection": 16220, + "detective": 13672, + "detectives": 27994, + "detector": 27689, + "detectors": 45063, + "detention": 16908, + "deter": 10742, + "deter": 47458, + "detergent": 46726, + "deterior": 28512, + "determin": 8325, + "determination": 17410, + "determine": 16768, + "determined": 14371, + "determines": 42192, + "determining": 39884, + "deth": 38375, + "deto": 39710, + "deton": 39335, + "detour": 31211, + "detox": 22459, + "detri": 47951, + "detro": 6210, + "detroit": 19404, + "detroit": 7073, + "detta": 45438, + "dette": 35750, + "deu": 21457, + "deuce": 45332, + "deus": 37625, + "deut": 14970, + "deutsch": 30389, + "deutsche": 32760, + "deutschland": 36878, + "deux": 47089, + "dev": 2797, + "dev": 3670, + "deva": 45179, + "devan": 37072, + "devast": 12913, + "devastated": 29865, + "devastating": 19280, + "devastation": 42452, + "devel": 1820, + "develop": 1966, + "develop": 7708, + "developed": 8763, + "developer": 10929, + "developers": 13248, + "developing": 8131, + "development": 2855, + "developmental": 29347, + "developments": 17393, + "develops": 29895, + "deven": 45537, + "devgn": 29871, + "devi": 12926, + "devi": 20717, + "deviant": 25593, + "deviantart": 26046, + "device": 8163, + "devices": 9067, + "devil": 8894, + "devil": 8043, + "deville": 34329, + "devils": 11683, + "devin": 31193, + "devin": 20996, + "devine": 33019, + "devlin": 48040, + "devo": 11861, + "devo": 43444, + "devon": 16205, + "devon": 10046, + "devops": 21504, + "devos": 40646, + "devote": 37777, + "devoted": 24561, + "devotees": 39759, + "devotion": 25821, + "devotional": 35456, + "devs": 27374, + "dew": 31952, + "dew": 16358, + "dewey": 40399, + "dex": 10030, + "dex": 13790, + "dexpo": 42502, + "dexter": 45049, + "dexter": 22781, + "dey": 11829, + "dez": 23190, + "dez": 8122, + "df": 12908, + "df": 10468, + "dfc": 41903, + "dfs": 32880, + "dfw": 20439, + "dg": 2394, + "dg": 9742, + "dgate": 41684, + "dge": 4016, + "dge": 1360, + "dged": 11830, + "dgeon": 45655, + "dgers": 8733, + "dges": 5432, + "dging": 9565, + "dh": 6669, + "dh": 9960, + "dha": 11629, + "dha": 27377, + "dhabi": 22349, + "dhaka": 32877, + "dham": 29635, + "dham": 30838, + "dhan": 12542, + "dhan": 28569, + "dhanush": 26162, + "dhanush": 36200, + "dhanushkraja": 29266, + "dhar": 12397, + "dharma": 30536, + "dhary": 28706, + "dhawan": 44699, + "dhe": 29706, + "dheim": 44280, + "dhi": 31553, + "dhi": 26166, + "dho": 37834, + "dhoni": 25698, + "dhru": 40257, + "dhry": 39960, + "dhs": 26849, + "dhu": 32387, + "di": 570, + "di": 1618, + "dia": 7351, + "dia": 3357, + "diab": 15954, + "diabe": 19167, + "diabete": 43826, + "diabetes": 10319, + "diabetic": 30230, + "diablo": 23931, + "diag": 6851, + "diagno": 7736, + "diagnose": 44429, + "diagnosed": 16979, + "diagnosis": 15715, + "diagnostic": 26351, + "diagnostics": 37723, + "diagram": 22697, + "dial": 18416, + "dial": 11381, + "dialo": 30709, + "dialog": 48945, + "dialogue": 11288, + "dialogues": 40330, + "dialysis": 44798, + "diam": 4347, + "diameter": 27189, + "diamon": 8873, + "diamond": 18535, + "diamond": 6235, + "diamonds": 12687, + "dian": 16021, + "dian": 4998, + "diana": 12803, + "diane": 15855, + "dianne": 42299, + "dians": 21041, + "diaper": 34382, + "diapers": 39659, + "diar": 25932, + "diaries": 15541, + "diary": 10380, + "dias": 22137, + "dias": 29354, + "diaspora": 28390, + "diaz": 17688, + "dic": 1404, + "dic": 6717, + "dicap": 30023, + "dicaprio": 30755, + "dice": 14406, + "dick": 14413, + "dick": 9554, + "dickens": 33421, + "dict": 45360, + "dict": 15159, + "dictat": 26156, + "dictator": 27399, + "dictatorship": 37989, + "dictionary": 19699, + "did": 1861, + "did": 1335, + "diddy": 33527, + "didi": 34396, + "didier": 45614, + "didn": 2376, + "didnt": 13057, + "dido": 31725, + "didyou": 12295, + "didyouknow": 12506, + "die": 3150, + "die": 2082, + "diec": 27729, + "diecast": 37936, + "died": 3622, + "diego": 30940, + "diego": 6306, + "diem": 45571, + "dience": 33686, + "dient": 27231, + "dier": 29702, + "dier": 16394, + "dies": 20104, + "dies": 1862, + "diesel": 46312, + "diesel": 10591, + "diest": 45739, + "diet": 21295, + "diet": 6582, + "dietary": 29009, + "dietrich": 47005, + "diets": 35173, + "dif": 18656, + "dif": 48731, + "diff": 44073, + "diff": 20331, + "diffe": 1967, + "differ": 34620, + "differen": 14903, + "difference": 4731, + "differences": 14003, + "different": 2731, + "differenti": 21729, + "differential": 34027, + "differentiate": 49032, + "differently": 18325, + "diffic": 6140, + "difficult": 7405, + "difficulties": 23468, + "difficulty": 25245, + "diffu": 31603, + "diffuser": 49400, + "dig": 1831, + "dig": 9887, + "dige": 17820, + "digest": 20413, + "digestion": 40533, + "digestive": 32304, + "digg": 43240, + "digger": 35919, + "diggin": 48466, + "digging": 14971, + "digi": 15627, + "digi": 39361, + "digimon": 44181, + "digit": 14899, + "digit": 27472, + "digital": 4704, + "digital": 2794, + "digitalart": 16987, + "digitalhealth": 32190, + "digitalindia": 46630, + "digitally": 27543, + "digitalmarketing": 15299, + "digitaltransformation": 20047, + "digiti": 25935, + "digits": 31710, + "digni": 45532, + "dignit": 39497, + "dignity": 17744, + "digo": 35701, + "digs": 26877, + "dih": 43089, + "dii": 32755, + "dijk": 44444, + "dik": 38854, + "dik": 37747, + "dike": 42683, + "dil": 7643, + "dil": 17942, + "dile": 25428, + "dilemma": 29787, + "dilig": 30664, + "dill": 12318, + "dill": 27206, + "dillon": 21056, + "dilu": 45242, + "dim": 19576, + "dim": 17523, + "dime": 24443, + "dimen": 10935, + "dimension": 20479, + "dimensional": 25252, + "dimensions": 25086, + "diment": 43500, + "dimes": 44888, + "dimini": 37459, + "dimit": 22250, + "dimitri": 48840, + "dimp": 38853, + "din": 1462, + "din": 5673, + "dina": 36815, + "dinah": 30903, + "dine": 20951, + "dine": 12989, + "diner": 16963, + "dinesh": 48341, + "ding": 7545, + "ding": 796, + "dinger": 45580, + "dingh": 48064, + "dings": 5473, + "dington": 24804, + "dinho": 47370, + "dini": 20196, + "dining": 8658, + "dinner": 27548, + "dinner": 2571, + "dinners": 33570, + "dino": 9692, + "dino": 14077, + "dinosa": 18955, + "dinosaur": 15095, + "dinosaurs": 20387, + "dio": 3779, + "dio": 1521, + "dioce": 20763, + "diocese": 27091, + "dion": 42899, + "dion": 16250, + "dior": 23655, + "dios": 37563, + "dious": 27417, + "dioxide": 38102, + "dip": 19918, + "dip": 11343, + "dipl": 8490, + "diplo": 38115, + "diplom": 11169, + "diploma": 21251, + "diplomacy": 23798, + "diplomat": 32828, + "diplomatic": 23782, + "diplomats": 44126, + "dipped": 30610, + "dipper": 49317, + "dipping": 33544, + "dips": 37522, + "dir": 4251, + "dir": 8478, + "dire": 38355, + "dire": 25664, + "direc": 1534, + "direct": 43224, + "direct": 6016, + "directed": 8392, + "directing": 21817, + "direction": 15923, + "direction": 5407, + "directional": 38687, + "directioner": 48042, + "directioners": 22055, + "directions": 16440, + "directive": 40630, + "directly": 9701, + "director": 20337, + "director": 2681, + "directorial": 45327, + "directors": 11940, + "directory": 25272, + "directs": 34349, + "directv": 48652, + "dirk": 28171, + "dirt": 31415, + "dirt": 11795, + "dirty": 20127, + "dirty": 7615, + "dis": 1518, + "dis": 6112, + "disa": 3882, + "disab": 47380, + "disabilities": 17350, + "disability": 48986, + "disability": 13261, + "disabled": 13613, + "disadvantaged": 40577, + "disagree": 23199, + "disapp": 5384, + "disappear": 21148, + "disappear": 25173, + "disappearance": 35929, + "disappeared": 23139, + "disappearing": 35819, + "disappears": 44406, + "disappo": 7605, + "disappoint": 25446, + "disappointed": 13794, + "disappointing": 21941, + "disappointment": 23884, + "disappoints": 48545, + "disappro": 48276, + "disar": 42971, + "disaster": 9072, + "disasters": 26976, + "disastrous": 35790, + "disc": 1472, + "disc": 10712, + "discar": 40532, + "discarded": 45197, + "discer": 49140, + "dischar": 22671, + "discharge": 32485, + "disci": 9559, + "discip": 38951, + "discipl": 10467, + "disciples": 39366, + "disciplinary": 20232, + "discipline": 18903, + "disciplines": 42032, + "discla": 40248, + "disclaimer": 46465, + "disclo": 17481, + "disclose": 46379, + "disclosed": 30905, + "disclosure": 26502, + "disco": 2475, + "disco": 11964, + "discography": 47545, + "discomfort": 48054, + "discord": 23582, + "discoun": 18515, + "discount": 7638, + "discounted": 20993, + "discounts": 18186, + "discoura": 45850, + "discourse": 29441, + "discover": 10539, + "discover": 4834, + "discovered": 6986, + "discoveries": 29308, + "discovering": 17967, + "discovers": 29719, + "discovery": 40491, + "discovery": 8027, + "discre": 20616, + "discrimin": 11721, + "discrimination": 14775, + "discs": 29270, + "discu": 1984, + "discus": 41828, + "discuss": 4312, + "discussed": 11300, + "discusses": 8116, + "discussing": 5900, + "discussion": 5060, + "discussions": 13806, + "dise": 4262, + "disease": 5336, + "diseases": 12035, + "disen": 46468, + "disgrace": 29877, + "disgraceful": 44146, + "disgu": 9793, + "disguise": 27803, + "disguised": 37149, + "disgusted": 41977, + "disgusting": 16218, + "dish": 11039, + "dish": 4531, + "disha": 42498, + "dishes": 11412, + "dishon": 30777, + "dishu": 44728, + "dishwasher": 40524, + "disin": 19484, + "disinfe": 48050, + "disintegr": 49275, + "disk": 17970, + "dislike": 30796, + "dism": 30836, + "dism": 38821, + "dismant": 36557, + "dismiss": 43287, + "dismissal": 42068, + "dismissed": 30087, + "dismisses": 45238, + "disney": 6729, + "disney": 4696, + "disneyland": 39481, + "disneyland": 13661, + "disneyworld": 28469, + "diso": 26305, + "disobe": 42841, + "dison": 19310, + "disorder": 12635, + "disorders": 17114, + "disp": 11073, + "dispar": 24633, + "disparities": 45122, + "dispat": 28652, + "dispatch": 26306, + "dispen": 19077, + "dispenser": 40116, + "disper": 34499, + "displa": 9326, + "displac": 17718, + "displaced": 22817, + "displacement": 37931, + "display": 4456, + "displayed": 18967, + "displaying": 26468, + "displays": 15648, + "dispo": 13651, + "dispon": 38872, + "disponible": 46130, + "dispos": 45177, + "disposable": 37275, + "disposal": 28231, + "dispro": 32927, + "dispropor": 40354, + "disproportion": 45492, + "disregard": 43869, + "disrespect": 34055, + "disrespectful": 41723, + "disru": 13763, + "disrup": 14641, + "disrupt": 25214, + "disrupted": 46674, + "disrupting": 42419, + "disruption": 19635, + "disruptive": 31554, + "diss": 10766, + "diss": 35688, + "dissec": 43879, + "dissemin": 40463, + "dissent": 45154, + "disser": 25560, + "dissertation": 29448, + "dissi": 25088, + "dissol": 27398, + "dissuper": 33461, + "dist": 5479, + "dist": 12116, + "distance": 7964, + "distances": 37078, + "distant": 18949, + "distill": 41586, + "distilled": 49179, + "distillery": 22200, + "distin": 11892, + "distinct": 25056, + "distinction": 28183, + "distinctive": 25486, + "distingui": 15053, + "distinguish": 45418, + "distinguished": 16513, + "distor": 23781, + "distortion": 43690, + "distr": 11885, + "distract": 39309, + "distracted": 24049, + "distraction": 32039, + "distress": 26866, + "distressed": 37515, + "distri": 5987, + "distribu": 6138, + "distribute": 32313, + "distributed": 16419, + "distributing": 35216, + "distribution": 10484, + "distributor": 28354, + "distributors": 44240, + "distric": 3208, + "district": 46683, + "district": 3506, + "districts": 17565, + "distur": 11732, + "disturb": 33018, + "disturb": 39449, + "disturbance": 42416, + "disturbed": 29967, + "disturbing": 21476, + "disupdates": 45667, + "dit": 5752, + "dit": 2524, + "dita": 47965, + "ditch": 43715, + "ditch": 19291, + "dited": 40392, + "diti": 2363, + "dition": 16452, + "dition": 3015, + "ditional": 4322, + "ditions": 4503, + "dito": 43705, + "dits": 49374, + "dity": 16436, + "dium": 2903, + "div": 5293, + "div": 14869, + "diva": 13605, + "divas": 23534, + "dive": 26042, + "dive": 9058, + "diver": 13119, + "diver": 22094, + "divergence": 48735, + "divergent": 36132, + "divers": 30241, + "divers": 27038, + "diverse": 11464, + "diversi": 24475, + "diversion": 38457, + "diversity": 35634, + "diversity": 6257, + "diverted": 41049, + "dives": 13893, + "divi": 8375, + "divid": 31337, + "divide": 18842, + "divided": 18689, + "dividend": 32067, + "dividends": 45146, + "dividing": 45605, + "divin": 21838, + "divine": 46919, + "divine": 10976, + "diving": 9886, + "divinity": 39754, + "divisi": 39196, + "division": 5378, + "divisional": 40912, + "divisions": 33715, + "divor": 13543, + "divorce": 17060, + "divorced": 39437, + "divya": 47767, + "diwali": 18218, + "dix": 45838, + "dix": 27620, + "dixie": 24484, + "dixit": 28279, + "dixon": 16086, + "diy": 28472, + "diy": 7845, + "diya": 36459, + "diz": 32740, + "dized": 36232, + "dizz": 40239, + "dizzy": 35464, + "dj": 3761, + "dj": 3723, + "djan": 35338, + "django": 46498, + "dji": 35284, + "dji": 28379, + "djing": 36113, + "djo": 19432, + "djoker": 42721, + "djokernole": 42830, + "djokovic": 27944, + "djs": 18117, + "dk": 20702, + "dk": 16196, + "dl": 12558, + "dl": 9373, + "dlc": 19079, + "dle": 11057, + "dle": 3287, + "dled": 23494, + "dler": 40279, + "dles": 7890, + "dless": 14997, + "dley": 12808, + "dling": 18221, + "dly": 3069, + "dm": 19070, + "dm": 4667, + "dma": 42903, + "dman": 18826, + "dmc": 28991, + "dmit": 31607, + "dmitry": 48326, + "dms": 19955, + "dmv": 27508, + "dmx": 45255, + "dn": 11552, + "dn": 7459, + "dna": 8790, + "dnb": 35422, + "dnc": 20237, + "dnd": 11678, + "dnr": 37051, + "dns": 39245, + "dnt": 26795, + "do": 639, + "do": 818, + "doa": 48332, + "dob": 29640, + "doba": 35605, + "dobbs": 43006, + "dobson": 46888, + "doc": 3009, + "doc": 7251, + "doch": 25101, + "dock": 17311, + "dock": 8997, + "docked": 46784, + "docker": 31152, + "docking": 40845, + "docks": 24091, + "docs": 15157, + "doctor": 7872, + "doctor": 5547, + "doctoral": 23649, + "doctorate": 39134, + "doctors": 9705, + "doctorwho": 12996, + "doctr": 28497, + "doctrine": 35612, + "docu": 4433, + "document": 29293, + "document": 15121, + "documentaries": 44209, + "documentary": 7881, + "documentation": 31560, + "documented": 22310, + "documenting": 37876, + "documents": 14105, + "dod": 13847, + "dod": 30187, + "dodd": 36748, + "dodge": 31263, + "dodge": 12093, + "dodgeball": 43244, + "dodger": 31641, + "dodgers": 12422, + "dodgy": 37727, + "doe": 13296, + "does": 2397, + "does": 1897, + "doesn": 2503, + "doesnt": 17937, + "dof": 8277, + "doff": 20193, + "dofficial": 42516, + "dog": 4326, + "dog": 1929, + "dogcelebration": 41819, + "dogday": 27475, + "doge": 42187, + "dogg": 20749, + "doggie": 32237, + "doggo": 42155, + "doggy": 26359, + "doglo": 40733, + "dogre": 40030, + "dogrescue": 44158, + "dogs": 42182, + "dogs": 3255, + "dogsoftwitter": 19415, + "doh": 23581, + "doha": 20908, + "doherty": 31774, + "doi": 36361, + "doin": 15412, + "doing": 37408, + "doing": 1960, + "doit": 32272, + "doit": 28109, + "doj": 25700, + "dojo": 35901, + "dok": 40547, + "dok": 41034, + "doka": 46528, + "dol": 2287, + "dol": 19170, + "dola": 38005, + "dolan": 27200, + "dolby": 42414, + "dolce": 30033, + "dolce": 30661, + "dole": 41040, + "doll": 27031, + "doll": 9286, + "dollar": 35092, + "dollar": 7474, + "dollars": 10669, + "dolls": 15090, + "dolly": 43281, + "dolly": 23821, + "dolom": 37137, + "dolores": 40741, + "dolph": 8900, + "dolph": 22257, + "dolphin": 42963, + "dolphin": 16464, + "dolphins": 14002, + "dom": 2164, + "dom": 1919, + "domain": 15492, + "domaine": 48744, + "domains": 36358, + "dome": 8515, + "dome": 9827, + "domen": 37584, + "domest": 21936, + "domestic": 28189, + "domestic": 9043, + "domin": 4361, + "dominance": 30546, + "dominant": 20565, + "dominate": 21431, + "dominated": 23048, + "dominates": 34043, + "dominating": 29303, + "domination": 30919, + "domingo": 24882, + "dominic": 39007, + "dominic": 19095, + "dominican": 22934, + "dominion": 27155, + "domino": 30752, + "dominos": 39770, + "domo": 44293, + "doms": 30126, + "don": 1067, + "don": 847, + "dona": 26789, + "donal": 42375, + "donald": 5990, + "donald": 4335, + "donaldson": 37783, + "donaldtrump": 6652, + "donat": 36384, + "donate": 6429, + "donated": 8705, + "donates": 26960, + "donating": 12621, + "donation": 7924, + "donations": 9928, + "doncaster": 38008, + "doncaster": 25352, + "doncasterisgreat": 47333, + "done": 5136, + "done": 1700, + "donegal": 24172, + "donesia": 41281, + "donet": 33724, + "donetsk": 33999, + "dong": 26242, + "dong": 31478, + "dongha": 28365, + "donghae": 28945, + "donia": 24014, + "donkey": 21415, + "donkeys": 44644, + "donna": 9158, + "donne": 30897, + "donnein": 38308, + "donneinarte": 40193, + "donnell": 35118, + "donnelly": 39070, + "donnie": 47058, + "donnie": 30609, + "donny": 37291, + "donny": 32887, + "dono": 14840, + "donor": 18013, + "donors": 17887, + "donovan": 21499, + "dons": 22127, + "dont": 8094, + "dont": 4632, + "donut": 18471, + "donuts": 13970, + "doo": 4543, + "doo": 11643, + "doodle": 9388, + "doodled": 41030, + "doodles": 22156, + "doodling": 37548, + "dooley": 47609, + "doom": 23263, + "doom": 14344, + "doomed": 33251, + "doomsday": 41791, + "doon": 36612, + "doop": 33886, + "door": 7188, + "door": 2489, + "doors": 4228, + "doorstep": 19533, + "doorway": 46575, + "dop": 42381, + "dop": 31722, + "dope": 42587, + "dope": 10094, + "doping": 30285, + "dopp": 21774, + "doppelg": 45216, + "doppler": 42540, + "dor": 2766, + "dor": 8695, + "dora": 18104, + "dorado": 32350, + "dorchester": 32656, + "dore": 39423, + "dores": 34323, + "dorf": 17296, + "dori": 49270, + "doria": 43186, + "dorian": 44016, + "doris": 24285, + "dork": 36206, + "dorm": 24263, + "doro": 15498, + "doro": 37389, + "dorothy": 20805, + "dors": 31240, + "dorset": 42109, + "dorset": 16047, + "dorsey": 41607, + "dortmund": 24290, + "dory": 36135, + "dos": 44258, + "dos": 5474, + "dose": 11497, + "doses": 37873, + "dossier": 46042, + "dost": 44222, + "dot": 7473, + "dot": 7004, + "dota": 23085, + "dotcom": 12443, + "dote": 31202, + "dothis": 47864, + "dotnet": 43124, + "dotorg": 46587, + "dots": 19019, + "dotted": 47950, + "dou": 1756, + "dou": 23608, + "doub": 19631, + "double": 13013, + "double": 3200, + "doubled": 24948, + "doubleheader": 34668, + "doubles": 12539, + "doubling": 36850, + "doubt": 37071, + "doubt": 8671, + "doubts": 30894, + "douche": 44292, + "doug": 20271, + "doug": 10758, + "dough": 15785, + "dough": 14983, + "doughnut": 32555, + "doughnuts": 31124, + "dougie": 46317, + "dougla": 9140, + "douglas": 10065, + "douglass": 45692, + "doun": 44785, + "dov": 38856, + "dova": 26551, + "dove": 27511, + "dove": 18281, + "dover": 43019, + "dover": 14683, + "doves": 47067, + "dow": 8022, + "dow": 10688, + "dowell": 27344, + "down": 1833, + "down": 1136, + "downe": 46501, + "downed": 35814, + "downer": 42522, + "downers": 43739, + "downey": 29429, + "downfall": 48702, + "downhill": 27387, + "downing": 28140, + "download": 35076, + "download": 3794, + "downloadable": 49105, + "downloaded": 22961, + "downloading": 30519, + "downloads": 26481, + "downpour": 39034, + "downpours": 40160, + "downs": 10706, + "downside": 41937, + "downstairs": 28174, + "downstream": 43822, + "downtime": 41964, + "downton": 45023, + "downton": 42668, + "downtown": 18230, + "downtown": 5061, + "downward": 37430, + "dowski": 43556, + "dox": 44786, + "dox": 14510, + "doyle": 17728, + "doyou": 27256, + "doz": 31106, + "dozen": 16401, + "dozens": 17883, + "dp": 23820, + "dp": 6465, + "dprint": 46644, + "dprinting": 16194, + "dprk": 47920, + "dps": 34288, + "dq": 28741, + "dr": 1084, + "dr": 1701, + "dra": 1114, + "dra": 7402, + "drac": 20168, + "dracing": 41253, + "dracula": 25405, + "draf": 37426, + "draft": 30624, + "draft": 5198, + "drafted": 19129, + "drafting": 33528, + "drafts": 29194, + "drag": 8452, + "drag": 12463, + "dragged": 27884, + "dragging": 37069, + "dragon": 9187, + "dragon": 5471, + "dragonball": 40959, + "dragoncon": 47802, + "dragonfly": 32824, + "dragons": 10203, + "dragrace": 40762, + "drags": 45368, + "drain": 23347, + "drain": 19467, + "drainage": 25953, + "drained": 44630, + "drains": 43638, + "drainthe": 47337, + "drake": 32504, + "drake": 8958, + "dral": 7503, + "dram": 6937, + "dram": 32170, + "drama": 5055, + "dramas": 33467, + "dramati": 43512, + "dramatic": 11240, + "dramatically": 24495, + "drank": 21712, + "draped": 49113, + "drastic": 43159, + "drastically": 35478, + "drau": 18621, + "draw": 17675, + "draw": 4001, + "drawer": 23219, + "drawers": 38975, + "drawing": 36996, + "drawing": 3610, + "drawings": 13397, + "drawn": 8893, + "draws": 12043, + "dray": 25562, + "drayton": 49044, + "drc": 21434, + "dre": 960, + "dre": 14584, + "dread": 17412, + "dread": 31403, + "dreaded": 47227, + "dreadful": 35846, + "dreality": 48367, + "dream": 4595, + "dream": 2984, + "dreambig": 46495, + "dreamcast": 47226, + "dreamed": 27984, + "dreamer": 25692, + "dreamers": 27194, + "dreaming": 11662, + "dreamliner": 49143, + "dreams": 4405, + "dreamt": 43743, + "dreamteam": 40090, + "dreamy": 23517, + "dred": 10903, + "dredge": 48783, + "dren": 29068, + "dren": 47309, + "drenched": 46378, + "dres": 48852, + "dres": 44697, + "dresden": 34836, + "dress": 12622, + "dress": 2595, + "dressage": 36144, + "dressed": 6559, + "dresser": 26346, + "dresses": 8184, + "dressing": 6348, + "drew": 18792, + "drew": 5281, + "drex": 33985, + "drey": 48271, + "dri": 1203, + "dri": 28833, + "drian": 36870, + "dribb": 42153, + "dric": 23448, + "dridge": 22956, + "drie": 40170, + "dried": 16037, + "drier": 39877, + "dries": 33857, + "drif": 33585, + "drift": 18194, + "drifting": 30276, + "drill": 11626, + "drilled": 46338, + "drilling": 18634, + "drills": 24378, + "drin": 3375, + "drin": 47133, + "drink": 14131, + "drink": 3979, + "drinking": 5778, + "drinklocal": 45998, + "drinks": 6732, + "drip": 24050, + "dripping": 38787, + "dris": 35804, + "drive": 11402, + "drive": 2620, + "driven": 9314, + "driver": 27563, + "driver": 4383, + "driverless": 46769, + "drivers": 7384, + "drives": 11441, + "driveway": 26273, + "driving": 37800, + "driving": 4161, + "drizzle": 28240, + "drm": 39674, + "dro": 1494, + "dro": 12442, + "drogba": 49199, + "droid": 38016, + "drome": 9157, + "dron": 43898, + "dron": 23360, + "drone": 33557, + "drone": 9397, + "drones": 14006, + "droo": 30715, + "drool": 41554, + "drooling": 44360, + "drop": 16407, + "drop": 3387, + "dropbox": 47216, + "dropped": 6792, + "dropping": 8339, + "drops": 6437, + "dros": 47033, + "drou": 38558, + "drought": 13935, + "drove": 13753, + "drow": 21159, + "drown": 28571, + "drowned": 34005, + "drowning": 24618, + "drs": 21257, + "dru": 2275, + "dru": 49048, + "drug": 20601, + "drug": 5600, + "drugs": 8021, + "druid": 40297, + "drum": 13353, + "drum": 8698, + "drummer": 13618, + "drummers": 46191, + "drumming": 35480, + "drummond": 42213, + "drums": 11690, + "drun": 15488, + "drunk": 37398, + "drunk": 8232, + "drunken": 28196, + "drupal": 46481, + "drush": 43009, + "drwho": 48342, + "dry": 13544, + "dry": 4501, + "dryer": 24425, + "drying": 23203, + "ds": 3361, + "ds": 646, + "dsa": 47607, + "dsb": 47168, + "dsb": 14257, + "dsburg": 47237, + "dsc": 37240, + "dsd": 45383, + "dsley": 40740, + "dslr": 33740, + "dsm": 39502, + "dson": 40310, + "dsp": 45291, + "dss": 41580, + "dstv": 35027, + "dt": 13104, + "dt": 7427, + "dthe": 13863, + "dtla": 31885, + "dtm": 42407, + "dts": 46233, + "du": 691, + "du": 3686, + "dua": 25244, + "dual": 39739, + "dual": 5347, + "duane": 38946, + "dub": 14526, + "dub": 13144, + "duba": 5485, + "dubai": 32599, + "dubai": 5985, + "dubbed": 27740, + "dublin": 20707, + "dublin": 6145, + "dubnation": 47329, + "dubois": 48046, + "dubrov": 46709, + "dubrovnik": 48724, + "dubs": 27013, + "dubstep": 38303, + "dubu": 43257, + "duc": 979, + "duc": 36446, + "ducati": 28570, + "ducation": 17197, + "duce": 3660, + "duchess": 21713, + "duck": 12708, + "duck": 6910, + "ducks": 11202, + "duct": 26829, + "dude": 48087, + "dude": 5710, + "dudes": 14449, + "dudley": 27324, + "due": 2887, + "duel": 27143, + "dues": 37646, + "duet": 25457, + "duf": 38713, + "duff": 38071, + "duff": 21934, + "duffy": 23599, + "dug": 22743, + "dug": 21000, + "dugg": 40523, + "duggan": 46169, + "dugout": 36831, + "duh": 26716, + "dui": 29693, + "duk": 14160, + "duke": 18402, + "duke": 7732, + "dukes": 27914, + "dul": 6738, + "dulce": 44872, + "dulil": 32565, + "dulkar": 47980, + "dull": 19433, + "dulu": 28865, + "duluth": 32109, + "dulwich": 47343, + "dum": 13400, + "dum": 11564, + "dumb": 15901, + "dumb": 12464, + "dumbass": 38980, + "dummies": 40899, + "dummy": 34246, + "dump": 12655, + "dump": 17146, + "dumped": 23768, + "dumping": 31707, + "dumplings": 35495, + "dumps": 45804, + "dumpster": 45467, + "dun": 2616, + "dun": 18284, + "dunbar": 41453, + "duncan": 31084, + "duncan": 13502, + "dundal": 38185, + "dundas": 39300, + "dundee": 18619, + "dune": 32833, + "dune": 28208, + "dunedin": 40121, + "dunes": 23526, + "dung": 33712, + "dungeon": 28812, + "dungeon": 22931, + "dungeons": 42572, + "dungeonsand": 34970, + "dungeonsanddragons": 35497, + "dunham": 42501, + "duni": 43454, + "dunk": 17222, + "dunkin": 48022, + "dunkin": 36415, + "dunkirk": 46928, + "dunks": 48977, + "dunlop": 34753, + "dunn": 19185, + "dunne": 38538, + "dunno": 24502, + "duo": 8696, + "dup": 36805, + "dup": 10445, + "duper": 44850, + "duplex": 41186, + "duplic": 28992, + "dupont": 35994, + "dur": 4355, + "dur": 23230, + "dura": 28173, + "dura": 47382, + "durability": 43671, + "durable": 22285, + "duran": 28185, + "durango": 44443, + "durant": 24861, + "duras": 27518, + "duration": 31663, + "durban": 24474, + "dure": 19108, + "durga": 38456, + "durham": 26765, + "durham": 14335, + "during": 1590, + "dus": 9931, + "dusa": 28546, + "dusk": 19708, + "dust": 29723, + "dust": 8349, + "dusted": 38274, + "duster": 46280, + "dustin": 42423, + "dustin": 21235, + "dusting": 41756, + "dusty": 22029, + "dut": 32625, + "dutch": 22277, + "dutch": 7991, + "duter": 21624, + "duterte": 22371, + "duties": 19603, + "dutt": 30081, + "dutton": 42771, + "duty": 6458, + "duval": 42459, + "duvet": 48006, + "dux": 28562, + "dv": 4288, + "dv": 26265, + "dvd": 7170, + "dvds": 36655, + "dvn": 29811, + "dvr": 29210, + "dw": 8455, + "dw": 19997, + "dwar": 13487, + "dwarf": 22643, + "dwayne": 31395, + "dwell": 27549, + "dwell": 18755, + "dwelling": 37098, + "dwight": 22473, + "dwp": 46976, + "dwts": 30220, + "dwyer": 43878, + "dx": 22717, + "dx": 15679, + "dy": 1444, + "dy": 907, + "dyce": 48325, + "dye": 37159, + "dye": 15997, + "dyed": 24906, + "dyer": 29495, + "dyes": 39874, + "dying": 5115, + "dyk": 12142, + "dyke": 32632, + "dylan": 21004, + "dylan": 9900, + "dyn": 44289, + "dyn": 30669, + "dynam": 5735, + "dynamic": 10057, + "dynamics": 14329, + "dynamite": 29003, + "dynamo": 28281, + "dynasty": 14593, + "dyne": 42756, + "dyou": 11484, + "dyour": 22525, + "dys": 11022, + "dys": 38384, + "dysfunction": 36865, + "dysfunctional": 40757, + "dysle": 33681, + "dyslexia": 43199, + "dyson": 34475, + "dyssey": 17435, + "dystop": 28276, + "dystopian": 38915, + "dz": 24421, + "dz": 22913, + "dé": 25466, + "dü": 46948, + "dÃŃ": 46988, + "e": 68, + "e": 324, + "ea": 2150, + "ea": 8100, + "eable": 20693, + "each": 31442, + "each": 2416, + "eachother": 40792, + "ead": 42556, + "ead": 45523, + "eae": 27446, + "eag": 3743, + "eager": 21551, + "eagerly": 30094, + "eagle": 20207, + "eagle": 7517, + "eagles": 6920, + "eal": 48872, + "ealing": 40484, + "eames": 49072, + "eamon": 45954, + "ean": 13327, + "ear": 1055, + "ear": 8373, + "earbuds": 47807, + "eared": 9127, + "earl": 30573, + "earl": 14235, + "earle": 40292, + "earlier": 4297, + "earliest": 22097, + "early": 15840, + "early": 2090, + "earn": 33977, + "earn": 8465, + "earned": 8898, + "earnest": 45422, + "earning": 14550, + "earnings": 15912, + "earns": 16760, + "earp": 35296, + "earphones": 44905, + "earring": 28664, + "earrings": 9136, + "ears": 9861, + "eart": 7086, + "earth": 5184, + "earth": 3475, + "earthand": 34229, + "earthandclouds": 34480, + "earthday": 19481, + "earthquake": 10060, + "earthquakes": 32895, + "earthy": 47139, + "earts": 38824, + "eas": 5740, + "ease": 13574, + "easier": 8817, + "easiest": 26314, + "easily": 8197, + "easing": 44825, + "easport": 42251, + "east": 5022, + "east": 2602, + "eastbound": 28827, + "eastbourne": 38455, + "eastenders": 23545, + "easter": 14783, + "easter": 4811, + "eastern": 34522, + "eastern": 6311, + "eastman": 48280, + "easton": 29619, + "eastside": 42650, + "eastwood": 28270, + "easy": 18308, + "easy": 3176, + "eat": 5418, + "eat": 3384, + "eaten": 16750, + "eater": 24060, + "eaters": 37645, + "eatery": 46559, + "eating": 4371, + "eatlocal": 42868, + "eaton": 28462, + "eats": 13188, + "eau": 17608, + "eazy": 36536, + "eb": 12283, + "eb": 8677, + "eba": 40889, + "ebay": 34412, + "ebay": 4099, + "eber": 34020, + "ebo": 46635, + "ebola": 15864, + "ebon": 22013, + "ebony": 30651, + "ebook": 13122, + "ebooks": 25774, + "ec": 747, + "ec": 10879, + "eca": 18465, + "ecar": 34500, + "ecb": 26205, + "ecc": 33128, + "eccc": 47401, + "eccentric": 43228, + "eccle": 27494, + "ece": 2163, + "eces": 5905, + "ecg": 45983, + "ech": 15797, + "ech": 31147, + "echel": 41233, + "echo": 17366, + "echo": 13989, + "echoes": 32564, + "eci": 31936, + "eck": 25866, + "eck": 15969, + "ecker": 39661, + "ecker": 40890, + "ecla": 47806, + "eclec": 25114, + "eclectic": 28382, + "eclip": 30841, + "eclipse": 11505, + "eclub": 38983, + "eco": 5106, + "eco": 10077, + "ecofriendly": 43412, + "ecol": 22706, + "ecological": 25127, + "ecology": 18578, + "ecommerce": 15529, + "econ": 26755, + "econ": 21158, + "econom": 2768, + "economic": 36649, + "economic": 5259, + "economical": 48782, + "economically": 39406, + "economics": 12625, + "economies": 27136, + "economist": 18836, + "economists": 43701, + "economy": 5644, + "ecor": 28962, + "ecosystem": 15788, + "ecosystems": 28725, + "ecoun": 27924, + "ecr": 48572, + "ecraft": 11439, + "ecs": 23485, + "ecstasy": 47286, + "ecstatic": 36244, + "ect": 25168, + "ecu": 13087, + "ecu": 32919, + "ecuador": 19813, + "ecz": 43530, + "ed": 843, + "ed": 538, + "eda": 10804, + "edad": 44724, + "eday": 39258, + "edc": 21245, + "edchat": 14702, + "edd": 35431, + "eddi": 42930, + "eddie": 22748, + "eddie": 9517, + "eddy": 25959, + "ede": 29632, + "eded": 19555, + "edel": 20460, + "edelman": 48139, + "eden": 23621, + "eden": 13741, + "eder": 16249, + "edes": 36247, + "edfringe": 27402, + "edg": 35955, + "edgar": 33543, + "edgar": 17914, + "edge": 16914, + "edge": 5461, + "edged": 39188, + "edges": 20938, + "edgy": 35393, + "edi": 8750, + "edi": 27148, + "edible": 19795, + "edic": 25184, + "edics": 30641, + "edin": 6524, + "edinburgh": 27574, + "edinburgh": 8068, + "eding": 5742, + "edison": 25846, + "edit": 8239, + "edit": 8013, + "edited": 13945, + "edith": 28597, + "editing": 10178, + "edition": 3062, + "editions": 21664, + "editor": 7661, + "editorial": 12325, + "editors": 19486, + "edits": 24945, + "edm": 37843, + "edm": 13539, + "edmon": 11275, + "edmond": 41581, + "edmonds": 46520, + "edmonton": 37311, + "edmonton": 15058, + "edmun": 36561, + "edmund": 27567, + "edna": 39002, + "edo": 29145, + "edo": 18096, + "edon": 41467, + "edor": 30184, + "edou": 47678, + "edp": 46066, + "eds": 1941, + "edsheeran": 30386, + "edt": 15071, + "edtech": 41825, + "edtech": 15262, + "edu": 11757, + "edu": 11799, + "eduardo": 30604, + "educ": 2200, + "educate": 17563, + "educated": 21447, + "education": 22358, + "education": 2806, + "educational": 10400, + "educator": 19875, + "educators": 15420, + "edwar": 27586, + "edward": 26184, + "edward": 7450, + "edwards": 12627, + "edwin": 48718, + "edwin": 22471, + "edy": 17072, + "edy": 4144, + "ee": 2644, + "ee": 4708, + "eed": 17513, + "eee": 24632, + "eee": 9361, + "eeee": 11696, + "eeee": 17570, + "eeeee": 26938, + "eeeeee": 41407, + "eek": 46591, + "eel": 27462, + "eels": 44416, + "eem": 27236, + "een": 47490, + "een": 21230, + "eer": 35409, + "eer": 31846, + "eera": 36664, + "eerie": 33846, + "ees": 40308, + "eet": 48935, + "eez": 39033, + "ef": 1490, + "ef": 1829, + "efa": 16999, + "eface": 48804, + "efan": 33556, + "efc": 22065, + "efcc": 46087, + "efer": 26199, + "eff": 20548, + "eff": 21715, + "effe": 2808, + "effec": 3943, + "effect": 5436, + "effective": 6837, + "effectively": 17516, + "effectiveness": 26847, + "effects": 7331, + "effic": 36004, + "efficacy": 39937, + "effici": 6670, + "efficiency": 11823, + "efficient": 11334, + "efficiently": 32915, + "effor": 6356, + "effort": 40078, + "effort": 6255, + "effortless": 41639, + "effortlessly": 42320, + "efforts": 6847, + "efish": 35813, + "efl": 27172, + "efron": 48111, + "efs": 7389, + "eg": 8053, + "eg": 14599, + "ega": 41193, + "egan": 42943, + "eger": 46704, + "eger": 22767, + "egg": 13778, + "egg": 5911, + "eggplant": 34906, + "eggs": 7099, + "ego": 34712, + "ego": 14250, + "egos": 43992, + "egre": 27044, + "egret": 42002, + "egy": 5224, + "egyp": 10250, + "egypt": 7267, + "egyptian": 12428, + "eh": 9277, + "eh": 9135, + "eha": 48563, + "ehealth": 48617, + "ehr": 45271, + "ehs": 44648, + "ei": 4006, + "ei": 18264, + "eic": 40251, + "eid": 28038, + "eid": 13979, + "eidmubarak": 46275, + "eiffel": 29720, + "eigh": 13468, + "eight": 7910, + "eighteen": 49316, + "eighth": 21237, + "eighty": 47449, + "eil": 29457, + "eileen": 31468, + "ein": 29944, + "ein": 24524, + "eindhoven": 47172, + "eing": 7702, + "einstein": 20587, + "eira": 47708, + "eis": 13802, + "eisen": 25273, + "eisenhower": 35562, + "either": 6036, + "ej": 19887, + "ej": 25009, + "ejec": 29771, + "ek": 4212, + "ek": 2092, + "el": 544, + "el": 832, + "ela": 11284, + "ela": 3787, + "elab": 38866, + "elabor": 26034, + "elaborate": 33855, + "elaine": 22523, + "elan": 17763, + "elan": 18399, + "eland": 24930, + "eland": 6275, + "elas": 41078, + "elast": 27479, + "elastic": 30282, + "elba": 48598, + "elbow": 21965, + "eld": 5684, + "elder": 11791, + "elder": 14416, + "elderly": 15455, + "elders": 28617, + "eldest": 33503, + "elding": 28223, + "elds": 13466, + "ele": 2084, + "ele": 9766, + "eleague": 36577, + "eleanor": 18604, + "elearning": 29969, + "elec": 1564, + "elec": 38768, + "elect": 15336, + "elected": 8828, + "election": 19312, + "election": 4247, + "electionday": 40540, + "elections": 6949, + "elector": 16465, + "electoral": 19544, + "electr": 3654, + "electra": 48959, + "electri": 23927, + "electric": 19547, + "electric": 5031, + "electrical": 12176, + "electrician": 46422, + "electricity": 10950, + "electrifying": 48843, + "electro": 11648, + "electro": 23244, + "electromagnetic": 46530, + "electron": 33396, + "electronic": 33865, + "electronic": 9273, + "electronica": 43119, + "electronics": 13081, + "eled": 20357, + "elee": 44112, + "eleg": 8075, + "elegance": 19146, + "elegant": 11124, + "elek": 34559, + "elem": 25406, + "element": 14909, + "elementary": 8143, + "elements": 10925, + "elen": 30654, + "elen": 39164, + "elena": 19421, + "eleng": 48180, + "eleph": 7554, + "elephant": 10299, + "elephants": 16871, + "eler": 24646, + "eless": 15244, + "eless": 30837, + "elets": 19400, + "elev": 7921, + "elevate": 26736, + "elevated": 23967, + "elevation": 23826, + "elevator": 19021, + "eleven": 31617, + "eleven": 17795, + "elf": 45961, + "elf": 11924, + "elfie": 39955, + "elg": 28790, + "elgin": 31868, + "eli": 1018, + "eli": 6292, + "elia": 10956, + "elian": 42508, + "elias": 47274, + "elias": 29902, + "elic": 34743, + "elic": 13492, + "elie": 38677, + "elie": 26501, + "elier": 14634, + "elife": 37429, + "elife": 12719, + "eligibility": 34937, + "eligible": 16978, + "elijah": 26065, + "elike": 48913, + "elim": 9296, + "elimin": 11386, + "eliminate": 19655, + "eliminated": 29075, + "eliminating": 36619, + "elimination": 24176, + "elin": 25353, + "elin": 13458, + "eline": 46199, + "eline": 7153, + "eling": 9990, + "elio": 47943, + "elion": 30682, + "elions": 44159, + "eliot": 33326, + "elis": 23411, + "elis": 48021, + "elisa": 25610, + "elisa": 44051, + "elisabeth": 33127, + "elise": 27124, + "elit": 40882, + "elite": 32277, + "elite": 6553, + "elited": 43943, + "elitedangerous": 47138, + "elites": 35975, + "elius": 35623, + "elive": 49338, + "elive": 23505, + "elives": 49174, + "elix": 32926, + "elixir": 42887, + "eliz": 42844, + "eliza": 6132, + "eliza": 29992, + "elizabeth": 22397, + "elizabeth": 7026, + "elk": 34013, + "elk": 21896, + "ell": 826, + "ell": 812, + "ella": 20692, + "ella": 2957, + "elland": 43326, + "ellar": 38443, + "ellas": 37053, + "elle": 12818, + "elle": 4765, + "elled": 13146, + "ellen": 14007, + "ellen": 12312, + "ellenshow": 34812, + "eller": 20927, + "eller": 4465, + "ellers": 19010, + "elles": 24431, + "elli": 3367, + "elli": 6673, + "ellic": 38905, + "ellie": 16769, + "ellier": 44054, + "ellin": 40374, + "elling": 2220, + "ellington": 34477, + "ellini": 43256, + "elliot": 20761, + "elliott": 44456, + "elliott": 13788, + "ellip": 44816, + "ellis": 11553, + "ellison": 32295, + "ello": 2512, + "ellor": 14594, + "ells": 2433, + "ellu": 35560, + "elly": 8041, + "elly": 20355, + "elm": 25199, + "elm": 22082, + "elman": 33622, + "elmer": 45958, + "elmo": 32150, + "elo": 6170, + "elo": 13490, + "elon": 26381, + "elon": 20406, + "elondon": 47377, + "elong": 44363, + "elonmusk": 37076, + "elope": 23367, + "eloqu": 37795, + "elos": 44733, + "elot": 43490, + "elove": 43319, + "elove": 19165, + "elover": 21732, + "elovers": 33946, + "els": 35958, + "els": 1645, + "elsa": 22050, + "else": 18857, + "else": 3344, + "elsewhere": 22906, + "elson": 19624, + "elt": 18692, + "elton": 20758, + "elu": 14208, + "elusive": 28903, + "elves": 29111, + "elvi": 47008, + "elvis": 47359, + "elvis": 14498, + "elxn": 37726, + "ely": 12189, + "ely": 1273, + "elyn": 29691, + "elyn": 18126, + "em": 908, + "em": 2270, + "ema": 7002, + "ema": 11131, + "emabiggest": 23101, + "emabiggestfans": 29587, + "email": 33537, + "email": 4462, + "emailed": 40470, + "emailmarketing": 40188, + "emails": 12871, + "eman": 24416, + "eman": 36868, + "emancip": 42996, + "emanuel": 35232, + "emb": 3692, + "embar": 8266, + "embaras": 48019, + "embark": 33953, + "embarra": 11382, + "embarrass": 27183, + "embarrassed": 28217, + "embarrassing": 19653, + "embarrassment": 41346, + "embassy": 13598, + "embe": 46041, + "embed": 19703, + "embedded": 22046, + "embelli": 32144, + "embellished": 46992, + "ember": 47049, + "emblem": 21163, + "embo": 23065, + "embr": 35267, + "embrac": 16928, + "embrace": 12118, + "embraced": 35739, + "embraces": 38404, + "embracing": 22196, + "embro": 12550, + "embroi": 18667, + "embroide": 21530, + "embroidered": 22381, + "embroidery": 20823, + "emc": 20897, + "emc": 31602, + "emcee": 42038, + "eme": 22910, + "eme": 21548, + "emea": 40352, + "emed": 11028, + "emen": 22033, + "ement": 40841, + "ement": 2057, + "ements": 11058, + "emer": 3132, + "emer": 25727, + "emerald": 46878, + "emerald": 16980, + "emerge": 22182, + "emerged": 26425, + "emergen": 24096, + "emergence": 39867, + "emergencies": 35759, + "emergency": 44038, + "emergency": 5897, + "emerges": 30801, + "emerging": 38174, + "emerging": 11113, + "emeritus": 35333, + "emerson": 24147, + "emery": 32678, + "emi": 44327, + "emi": 18525, + "emil": 26794, + "emil": 40624, + "emile": 43926, + "emili": 20709, + "emilia": 34238, + "emilio": 39722, + "emily": 14545, + "emily": 7640, + "emin": 17227, + "emin": 23995, + "eminem": 22129, + "eminent": 33779, + "eming": 40398, + "emir": 13337, + "emir": 47613, + "emirates": 47244, + "emirates": 17867, + "emission": 27761, + "emissions": 14172, + "emit": 49043, + "emma": 18177, + "emma": 7445, + "emmanuel": 48045, + "emmanuel": 20411, + "emmett": 45779, + "emmy": 35625, + "emmy": 17089, + "emmys": 21875, + "emo": 3738, + "emo": 19381, + "emoji": 16327, + "emojis": 27870, + "emon": 34406, + "emor": 45034, + "emory": 44274, + "emotion": 17464, + "emotional": 7357, + "emotionally": 24088, + "emotions": 12904, + "emp": 3831, + "emp": 41004, + "empathy": 22420, + "emper": 12522, + "emperor": 13828, + "empha": 16237, + "emphasi": 47176, + "emphasis": 29588, + "empire": 26212, + "empire": 7614, + "empires": 46510, + "emplo": 3409, + "employ": 37290, + "employ": 39626, + "employe": 5037, + "employed": 26567, + "employee": 36631, + "employee": 9560, + "employees": 7377, + "employer": 21296, + "employers": 17647, + "employment": 10959, + "empor": 27386, + "emporium": 48541, + "empower": 13612, + "empower": 17230, + "empowered": 29087, + "empowering": 20086, + "empowerment": 15747, + "empowers": 46206, + "empress": 26656, + "empty": 41203, + "empty": 7893, + "emra": 39259, + "ems": 2858, + "emt": 46360, + "emu": 48149, + "emu": 29296, + "emul": 23272, + "emy": 31076, + "en": 524, + "en": 576, + "ena": 3452, + "enab": 17308, + "enable": 15642, + "enabled": 23666, + "enables": 23417, + "enabling": 23590, + "enam": 41486, + "enamel": 22746, + "enary": 13132, + "enas": 34536, + "enation": 20860, + "enberg": 15658, + "enburg": 28430, + "enc": 33169, + "enca": 37774, + "encan": 30345, + "encapsul": 40874, + "ence": 6495, + "ence": 954, + "enced": 6549, + "ences": 3777, + "enchan": 17290, + "enchanted": 28258, + "enchanting": 32531, + "enchil": 47396, + "enci": 32207, + "encia": 30068, + "encies": 18729, + "encing": 10326, + "enclosed": 43243, + "enclosure": 37419, + "encom": 44026, + "encore": 20549, + "encoun": 17309, + "encounter": 13164, + "encountered": 32492, + "encounters": 25399, + "encoura": 6169, + "encourage": 12090, + "encouraged": 20299, + "encouragement": 24959, + "encourages": 23848, + "encouraging": 15875, + "encro": 45822, + "encry": 28600, + "encryp": 42928, + "encrypted": 48710, + "encryption": 31423, + "ency": 3484, + "encyclo": 32104, + "encyclopedia": 38376, + "end": 945, + "end": 806, + "enda": 6735, + "endale": 20290, + "endange": 13990, + "endangered": 14931, + "ende": 11373, + "ende": 40306, + "endeav": 18134, + "endeavor": 40502, + "endeavors": 44394, + "endeavour": 38035, + "ended": 2622, + "endemic": 41241, + "endent": 16265, + "ender": 48106, + "ender": 12383, + "enders": 7418, + "endez": 43850, + "endgame": 23042, + "endi": 31359, + "ending": 2695, + "endings": 36516, + "endish": 38841, + "endless": 12688, + "endlessly": 45145, + "endment": 45894, + "endo": 13476, + "endo": 15830, + "endocr": 36486, + "endof": 40786, + "endome": 46996, + "endon": 48018, + "endor": 8092, + "endorf": 37249, + "endorse": 28819, + "endorsed": 24307, + "endorsement": 21205, + "endorses": 34603, + "endorsing": 46779, + "endow": 45895, + "endra": 22321, + "ends": 1339, + "endthe": 46256, + "endu": 26032, + "endur": 19557, + "endurance": 21027, + "endure": 32419, + "enduring": 30851, + "enduro": 47042, + "ene": 3297, + "ene": 6049, + "ened": 2494, + "eneed": 45137, + "enegger": 33235, + "enei": 48906, + "enemies": 15824, + "enemy": 10310, + "enen": 45113, + "ener": 2244, + "ener": 13600, + "energ": 39451, + "energetic": 24197, + "energi": 23044, + "energies": 42374, + "energized": 48635, + "energy": 14974, + "energy": 2650, + "energye": 32271, + "energyefficiency": 40586, + "eners": 48208, + "enes": 42066, + "eness": 11806, + "enet": 46336, + "enew": 29672, + "enews": 13442, + "eney": 20706, + "enez": 33110, + "enf": 38167, + "enfield": 27808, + "enfor": 10592, + "enforce": 40224, + "enforced": 44597, + "enforcement": 12460, + "eng": 1035, + "eng": 6730, + "enga": 22297, + "engag": 6793, + "engage": 11089, + "engaged": 11475, + "engagement": 7281, + "engaging": 13060, + "enge": 26279, + "enge": 2742, + "engel": 38265, + "engen": 48286, + "enger": 6618, + "engers": 7533, + "engine": 3355, + "engine": 5857, + "engineer": 40151, + "engineer": 8517, + "engineered": 26580, + "engineering": 5273, + "engineers": 11494, + "engines": 14487, + "england": 20904, + "england": 3595, + "english": 15942, + "english": 3469, + "engra": 17560, + "engraved": 29421, + "engraving": 33309, + "engul": 43655, + "engv": 28401, + "enh": 7449, + "enhall": 48781, + "enham": 24592, + "enhan": 26827, + "enhance": 13993, + "enhanced": 16070, + "enhancement": 35601, + "enhances": 38259, + "enhancing": 25986, + "eni": 4395, + "eni": 17538, + "enic": 46780, + "enic": 28292, + "enig": 19754, + "enig": 48730, + "enight": 32848, + "enight": 20640, + "enigma": 34998, + "ening": 1133, + "enium": 34380, + "enix": 25720, + "enjo": 1498, + "enjoy": 12981, + "enjoy": 2218, + "enjoyable": 17444, + "enjoyed": 5045, + "enjoying": 3603, + "enjoyment": 34905, + "enjoys": 17024, + "enka": 43942, + "enko": 25312, + "enlar": 38136, + "enligh": 21364, + "enlighten": 28200, + "enlightened": 44032, + "enlightening": 44005, + "enlightenment": 29255, + "enlisted": 43555, + "enly": 43023, + "enn": 43563, + "enna": 8095, + "enne": 21176, + "enne": 11518, + "ennedy": 46266, + "ennes": 43613, + "enni": 7049, + "ennial": 14220, + "ennis": 48923, + "ennis": 26309, + "eno": 9429, + "eno": 12843, + "enoch": 47917, + "enor": 13955, + "enormous": 20129, + "enos": 44759, + "enote": 44955, + "enough": 2744, + "enow": 26876, + "enqu": 28417, + "enqui": 22810, + "enquire": 46658, + "enquiries": 31901, + "enquiry": 45141, + "enri": 18915, + "enrich": 20058, + "enrich": 45504, + "enriched": 45166, + "enrichment": 32903, + "enrique": 25489, + "enrol": 44279, + "enroll": 23739, + "enroll": 30366, + "enrolled": 36853, + "enrollment": 24875, + "enroute": 40548, + "ens": 41799, + "ens": 1323, + "ense": 12657, + "ense": 27658, + "ensemble": 14843, + "ensis": 32842, + "ensla": 37535, + "enslaved": 48675, + "ensure": 7492, + "ensures": 29707, + "ensuring": 19403, + "ent": 724, + "ent": 621, + "enta": 17681, + "ental": 32342, + "ental": 6168, + "entary": 9833, + "entation": 37412, + "ente": 17433, + "ente": 9935, + "ented": 3800, + "entennial": 43088, + "enter": 2963, + "enter": 3819, + "entered": 10679, + "entering": 12580, + "enterpri": 7339, + "enterprise": 9220, + "enterprises": 21219, + "enters": 15287, + "entertain": 5566, + "entertain": 23510, + "entertained": 30631, + "entertainer": 28674, + "entertaining": 13897, + "entertainment": 6166, + "entes": 24213, + "enthr": 36202, + "enthusi": 9631, + "enthusiasm": 20525, + "enthusiast": 27153, + "enthusiastic": 22068, + "enthusiasts": 27514, + "enti": 1938, + "ential": 5194, + "entially": 37695, + "entic": 10340, + "entine": 49212, + "enting": 20526, + "entire": 4709, + "entirely": 13911, + "entirety": 43242, + "entit": 15209, + "entities": 38134, + "entitled": 18680, + "entity": 28455, + "ently": 2922, + "ento": 21917, + "ento": 8762, + "entom": 31676, + "entourage": 47893, + "entr": 7129, + "entrance": 9129, + "entrata": 27304, + "entre": 34188, + "entre": 19600, + "entren": 46959, + "entrepre": 4583, + "entreprene": 4789, + "entrepreneu": 26784, + "entrepreneur": 12119, + "entrepreneur": 8033, + "entrepreneurial": 28261, + "entrepreneurs": 11054, + "entrepreneurship": 12858, + "entries": 13766, + "entry": 5362, + "ents": 870, + "entu": 6650, + "enty": 5657, + "enu": 23430, + "env": 32280, + "env": 39207, + "envel": 20052, + "envelope": 27358, + "envir": 3512, + "enviro": 46200, + "environ": 3599, + "environment": 33039, + "environment": 5501, + "environmental": 7831, + "environmentally": 32855, + "environments": 19577, + "envision": 49031, + "envoy": 29263, + "envy": 21017, + "eny": 20482, + "enya": 36509, + "enyc": 39520, + "enz": 25805, + "enz": 31873, + "enza": 25239, + "enzie": 14839, + "enzo": 31543, + "enzyme": 40348, + "enzymes": 47465, + "eo": 16054, + "eo": 11712, + "eoin": 48634, + "eon": 31915, + "eos": 17805, + "ep": 1178, + "ep": 1117, + "epa": 15866, + "epage": 26931, + "epaper": 33584, + "epcot": 32524, + "eper": 43071, + "eph": 45752, + "eph": 41240, + "ephe": 25129, + "epi": 7219, + "epi": 34641, + "epic": 12683, + "epic": 4991, + "epiconetsy": 49222, + "epide": 17382, + "epidemi": 44447, + "epidemic": 21522, + "epile": 23150, + "epilepsy": 29547, + "epilo": 31291, + "epilots": 39766, + "epiph": 40561, + "epiphany": 43251, + "epis": 24616, + "episcop": 28037, + "episcopal": 31221, + "episo": 2708, + "episode": 2965, + "episodes": 11837, + "epit": 21967, + "epitome": 35114, + "epl": 25950, + "epo": 25810, + "epp": 39054, + "epp": 39593, + "eps": 4090, + "epsilon": 40019, + "epsom": 40364, + "epstein": 34688, + "eq": 39331, + "eq": 33692, + "equ": 2563, + "equal": 17373, + "equal": 10433, + "equality": 48981, + "equality": 9578, + "equally": 18172, + "equals": 30278, + "equation": 28591, + "equations": 38225, + "eque": 19518, + "equestrian": 24728, + "equi": 8752, + "equili": 43262, + "equine": 33801, + "equinox": 32652, + "equip": 6526, + "equip": 36979, + "equipment": 6893, + "equipo": 45688, + "equipped": 18331, + "equitable": 44717, + "equities": 44015, + "equity": 11293, + "equivalent": 19489, + "er": 517, + "er": 528, + "era": 30548, + "era": 2072, + "erable": 18801, + "erad": 24194, + "eradic": 36346, + "eradicate": 46164, + "eral": 6222, + "eran": 13069, + "eras": 19325, + "eras": 39090, + "erase": 33893, + "erased": 46762, + "erasmus": 38935, + "erc": 5360, + "erc": 32382, + "erd": 25645, + "erdo": 21112, + "erdogan": 24453, + "ere": 17907, + "ere": 642, + "erec": 21526, + "erected": 39365, + "ered": 9097, + "eres": 15751, + "ergon": 38120, + "ergy": 19550, + "eri": 2769, + "eri": 9509, + "eria": 11634, + "erial": 5409, + "eric": 1206, + "eric": 5396, + "erica": 13208, + "erich": 26070, + "erick": 27434, + "erick": 36959, + "erickson": 45286, + "ericsson": 39645, + "eridge": 45408, + "erie": 7005, + "eries": 9099, + "erik": 22805, + "erik": 16532, + "erika": 25531, + "erin": 17532, + "erin": 11333, + "erina": 25176, + "ering": 1785, + "erit": 23335, + "eritrea": 30738, + "erjee": 41665, + "erly": 14380, + "erm": 31649, + "erman": 17990, + "ern": 6992, + "ern": 12140, + "ernal": 20868, + "ernan": 34617, + "ernation": 48796, + "erne": 33930, + "ernest": 23006, + "ernie": 23636, + "ernity": 14653, + "erno": 40812, + "ernst": 30099, + "ero": 3211, + "ero": 3732, + "erock": 38206, + "eron": 32837, + "eroom": 46690, + "eros": 30597, + "erose": 48657, + "erosion": 30174, + "erotic": 30708, + "erotica": 39126, + "erous": 6384, + "eroy": 36461, + "erp": 28268, + "err": 22479, + "err": 25346, + "erra": 48446, + "errands": 45485, + "error": 12097, + "errors": 21195, + "erry": 45236, + "erry": 24124, + "ers": 4840, + "ers": 612, + "ersfc": 37925, + "ership": 2884, + "erson": 25780, + "erson": 6811, + "ert": 40325, + "ert": 3112, + "erta": 32007, + "erton": 26245, + "erts": 12921, + "eru": 36068, + "erun": 41642, + "erup": 17093, + "erupted": 48862, + "eruption": 33705, + "erville": 37557, + "erwin": 43724, + "ery": 12467, + "ery": 1692, + "erz": 38711, + "es": 957, + "es": 542, + "esa": 46834, + "esa": 12489, + "esanders": 23099, + "esc": 3330, + "esc": 28420, + "escal": 15902, + "escap": 11499, + "escape": 32484, + "escape": 7568, + "escaped": 18707, + "escapes": 29916, + "escaping": 21767, + "escar": 39229, + "escence": 37972, + "esch": 46760, + "esch": 41945, + "esco": 32482, + "escobar": 48807, + "escor": 24360, + "escort": 24976, + "escorted": 47667, + "escorts": 48574, + "escu": 36517, + "esday": 19553, + "ese": 18766, + "ese": 2260, + "esg": 41674, + "esh": 17119, + "esh": 13407, + "esha": 28799, + "eshop": 38451, + "eshop": 45570, + "eshopsuk": 39349, + "esi": 30064, + "esis": 12414, + "esk": 19359, + "esl": 26201, + "eso": 29890, + "eso": 28921, + "esof": 17047, + "eson": 46845, + "esp": 3849, + "esp": 13870, + "espa": 37301, + "espan": 41731, + "españa": 41118, + "especially": 4878, + "esper": 29216, + "espino": 46633, + "espionage": 43498, + "espn": 22917, + "espn": 7540, + "espnu": 47747, + "espo": 34381, + "esports": 16035, + "espresso": 17098, + "esq": 47352, + "esqu": 34616, + "esque": 25877, + "ess": 3118, + "ess": 9764, + "essa": 39125, + "essay": 12751, + "essays": 27328, + "esse": 22305, + "essen": 30489, + "essence": 17830, + "essenti": 11163, + "essential": 47264, + "essential": 6895, + "essentially": 30042, + "essentials": 16191, + "essex": 30563, + "essex": 11623, + "est": 2291, + "est": 1509, + "esta": 41449, + "esta": 10135, + "estab": 7010, + "establi": 8412, + "establish": 19709, + "established": 13143, + "establishing": 29420, + "establishment": 20213, + "estas": 39072, + "estate": 47130, + "estate": 6159, + "estates": 26054, + "este": 12968, + "este": 20579, + "esteban": 48381, + "esteem": 31541, + "esteemed": 36293, + "ester": 45808, + "esthe": 18468, + "esther": 24393, + "estim": 8904, + "estimate": 21883, + "estimated": 16665, + "estimates": 21957, + "esto": 31589, + "esto": 23958, + "estonia": 26260, + "estonian": 48895, + "estrada": 48116, + "estre": 31271, + "estu": 26272, + "estuary": 35269, + "esur": 35758, + "esville": 39187, + "esy": 46268, + "et": 1169, + "et": 875, + "eta": 8761, + "etal": 25221, + "etary": 13074, + "etc": 5353, + "etched": 40411, + "etching": 41375, + "ete": 38820, + "ete": 40245, + "eter": 8587, + "eter": 17007, + "eternal": 13732, + "eternally": 48486, + "eternity": 23832, + "eters": 18392, + "etf": 31661, + "eth": 4819, + "eth": 5927, + "ethan": 24245, + "ethan": 15958, + "ethanol": 38166, + "ethe": 21312, + "ethel": 45921, + "ether": 23349, + "ethere": 18705, + "ethereal": 40925, + "ethereum": 19612, + "ethernet": 35026, + "ethi": 10327, + "ethic": 39104, + "ethical": 47041, + "ethical": 17679, + "ethics": 13355, + "ethiop": 10897, + "ethiopia": 13920, + "ethiopian": 24507, + "ethnic": 30522, + "ethnic": 16344, + "ethnicity": 46787, + "ethno": 34225, + "ethos": 48768, + "eti": 11188, + "eti": 30394, + "etienne": 46118, + "eties": 15137, + "etihad": 38489, + "etiquette": 37957, + "etis": 38216, + "etisation": 39733, + "etna": 41940, + "eto": 27829, + "eto": 33837, + "eton": 44339, + "etour": 41462, + "etr": 23012, + "etres": 42838, + "ets": 3442, + "etsy": 13237, + "etsy": 6282, + "etsym": 22902, + "etsymntt": 25416, + "etsyshop": 44643, + "ett": 32729, + "ett": 24998, + "etta": 30466, + "ette": 19981, + "ette": 5212, + "ettes": 35326, + "etto": 44219, + "etty": 40759, + "etu": 36593, + "etv": 49155, + "etv": 20325, + "etwork": 20585, + "ety": 25920, + "ety": 2746, + "etz": 36181, + "etz": 25301, + "eu": 1506, + "eu": 3238, + "eucalyp": 41068, + "eucalyptus": 42351, + "euchar": 38362, + "eugen": 30678, + "eugene": 17760, + "eul": 46749, + "eun": 16431, + "eun": 26219, + "eunhyuk": 47526, + "eup": 44435, + "euph": 21386, + "euphoria": 41051, + "eur": 18343, + "eur": 12018, + "eura": 32605, + "eure": 25311, + "euref": 48017, + "eureka": 31686, + "euro": 2039, + "euro": 8463, + "euroleague": 46821, + "europa": 18290, + "europale": 42473, + "europaleague": 44029, + "europarl": 44922, + "europe": 4198, + "europe": 3848, + "european": 26712, + "european": 4759, + "europeans": 37082, + "euros": 22274, + "eurovision": 17593, + "eurozone": 42555, + "eurusd": 40895, + "eus": 44214, + "euston": 46905, + "euthan": 43280, + "euve": 40652, + "eux": 25019, + "ev": 776, + "ev": 10133, + "eva": 6845, + "evacu": 13187, + "evacuated": 26806, + "evacuation": 27353, + "eval": 25139, + "eval": 9703, + "evalu": 10314, + "evaluate": 27174, + "evaluating": 34541, + "evaluation": 17640, + "evan": 12821, + "evan": 12847, + "evangel": 20518, + "evangeli": 21372, + "evangelical": 36151, + "evangelist": 42275, + "evankirstel": 46581, + "evans": 8836, + "evansville": 44782, + "evapor": 33352, + "evasion": 48795, + "eve": 5732, + "eve": 1866, + "eved": 19820, + "evel": 39315, + "evelyn": 26687, + "evement": 8210, + "even": 6359, + "even": 1427, + "evening": 34487, + "evening": 2285, + "evenings": 19994, + "evenly": 45974, + "event": 10612, + "event": 1655, + "eventful": 45628, + "evento": 38155, + "eventprofs": 24980, + "events": 3667, + "eventu": 14055, + "eventual": 45321, + "eventually": 14397, + "ever": 888, + "ever": 1247, + "everest": 21722, + "everett": 25456, + "everglades": 46294, + "evergreen": 23852, + "everlasting": 32849, + "evers": 31914, + "everton": 13315, + "every": 1091, + "every": 1505, + "everybody": 5901, + "everyday": 25049, + "everyday": 5160, + "everyone": 1584, + "everything": 36376, + "everything": 2410, + "everytime": 16911, + "everywhere": 6364, + "eves": 7323, + "evi": 5348, + "evi": 36989, + "evic": 21336, + "eviction": 37111, + "eviden": 46220, + "evidence": 6439, + "evident": 34529, + "evie": 47195, + "evil": 23218, + "evil": 6006, + "eville": 16143, + "eving": 24729, + "evo": 17962, + "evo": 13169, + "evoc": 43133, + "evol": 5350, + "evolu": 7725, + "evolution": 8902, + "evolutionary": 30629, + "evolve": 23406, + "evolved": 22613, + "evolving": 23675, + "evp": 46154, + "evs": 33576, + "ew": 11942, + "ew": 15428, + "ewan": 40247, + "ewe": 48438, + "ewing": 38873, + "ews": 9878, + "ex": 659, + "ex": 4118, + "exac": 5460, + "exact": 12651, + "exactly": 5840, + "exagger": 29766, + "exal": 49324, + "exam": 4428, + "exam": 8785, + "examination": 20970, + "examine": 25728, + "examined": 44004, + "examiner": 29149, + "examines": 28160, + "examining": 30616, + "example": 6228, + "examples": 14790, + "exams": 14028, + "exas": 47536, + "exc": 1302, + "excav": 20733, + "excavation": 45909, + "exce": 10999, + "exceed": 32521, + "exceeded": 36221, + "exceeding": 47213, + "exceeds": 49353, + "excel": 28351, + "excel": 18754, + "excell": 3298, + "excellence": 8171, + "excellency": 36503, + "excellent": 4239, + "excelsi": 47315, + "excep": 8882, + "except": 8541, + "exception": 25018, + "exceptional": 13425, + "exceptionally": 29306, + "excer": 17737, + "excerpt": 20586, + "excess": 22491, + "excessive": 21332, + "exchange": 6616, + "exchanged": 48919, + "exchanges": 29730, + "exchanging": 47760, + "excit": 10510, + "excite": 47711, + "excited": 1889, + "excitement": 11407, + "exciting": 4300, + "exclu": 3114, + "exclude": 49235, + "excluded": 46216, + "excluding": 44326, + "exclusion": 40219, + "exclusive": 3747, + "exclusively": 13565, + "exclusives": 47149, + "excu": 7324, + "excur": 27533, + "excursion": 34869, + "excuse": 9266, + "excuses": 19388, + "exe": 3554, + "exe": 48027, + "exec": 15052, + "execs": 35728, + "execu": 4360, + "execute": 36405, + "executed": 20432, + "execution": 18085, + "executive": 5944, + "executives": 24357, + "exem": 19753, + "exemp": 28602, + "exempl": 36371, + "exemplary": 39123, + "exempli": 41934, + "exempt": 44278, + "exemption": 47481, + "exer": 40295, + "exerc": 5932, + "exercise": 7016, + "exercises": 19669, + "exercising": 39036, + "exeter": 32137, + "exeter": 18837, + "exfoli": 38823, + "exhau": 11154, + "exhaust": 21812, + "exhausted": 21741, + "exhausting": 40035, + "exhaustion": 49221, + "exhi": 3022, + "exhib": 3783, + "exhibit": 24992, + "exhibit": 8209, + "exhibiting": 23889, + "exhibition": 4219, + "exhibitions": 28311, + "exhibitor": 44192, + "exhibitors": 38542, + "exhibits": 30093, + "exhilar": 40262, + "exhilarating": 49289, + "exi": 5297, + "exico": 38712, + "exile": 28566, + "exist": 10899, + "exist": 9645, + "existed": 23198, + "existence": 13832, + "existent": 43541, + "existential": 38752, + "existing": 12886, + "exists": 14608, + "exit": 9374, + "exited": 37581, + "exiting": 39577, + "exits": 34943, + "exmoor": 48260, + "exo": 15600, + "exo": 5842, + "exodus": 30098, + "exol": 42856, + "exop": 35288, + "exoplan": 37980, + "exor": 24506, + "exorcist": 46309, + "exotic": 15639, + "exp": 9923, + "exp": 19066, + "expan": 7512, + "expand": 10382, + "expand": 13141, + "expanded": 18390, + "expanding": 15755, + "expands": 22223, + "expanse": 46886, + "expansion": 10138, + "expansive": 49261, + "expat": 43900, + "expe": 2560, + "expect": 9802, + "expect": 5716, + "expectation": 34273, + "expectations": 12529, + "expected": 5573, + "expecting": 12525, + "expects": 24536, + "expedition": 16761, + "expeditions": 49327, + "expelled": 48834, + "expen": 7216, + "expend": 29302, + "expenditure": 47044, + "expense": 28473, + "expenses": 21797, + "expensive": 9649, + "exper": 1533, + "experi": 4723, + "experience": 31867, + "experience": 2415, + "experienced": 10417, + "experiences": 8233, + "experiencing": 16643, + "experiential": 44952, + "experim": 6697, + "experiment": 13079, + "experimental": 16539, + "experimenting": 28263, + "experiments": 21077, + "expert": 6284, + "expertise": 16555, + "experts": 6960, + "expi": 26850, + "expir": 35077, + "expire": 49315, + "expired": 30200, + "expires": 34739, + "expl": 3261, + "expla": 3517, + "explain": 48918, + "explain": 7304, + "explained": 14229, + "explaining": 13136, + "explains": 6655, + "explan": 13294, + "explanation": 16577, + "explanations": 34383, + "explic": 21011, + "explicit": 33228, + "explo": 3586, + "explode": 31262, + "exploded": 28947, + "explodes": 38119, + "exploding": 34683, + "exploit": 36953, + "exploited": 48554, + "explor": 11958, + "exploration": 14043, + "explore": 10405, + "explore": 5147, + "explorebc": 38754, + "explorecanada": 36600, + "explored": 25016, + "explorer": 15776, + "explorers": 28491, + "explores": 13996, + "exploring": 7584, + "explosion": 13785, + "explosions": 38646, + "explosive": 18888, + "explosives": 44705, + "expo": 7820, + "expo": 6344, + "expon": 27905, + "export": 14444, + "exporting": 47433, + "exports": 20088, + "expose": 23181, + "exposed": 12180, + "exposes": 33575, + "exposing": 28362, + "exposition": 36943, + "exposure": 11903, + "expre": 6085, + "express": 18553, + "express": 5642, + "expressed": 20777, + "expresses": 31931, + "expressing": 30207, + "expression": 11357, + "expressions": 20314, + "expressive": 42060, + "expressway": 31658, + "exquis": 16575, + "exquisite": 17958, + "ext": 5711, + "ext": 20072, + "exten": 5555, + "extend": 14492, + "extended": 9614, + "extending": 25652, + "extends": 20688, + "extension": 10275, + "extensions": 24525, + "extensive": 16870, + "extensively": 47365, + "extent": 24913, + "exter": 9797, + "exterior": 19352, + "extermin": 41671, + "external": 15028, + "extin": 13553, + "extinct": 24488, + "extinction": 21186, + "extingui": 38567, + "extor": 35620, + "extr": 29082, + "extra": 6416, + "extra": 4231, + "extrac": 18550, + "extract": 18962, + "extraction": 28789, + "extracts": 45576, + "extraordin": 23628, + "extraordinaire": 30909, + "extraordinary": 10982, + "extras": 29817, + "extravag": 22299, + "extravaganza": 29461, + "extre": 3978, + "extreme": 38357, + "extreme": 8331, + "extremely": 6519, + "extremism": 31493, + "extremist": 36383, + "extremists": 41425, + "extru": 43010, + "ey": 1541, + "ey": 1477, + "eyang": 28915, + "eye": 5034, + "eye": 3272, + "eyebrow": 34250, + "eyebrows": 19923, + "eyed": 15512, + "eyeing": 34916, + "eyel": 17075, + "eyelashes": 42074, + "eyeliner": 33354, + "eyeon": 25126, + "eyes": 3095, + "eyeshadow": 35213, + "eyewear": 30165, + "eyewitness": 36258, + "eyou": 31996, + "eyour": 40229, + "eyre": 44115, + "ez": 10082, + "ez": 8387, + "eze": 25993, + "eze": 27229, + "ezekiel": 41428, + "ezra": 27552, + "f": 69, + "f": 325, + "fa": 778, + "fa": 2800, + "faa": 27577, + "fab": 2833, + "fab": 5492, + "faber": 43461, + "faber": 42488, + "fabi": 29425, + "fabian": 34539, + "fabio": 31666, + "fabric": 16217, + "fabric": 10033, + "fabricated": 40851, + "fabrication": 33476, + "fabrics": 23159, + "fabulous": 5189, + "fac": 1053, + "fac": 35438, + "facade": 29217, + "face": 2545, + "face": 1710, + "facebook": 36156, + "facebook": 2943, + "faced": 10941, + "faceli": 32023, + "facelift": 36380, + "faceoff": 42710, + "facep": 45285, + "faces": 4905, + "faceted": 43435, + "facetime": 24076, + "facial": 11909, + "facil": 39973, + "facilit": 13567, + "facilitate": 26733, + "facilitated": 43853, + "facilitating": 34796, + "facilities": 10388, + "facility": 8165, + "facing": 7619, + "fact": 17189, + "fact": 3598, + "factfriday": 27953, + "faction": 14629, + "factor": 21082, + "factor": 8124, + "factories": 36492, + "factors": 12733, + "factory": 42483, + "factory": 6072, + "facts": 5085, + "factual": 45471, + "faculty": 9504, + "facup": 25283, + "fad": 12632, + "fad": 47669, + "fade": 20486, + "faded": 26051, + "fades": 40441, + "fading": 32882, + "fadnavis": 38945, + "faf": 31052, + "faf": 43903, + "fag": 25617, + "fag": 39305, + "fah": 25495, + "fah": 35429, + "fahren": 45527, + "fai": 20519, + "fai": 26384, + "fail": 7105, + "fail": 6801, + "failed": 8314, + "failing": 15757, + "fails": 13388, + "failure": 8732, + "failures": 25442, + "faint": 30807, + "fair": 3031, + "fair": 2849, + "fairbanks": 43962, + "faire": 34745, + "faire": 20798, + "fairfax": 29368, + "fairfield": 29664, + "fairgrounds": 38325, + "fairi": 28884, + "fairies": 33590, + "fairly": 14961, + "fairmont": 41547, + "fairness": 29388, + "fairs": 8655, + "fairtrade": 33361, + "fairview": 43479, + "fairway": 44022, + "fairy": 17021, + "fairy": 10444, + "fairytale": 28944, + "fais": 23542, + "faisal": 35459, + "fait": 20567, + "faith": 10653, + "faith": 5080, + "faithful": 15511, + "faiz": 41775, + "fake": 18794, + "fake": 5777, + "faken": 22853, + "fakenews": 26943, + "fakespeare": 49095, + "fal": 2778, + "fal": 40494, + "fala": 47120, + "falcon": 22498, + "falcon": 13571, + "falcons": 13834, + "falk": 34648, + "falkirk": 44080, + "fall": 6489, + "fall": 2359, + "fallen": 8688, + "falling": 48709, + "falling": 7293, + "fallon": 39596, + "fallon": 21281, + "fallontonight": 44627, + "fallout": 49365, + "fallout": 16009, + "falls": 4778, + "falmouth": 38261, + "false": 38948, + "false": 9078, + "falsely": 42321, + "fam": 1058, + "fam": 5128, + "fame": 6573, + "famed": 23302, + "famer": 24554, + "famil": 3395, + "famili": 8488, + "familia": 25622, + "familiar": 10020, + "families": 4612, + "family": 8137, + "family": 1315, + "familyfun": 46308, + "familytime": 47236, + "familytravel": 38222, + "famine": 35847, + "famous": 44811, + "famous": 4096, + "famously": 44505, + "fan": 1675, + "fan": 2261, + "fanart": 41059, + "fanart": 7855, + "fanartfriday": 45346, + "fanatic": 36643, + "fanatics": 39610, + "fanbase": 36921, + "fanboy": 43369, + "fanc": 29017, + "fancafe": 45080, + "fanci": 35908, + "fanclub": 31530, + "fancy": 47622, + "fancy": 6733, + "fand": 19684, + "fandom": 47634, + "fandom": 11534, + "fanfest": 42916, + "fanfic": 47243, + "fang": 14269, + "fang": 27428, + "fangirl": 28813, + "fangirling": 39463, + "fanning": 37282, + "fanny": 30401, + "fans": 32454, + "fans": 1840, + "fansign": 25288, + "fant": 4467, + "fanta": 2703, + "fantaken": 39412, + "fantasia": 49306, + "fantastic": 31289, + "fantastic": 2935, + "fantasy": 15124, + "fantasy": 5267, + "fantasyfootball": 35713, + "fao": 31155, + "faq": 28533, + "far": 1578, + "far": 2384, + "fara": 48562, + "farage": 28340, + "farah": 31547, + "fare": 8620, + "fare": 6461, + "fares": 27525, + "farewell": 10734, + "fargo": 18870, + "fari": 26197, + "farley": 43761, + "farm": 9066, + "farm": 3985, + "farmer": 19735, + "farmer": 10474, + "farmers": 29752, + "farmers": 6402, + "farmersmarket": 41808, + "farmhouse": 26293, + "farming": 10399, + "farmington": 49305, + "farmland": 45258, + "farms": 11277, + "farn": 27527, + "faroo": 39147, + "farra": 33657, + "farrakhan": 46293, + "farrell": 24234, + "fart": 34664, + "farther": 42233, + "fas": 4830, + "fas": 42995, + "fasci": 17191, + "fascin": 7327, + "fascinated": 32964, + "fascinating": 8640, + "fascism": 28213, + "fascist": 23870, + "fascists": 43598, + "fash": 42682, + "fashi": 2099, + "fashion": 6976, + "fashion": 2444, + "fashionable": 24597, + "fashionblogger": 31726, + "fashioned": 21563, + "fashioni": 26062, + "fashionista": 30415, + "fashions": 37601, + "fashionshow": 45653, + "fashionweek": 28684, + "fass": 42398, + "fast": 8509, + "fast": 1953, + "fasten": 44990, + "faster": 8835, + "fastest": 9808, + "fasting": 24656, + "fat": 4751, + "fat": 5484, + "fatal": 12124, + "fatalities": 44168, + "fatally": 34069, + "fate": 26315, + "fate": 11734, + "father": 11607, + "father": 3224, + "fathers": 12780, + "fathersday": 16731, + "fati": 13430, + "fatigue": 23747, + "fatima": 28202, + "fats": 30151, + "fatt": 44131, + "fatty": 22953, + "fau": 5571, + "fau": 31381, + "faucet": 44273, + "faul": 16230, + "faulkner": 37840, + "fault": 13862, + "faults": 42752, + "faulty": 47103, + "fauna": 30808, + "faust": 44772, + "faux": 19429, + "fav": 1355, + "fav": 5426, + "fave": 7272, + "faves": 18003, + "favor": 1766, + "favor": 12160, + "favorable": 35392, + "favored": 46640, + "favorite": 35262, + "favorite": 1916, + "favorited": 36926, + "favorites": 10564, + "favors": 36085, + "favour": 3111, + "favour": 20469, + "favourite": 3342, + "favourites": 16585, + "favs": 18879, + "faw": 21800, + "fawad": 46425, + "fawn": 48624, + "fax": 32535, + "fax": 9337, + "fay": 8939, + "fay": 40074, + "faye": 30257, + "fayette": 32043, + "fayette": 19782, + "fayetteville": 37771, + "fayre": 34982, + "faz": 26238, + "faze": 44880, + "fb": 22637, + "fb": 3307, + "fball": 29663, + "fbf": 20004, + "fbi": 10293, + "fbloggers": 41389, + "fbs": 48454, + "fc": 4278, + "fc": 1399, + "fca": 24540, + "fcb": 26639, + "fcb": 25045, + "fcbarcelona": 32174, + "fcbayern": 35033, + "fcblive": 44608, + "fcc": 21240, + "fck": 40080, + "fck": 49263, + "fcofficial": 27805, + "fcs": 32095, + "fcu": 47898, + "fd": 16972, + "fd": 11525, + "fda": 17823, + "fdi": 45579, + "fdn": 18563, + "fdny": 41084, + "fdr": 42298, + "fe": 623, + "fe": 873, + "fear": 8744, + "fear": 5402, + "feared": 31154, + "fearless": 17470, + "fears": 13867, + "fearthe": 33449, + "feasi": 34977, + "feast": 37963, + "feast": 9564, + "feat": 1703, + "feat": 5611, + "feather": 24905, + "feather": 17871, + "feathers": 21138, + "featherweight": 44939, + "feature": 30413, + "feature": 4527, + "featured": 4743, + "features": 4643, + "featuring": 3706, + "feb": 4317, + "febru": 4202, + "february": 4248, + "fect": 31293, + "fed": 22518, + "fed": 7035, + "feder": 4737, + "federal": 6369, + "federation": 15530, + "federer": 18246, + "federico": 40539, + "fedex": 32603, + "fedora": 45111, + "feds": 30593, + "fee": 28242, + "fee": 9224, + "feed": 6662, + "feed": 5839, + "feedback": 8683, + "feeder": 24482, + "feeders": 44523, + "feeding": 9879, + "feeds": 21788, + "feel": 2408, + "feel": 2051, + "feelin": 19903, + "feeling": 33087, + "feeling": 3045, + "feelings": 9452, + "feels": 4808, + "feelthe": 22322, + "feelthebern": 27743, + "fees": 11765, + "feet": 4804, + "fei": 23441, + "fei": 34217, + "fein": 46707, + "feinstein": 41313, + "fel": 2081, + "fel": 20304, + "feld": 45913, + "feld": 14219, + "feldman": 41942, + "feli": 7498, + "felic": 25845, + "felici": 23379, + "felicia": 41139, + "felicidades": 41648, + "felicity": 35123, + "feline": 29471, + "felipe": 27681, + "felix": 33455, + "felix": 16514, + "feliz": 26104, + "feliz": 20221, + "fell": 33540, + "fell": 6266, + "fella": 17586, + "fellas": 18787, + "feller": 29226, + "fellow": 12099, + "fellow": 5242, + "fellows": 15766, + "fellowship": 13857, + "felony": 31068, + "felt": 5413, + "fem": 24574, + "fem": 36615, + "fema": 41721, + "female": 22062, + "female": 3970, + "females": 21028, + "femi": 38607, + "femin": 11423, + "femini": 11894, + "feminine": 24911, + "feminism": 18784, + "feminist": 14921, + "feminists": 38809, + "femme": 31331, + "fen": 5509, + "fen": 25024, + "fence": 12679, + "fences": 34312, + "fencing": 23489, + "fender": 17117, + "fener": 41208, + "fenerbah": 46652, + "feng": 33291, + "fennel": 28689, + "fent": 26395, + "fenton": 47265, + "fenway": 29206, + "fer": 1765, + "fer": 2897, + "fera": 37705, + "feral": 29972, + "ferdin": 25541, + "ferdinand": 27591, + "fere": 43144, + "feren": 35652, + "ference": 19984, + "ferg": 44938, + "fergie": 39119, + "fergu": 10988, + "fergus": 42041, + "ferguson": 11904, + "fermentation": 45817, + "fermented": 36886, + "fern": 10747, + "fern": 21685, + "fernandes": 44391, + "fernandez": 23436, + "fernando": 17140, + "ferns": 38277, + "feroci": 45652, + "ferr": 7256, + "ferra": 47911, + "ferrari": 9606, + "ferre": 29626, + "ferred": 10432, + "ferreira": 48686, + "ferrell": 41112, + "ferrer": 38904, + "ferri": 42008, + "ferries": 28489, + "ferris": 27532, + "ferry": 38936, + "ferry": 10278, + "fers": 12378, + "fert": 14925, + "fert": 43662, + "fertil": 41987, + "fertile": 44837, + "fertili": 23912, + "fertility": 23528, + "fertilizer": 36786, + "fery": 47448, + "fes": 32300, + "fest": 17383, + "fest": 2590, + "festa": 42124, + "festi": 1943, + "festiv": 19222, + "festival": 20946, + "festival": 2240, + "festivals": 17834, + "festive": 9533, + "festivities": 21020, + "fet": 21409, + "feta": 31705, + "fetal": 42031, + "fetch": 30271, + "fete": 34629, + "fett": 37979, + "fetus": 26768, + "feu": 24912, + "feu": 32990, + "feud": 27365, + "fever": 40896, + "fever": 9989, + "fevre": 43861, + "few": 1939, + "fewer": 19128, + "fex": 41584, + "fex": 26392, + "fey": 39069, + "fey": 23298, + "fez": 43081, + "ff": 1021, + "ff": 1304, + "ffa": 15355, + "ffame": 42873, + "ffc": 19832, + "ffe": 1138, + "ffe": 8631, + "ffect": 29151, + "ffed": 8448, + "ffee": 26377, + "ffel": 22656, + "ffen": 46537, + "ffer": 27369, + "ffer": 11636, + "ffers": 32163, + "fferty": 44771, + "ffes": 46441, + "ffey": 30138, + "fff": 28106, + "ffi": 19961, + "ffic": 4762, + "ffice": 26044, + "ffici": 3639, + "fficial": 39818, + "fficial": 6463, + "fficiency": 27800, + "fficient": 20424, + "ffin": 12779, + "ffin": 7367, + "ffing": 16592, + "ffins": 17898, + "ffl": 39490, + "ffle": 7749, + "ffler": 39819, + "ffles": 19344, + "ffman": 15823, + "ffo": 42264, + "ffs": 4424, + "ffxiv": 26569, + "ffxv": 46786, + "ffy": 26404, + "ffy": 7795, + "fg": 45977, + "fg": 6823, + "fgm": 32178, + "fgo": 46113, + "fh": 21649, + "fh": 21010, + "fhs": 45094, + "fi": 701, + "fi": 3589, + "fia": 8827, + "fiable": 34373, + "fianc": 27752, + "fiance": 44114, + "fiancé": 34039, + "fiasco": 40944, + "fiat": 16740, + "fiawec": 39485, + "fib": 40594, + "fiba": 34993, + "fiber": 35074, + "fiber": 12612, + "fibers": 44587, + "fibre": 21401, + "fibro": 21294, + "fibrosis": 36307, + "fic": 1788, + "fic": 2059, + "fica": 26952, + "fically": 14854, + "fication": 4523, + "fications": 12512, + "ficial": 48192, + "fics": 42505, + "fiction": 6218, + "fictional": 25570, + "fid": 34197, + "fid": 23966, + "fidd": 25218, + "fiddle": 35968, + "fide": 45375, + "fidel": 21740, + "fidel": 36837, + "fidelity": 30109, + "fidget": 48664, + "fie": 28487, + "fie": 10348, + "fied": 29642, + "fied": 2853, + "fiel": 1361, + "field": 7571, + "field": 1570, + "fielder": 11046, + "fieldhouse": 37969, + "fielding": 30465, + "fields": 6494, + "fieldwork": 33155, + "fiends": 37869, + "fier": 11167, + "fier": 10598, + "fierc": 48609, + "fierce": 13896, + "fiercely": 49039, + "fiers": 16113, + "fiery": 24557, + "fies": 9537, + "fiesta": 14580, + "fif": 5309, + "fifa": 21976, + "fifa": 8516, + "fifaworldcup": 38819, + "fifawwc": 41329, + "fife": 24374, + "fifteen": 29504, + "fifth": 25515, + "fifth": 8772, + "fifthharmony": 31075, + "fifty": 24456, + "fifty": 15978, + "fig": 4814, + "fig": 20719, + "figaro": 48044, + "figh": 23274, + "fight": 5262, + "fight": 2757, + "fighter": 35884, + "fighter": 6438, + "fighters": 7371, + "fightfor": 48909, + "fightfor": 35740, + "fighting": 38625, + "fighting": 4652, + "fighton": 45578, + "fights": 12132, + "figs": 38882, + "figu": 6390, + "figur": 16948, + "figurative": 44042, + "figure": 48820, + "figure": 5274, + "figured": 15630, + "figures": 8739, + "figurine": 33306, + "figuring": 31513, + "fiji": 48270, + "fiji": 18285, + "fik": 46589, + "fil": 1142, + "fil": 14915, + "fila": 30992, + "filament": 49252, + "file": 12545, + "file": 4512, + "filed": 13864, + "files": 7850, + "filet": 43155, + "fili": 9590, + "filing": 16576, + "filip": 14368, + "filipino": 19153, + "fill": 15904, + "fill": 6277, + "filled": 5589, + "filler": 32816, + "fillers": 45005, + "fillet": 39276, + "filling": 9736, + "fillion": 38048, + "fillmore": 43922, + "fills": 21750, + "filly": 27690, + "film": 5117, + "film": 1860, + "filmed": 15801, + "filmfare": 42224, + "filmfest": 24508, + "filmfestival": 28066, + "filming": 6866, + "filmmaker": 17202, + "filmmakers": 24896, + "filmmaking": 18226, + "films": 5370, + "fils": 40271, + "filter": 7541, + "filtered": 29926, + "filtering": 47770, + "filters": 18385, + "filth": 39713, + "filthy": 26899, + "filtr": 21408, + "filtration": 42036, + "fim": 47525, + "fin": 735, + "fin": 10663, + "fina": 34497, + "final": 11968, + "final": 1755, + "finale": 7844, + "finalfantasy": 44543, + "finalfour": 46999, + "finalist": 12620, + "finalists": 13422, + "finalized": 48930, + "finally": 1992, + "finals": 4536, + "finan": 4807, + "finance": 6117, + "finances": 28767, + "financi": 12846, + "financial": 19783, + "financial": 4930, + "financially": 28124, + "financing": 18375, + "finch": 18523, + "find": 18638, + "find": 1416, + "finder": 15045, + "finders": 43884, + "findia": 47064, + "finding": 37455, + "finding": 6002, + "findings": 16529, + "findlay": 48227, + "findom": 36463, + "finds": 6680, + "findyour": 25936, + "findyourpark": 38924, + "fine": 12042, + "fine": 3797, + "fineart": 7484, + "fineart": 16005, + "fineartamerica": 7724, + "fined": 20094, + "finely": 46120, + "finer": 36681, + "fines": 25053, + "finesse": 46047, + "finest": 7707, + "fing": 6485, + "fing": 17955, + "finger": 13480, + "finger": 8895, + "fingerprint": 39579, + "fingers": 9690, + "fini": 2405, + "finish": 42178, + "finish": 3958, + "finished": 3078, + "finisher": 38636, + "finishers": 48661, + "finishes": 13078, + "finishing": 7912, + "finite": 48312, + "finity": 41463, + "finity": 21273, + "fink": 40158, + "finland": 10775, + "finley": 41652, + "finn": 28479, + "finn": 16925, + "finna": 35180, + "finnish": 19616, + "fino": 30083, + "fins": 32810, + "fintech": 48929, + "fintech": 8899, + "fion": 27476, + "fiona": 20099, + "fior": 37086, + "fiore": 44997, + "fioren": 33188, + "fiorentina": 43713, + "fios": 42521, + "fir": 770, + "fir": 16233, + "fire": 2951, + "fire": 1769, + "firearm": 40311, + "firearms": 23960, + "fireball": 40543, + "firec": 42806, + "fired": 8846, + "firefighter": 20498, + "firefighters": 12600, + "firefly": 33997, + "firefox": 35372, + "fireman": 46085, + "firen": 34752, + "firenze": 38445, + "fireplace": 23050, + "fires": 8749, + "fireside": 36185, + "firework": 40750, + "fireworks": 10641, + "firing": 15105, + "firm": 16936, + "firm": 7705, + "firmly": 29156, + "firms": 13655, + "firmware": 42691, + "first": 6853, + "first": 874, + "firstdayof": 44297, + "firsth": 48512, + "firsts": 47884, + "firth": 26078, + "fis": 7846, + "fis": 47683, + "fiscal": 20825, + "fischer": 26532, + "fish": 6431, + "fish": 2759, + "fisher": 11175, + "fisher": 9176, + "fisheries": 24612, + "fisherman": 25055, + "fishermen": 28547, + "fishers": 42065, + "fishery": 49057, + "fishes": 35470, + "fishing": 31703, + "fishing": 4935, + "fishy": 35665, + "fist": 48340, + "fist": 17085, + "fit": 2366, + "fit": 2478, + "fitbit": 33768, + "fitch": 44614, + "fitfam": 20662, + "fitnes": 47285, + "fitness": 20044, + "fitness": 4838, + "fits": 6401, + "fitt": 32994, + "fitted": 14863, + "fitter": 42096, + "fitters": 32364, + "fitting": 11769, + "fittings": 45787, + "fitz": 11120, + "fitz": 25913, + "fitzgerald": 20606, + "fitzpatrick": 37141, + "fiu": 38374, + "five": 19508, + "five": 3127, + "fives": 44066, + "fix": 4596, + "fix": 6028, + "fixed": 9393, + "fixes": 25473, + "fixing": 17423, + "fixture": 17317, + "fixtures": 19904, + "fizz": 31242, + "fj": 43183, + "fj": 46447, + "fjor": 31260, + "fk": 12410, + "fl": 1082, + "fl": 2685, + "fla": 1577, + "fla": 20292, + "flag": 11536, + "flag": 4859, + "flagged": 45012, + "flags": 12221, + "flagship": 19779, + "flagstaff": 40406, + "flair": 24938, + "flake": 21221, + "flakes": 20934, + "flam": 10559, + "flame": 40351, + "flame": 13484, + "flamen": 28826, + "flamenco": 37362, + "flames": 13441, + "flamin": 42693, + "flaming": 34782, + "flamingo": 30323, + "flan": 14572, + "flanagan": 28641, + "flanders": 34837, + "flank": 44553, + "flann": 39510, + "flannel": 37807, + "flap": 35253, + "flappy": 40241, + "flare": 21185, + "flares": 46088, + "flash": 6089, + "flash": 5815, + "flashback": 14616, + "flashback": 11988, + "flashbackfriday": 15014, + "flashbacks": 47056, + "flashes": 31259, + "flashing": 31764, + "flashlight": 37256, + "flask": 36194, + "flat": 8986, + "flat": 6313, + "flats": 17228, + "flatt": 45498, + "flattering": 43267, + "flaun": 41421, + "flav": 7191, + "flavo": 28895, + "flavor": 31835, + "flavor": 11818, + "flavored": 29350, + "flavorful": 49135, + "flavors": 16930, + "flavour": 17026, + "flavoured": 42397, + "flavours": 21083, + "flaw": 14268, + "flaw": 34978, + "flawed": 35136, + "flawless": 15531, + "flaws": 30492, + "flax": 43443, + "fle": 2428, + "fle": 44964, + "flea": 24883, + "fleck": 28143, + "fled": 26731, + "flee": 19427, + "flee": 30167, + "fleece": 25038, + "fleeing": 30543, + "fleek": 43513, + "fleet": 35922, + "fleet": 9147, + "fleetwood": 28883, + "fleming": 25769, + "fler": 48789, + "flesh": 17495, + "flet": 16102, + "fletcher": 19810, + "fleur": 28593, + "flew": 13768, + "flex": 16426, + "flex": 12038, + "flexi": 10032, + "flexibility": 22547, + "flexible": 14502, + "flexing": 48483, + "fli": 2472, + "flick": 13746, + "flick": 23414, + "flickr": 17755, + "flies": 8070, + "flight": 24701, + "flight": 3795, + "flights": 10515, + "flin": 24730, + "flin": 43816, + "flinders": 44647, + "fling": 22768, + "flint": 28306, + "flint": 18324, + "flip": 20385, + "flip": 11035, + "flipk": 30829, + "flipkart": 33154, + "flipped": 28144, + "flipping": 25881, + "flips": 35089, + "flir": 24330, + "flirt": 38352, + "flirting": 35243, + "flix": 40663, + "flo": 1945, + "flo": 20711, + "float": 16123, + "floating": 12619, + "floats": 33272, + "flock": 36297, + "flock": 21822, + "flondon": 47366, + "floo": 4062, + "flood": 23793, + "flood": 7148, + "flooded": 19706, + "flooding": 10204, + "floods": 16369, + "floor": 23657, + "floor": 4125, + "flooring": 19227, + "floors": 15671, + "flop": 22994, + "floppy": 38267, + "flops": 29146, + "flor": 15784, + "flor": 41669, + "flora": 18906, + "floral": 10732, + "florals": 48331, + "floren": 37706, + "florence": 11617, + "flores": 21537, + "flori": 3482, + "florian": 41861, + "florida": 34264, + "florida": 3966, + "florist": 38403, + "floss": 36453, + "flotus": 35181, + "flour": 18592, + "flouri": 23239, + "flourish": 36038, + "flow": 2180, + "flow": 5608, + "flower": 12772, + "flower": 4055, + "flowering": 19953, + "flowers": 4023, + "flowing": 14922, + "flown": 25659, + "flows": 16715, + "floyd": 46369, + "floyd": 13656, + "flu": 3698, + "flu": 13528, + "fluctu": 40181, + "fluence": 38169, + "fluent": 30025, + "fluff": 31174, + "fluffy": 40346, + "fluffy": 17054, + "fluid": 43803, + "fluid": 16717, + "fluids": 41490, + "fluor": 45127, + "fluore": 26974, + "fluorescent": 35036, + "fluori": 45611, + "flur": 31591, + "flush": 25777, + "flushing": 43754, + "flute": 23746, + "flux": 25249, + "flwx": 30907, + "fly": 5666, + "fly": 3228, + "flye": 30873, + "flyeagles": 39927, + "flyeaglesfly": 39931, + "flyer": 11875, + "flyers": 14181, + "flyfishing": 31800, + "flying": 20782, + "flying": 4610, + "flyn": 40676, + "flynn": 15721, + "flyo": 33506, + "flyover": 38083, + "fm": 13715, + "fm": 3689, + "fman": 25152, + "fml": 26730, + "fmr": 32875, + "fn": 22773, + "fn": 21763, + "fnc": 46506, + "fo": 898, + "fo": 6157, + "foal": 40386, + "foam": 30039, + "foam": 14587, + "foamed": 26711, + "fob": 40315, + "focal": 30934, + "focu": 5827, + "focus": 4353, + "focused": 9319, + "focuses": 20093, + "focusing": 15551, + "fod": 31015, + "fod": 43299, + "fodils": 44411, + "foe": 22952, + "foes": 46279, + "fog": 9417, + "foggy": 19770, + "foil": 17302, + "fol": 1106, + "fol": 48616, + "fold": 35201, + "fold": 11021, + "foldable": 48307, + "folded": 25233, + "folder": 25717, + "folding": 15464, + "folds": 24266, + "foley": 22850, + "foli": 7713, + "folia": 48964, + "foliage": 26350, + "folio": 10772, + "folk": 10665, + "folk": 6032, + "folke": 47190, + "folkl": 27273, + "folklore": 22133, + "folklore": 28620, + "folklorethursday": 23270, + "folks": 5422, + "follo": 41417, + "follow": 1964, + "follow": 1979, + "followart": 40957, + "followback": 33863, + "followed": 6499, + "follower": 17039, + "followers": 4856, + "following": 3473, + "followme": 29668, + "followparty": 44757, + "follows": 11287, + "followthe": 30747, + "folly": 41408, + "folsom": 42108, + "fom": 34540, + "fon": 5017, + "fon": 38318, + "fond": 19964, + "fonda": 44609, + "fondue": 48321, + "fone": 40672, + "font": 37610, + "font": 16248, + "fontaine": 37864, + "fontana": 43643, + "fontein": 45062, + "fonts": 32801, + "foo": 1183, + "foo": 23435, + "food": 4586, + "food": 1559, + "foodand": 38317, + "foodbank": 31926, + "foodie": 30762, + "foodie": 9847, + "foodies": 22416, + "foodnetwork": 46793, + "foods": 7057, + "foodsecurity": 49329, + "foodtruck": 47682, + "fool": 23959, + "fool": 12212, + "fooled": 28761, + "fooling": 47964, + "foolish": 33824, + "fools": 15946, + "foot": 6702, + "foot": 4738, + "footage": 11130, + "footb": 33466, + "football": 9376, + "football": 1882, + "footballer": 20646, + "footballers": 30269, + "footed": 38040, + "footh": 25951, + "foothills": 37020, + "footpath": 48858, + "footprint": 23206, + "footprints": 39640, + "footsteps": 27289, + "footwear": 22772, + "footy": 39866, + "footy": 18922, + "for": 645, + "for": 556, + "forage": 46871, + "foraging": 39056, + "forall": 17824, + "forbe": 49098, + "forbes": 13925, + "forbi": 24754, + "forbidden": 25164, + "force": 12068, + "force": 2869, + "forced": 8201, + "forces": 5381, + "forchange": 35848, + "forcing": 21573, + "ford": 3751, + "ford": 1623, + "fordfc": 28581, + "fordham": 48792, + "fords": 29351, + "fordshire": 14645, + "fore": 1484, + "fore": 1332, + "forec": 34155, + "forecast": 7361, + "forecasting": 38133, + "forecasts": 27696, + "foreclo": 44916, + "forefront": 37679, + "foreground": 35186, + "forehead": 25394, + "foreig": 26497, + "foreign": 42255, + "foreign": 6046, + "foreigners": 38549, + "foreman": 36174, + "foremost": 42128, + "foren": 16526, + "forensic": 23158, + "forensics": 38763, + "forest": 18760, + "forest": 4167, + "forestation": 33939, + "forestry": 26281, + "forests": 14095, + "forever": 14748, + "forever": 3225, + "forevery": 40605, + "forex": 40200, + "forex": 17395, + "forfe": 44871, + "forge": 19232, + "forged": 28105, + "forget": 46153, + "forget": 2678, + "forgets": 35613, + "forgetting": 25452, + "forgi": 22080, + "forgive": 15332, + "forgiven": 44894, + "forgiveness": 23585, + "forgood": 39169, + "forgot": 6483, + "forgotten": 7994, + "fork": 24501, + "fork": 13700, + "forkids": 48571, + "forklift": 43202, + "forks": 28769, + "forlife": 17624, + "form": 1157, + "form": 1907, + "forma": 38829, + "formal": 12978, + "formally": 24867, + "format": 16252, + "format": 11874, + "formation": 2510, + "formations": 37715, + "formative": 48882, + "formats": 32085, + "forme": 42085, + "formed": 6528, + "former": 2276, + "formerly": 20866, + "formid": 38599, + "formidable": 39834, + "forming": 15443, + "formity": 42290, + "forms": 5161, + "formu": 8689, + "formul": 23923, + "formula": 24485, + "formula": 10776, + "formulae": 34586, + "formulated": 45066, + "forre": 38876, + "forrest": 25205, + "forrester": 45338, + "forsa": 48958, + "forsale": 13303, + "forster": 42923, + "forsy": 29629, + "forsyth": 40952, + "fort": 12300, + "fort": 2921, + "forte": 44350, + "forte": 27367, + "forth": 17068, + "forth": 11932, + "forthcoming": 19989, + "forthe": 12521, + "forti": 26984, + "fortified": 46486, + "fortn": 14428, + "fortnight": 39235, + "fortnite": 38734, + "fortnite": 17890, + "fortress": 19988, + "fortun": 6950, + "fortunate": 19898, + "fortunately": 34358, + "fortune": 40931, + "fortune": 11451, + "fortunes": 41989, + "forty": 24399, + "forum": 37851, + "forum": 4538, + "forums": 31518, + "forwar": 34364, + "forward": 47031, + "forward": 2342, + "forwards": 38974, + "foryou": 35150, + "forz": 46056, + "forza": 33293, + "forza": 28089, + "fos": 36925, + "fos": 22081, + "foss": 14240, + "foss": 37911, + "fossil": 20419, + "fossil": 15202, + "fossilfriday": 26079, + "fossils": 30652, + "foster": 26778, + "foster": 8139, + "fostering": 35996, + "fosters": 37644, + "foto": 15908, + "foto": 12823, + "fotogra": 23687, + "fotografia": 40256, + "fotos": 26124, + "fou": 14516, + "fought": 10844, + "foul": 19784, + "foun": 3154, + "found": 3454, + "found": 1546, + "foundation": 4058, + "foundations": 25219, + "founded": 12240, + "founder": 5145, + "founders": 14602, + "founding": 15317, + "foundry": 31426, + "fountain": 44863, + "fountain": 13405, + "fountains": 37411, + "four": 5113, + "four": 2721, + "foursquare": 34484, + "fourteen": 46255, + "fourth": 7516, + "fourthofjuly": 47805, + "fow": 17084, + "fowl": 31685, + "fowler": 20980, + "fox": 5007, + "fox": 3240, + "foxandfriends": 45841, + "foxes": 24145, + "foxnews": 18830, + "foxsports": 39267, + "foxtv": 49396, + "foxx": 32993, + "foxy": 27945, + "foy": 30284, + "foyer": 38011, + "foyle": 47902, + "fp": 28058, + "fp": 8941, + "fpl": 27970, + "fpp": 36464, + "fps": 25300, + "fpv": 43175, + "fr": 936, + "fr": 5512, + "fra": 3368, + "fra": 15644, + "frac": 15607, + "fracking": 21894, + "fractal": 46471, + "fraction": 26788, + "fractu": 25847, + "fracture": 28995, + "fractured": 37421, + "fractures": 46213, + "frag": 13093, + "fragile": 23579, + "fragment": 39209, + "fragments": 41424, + "fragr": 15403, + "fragrance": 17874, + "fragrances": 44567, + "fragrant": 37030, + "fram": 27987, + "frame": 11029, + "frame": 6481, + "framed": 13135, + "frames": 15479, + "framework": 13195, + "frameworks": 43136, + "framing": 24539, + "frampton": 41733, + "fran": 2118, + "fran": 18878, + "franc": 3872, + "franc": 42340, + "franca": 48952, + "france": 12045, + "france": 3552, + "frances": 20803, + "francesca": 32327, + "francesco": 25816, + "franch": 11756, + "franchi": 46438, + "franchise": 13664, + "franci": 46458, + "francis": 22187, + "francis": 7660, + "francisco": 6887, + "franco": 17934, + "franco": 17052, + "francois": 29317, + "frank": 5390, + "frank": 5229, + "franken": 20487, + "franken": 48252, + "frankenstein": 26410, + "frankfur": 17442, + "frankfurt": 18598, + "franki": 39227, + "frankie": 38373, + "frankie": 16215, + "franklin": 40935, + "franklin": 9999, + "frankly": 38015, + "franks": 42855, + "frans": 47892, + "franz": 25449, + "franç": 38381, + "fraser": 39082, + "fraser": 16754, + "frat": 15225, + "frat": 39292, + "fraternity": 24433, + "frau": 23063, + "fraud": 40647, + "fraud": 9961, + "fraudul": 42655, + "fraudulent": 47408, + "fray": 41154, + "frazier": 32841, + "frc": 41507, + "fre": 821, + "fre": 43165, + "freak": 20352, + "freak": 13701, + "freaked": 43511, + "freakin": 23900, + "freaking": 11992, + "freaks": 27009, + "freaky": 31583, + "freck": 33328, + "freckles": 48036, + "fred": 9486, + "fred": 6678, + "freddie": 41890, + "freddie": 17014, + "freddy": 24394, + "freder": 10745, + "frederic": 41165, + "frederick": 37103, + "frederick": 18570, + "fredo": 48241, + "free": 2065, + "free": 1139, + "freebie": 35865, + "freebies": 28630, + "freec": 46569, + "freed": 12585, + "freed": 23392, + "freedom": 17992, + "freedom": 4511, + "freedoms": 32500, + "freef": 48678, + "freel": 14174, + "freelance": 21942, + "freely": 24436, + "freeman": 16450, + "freep": 32499, + "freepalestine": 39242, + "freer": 44676, + "frees": 27455, + "freestyle": 15594, + "freeway": 24927, + "freeze": 14187, + "freezer": 25390, + "freezing": 12499, + "frei": 30183, + "freight": 17023, + "fremantle": 48012, + "fremont": 34578, + "fren": 2919, + "french": 13118, + "french": 3461, + "frenzy": 30084, + "frequ": 9211, + "frequencies": 45319, + "frequency": 18825, + "frequent": 19836, + "frequently": 22434, + "fresco": 31609, + "fresh": 4065, + "fresh": 2975, + "fresher": 49284, + "freshers": 35810, + "freshest": 46809, + "freshly": 16081, + "freshman": 9381, + "freshmen": 21292, + "freshness": 45872, + "freshwater": 24803, + "fresno": 40879, + "fresno": 20995, + "fret": 40510, + "freud": 40787, + "frey": 22136, + "frey": 9082, + "fri": 815, + "fri": 6882, + "friars": 30513, + "fric": 18981, + "frick": 46304, + "friction": 38563, + "frid": 46388, + "frida": 36001, + "friday": 6350, + "friday": 1461, + "fridayfeeling": 11952, + "fridaymotivation": 38544, + "fridaynight": 44858, + "fridayreads": 37736, + "fridays": 15589, + "fridaythe": 47642, + "fridge": 13491, + "fridges": 40734, + "frie": 36999, + "fried": 13743, + "fried": 7310, + "friedman": 29402, + "friedrich": 34171, + "friend": 3017, + "friend": 1625, + "friendly": 44612, + "friendly": 4681, + "friends": 38875, + "friends": 1574, + "friendship": 42674, + "friendship": 7679, + "friendships": 28840, + "fries": 11369, + "frifotos": 40493, + "friger": 20785, + "friggin": 48300, + "frigh": 34831, + "fright": 24277, + "fright": 40207, + "frightened": 47136, + "frightening": 39290, + "fringe": 10640, + "fris": 37252, + "frisbee": 45768, + "frisco": 35945, + "frit": 34614, + "fritz": 29860, + "friyay": 38887, + "frm": 12951, + "fro": 626, + "fro": 26603, + "frock": 45306, + "frog": 26494, + "frog": 11438, + "frogs": 20781, + "from": 8330, + "from": 633, + "frome": 48691, + "fromhome": 41477, + "fromthe": 18756, + "fron": 1847, + "fron": 18036, + "front": 10996, + "front": 2184, + "frontal": 35794, + "frontier": 18253, + "frontiers": 38396, + "frontline": 29589, + "frontman": 36775, + "fronts": 26846, + "froome": 48560, + "frosh": 47069, + "frost": 39420, + "frost": 11619, + "frosted": 35988, + "frosting": 33872, + "frosty": 22760, + "froze": 47788, + "frozen": 42464, + "frozen": 8507, + "frs": 26216, + "fru": 3248, + "fruit": 16771, + "fruit": 5190, + "fruitful": 31494, + "fruits": 13282, + "fruity": 22320, + "frustr": 16046, + "frustrated": 25111, + "frustrating": 31342, + "frustration": 30535, + "fry": 33914, + "fry": 13686, + "fryer": 49217, + "frying": 38516, + "fs": 23699, + "fs": 3854, + "fsa": 33373, + "fsu": 44185, + "fsu": 19317, + "ft": 3391, + "ft": 981, + "fta": 41975, + "ftc": 33752, + "fted": 5612, + "fter": 25063, + "fthe": 22886, + "ftheday": 9823, + "fting": 6174, + "fton": 26605, + "ftp": 42649, + "fts": 3767, + "ftse": 46717, + "ftw": 19298, + "fty": 17494, + "fu": 665, + "fu": 9098, + "fuch": 42617, + "fudge": 24270, + "fue": 43723, + "fuego": 41500, + "fuel": 21113, + "fuel": 5945, + "fueled": 28792, + "fueling": 38793, + "fuelled": 48357, + "fuels": 19365, + "fuentes": 44393, + "fuer": 29645, + "fug": 29227, + "fugitive": 39257, + "fuji": 15573, + "fuji": 21634, + "fujifilm": 24765, + "fuk": 31051, + "fuku": 20728, + "fukushima": 33929, + "ful": 1814, + "ful": 857, + "fulbright": 41834, + "fulfill": 43675, + "fulfill": 27467, + "fulfilled": 29919, + "fulfilling": 30621, + "fulfillment": 45573, + "fulham": 25574, + "full": 9407, + "full": 1476, + "fuller": 20225, + "fullerton": 42822, + "fullest": 35603, + "fully": 39142, + "fully": 2401, + "fulness": 10526, + "fuls": 41606, + "fulton": 26725, + "fum": 38393, + "fumble": 49373, + "fun": 1229, + "fun": 1499, + "func": 8679, + "function": 8093, + "functional": 12885, + "functionality": 33316, + "functioning": 25479, + "functions": 18001, + "fund": 19089, + "fund": 4877, + "fundam": 11670, + "fundament": 18852, + "fundamental": 17627, + "fundamentally": 45378, + "fundamentals": 27887, + "funday": 15439, + "funded": 10588, + "funding": 5588, + "fundra": 6201, + "fundraiser": 10049, + "fundraising": 10755, + "funds": 7066, + "funer": 40693, + "funeral": 10606, + "funfact": 31596, + "funfactfriday": 40710, + "fungal": 38838, + "fungi": 27837, + "fungus": 30677, + "funk": 37353, + "funk": 13372, + "funko": 49402, + "funko": 23697, + "funky": 16492, + "funnel": 27862, + "funnier": 42232, + "funniest": 15557, + "funny": 19124, + "funny": 3789, + "funrun": 34185, + "fur": 2395, + "fur": 9686, + "furi": 40816, + "furious": 17522, + "furman": 49238, + "furn": 21348, + "furnace": 31913, + "furnished": 37388, + "furnitu": 45696, + "furniture": 7993, + "furry": 33414, + "furry": 15351, + "fursuit": 25306, + "fursuit": 43083, + "fursuitfriday": 27917, + "further": 5583, + "fury": 14404, + "fus": 18419, + "fuse": 23386, + "fused": 38994, + "fusion": 44661, + "fusion": 9364, + "fuss": 26331, + "fut": 21460, + "fut": 34049, + "futbol": 33014, + "futsal": 20558, + "futu": 33454, + "futur": 38840, + "future": 7959, + "future": 1904, + "futureof": 22599, + "futureofwork": 33202, + "futures": 13488, + "futuri": 19068, + "futurism": 48435, + "futurist": 48086, + "futuristic": 30987, + "fuzz": 47128, + "fuzz": 40443, + "fuzzy": 25876, + "fv": 29795, + "fw": 23934, + "fw": 5277, + "fwd": 27052, + "fx": 17807, + "fx": 9025, + "fy": 8440, + "fy": 2702, + "fyi": 16014, + "fying": 5294, + "fz": 46400, + "fé": 34072, + "g": 70, + "g": 326, + "ga": 1275, + "ga": 1531, + "gaa": 10715, + "gaal": 40867, + "gaard": 24645, + "gab": 3927, + "gab": 37382, + "gabbana": 36272, + "gabby": 48115, + "gabby": 24567, + "gabe": 18916, + "gabi": 41931, + "gable": 33387, + "gables": 40928, + "gabri": 8311, + "gabriel": 31684, + "gabriel": 13244, + "gabrielle": 33572, + "gaby": 46420, + "gac": 32520, + "gad": 7786, + "gad": 44651, + "gadget": 25525, + "gadgets": 22840, + "gado": 29489, + "gae": 22003, + "gael": 35663, + "gaelic": 31173, + "gaf": 21354, + "gaf": 32670, + "gag": 14121, + "gag": 18844, + "gaga": 9782, + "gage": 21081, + "gah": 27750, + "gai": 24214, + "gai": 25153, + "gaia": 41269, + "gail": 41160, + "gail": 27676, + "gain": 21536, + "gain": 6202, + "gaine": 35747, + "gained": 14489, + "gaines": 49225, + "gainesville": 40427, + "gaining": 15260, + "gains": 42751, + "gains": 12107, + "gal": 2001, + "gal": 4488, + "gala": 7211, + "galac": 18864, + "galactic": 25514, + "galap": 41115, + "galapagos": 44057, + "galat": 39853, + "galatasar": 42413, + "galatasaray": 47787, + "galax": 5647, + "galaxies": 32435, + "galaxy": 32130, + "galaxy": 6545, + "gale": 37658, + "gale": 21380, + "galerie": 44539, + "gales": 48633, + "gali": 17546, + "gali": 30552, + "galicia": 47927, + "galileo": 39671, + "gall": 3011, + "gall": 33374, + "galla": 16847, + "gallagher": 19168, + "galleria": 40656, + "galleries": 22304, + "gallery": 36648, + "gallery": 3830, + "galley": 48917, + "galli": 22568, + "gallipoli": 47249, + "gallo": 37350, + "gallo": 33265, + "gallon": 24615, + "gallons": 29335, + "galloway": 27796, + "galore": 22286, + "gals": 20125, + "galvani": 46046, + "galve": 34328, + "galveston": 36003, + "galway": 38045, + "galway": 17112, + "gam": 1162, + "gam": 34195, + "gama": 35873, + "gambia": 32988, + "gamble": 26121, + "gambling": 20287, + "game": 2882, + "game": 1063, + "gameart": 31490, + "gameboy": 40951, + "gamecube": 44079, + "gameday": 9241, + "gamedev": 7544, + "gameinsight": 42626, + "gameof": 10987, + "gameofthrones": 11822, + "gameon": 47691, + "gameplay": 16794, + "gamer": 12595, + "gamer": 11598, + "gamergate": 25961, + "gamers": 16166, + "gamersunite": 26423, + "games": 18551, + "games": 1955, + "gamescom": 37003, + "gamestop": 39436, + "gametime": 45899, + "gami": 42025, + "gamification": 48908, + "gaming": 28803, + "gaming": 4017, + "gamma": 22180, + "gamo": 39325, + "gan": 1822, + "gan": 1670, + "gand": 8399, + "ganda": 27261, + "gander": 44508, + "gandhi": 12322, + "ganesh": 30362, + "ganesha": 45185, + "gang": 8066, + "gang": 5674, + "ganga": 36275, + "gangnam": 46777, + "gangs": 29844, + "gangsta": 37365, + "gangster": 26514, + "gani": 48324, + "gann": 45665, + "gannon": 45837, + "gano": 25304, + "gao": 26556, + "gaon": 19279, + "gap": 29906, + "gap": 7609, + "gaps": 25296, + "gar": 1099, + "gar": 5824, + "gara": 28710, + "garage": 8474, + "garbage": 13760, + "garci": 44658, + "garcia": 10529, + "gard": 7751, + "gard": 21003, + "garda": 31906, + "garde": 22649, + "garden": 4674, + "garden": 2756, + "gardenchat": 46292, + "gardener": 28554, + "gardeners": 38205, + "gardening": 10483, + "gardens": 6152, + "gardiner": 43121, + "gardner": 18710, + "gare": 5633, + "gare": 48402, + "gareth": 37140, + "gareth": 18175, + "garfield": 26728, + "garh": 16762, + "gari": 40898, + "gari": 43080, + "garis": 37839, + "garland": 23418, + "garlic": 9685, + "garment": 31418, + "garments": 43341, + "garmin": 39885, + "garner": 20340, + "garnet": 37669, + "garo": 30388, + "garrett": 15881, + "garri": 21764, + "garrison": 30108, + "garros": 40425, + "garry": 24398, + "gars": 12055, + "gart": 18380, + "gart": 18751, + "garten": 14684, + "garter": 48420, + "garth": 45398, + "garth": 24469, + "gartner": 43334, + "gartner": 29678, + "garty": 46383, + "garu": 31140, + "garvey": 39511, + "garwal": 38623, + "gary": 10535, + "gary": 4516, + "garza": 49393, + "gas": 5047, + "gas": 2474, + "gases": 36971, + "gasoline": 27691, + "gasp": 43762, + "gaston": 40669, + "gastri": 49197, + "gastro": 23740, + "gastron": 30699, + "gastronomy": 46987, + "gat": 5314, + "gat": 18941, + "gata": 44575, + "gate": 8071, + "gate": 3302, + "gated": 23997, + "gates": 9472, + "gateshead": 40051, + "gateway": 45221, + "gateway": 14943, + "gather": 36345, + "gather": 12602, + "gathered": 14646, + "gathering": 9197, + "gatherings": 48096, + "gathers": 39250, + "gating": 27561, + "gation": 11095, + "gations": 33906, + "gato": 44492, + "gator": 20216, + "gator": 16390, + "gatorade": 36354, + "gators": 17173, + "gatory": 24796, + "gatsby": 32586, + "gatwick": 37122, + "gau": 5919, + "gau": 43068, + "gauge": 18728, + "gaunt": 31862, + "gauntlet": 37163, + "gautam": 45853, + "gautam": 31356, + "gauteng": 40333, + "gav": 8966, + "gave": 3485, + "gavin": 32974, + "gavin": 16389, + "gaw": 15405, + "gawd": 43239, + "gawx": 43420, + "gay": 7460, + "gay": 5627, + "gaya": 39477, + "gaye": 41401, + "gayle": 29998, + "gayo": 36768, + "gays": 28001, + "gaz": 4837, + "gaz": 36475, + "gaza": 38391, + "gaza": 10112, + "gazaunderattack": 42458, + "gaze": 23212, + "gazette": 20443, + "gazing": 28373, + "gb": 8727, + "gb": 4619, + "gba": 18528, + "gbbo": 34474, + "gbc": 42993, + "gbp": 27391, + "gbr": 31984, + "gby": 40509, + "gc": 8577, + "gc": 6043, + "gcc": 26804, + "gcse": 28763, + "gcu": 34137, + "gd": 13264, + "gd": 14604, + "gdc": 32793, + "gden": 44928, + "gdp": 17100, + "gdpr": 22963, + "ge": 619, + "ge": 710, + "gea": 26790, + "gear": 15532, + "gear": 4802, + "gearbox": 42454, + "geared": 33903, + "gearing": 19027, + "gears": 21147, + "geaux": 36313, + "gecko": 38616, + "ged": 17252, + "ged": 3480, + "geddon": 31720, + "gedly": 13991, + "gee": 9806, + "gee": 9071, + "geek": 17920, + "geek": 7135, + "geeks": 20110, + "geeky": 47332, + "geel": 25906, + "geelong": 34555, + "gees": 38088, + "geese": 26413, + "geez": 42394, + "geh": 30320, + "geist": 38290, + "gel": 7343, + "gel": 5697, + "gelato": 29577, + "gels": 42552, + "gely": 14637, + "gem": 14261, + "gem": 7613, + "gement": 19495, + "gemini": 23086, + "gemma": 23952, + "gems": 14355, + "gemstone": 27747, + "gemstones": 43972, + "gen": 1024, + "gen": 3278, + "gence": 16088, + "gency": 5245, + "gend": 33247, + "gender": 22976, + "gender": 5906, + "gendere": 35824, + "genderequality": 43338, + "gene": 5822, + "gene": 7962, + "genealo": 24142, + "genealogy": 29381, + "gener": 1832, + "general": 20576, + "general": 3658, + "generally": 19256, + "generals": 30296, + "generate": 16896, + "generated": 19450, + "generates": 33938, + "generating": 23882, + "generation": 41211, + "generation": 4883, + "generational": 34506, + "generations": 12247, + "generative": 29472, + "generator": 19399, + "generators": 41917, + "generic": 26978, + "generosity": 23015, + "generous": 12570, + "generously": 35113, + "genes": 19683, + "genesis": 13518, + "genetic": 47746, + "genetic": 13578, + "genetically": 36745, + "genetics": 18276, + "geneva": 14799, + "genevie": 41633, + "genevieve": 46584, + "geni": 22334, + "genic": 15750, + "genie": 24221, + "genital": 32960, + "genius": 8235, + "geniuses": 41406, + "geno": 41544, + "geno": 46776, + "genoa": 43993, + "genoci": 14687, + "genocide": 15903, + "genome": 23991, + "genomic": 44371, + "genomics": 26227, + "genre": 14249, + "genres": 30340, + "gens": 17449, + "gent": 3685, + "gent": 7139, + "gente": 34325, + "gentle": 7262, + "gentle": 13577, + "gentleman": 13293, + "gentlemen": 11692, + "gently": 17187, + "gento": 28320, + "gentri": 41148, + "gentry": 47225, + "gents": 18862, + "genu": 9182, + "genuine": 12184, + "genuinely": 20006, + "genus": 38161, + "geny": 35323, + "geo": 5038, + "geo": 11604, + "geocaching": 47908, + "geof": 20629, + "geoff": 33697, + "geoff": 20386, + "geoffrey": 29520, + "geograph": 45920, + "geographic": 22635, + "geographical": 39380, + "geography": 17101, + "geological": 38380, + "geology": 21578, + "geom": 46135, + "geome": 12958, + "geometric": 22419, + "geometry": 21731, + "geon": 20844, + "geon": 7295, + "geons": 15914, + "geopol": 39758, + "geor": 2549, + "georg": 43126, + "george": 8377, + "george": 3296, + "georges": 25042, + "georgetown": 22970, + "georgie": 42115, + "georgina": 43892, + "geospatial": 46238, + "geothermal": 38413, + "geous": 3068, + "ger": 1291, + "ger": 1502, + "gera": 48867, + "gerald": 29901, + "gerald": 13269, + "gerard": 35979, + "gerard": 20826, + "gerber": 45058, + "gered": 40179, + "geri": 41664, + "geri": 46214, + "gering": 24077, + "germain": 38786, + "german": 14972, + "german": 4710, + "germans": 28400, + "germany": 4464, + "germin": 44721, + "germs": 47731, + "geronimo": 45171, + "gerrard": 26538, + "gerry": 29825, + "gerry": 23026, + "gers": 3314, + "gertrude": 46950, + "gervais": 36527, + "gery": 32845, + "ges": 3316, + "gest": 11843, + "gest": 2033, + "gesture": 21780, + "gestures": 43524, + "get": 5670, + "get": 779, + "geta": 13155, + "getaway": 16131, + "gether": 27224, + "getic": 20661, + "getin": 25822, + "getit": 44891, + "getit": 48315, + "getoutside": 35644, + "gets": 39448, + "gets": 2127, + "gett": 6647, + "gett": 27965, + "gettable": 15620, + "gette": 29800, + "gettin": 13428, + "getting": 30885, + "getting": 1500, + "getty": 31185, + "getty": 13965, + "gettys": 35189, + "gettysburg": 37062, + "getyour": 42159, + "gey": 29289, + "gf": 28953, + "gf": 10846, + "gfriend": 35245, + "gfs": 37553, + "gg": 1129, + "gg": 3286, + "gga": 26003, + "ggan": 25626, + "gge": 21521, + "gge": 31659, + "gged": 6095, + "gger": 12367, + "gger": 3493, + "ggers": 7480, + "ggg": 20143, + "gggg": 33513, + "ggi": 21662, + "ggin": 17160, + "gging": 4966, + "ggins": 12444, + "ggle": 34981, + "ggle": 11430, + "ggled": 46328, + "ggles": 14703, + "ggling": 16523, + "ggly": 39407, + "ggs": 4797, + "ggy": 24935, + "ggy": 6476, + "gh": 583, + "gh": 790, + "gha": 10010, + "gha": 25183, + "gham": 21456, + "ghan": 18945, + "ghan": 6624, + "ghana": 30330, + "ghana": 9731, + "ghanaian": 34223, + "ghani": 36699, + "ghar": 37334, + "ghar": 36973, + "ghat": 43989, + "ghaz": 37493, + "ghc": 42139, + "ghe": 10754, + "ghe": 28561, + "ghead": 40783, + "ghee": 34794, + "gher": 21542, + "gher": 14796, + "ghet": 18447, + "ghetti": 17485, + "ghetto": 22403, + "ghi": 22436, + "ghi": 22279, + "ghibli": 40555, + "ghj": 38439, + "ghlin": 24131, + "gho": 4307, + "ghorn": 38094, + "ghosh": 43279, + "ghoshal": 49134, + "ghost": 11417, + "ghost": 7108, + "ghostbusters": 25462, + "ghostly": 44901, + "ghosts": 16737, + "ghou": 35843, + "ghoul": 45302, + "ghouse": 38238, + "ghs": 14157, + "ght": 1413, + "ght": 630, + "ghted": 4963, + "ghter": 2427, + "ghters": 12994, + "ghtful": 8334, + "ghting": 3019, + "ghtly": 6993, + "ghtning": 39740, + "ghton": 16353, + "ghts": 1259, + "ghty": 20968, + "ghty": 5866, + "ghu": 25808, + "ghue": 45675, + "ghyun": 25010, + "ghz": 24325, + "gi": 707, + "gi": 4478, + "gia": 8864, + "giac": 35444, + "giam": 39623, + "gian": 17274, + "gian": 12866, + "gianni": 46752, + "giant": 23668, + "giant": 4687, + "giants": 7076, + "giar": 34241, + "gib": 9816, + "gibb": 18964, + "gibbons": 31974, + "gibbs": 26488, + "gibility": 33297, + "gible": 13159, + "gibr": 20206, + "gibraltar": 23988, + "gibson": 37420, + "gibson": 12178, + "gic": 27900, + "gic": 2570, + "gical": 32973, + "gically": 26320, + "gid": 36774, + "gid": 21413, + "giddy": 40894, + "gideon": 43867, + "gidi": 30603, + "gie": 11459, + "gie": 3991, + "gier": 28974, + "gies": 5505, + "gif": 11363, + "gif": 11677, + "gifford": 47850, + "gifs": 37643, + "gift": 20569, + "gift": 2733, + "gifted": 15110, + "giftide": 20152, + "giftideas": 23487, + "gifting": 39546, + "gifts": 5836, + "gig": 26981, + "gig": 7471, + "gigab": 34530, + "gigan": 24104, + "gigantic": 31507, + "giggle": 36426, + "giggles": 42731, + "giggs": 44692, + "gigi": 44106, + "gigi": 26171, + "gigs": 20316, + "gil": 3997, + "gil": 10088, + "gila": 46952, + "gilbert": 14154, + "gilded": 44341, + "giles": 24802, + "gill": 14280, + "gill": 12003, + "gille": 29610, + "gilles": 39590, + "gillespie": 36242, + "gillette": 38603, + "gilli": 13695, + "gillian": 28753, + "gills": 48851, + "gilmore": 27603, + "gilt": 44378, + "gim": 31284, + "gimm": 40692, + "gimme": 21525, + "gin": 3374, + "gin": 4941, + "gina": 15604, + "gine": 27482, + "ging": 10829, + "ging": 3905, + "ginger": 16287, + "ginger": 9718, + "gingerbread": 23692, + "gini": 35768, + "gino": 36521, + "gins": 18328, + "gio": 16329, + "gio": 8050, + "gion": 41226, + "gior": 14920, + "giorgio": 33271, + "giorno": 33310, + "gios": 41927, + "gious": 14419, + "giov": 21404, + "giovanni": 26574, + "gipp": 41351, + "gir": 1077, + "gir": 25481, + "gira": 16949, + "giraffe": 22826, + "giri": 31709, + "girl": 3914, + "girl": 1611, + "girlfriend": 8217, + "girlfriends": 30736, + "girlpower": 37433, + "girls": 15480, + "girls": 1917, + "girly": 29605, + "giro": 39664, + "giro": 26454, + "girona": 47842, + "giroud": 41177, + "gis": 16266, + "gis": 12773, + "gist": 21241, + "git": 16060, + "git": 20918, + "gita": 40838, + "github": 31196, + "giu": 17931, + "giuli": 29762, + "giuliani": 47739, + "giuse": 29385, + "giuseppe": 33563, + "give": 4120, + "give": 1781, + "giveaway": 5310, + "giveaways": 18974, + "giveback": 41385, + "given": 33323, + "given": 4302, + "givenchy": 38245, + "giver": 43339, + "gives": 3926, + "giveup": 35485, + "giving": 14673, + "giving": 2339, + "givingback": 49300, + "givingtuesday": 23556, + "giz": 29237, + "gk": 38953, + "gk": 18719, + "gl": 1849, + "gl": 14751, + "gla": 1523, + "gla": 36904, + "glaci": 14924, + "glacial": 40782, + "glacier": 19282, + "glaciers": 42528, + "glad": 20841, + "glad": 4761, + "glades": 37432, + "gladi": 21742, + "gladiator": 38477, + "gladiators": 41087, + "gladly": 41598, + "gladys": 43168, + "glam": 8738, + "glam": 16905, + "glamorous": 22896, + "glamour": 42876, + "glamour": 17499, + "glamping": 46167, + "glan": 40482, + "glan": 45844, + "glance": 26557, + "gland": 41441, + "glar": 48535, + "glar": 41702, + "glare": 46035, + "glas": 29935, + "glas": 43654, + "glasgo": 6757, + "glasgow": 29990, + "glasgow": 7363, + "glass": 16305, + "glass": 3313, + "glasses": 6116, + "glaston": 26848, + "glastonbury": 28233, + "glau": 39171, + "glaze": 28112, + "glazed": 24122, + "gle": 7166, + "gle": 2865, + "glee": 32379, + "glee": 21614, + "glen": 6158, + "glen": 11049, + "glend": 38332, + "glendale": 33043, + "glenn": 32004, + "glenn": 12861, + "gler": 34649, + "gley": 21998, + "gli": 5896, + "gli": 28791, + "glia": 22217, + "glide": 37321, + "glider": 41636, + "glimp": 12888, + "glimpse": 13817, + "glio": 29785, + "glit": 21079, + "glitch": 29563, + "glitter": 16528, + "glitz": 44542, + "glo": 1721, + "glo": 30474, + "glob": 13363, + "global": 6707, + "global": 2779, + "globalgoals": 33211, + "globalhealth": 46751, + "globalization": 47680, + "globally": 17775, + "globalwarming": 46017, + "globe": 19436, + "globe": 9368, + "globes": 38085, + "glock": 38818, + "glomer": 43689, + "gloom": 48594, + "gloomy": 32199, + "glori": 7270, + "gloria": 19244, + "glorious": 9171, + "glory": 36107, + "glory": 7285, + "glos": 40633, + "gloss": 38258, + "gloss": 22014, + "glossy": 29802, + "glou": 15989, + "gloucester": 28133, + "gloucester": 23835, + "gloucestershire": 33789, + "glove": 16078, + "glover": 21594, + "gloves": 12363, + "glow": 30472, + "glow": 10111, + "glowing": 18437, + "glows": 48107, + "glu": 5952, + "glu": 32281, + "glucose": 34642, + "glue": 22103, + "glued": 38135, + "gluten": 15482, + "gluten": 15524, + "glutenfree": 16138, + "gly": 13027, + "glycer": 48914, + "gm": 18743, + "gm": 5918, + "gma": 18155, + "gmail": 11119, + "gman": 41043, + "gman": 36936, + "gmb": 35934, + "gmb": 31799, + "gmbh": 46877, + "gmc": 27257, + "gmo": 23486, + "gms": 36987, + "gmt": 13803, + "gn": 2455, + "gn": 9831, + "gna": 23009, + "gnation": 45912, + "gne": 25407, + "gni": 5104, + "gnment": 25110, + "gno": 23376, + "gno": 43686, + "gnocchi": 48299, + "gnome": 33643, + "gnon": 20561, + "go": 650, + "go": 861, + "goa": 14399, + "goal": 9003, + "goal": 3321, + "goalie": 20723, + "goalkeeper": 16601, + "goals": 3295, + "goalscorer": 43547, + "goaltender": 44151, + "goat": 34082, + "goat": 9530, + "goats": 18393, + "gob": 29559, + "gobeavs": 48285, + "goblin": 26223, + "goblue": 25232, + "gobucks": 29175, + "gocougs": 34202, + "god": 4190, + "god": 1731, + "godawgs": 40436, + "godbless": 46616, + "godbless": 44007, + "godd": 16589, + "goddamn": 28495, + "goddard": 37827, + "goddess": 10808, + "godfather": 26222, + "godfrey": 40148, + "godis": 38521, + "godly": 42438, + "gods": 33620, + "gods": 10328, + "goducks": 35889, + "godzilla": 23369, + "goe": 22084, + "goers": 27784, + "goes": 43581, + "goes": 2635, + "gof": 17537, + "goff": 34399, + "goftheday": 39360, + "gofund": 34445, + "gofundme": 34686, + "gog": 42949, + "goggles": 31027, + "gogh": 19697, + "gogo": 22688, + "gogreen": 36279, + "gohawks": 34884, + "goi": 24917, + "goin": 13939, + "going": 25787, + "going": 1245, + "goku": 29550, + "gol": 1537, + "gol": 18257, + "gola": 41090, + "gold": 4999, + "gold": 2209, + "goldberg": 25161, + "goldcoast": 34634, + "golden": 10763, + "golden": 3878, + "goldeng": 20650, + "goldenglobes": 26842, + "goldfish": 40293, + "goldie": 42805, + "goldman": 27164, + "golds": 30526, + "golds": 40283, + "goldsmith": 40214, + "gole": 41297, + "golf": 9096, + "golf": 3096, + "golfclub": 45742, + "golfer": 24579, + "golfers": 28441, + "golfing": 31379, + "goli": 29265, + "goliath": 41602, + "gom": 7051, + "goma": 46198, + "gomes": 39128, + "gomez": 16433, + "gon": 1854, + "gon": 3379, + "gona": 34835, + "gone": 35135, + "gone": 3601, + "gong": 28486, + "gonna": 2562, + "gonz": 10587, + "gonzaga": 36241, + "gonzale": 17512, + "gonzales": 31265, + "gonzalez": 18198, + "goo": 1381, + "goo": 17882, + "good": 2185, + "good": 886, + "goodbye": 6968, + "goodday": 46284, + "goode": 42076, + "goodfood": 46844, + "goodfriday": 40360, + "goodie": 29213, + "goodies": 13308, + "goodluck": 19718, + "goodman": 24146, + "goodmorning": 14421, + "goodness": 10531, + "goodnight": 8540, + "goodreads": 31629, + "goods": 9340, + "goodtimes": 22570, + "goodvibes": 43146, + "goodwill": 24902, + "goodwin": 28080, + "goodwood": 30008, + "goody": 35937, + "goodyear": 42858, + "goofy": 26879, + "goog": 18581, + "google": 12195, + "google": 3460, + "googled": 40345, + "googleplay": 37309, + "goon": 15267, + "goons": 30440, + "goooo": 35876, + "goooo": 48957, + "goose": 21445, + "goose": 13822, + "goosebumps": 32254, + "gop": 18942, + "gop": 6250, + "gopack": 46995, + "gopackgo": 47719, + "gopal": 47268, + "gopdebate": 39806, + "gopher": 47750, + "gopher": 48905, + "gophers": 31957, + "gopro": 17511, + "gor": 1747, + "gor": 29827, + "gordo": 47707, + "gordon": 20485, + "gordon": 8244, + "gore": 30311, + "gore": 17872, + "gorg": 46815, + "gorge": 35548, + "gorge": 20038, + "gorgeous": 3241, + "gori": 12461, + "goria": 43359, + "gorilla": 37910, + "gorilla": 21994, + "gorman": 35741, + "goro": 44977, + "gory": 7160, + "gos": 20517, + "gos": 5693, + "gosh": 15395, + "gosling": 35320, + "gosp": 9617, + "gospel": 11313, + "goss": 39734, + "goss": 36924, + "gossi": 15684, + "gossip": 18963, + "got": 10125, + "got": 1005, + "gota": 36693, + "gotcha": 43275, + "gote": 49345, + "goth": 48465, + "goth": 20437, + "gotham": 46123, + "gotham": 18299, + "gothic": 15426, + "goti": 9497, + "goto": 39715, + "gots": 35215, + "gott": 5089, + "gott": 36466, + "gotta": 4633, + "gotten": 5889, + "gotti": 41881, + "gotv": 36089, + "gou": 10520, + "gou": 36555, + "gouache": 43314, + "goul": 33187, + "gould": 31087, + "gour": 13580, + "gourmet": 19111, + "gov": 4022, + "gov": 4564, + "gove": 36997, + "govegan": 38886, + "gover": 10471, + "gover": 16759, + "govern": 2351, + "govern": 32404, + "governance": 13386, + "governing": 30946, + "government": 3149, + "governmental": 42609, + "governments": 19582, + "governor": 17459, + "governor": 6630, + "governors": 26881, + "govin": 42451, + "govt": 5345, + "govuk": 28830, + "gow": 21885, + "gow": 33788, + "gowan": 31307, + "gower": 43448, + "gown": 13719, + "gowns": 38029, + "goyal": 35105, + "gp": 19329, + "gp": 5051, + "gpa": 24098, + "gps": 13639, + "gpu": 38561, + "gq": 40286, + "gq": 31324, + "gr": 709, + "gr": 6062, + "gra": 782, + "gra": 15276, + "grab": 4646, + "grabbed": 22856, + "grabbing": 26440, + "grabs": 17076, + "grac": 11323, + "grace": 13225, + "grace": 5142, + "graced": 31894, + "graceful": 25242, + "graces": 38629, + "graci": 11174, + "gracias": 16463, + "gracie": 23235, + "gracing": 37263, + "gracious": 29044, + "grad": 19869, + "grad": 7291, + "gradable": 41529, + "grade": 45435, + "grade": 3394, + "graded": 13823, + "grader": 23930, + "graders": 10930, + "grades": 10838, + "gradient": 36885, + "grading": 19016, + "grads": 17811, + "gradu": 3230, + "gradual": 45210, + "gradually": 32192, + "graduate": 6675, + "graduated": 15128, + "graduates": 12236, + "graduating": 14819, + "graduation": 8060, + "grady": 33980, + "graeme": 30192, + "graf": 46478, + "graf": 39765, + "graff": 10656, + "graffiti": 11676, + "graft": 32698, + "grafton": 47347, + "graham": 19805, + "graham": 7711, + "grail": 37184, + "grain": 44003, + "grain": 12109, + "grains": 25791, + "gral": 25631, + "gram": 2949, + "gram": 2338, + "grammar": 16077, + "grammy": 15388, + "grammys": 18121, + "grams": 6294, + "gran": 3892, + "gran": 14493, + "granada": 31172, + "grand": 3058, + "grand": 2991, + "grandad": 29148, + "grandchildren": 36856, + "granddaughter": 29460, + "grande": 37514, + "grande": 10757, + "grandes": 36382, + "grandfather": 15346, + "grandma": 10525, + "grandmother": 17469, + "grandpa": 14582, + "grandparents": 21311, + "grandprix": 39358, + "grandson": 20766, + "grandstand": 43172, + "grange": 45027, + "grange": 23850, + "granger": 42968, + "granite": 18813, + "grann": 45585, + "granny": 22710, + "granola": 34271, + "grant": 18682, + "grant": 5442, + "granted": 14156, + "granth": 41283, + "grants": 15123, + "grape": 19131, + "grape": 15959, + "grapefruit": 28347, + "grapes": 18580, + "grapevine": 47619, + "graph": 1349, + "graph": 4407, + "graphene": 38387, + "grapher": 14987, + "graphers": 32088, + "graphic": 15653, + "graphic": 4245, + "graphical": 20878, + "graphicdesign": 21907, + "graphics": 9492, + "graphies": 40164, + "graphite": 29447, + "graphs": 24670, + "graphy": 4897, + "grapp": 30843, + "gras": 31517, + "gras": 17584, + "grasp": 34975, + "grass": 11584, + "grass": 5922, + "grasses": 46807, + "grasshopper": 48894, + "grassi": 42294, + "grasso": 34808, + "grassroots": 21991, + "grassy": 44140, + "grat": 9221, + "grate": 32463, + "grateful": 45659, + "grateful": 5730, + "grati": 36402, + "gratis": 33638, + "gratitude": 12614, + "grav": 20663, + "grave": 16606, + "grave": 9981, + "gravel": 27054, + "graves": 17665, + "graveyard": 31176, + "gravit": 26150, + "gravitational": 45268, + "gravity": 47426, + "gravity": 15160, + "gravy": 21225, + "gray": 12703, + "gray": 7048, + "grays": 46848, + "grayson": 45831, + "grayson": 25471, + "grazi": 42427, + "grazie": 38698, + "grazing": 29889, + "grc": 44069, + "gre": 689, + "gre": 17878, + "grease": 24132, + "greasy": 44376, + "great": 3265, + "great": 830, + "greate": 31930, + "greater": 32725, + "greater": 7033, + "greatest": 39080, + "greatest": 4153, + "greatly": 13978, + "greatness": 14189, + "greats": 21855, + "greaves": 42350, + "greco": 39103, + "gree": 9987, + "gree": 30774, + "greece": 6965, + "greed": 26147, + "greedy": 33301, + "greek": 23844, + "greek": 6842, + "greeks": 35866, + "green": 2762, + "green": 1901, + "greenberg": 46662, + "greene": 16383, + "greener": 31169, + "greenery": 42493, + "greenfield": 39924, + "greeng": 42077, + "greenhouse": 20819, + "greening": 48673, + "greenland": 27345, + "greenpeace": 44755, + "greens": 10235, + "greensboro": 33436, + "greenville": 25156, + "greenway": 35205, + "greenwich": 18658, + "greenwood": 25782, + "greer": 34345, + "greet": 11042, + "greet": 11997, + "greeted": 24546, + "greeting": 17754, + "greetings": 11569, + "greets": 25464, + "greg": 6894, + "greg": 7943, + "gregation": 20131, + "gregg": 39422, + "gregg": 22929, + "gregor": 33856, + "gregor": 16177, + "gregory": 16253, + "gren": 13941, + "gren": 20119, + "grenade": 33679, + "grenfell": 42107, + "gres": 39670, + "gress": 2752, + "gret": 30041, + "greta": 33443, + "gretchen": 45516, + "grette": 38774, + "grew": 10451, + "grey": 9190, + "grey": 5046, + "greyhound": 27363, + "greyhounds": 45718, + "greys": 44311, + "greysanatomy": 36833, + "gri": 2169, + "gri": 18484, + "grid": 29067, + "grid": 9882, + "gridi": 41063, + "gridiron": 47786, + "grids": 46500, + "grief": 21058, + "grier": 22016, + "griev": 36400, + "grieving": 42383, + "griez": 47962, + "griezmann": 48396, + "griff": 17855, + "griff": 35551, + "griffi": 28676, + "griffin": 46612, + "griffin": 13161, + "griffith": 24375, + "griffiths": 34182, + "gril": 49091, + "grill": 44083, + "grill": 9519, + "grille": 34748, + "grilled": 10691, + "grilling": 28324, + "grills": 39464, + "grim": 20383, + "grim": 23635, + "grime": 37101, + "grimes": 25057, + "grimm": 27865, + "grims": 34861, + "grimsby": 41513, + "grin": 11033, + "grin": 28697, + "grinch": 40527, + "grind": 25730, + "grind": 11810, + "grinder": 31733, + "grinding": 21541, + "gring": 40135, + "grip": 15521, + "gripping": 34567, + "grips": 27819, + "gris": 29150, + "grit": 22037, + "grit": 22087, + "grits": 44307, + "gritty": 33704, + "grizz": 14877, + "grizz": 44088, + "grizzlies": 25594, + "grizzly": 29676, + "grl": 48005, + "gro": 1464, + "gro": 12691, + "grocer": 11633, + "groceries": 32409, + "grocery": 13826, + "grom": 45284, + "gron": 22345, + "groningen": 45639, + "groo": 9015, + "groom": 39883, + "groom": 22813, + "grooming": 25575, + "groot": 37708, + "groove": 39484, + "groove": 17680, + "grooves": 43954, + "groovy": 30143, + "gros": 26834, + "gros": 32639, + "gross": 31080, + "gross": 11541, + "grosven": 46911, + "grote": 47207, + "grotto": 45260, + "grou": 1582, + "groun": 45110, + "ground": 9558, + "ground": 2461, + "groundbreaking": 21006, + "grounded": 27799, + "grounds": 8454, + "groundwater": 39457, + "group": 19045, + "group": 1771, + "groupe": 47654, + "groups": 6776, + "grouse": 36327, + "grove": 31756, + "grove": 7463, + "grover": 31345, + "groves": 27306, + "grow": 3179, + "grow": 4559, + "grower": 44925, + "growers": 25689, + "growing": 28429, + "growing": 4425, + "growingup": 43433, + "growler": 47096, + "grown": 41762, + "grown": 7120, + "grows": 13352, + "growth": 17925, + "growth": 4026, + "growthhacking": 25963, + "grp": 27321, + "grt": 28557, + "gru": 5957, + "grub": 34019, + "grue": 42047, + "gruesome": 47111, + "grum": 45454, + "grump": 49015, + "grumpy": 23610, + "grun": 16203, + "grunge": 33745, + "gry": 16140, + "gry": 5364, + "gs": 25818, + "gs": 1345, + "gsa": 40433, + "gsc": 47751, + "gshore": 43392, + "gsm": 32181, + "gsp": 49173, + "gst": 22239, + "gt": 16151, + "gt": 4725, + "gta": 14826, + "gta": 15338, + "gtaonline": 27292, + "gtav": 27283, + "gti": 39954, + "gto": 39071, + "gtr": 33407, + "gts": 37338, + "gtx": 35230, + "gu": 700, + "gu": 12916, + "gua": 23751, + "guacam": 37477, + "guacamole": 40115, + "guad": 22966, + "guadal": 46097, + "guadalu": 36994, + "guadalupe": 38360, + "guam": 37325, + "guan": 44191, + "guan": 42406, + "guang": 27019, + "guangzhou": 37857, + "guar": 4119, + "guaran": 9242, + "guarantee": 17421, + "guaranteed": 14731, + "guarantees": 40154, + "guard": 30776, + "guard": 4901, + "guarded": 40602, + "guardi": 12008, + "guardia": 43628, + "guardian": 23713, + "guardian": 9498, + "guardians": 21479, + "guarding": 24966, + "guardiola": 32100, + "guards": 12810, + "guatem": 19423, + "guatemala": 21670, + "guay": 48591, + "guay": 24247, + "gubernat": 41400, + "gubernatorial": 41618, + "gucci": 16779, + "gud": 48061, + "gud": 22378, + "gue": 2030, + "gue": 2917, + "gued": 38893, + "guel": 23146, + "guelph": 27660, + "guer": 10391, + "guern": 29277, + "guernsey": 33982, + "guerra": 38215, + "guerrero": 31967, + "guerrilla": 36715, + "gues": 39971, + "gues": 12601, + "guess": 35506, + "guess": 3135, + "guessed": 28005, + "guesses": 30623, + "guessing": 21891, + "guest": 27349, + "guest": 3781, + "guests": 6212, + "guet": 36797, + "guetta": 45904, + "guez": 12313, + "gug": 31358, + "guggen": 35086, + "guggenheim": 37135, + "gui": 2587, + "gui": 25746, + "guid": 11437, + "guidance": 12508, + "guide": 21845, + "guide": 3555, + "guided": 13194, + "guidelines": 16591, + "guides": 14375, + "guiding": 22759, + "guido": 41818, + "guil": 5008, + "guild": 19755, + "guild": 16597, + "guildford": 34450, + "guildhall": 47224, + "guillau": 41123, + "guillaume": 45394, + "guiller": 33660, + "guillermo": 39524, + "guilt": 26354, + "guilty": 9761, + "guin": 13284, + "guin": 47863, + "guine": 13759, + "guinea": 18537, + "guinness": 16648, + "guire": 18209, + "guise": 42024, + "guit": 3759, + "guitar": 21746, + "guitar": 5084, + "guitarist": 13035, + "guitars": 15023, + "guj": 34935, + "gujar": 12698, + "gujarat": 14714, + "guk": 20280, + "gul": 5530, + "gul": 21350, + "gula": 27426, + "gular": 34969, + "gulf": 22101, + "gulf": 11279, + "gull": 48764, + "gull": 28778, + "gulls": 37501, + "gully": 46112, + "gum": 22041, + "gum": 11235, + "gumb": 40147, + "gumbo": 47126, + "gummy": 34276, + "gums": 46609, + "gun": 2748, + "gun": 3496, + "guna": 43333, + "gundam": 26087, + "gundy": 21162, + "gunman": 32743, + "gunmen": 44738, + "gunn": 27473, + "gunna": 24002, + "gunnar": 45301, + "gunner": 35285, + "gunners": 37788, + "guns": 7591, + "gunsense": 44781, + "gunshot": 49250, + "gunsn": 49028, + "gup": 38632, + "gup": 47335, + "gupta": 15905, + "gur": 3218, + "gur": 30224, + "gura": 46836, + "gurgaon": 33240, + "guri": 43888, + "gurl": 25445, + "gurmee": 35482, + "gurmeetramrahim": 36549, + "guru": 18629, + "guru": 10800, + "gurudev": 48647, + "gus": 8018, + "gust": 24629, + "gusta": 23024, + "gusta": 44196, + "gustav": 32062, + "gustav": 37921, + "gustave": 43170, + "gustavo": 45943, + "gusto": 37937, + "gusts": 20896, + "gusty": 27589, + "gut": 24780, + "gut": 13486, + "guter": 44963, + "guterres": 48738, + "guth": 31696, + "guthrie": 33164, + "gutier": 32773, + "gutierrez": 33739, + "guts": 25983, + "gutted": 26524, + "gutter": 40537, + "guwa": 43063, + "guwahati": 45045, + "guy": 10008, + "guy": 2149, + "guyana": 45215, + "guyen": 28031, + "guys": 43588, + "guys": 1791, + "guyz": 48170, + "guzman": 37960, + "gv": 15462, + "gv": 17336, + "gw": 7172, + "gw": 15717, + "gwen": 32165, + "gwen": 24182, + "gwin": 43005, + "gwy": 32226, + "gwyne": 36923, + "gx": 40227, + "gy": 2168, + "gy": 1164, + "gya": 43214, + "gyan": 43814, + "gye": 21728, + "gyllen": 49348, + "gym": 9902, + "gym": 5222, + "gymna": 13517, + "gymnasium": 42847, + "gymnast": 42658, + "gymnastics": 20116, + "gyn": 39603, + "gyne": 45836, + "gyp": 40053, + "gypsy": 22354, + "gypt": 41921, + "gz": 45937, + "gz": 35841, + "gö": 40778, + "gü": 31907, + "h": 71, + "h": 327, + "ha": 560, + "ha": 1429, + "haa": 26814, + "haal": 35869, + "haan": 36284, + "haar": 45247, + "haar": 35859, + "haas": 27443, + "haasan": 26601, + "hab": 20573, + "hab": 20002, + "haban": 46225, + "haber": 44737, + "habit": 8491, + "habit": 17215, + "habitat": 11747, + "habitats": 35344, + "habits": 14540, + "habs": 27489, + "hac": 20343, + "hace": 43623, + "haci": 40674, + "hack": 6610, + "hack": 11182, + "hackathon": 25182, + "hacked": 19575, + "hacker": 22376, + "hackers": 21498, + "hacking": 12939, + "hackney": 48811, + "hackney": 24928, + "hacks": 19965, + "had": 10660, + "had": 1100, + "hadi": 39058, + "hadid": 26415, + "hadith": 46907, + "hadley": 44995, + "hadn": 21480, + "hadoop": 43868, + "hae": 30723, + "hae": 27193, + "hafi": 39914, + "hag": 26855, + "hag": 43207, + "hagan": 47489, + "hagen": 14664, + "hager": 48773, + "hagg": 26324, + "hague": 28988, + "hah": 18108, + "hah": 13680, + "haha": 1913, + "haha": 3060, + "hahah": 27253, + "hahah": 15441, + "hahaha": 4722, + "hahahah": 37513, + "hahahah": 20096, + "hahahaha": 8058, + "hahahaha": 9501, + "hahahahah": 33334, + "hahahahaha": 16347, + "hahahahahaha": 26487, + "hahahahahahaha": 43653, + "hahahahahahahaha": 36126, + "hahahha": 49205, + "hahn": 35596, + "hai": 8734, + "hai": 5234, + "haider": 42200, + "haiku": 19542, + "hail": 15272, + "hail": 8634, + "hailed": 44604, + "hailey": 27703, + "hailing": 47288, + "hails": 32571, + "hailstate": 35063, + "hain": 23861, + "hair": 4658, + "hair": 2225, + "haircare": 43682, + "haircut": 14711, + "hairdresser": 47468, + "haired": 27202, + "hairs": 27951, + "hairstyle": 22324, + "hairstyles": 40627, + "hairy": 26513, + "haiti": 17368, + "haitian": 37577, + "haj": 27885, + "haj": 43191, + "haji": 41889, + "hajj": 35576, + "hak": 25142, + "hak": 40671, + "haka": 44011, + "hake": 41663, + "hal": 1296, + "hal": 8708, + "hala": 25918, + "halal": 34216, + "halam": 29061, + "halamadrid": 31132, + "halder": 32201, + "hale": 37038, + "hale": 14701, + "halen": 39204, + "halep": 49017, + "haley": 37330, + "haley": 16839, + "half": 7453, + "half": 2349, + "halftime": 13742, + "halfway": 16736, + "hali": 9860, + "hali": 43030, + "halibut": 49030, + "halifax": 13411, + "hall": 6850, + "hall": 2140, + "halla": 29569, + "halle": 27763, + "halle": 32239, + "hallelujah": 36993, + "halli": 32665, + "hallmark": 31040, + "hallmark": 32053, + "hallmarkchannel": 36840, + "hallo": 3463, + "halloffame": 48578, + "halloween": 28537, + "halloween": 3739, + "halls": 18052, + "hallucin": 35385, + "hallway": 26845, + "halo": 33331, + "halo": 11918, + "halsey": 34256, + "halt": 25640, + "halter": 47194, + "halton": 45445, + "ham": 1522, + "ham": 1714, + "hama": 17944, + "hamas": 14818, + "hamburg": 18409, + "hamburger": 33928, + "hamid": 32377, + "hamil": 6725, + "hamill": 45784, + "hamill": 48729, + "hamillhimself": 47324, + "hamilton": 22448, + "hamilton": 7684, + "hamlet": 27722, + "hamlin": 49326, + "hamm": 46110, + "hammer": 15331, + "hammer": 9401, + "hammered": 37251, + "hammers": 35649, + "hammersmith": 42127, + "hammock": 33682, + "hammond": 21761, + "hamont": 18518, + "hamp": 6665, + "hamper": 27692, + "hampshire": 16006, + "hampstead": 37340, + "hampton": 36582, + "hampton": 12285, + "hamptons": 42415, + "hamr": 47979, + "hamradio": 36712, + "hams": 25619, + "hamster": 33313, + "hamstring": 39990, + "hamza": 45762, + "han": 1545, + "han": 3565, + "hana": 16801, + "hand": 1722, + "hand": 2463, + "handbag": 22654, + "handbags": 35667, + "handball": 27988, + "handbook": 25147, + "handcrafted": 22185, + "handed": 10881, + "handedly": 48656, + "handel": 40072, + "handful": 23725, + "handheld": 26812, + "handic": 17812, + "handicap": 27063, + "handicapp": 42349, + "handing": 19196, + "handle": 43681, + "handle": 7245, + "handled": 26824, + "handler": 29097, + "handles": 22124, + "handling": 14071, + "handmade": 18054, + "handmade": 6737, + "handmadehour": 25724, + "handover": 46922, + "hands": 3500, + "handshake": 38418, + "handsome": 7438, + "handwriting": 29986, + "handwritten": 35192, + "handy": 13479, + "hane": 28411, + "hang": 3351, + "hang": 5592, + "hangar": 33439, + "hanged": 40807, + "hanger": 28905, + "hangin": 22670, + "hanging": 4850, + "hangout": 17572, + "hangover": 20755, + "hangs": 21785, + "hani": 39944, + "hani": 18374, + "hank": 35993, + "hank": 17655, + "hanks": 29943, + "hanley": 47284, + "hann": 5584, + "hanna": 10075, + "hannah": 18622, + "hannah": 9142, + "hannel": 43477, + "hanni": 19493, + "hannibal": 25149, + "hannity": 24569, + "hannover": 39976, + "hanoi": 36134, + "hanover": 33246, + "hans": 35172, + "hans": 16628, + "hansen": 19729, + "hanson": 24602, + "hant": 40641, + "hanuk": 32774, + "hanukkah": 34247, + "hanuman": 46975, + "hao": 27184, + "hap": 44981, + "hap": 47988, + "happ": 784, + "happen": 21486, + "happen": 4506, + "happened": 4402, + "happening": 4284, + "happeningnow": 43107, + "happenings": 41998, + "happens": 4988, + "happier": 14118, + "happiest": 13811, + "happily": 17316, + "happiness": 5096, + "happy": 2952, + "happy": 900, + "happybirthday": 9651, + "happybirthday": 12207, + "happydays": 25106, + "happye": 33922, + "happyeaster": 38745, + "happyfathersday": 43534, + "happyfriday": 33340, + "happyhalloween": 28750, + "happyholidays": 32186, + "happyhour": 32036, + "happymonday": 47364, + "happymothersday": 42425, + "happynewyear": 18655, + "happythanksgiving": 40593, + "happyvalentinesday": 42403, + "haps": 9114, + "haq": 32445, + "har": 915, + "har": 5888, + "hara": 10367, + "haram": 35732, + "haram": 22950, + "haran": 27921, + "harare": 43562, + "haras": 26644, + "harass": 16481, + "harassed": 43067, + "harassment": 16641, + "harat": 28984, + "harb": 5856, + "harbaugh": 45220, + "harbor": 40686, + "harbor": 10202, + "harbour": 35430, + "harbour": 10011, + "harcourt": 48093, + "hard": 3312, + "hard": 1626, + "hardcover": 31123, + "harden": 27350, + "harder": 12274, + "hardest": 15258, + "hardin": 43802, + "harding": 24382, + "hardly": 17363, + "hardro": 28126, + "hardrock": 48365, + "hardrock": 40739, + "hards": 44048, + "hardship": 45085, + "hardt": 17922, + "hardware": 11957, + "hardwell": 45572, + "hardwick": 46864, + "hardwood": 28167, + "hardwork": 42554, + "hardwork": 27404, + "hardworking": 28095, + "hardworkpaysoff": 49193, + "hardy": 48179, + "hardy": 14113, + "hare": 27903, + "hare": 18464, + "harga": 39738, + "hari": 25472, + "hari": 8981, + "harlan": 49133, + "harle": 29096, + "harlem": 17771, + "harley": 24702, + "harley": 13632, + "harleydavidson": 39183, + "harlow": 34113, + "harm": 16656, + "harm": 14452, + "harman": 42434, + "harmed": 39637, + "harmful": 21725, + "harmless": 44369, + "harmon": 10828, + "harmon": 28729, + "harmony": 10785, + "harms": 46703, + "harne": 43323, + "harness": 23205, + "harold": 16917, + "harp": 27339, + "harper": 31288, + "harper": 12634, + "harri": 6639, + "harrier": 37372, + "harriet": 27154, + "harrington": 34340, + "harris": 25356, + "harris": 6925, + "harrisburg": 40590, + "harrison": 34389, + "harrison": 10540, + "harro": 18939, + "harrogate": 30842, + "harrow": 38807, + "harry": 11094, + "harry": 3600, + "harrypotter": 23375, + "harsh": 30596, + "harsh": 16944, + "hart": 9335, + "hart": 7752, + "hartford": 23434, + "harth": 35619, + "hartle": 47482, + "hartley": 31268, + "hartman": 43294, + "haru": 35099, + "harvard": 28118, + "harvard": 12848, + "harve": 6405, + "harvest": 44495, + "harvest": 8971, + "harvested": 35899, + "harvesting": 26674, + "harvey": 33289, + "harvey": 9586, + "harvick": 46983, + "haryana": 27661, + "has": 13855, + "has": 791, + "hasan": 30049, + "hasbro": 37405, + "hash": 6338, + "hash": 19199, + "hashi": 41831, + "hashmi": 35852, + "hashtag": 34015, + "hashtag": 9238, + "hashtags": 23514, + "haskell": 48550, + "hasn": 9143, + "hass": 9298, + "hassan": 15829, + "hassee": 37117, + "hassel": 32204, + "hassle": 35762, + "hast": 18146, + "hasta": 36623, + "hastings": 22035, + "hat": 3447, + "hat": 3801, + "hatch": 24202, + "hatch": 17809, + "hatchback": 42348, + "hatched": 42158, + "hate": 23546, + "hate": 3753, + "hated": 21298, + "hateful": 36418, + "hater": 36917, + "haters": 14027, + "hates": 14957, + "hatfield": 38448, + "hath": 27894, + "hath": 34416, + "hathaway": 31801, + "hati": 26045, + "hating": 25668, + "hatred": 19046, + "hats": 9812, + "hatt": 8747, + "hatton": 44861, + "hau": 5152, + "hauer": 48751, + "haul": 23743, + "haul": 12332, + "hauled": 46620, + "hauling": 43132, + "haun": 9676, + "haunt": 31039, + "haunted": 14944, + "haunting": 24034, + "haunts": 48035, + "haus": 41755, + "haus": 16478, + "hausen": 33338, + "hauser": 46586, + "haute": 28854, + "hav": 13443, + "hav": 20447, + "havan": 36304, + "havana": 23357, + "havas": 46261, + "have": 18053, + "have": 720, + "haven": 33074, + "haven": 3871, + "havent": 29130, + "haver": 27876, + "haves": 49088, + "havin": 31937, + "having": 1977, + "havoc": 24447, + "haw": 2788, + "haw": 26954, + "hawa": 6067, + "hawa": 46278, + "hawai": 15800, + "hawaii": 32413, + "hawaii": 8265, + "hawaiian": 17734, + "hawan": 27765, + "hawk": 14704, + "hawk": 8218, + "hawke": 38178, + "hawker": 39051, + "hawkeye": 38666, + "hawkeyes": 34266, + "hawking": 33437, + "hawkins": 19740, + "hawks": 44806, + "hawks": 5841, + "hawthorn": 45372, + "hawthorne": 36730, + "hay": 4871, + "hay": 11367, + "haya": 41325, + "hayat": 49360, + "hayden": 19806, + "haydn": 48207, + "haye": 36583, + "hayes": 13555, + "hayley": 39986, + "hayley": 22204, + "haynes": 30496, + "hays": 41524, + "hayward": 29400, + "haz": 5040, + "haz": 39921, + "hazard": 26174, + "hazard": 15178, + "hazardous": 27102, + "hazards": 30639, + "haze": 22785, + "hazel": 19838, + "hazel": 21882, + "hazelnut": 35816, + "hazi": 22740, + "hazmat": 48887, + "hazrat": 45775, + "hazy": 32655, + "hb": 6854, + "hb": 12576, + "hbcu": 40008, + "hbd": 25277, + "hbd": 13594, + "hbo": 15252, + "hc": 15831, + "hc": 7821, + "hcs": 46850, + "hd": 11601, + "hd": 4414, + "hdd": 40508, + "hdmi": 33302, + "hdr": 28065, + "he": 651, + "he": 797, + "hea": 27150, + "hea": 32790, + "head": 1603, + "head": 1375, + "headache": 23849, + "headaches": 38025, + "headband": 28556, + "headed": 6153, + "header": 11077, + "heading": 4409, + "headless": 45219, + "headlights": 42422, + "headline": 10891, + "headliner": 38880, + "headlines": 14706, + "headlining": 26971, + "headphone": 37524, + "headphones": 14906, + "headquarters": 13041, + "heads": 5174, + "headset": 23883, + "headshot": 34890, + "heal": 1231, + "heal": 13833, + "healed": 31456, + "healer": 38328, + "healey": 38985, + "healing": 9295, + "heals": 32384, + "health": 2145, + "health": 1728, + "healthand": 43704, + "healthcare": 42500, + "healthcare": 6023, + "healthier": 18242, + "healthtech": 42694, + "healthy": 10330, + "healthy": 3782, + "healthye": 31532, + "healthyeating": 33761, + "healthyfood": 39996, + "healthylifestyle": 46254, + "healthyliving": 27293, + "healy": 34299, + "heap": 34781, + "heaps": 44446, + "hear": 2749, + "hear": 2584, + "heard": 4063, + "hearing": 46353, + "hearing": 5541, + "hearings": 33175, + "hearn": 36613, + "hears": 25395, + "heart": 4975, + "heart": 1936, + "heartbeat": 29154, + "heartbreak": 29281, + "heartbreaking": 21322, + "heartbroken": 35383, + "hearted": 21679, + "heartfelt": 22904, + "hearth": 31563, + "hearthstone": 34054, + "hearti": 29345, + "hearties": 44572, + "heartland": 31923, + "heartless": 47022, + "heartnews": 40426, + "hearts": 5516, + "heartw": 30002, + "heartwarming": 34080, + "hearty": 26994, + "heat": 12175, + "heat": 4403, + "heated": 17057, + "heater": 23246, + "heath": 12794, + "heath": 11719, + "heather": 20230, + "heather": 12470, + "heathrow": 24171, + "heating": 12478, + "heaton": 34557, + "heats": 36106, + "heatwave": 25726, + "heav": 2409, + "heaven": 15520, + "heaven": 5545, + "heavenly": 19117, + "heavens": 26026, + "heavier": 31253, + "heaviest": 33268, + "heavily": 14123, + "heavy": 12048, + "heavy": 4200, + "heavymetal": 39804, + "heavyweight": 17448, + "heb": 24700, + "heb": 34515, + "hebdo": 41817, + "hebrew": 27298, + "hebrides": 45121, + "hebron": 45725, + "hec": 18932, + "heck": 22985, + "heck": 14427, + "hectares": 44162, + "hectic": 37245, + "hector": 25852, + "hed": 18271, + "hedge": 16229, + "hedge": 20294, + "hedgehog": 21940, + "hedges": 41345, + "hee": 18364, + "hee": 15773, + "heechul": 42487, + "heed": 15118, + "heel": 33646, + "heel": 16861, + "heels": 10909, + "heem": 30061, + "heer": 40473, + "hef": 29473, + "heff": 48756, + "hefty": 48584, + "heg": 41995, + "heh": 25834, + "hehe": 48723, + "hehe": 10658, + "hehehe": 24138, + "hei": 6101, + "hei": 29051, + "heidel": 42927, + "heidelberg": 48445, + "heidi": 44860, + "heidi": 23867, + "heifer": 48219, + "heigh": 43883, + "height": 10788, + "heights": 8418, + "heim": 10931, + "heim": 9768, + "heimer": 39517, + "hein": 15487, + "hein": 43206, + "heine": 28742, + "heineken": 36874, + "heinrich": 47877, + "heinz": 32359, + "heir": 27083, + "heir": 34007, + "heirloom": 34232, + "heirs": 43834, + "heis": 21849, + "heisman": 34537, + "heist": 31035, + "heit": 37255, + "hel": 919, + "hel": 11579, + "hela": 48212, + "held": 4042, + "hele": 46129, + "helen": 17576, + "helen": 11291, + "helena": 23109, + "helene": 41591, + "helens": 45940, + "heli": 33874, + "heli": 40183, + "helicop": 10035, + "helicopter": 11956, + "helicopters": 26922, + "helium": 46505, + "helix": 35247, + "hell": 8410, + "hell": 4141, + "hella": 19800, + "hellboy": 48428, + "helle": 48600, + "helle": 46968, + "hellenic": 42544, + "heller": 44464, + "hello": 12887, + "hello": 3306, + "hells": 47989, + "helly": 48690, + "helm": 47970, + "helm": 19520, + "helmet": 11122, + "helmets": 21843, + "help": 8641, + "help": 1318, + "helped": 4845, + "helper": 29321, + "helpers": 36316, + "helpful": 12695, + "helping": 3875, + "helpless": 47638, + "helpline": 43101, + "helps": 5144, + "helsin": 17842, + "helsinki": 19626, + "hem": 20270, + "hem": 11148, + "hemi": 14256, + "hemi": 46856, + "heming": 30819, + "hemingway": 33470, + "hemisphere": 32767, + "hemmings": 34882, + "hemo": 43788, + "hemp": 28225, + "hemp": 18467, + "hems": 32451, + "hemsworth": 39428, + "hen": 2385, + "hen": 8047, + "hence": 23640, + "hend": 11560, + "hender": 49248, + "henderson": 14348, + "hendrick": 45296, + "hendricks": 37588, + "hendrix": 23605, + "henge": 33104, + "henley": 27853, + "henna": 39455, + "hennessy": 42667, + "henri": 19431, + "henri": 21610, + "henrik": 35772, + "henry": 16018, + "henry": 5508, + "hens": 31742, + "henson": 32935, + "hep": 17724, + "hep": 48791, + "hepat": 23767, + "hepatitis": 32169, + "hepburn": 26348, + "her": 1223, + "her": 899, + "hera": 38724, + "heral": 37809, + "herald": 27625, + "herald": 12851, + "herb": 26116, + "herb": 15302, + "herbal": 21868, + "herbali": 44087, + "herbalife": 48364, + "herbert": 19935, + "herbs": 17320, + "hercules": 26539, + "herd": 36142, + "herd": 18589, + "here": 9134, + "here": 763, + "hered": 47976, + "hereford": 35543, + "heres": 13566, + "hereto": 47673, + "heri": 31392, + "herit": 4720, + "heritag": 38273, + "heritage": 20962, + "heritage": 5455, + "herman": 31890, + "herman": 21568, + "hermann": 40942, + "hermes": 34563, + "hermi": 35265, + "hermione": 45502, + "hermit": 43953, + "hermitage": 47706, + "hermo": 40967, + "hermosa": 42531, + "hern": 30571, + "hern": 43576, + "hernandez": 17707, + "hero": 7338, + "hero": 3756, + "heroes": 38010, + "heroes": 5506, + "heroic": 24255, + "heroin": 23841, + "heroine": 27420, + "heron": 22593, + "heros": 37642, + "herr": 38537, + "herrera": 27755, + "herring": 30211, + "hers": 25359, + "herself": 9207, + "hersh": 20379, + "hershey": 29734, + "hert": 26744, + "hertfordshire": 41070, + "herts": 35784, + "herty": 23454, + "hertz": 49383, + "hes": 30553, + "hes": 12784, + "hesit": 23933, + "hesitate": 34967, + "hess": 41888, + "hester": 31105, + "het": 37527, + "het": 19678, + "hetero": 26405, + "heu": 20105, + "heughan": 32298, + "hew": 48141, + "hew": 43051, + "hewitt": 28871, + "hex": 16255, + "hex": 31241, + "hey": 10759, + "hey": 2189, + "hez": 34591, + "hezbollah": 37636, + "hf": 26606, + "hf": 20603, + "hfx": 47297, + "hg": 23986, + "hg": 26237, + "hgtv": 47657, + "hh": 3280, + "hh": 5180, + "hhh": 8281, + "hhhh": 19391, + "hhhh": 13121, + "hhhhh": 24246, + "hhhhhh": 37278, + "hhs": 27006, + "hi": 677, + "hi": 1883, + "hia": 20672, + "hiatus": 27823, + "hib": 15922, + "hiber": 38799, + "hibis": 36226, + "hibiscus": 36460, + "hibition": 24658, + "hibs": 42814, + "hic": 3549, + "hic": 38079, + "hick": 14813, + "hickman": 49148, + "hickory": 29905, + "hicks": 23429, + "hid": 15552, + "hid": 14451, + "hidalgo": 47464, + "hidden": 28305, + "hidden": 7029, + "hiddleston": 31444, + "hide": 17725, + "hide": 9379, + "hideous": 46588, + "hides": 30800, + "hiding": 11371, + "hie": 15763, + "hier": 23433, + "hier": 29913, + "hierarchy": 44442, + "hifi": 38168, + "hig": 38108, + "higgins": 21783, + "high": 1487, + "high": 1400, + "higher": 5321, + "highered": 27072, + "highest": 5317, + "highland": 32244, + "highland": 16062, + "highlander": 46251, + "highlanders": 40445, + "highlands": 16883, + "highlight": 8264, + "highlighted": 22252, + "highlighter": 45460, + "highlighting": 17344, + "highlights": 6173, + "highly": 5302, + "highness": 38694, + "highs": 15144, + "highschool": 23102, + "highway": 45344, + "highway": 7620, + "highways": 28007, + "higu": 39115, + "hihi": 36240, + "hii": 42315, + "hijab": 31407, + "hika": 41356, + "hikari": 44624, + "hike": 9404, + "hiked": 36471, + "hiker": 40947, + "hikers": 46090, + "hikes": 27076, + "hiking": 9118, + "hiko": 48708, + "hil": 3508, + "hil": 17927, + "hila": 38837, + "hilar": 37337, + "hilari": 7784, + "hilarious": 8358, + "hilariously": 43476, + "hilary": 45898, + "hilary": 25415, + "hilde": 45382, + "hill": 3671, + "hill": 2682, + "hillary": 13257, + "hillary": 7074, + "hillaryclinton": 15357, + "hilli": 32513, + "hills": 24178, + "hills": 5289, + "hillsborough": 32157, + "hillside": 37194, + "hilltop": 45858, + "hilly": 32483, + "hilton": 33621, + "hilton": 14012, + "him": 4128, + "him": 1269, + "himach": 29132, + "himachal": 35461, + "himalay": 17552, + "himalayan": 30318, + "himalayas": 32872, + "hime": 45892, + "himself": 4530, + "himss": 41730, + "hin": 1676, + "hin": 37930, + "hina": 40571, + "hinakhan": 45518, + "hinch": 49320, + "hind": 34460, + "hind": 23293, + "hindi": 14967, + "hinds": 47859, + "hindu": 17587, + "hindu": 12053, + "hinduism": 40592, + "hindus": 25701, + "hindustan": 46553, + "hines": 37462, + "hing": 37968, + "hini": 33564, + "hino": 45343, + "hint": 11868, + "hinton": 47165, + "hints": 20594, + "hio": 32897, + "hip": 11725, + "hip": 6584, + "hipho": 8819, + "hiphop": 26598, + "hiphop": 10914, + "hipp": 13607, + "hippie": 28637, + "hippo": 28398, + "hippo": 36729, + "hips": 30191, + "hipstamatic": 31002, + "hipster": 19987, + "hipsters": 48265, + "hir": 4959, + "hir": 14728, + "hira": 42577, + "hire": 32356, + "hire": 8243, + "hired": 17602, + "hires": 24133, + "hiring": 7835, + "hiro": 17396, + "hiro": 20588, + "hiroshima": 33867, + "hirsch": 46967, + "his": 15211, + "his": 787, + "hism": 23502, + "hispan": 16843, + "hispanic": 22676, + "hist": 21710, + "hist": 13779, + "histo": 33479, + "histor": 2993, + "historia": 46010, + "historian": 20697, + "historians": 35200, + "historic": 30195, + "historic": 5726, + "historical": 34154, + "historical": 8039, + "historically": 30445, + "histories": 34736, + "history": 11142, + "history": 1695, + "historymonth": 19356, + "historyof": 35905, + "hit": 5453, + "hit": 2341, + "hitch": 22937, + "hitch": 36203, + "hitler": 16518, + "hitman": 33290, + "hits": 4712, + "hitter": 23538, + "hitters": 39724, + "hitting": 7957, + "hiv": 44410, + "hiv": 11018, + "hive": 38162, + "hive": 18521, + "hiya": 42393, + "hk": 22648, + "hk": 12307, + "hl": 8297, + "hl": 5956, + "hle": 32389, + "hler": 35418, + "hm": 17913, + "hm": 7631, + "hmm": 13725, + "hmmm": 17032, + "hmmmm": 34598, + "hms": 14625, + "hmu": 21630, + "hmv": 49288, + "hn": 22905, + "hn": 7478, + "hns": 48412, + "ho": 606, + "ho": 2971, + "hoa": 37517, + "hoar": 31628, + "hoax": 33438, + "hob": 18212, + "hobart": 31646, + "hobb": 16175, + "hobbies": 36370, + "hobbit": 23207, + "hobbs": 34343, + "hobby": 41120, + "hobby": 17557, + "hobo": 34613, + "hobo": 41334, + "hoboken": 41568, + "hoc": 35880, + "hoch": 43772, + "hock": 34914, + "hock": 46574, + "hockey": 16499, + "hockey": 4111, + "hoco": 34771, + "hod": 31062, + "hodg": 23660, + "hodge": 40585, + "hodges": 35061, + "hodgson": 37044, + "hoe": 32502, + "hoe": 11262, + "hoek": 40073, + "hoes": 21164, + "hof": 20186, + "hof": 12789, + "hofer": 38654, + "hoff": 32860, + "hoff": 22751, + "hofficial": 41949, + "hoffman": 22026, + "hog": 12075, + "hog": 13255, + "hogan": 19757, + "hogg": 42005, + "hogs": 23242, + "hogwarts": 29168, + "hoh": 43947, + "hoi": 39295, + "hok": 26942, + "hok": 47167, + "hokies": 35168, + "hokkaido": 49145, + "hol": 1187, + "hol": 7349, + "hola": 28724, + "hold": 36496, + "hold": 3254, + "holden": 21869, + "holder": 7862, + "holders": 10074, + "holding": 5050, + "holdings": 24832, + "holds": 7286, + "hole": 47242, + "hole": 5341, + "holes": 11266, + "holi": 2093, + "holi": 21926, + "holic": 16348, + "holics": 29782, + "holiday": 13168, + "holiday": 2878, + "holidays": 5372, + "holiness": 37259, + "holistic": 26300, + "holl": 27699, + "holla": 26500, + "holland": 31608, + "holland": 9978, + "hollande": 47690, + "holler": 49047, + "holli": 24019, + "holliday": 41624, + "hollow": 41221, + "hollow": 16691, + "holloway": 29435, + "holly": 12731, + "holly": 11923, + "hollyo": 41525, + "hollyoaks": 43352, + "hollywood": 24655, + "hollywood": 5518, + "holm": 34758, + "holm": 12739, + "holme": 46149, + "holmes": 12756, + "holo": 10317, + "holocau": 14688, + "holocaust": 16476, + "hols": 33344, + "holt": 18868, + "holtz": 44743, + "holy": 13910, + "holy": 4874, + "hom": 906, + "hom": 47397, + "homa": 9557, + "homage": 17746, + "home": 2143, + "home": 1137, + "homebrew": 35046, + "homec": 33869, + "homecoming": 9008, + "homedecor": 15695, + "homedepot": 38707, + "homegrown": 32554, + "homeitems": 42972, + "homeland": 21633, + "homeless": 18403, + "homeless": 9661, + "homelessness": 19851, + "homemade": 7889, + "homeof": 48856, + "homeowner": 37267, + "homeowners": 29882, + "homepage": 29828, + "homer": 29307, + "homer": 16931, + "homers": 38333, + "homes": 19480, + "homes": 5416, + "homeschool": 40994, + "homestead": 32609, + "homeswee": 46298, + "hometown": 12238, + "homework": 12495, + "homicide": 21520, + "homie": 12540, + "homies": 18893, + "homme": 26193, + "homo": 18129, + "homo": 30504, + "homophobia": 37875, + "homophobic": 40975, + "homosexual": 44288, + "homosexuality": 46720, + "homs": 45413, + "hon": 1279, + "hon": 10296, + "honda": 8553, + "honduras": 29715, + "hone": 38640, + "honest": 7814, + "honest": 9602, + "honestly": 9155, + "honesty": 24939, + "honey": 9843, + "honey": 6406, + "honeycomb": 48583, + "honeymoon": 22527, + "hong": 12144, + "hong": 8598, + "hongkong": 16659, + "honi": 17918, + "honolulu": 28096, + "honor": 9206, + "honor": 3402, + "honorable": 19498, + "honorary": 15675, + "honore": 25868, + "honored": 5494, + "honoree": 38993, + "honorees": 43012, + "honoring": 10771, + "honors": 10248, + "honour": 8240, + "honourable": 29855, + "honoured": 11945, + "honouring": 37754, + "honours": 22558, + "hoo": 2300, + "hoo": 7920, + "hood": 18681, + "hood": 3222, + "hooded": 33631, + "hoodie": 13444, + "hoodies": 25974, + "hoods": 16664, + "hoof": 44555, + "hook": 30488, + "hook": 10395, + "hookah": 34214, + "hooked": 18138, + "hookem": 31465, + "hooker": 37891, + "hooking": 35240, + "hooks": 25068, + "hooligans": 48176, + "hoon": 21368, + "hooo": 44538, + "hoop": 31516, + "hoop": 19573, + "hooper": 35221, + "hoops": 9351, + "hoor": 22155, + "hooray": 24940, + "hoos": 46462, + "hoosier": 48886, + "hoosiers": 42780, + "hoot": 29164, + "hoover": 25691, + "hop": 10848, + "hop": 5833, + "hope": 5263, + "hope": 1683, + "hoped": 30628, + "hopeful": 21453, + "hopefully": 7602, + "hopeless": 35586, + "hopes": 10018, + "hoping": 7207, + "hopkins": 17821, + "hopp": 48839, + "hopped": 34220, + "hopper": 21748, + "hopping": 27606, + "hoppy": 38359, + "hops": 21137, + "hor": 1407, + "hor": 33847, + "hora": 26013, + "horace": 39282, + "horan": 26857, + "horde": 44947, + "hore": 15380, + "horiz": 8144, + "horizon": 17924, + "horizon": 11920, + "horizons": 29685, + "horizontal": 25775, + "hormon": 27096, + "hormone": 31283, + "hormones": 35162, + "horn": 15771, + "horn": 9607, + "horne": 38143, + "horned": 34526, + "hornet": 28739, + "hornets": 20124, + "horns": 22109, + "horny": 32622, + "horo": 21500, + "horoscope": 38453, + "horowitz": 44669, + "horri": 8656, + "horrible": 13726, + "horribly": 45484, + "horrific": 25314, + "horrifying": 38901, + "horror": 13787, + "horror": 5032, + "horrormovies": 46682, + "horrors": 33321, + "horse": 8562, + "horse": 4558, + "horseback": 43673, + "horseman": 48885, + "horsepower": 36882, + "horser": 23096, + "horseracing": 30693, + "horses": 8809, + "horseshoe": 29242, + "horst": 37182, + "hort": 19482, + "horticul": 27141, + "horticulture": 39998, + "horton": 25945, + "hortons": 38422, + "horus": 29794, + "hos": 44320, + "hos": 25008, + "hosa": 44618, + "hose": 19662, + "hoseok": 38817, + "hosp": 2847, + "hosp": 37853, + "hospice": 20533, + "hospit": 7180, + "hospital": 29399, + "hospital": 3851, + "hospitality": 11657, + "hospitalized": 36915, + "hospitals": 13816, + "host": 17403, + "host": 3953, + "hostage": 26119, + "hoste": 31700, + "hosted": 6017, + "hostel": 27225, + "hostess": 39692, + "hostile": 28074, + "hosting": 4857, + "hosts": 8718, + "hot": 2851, + "hot": 2069, + "hota": 43289, + "hotdog": 43758, + "hotel": 14591, + "hotel": 2738, + "hotels": 8654, + "hotline": 30516, + "hotmail": 46427, + "hotness": 39803, + "hotra": 27109, + "hotro": 47823, + "hotspot": 36606, + "hotspur": 35176, + "hotter": 23591, + "hottest": 8279, + "hottie": 22804, + "hotties": 46027, + "hou": 1011, + "hou": 10122, + "hough": 44529, + "houghton": 36133, + "houn": 39273, + "houn": 33607, + "hound": 33996, + "hound": 13561, + "hounds": 21178, + "hounews": 48373, + "hour": 14930, + "hour": 2232, + "hourly": 30918, + "hours": 2382, + "house": 4107, + "house": 1212, + "housed": 37518, + "household": 12412, + "households": 27167, + "housel": 48685, + "housemusic": 28468, + "houseof": 19928, + "houses": 7791, + "housewives": 38523, + "housing": 32924, + "housing": 5734, + "houston": 16564, + "houston": 5663, + "hov": 40291, + "hove": 29674, + "hoven": 35559, + "hover": 36252, + "hover": 49016, + "hovering": 43437, + "how": 7470, + "how": 829, + "howar": 37672, + "howard": 25447, + "howard": 7632, + "howdy": 42216, + "howe": 8179, + "howe": 24614, + "howell": 25297, + "hower": 32920, + "however": 8467, + "howi": 47883, + "howie": 42939, + "howl": 40332, + "howling": 41771, + "howto": 38191, + "howto": 44060, + "hoy": 39625, + "hoy": 13278, + "hoya": 40978, + "hp": 23753, + "hp": 6371, + "hpa": 30983, + "hpc": 39936, + "hpe": 33787, + "hpv": 45765, + "hq": 33571, + "hq": 4693, + "hr": 4810, + "hr": 4086, + "hra": 21320, + "hra": 17212, + "hrc": 18139, + "hrh": 29103, + "hri": 21068, + "hrithik": 45371, + "hrs": 7157, + "hru": 24127, + "hrw": 25064, + "hs": 9343, + "hs": 2466, + "hsbc": 31508, + "hsc": 43510, + "hse": 34057, + "hsfb": 29539, + "hsv": 47311, + "ht": 11123, + "ht": 7801, + "hta": 23452, + "hta": 49384, + "htafc": 42821, + "htc": 48942, + "htc": 17635, + "html": 18231, + "hts": 43710, + "htt": 10620, + "http": 15066, + "https": 30901, + "httr": 49372, + "httweets": 43198, + "hu": 845, + "hu": 5949, + "hua": 22138, + "huan": 41405, + "huang": 32013, + "huar": 46916, + "huawe": 17709, + "huawei": 21128, + "hub": 18775, + "hub": 7028, + "hubb": 23183, + "hubbard": 33288, + "hubble": 30421, + "hubby": 16947, + "hubert": 40699, + "hubs": 29327, + "huck": 22909, + "huckabee": 43666, + "hud": 7169, + "hud": 28563, + "hudder": 22629, + "huddersfield": 24220, + "huddle": 33435, + "hudson": 25873, + "hudson": 11260, + "hue": 48380, + "hue": 21465, + "hues": 38003, + "huey": 39663, + "huff": 18746, + "huff": 44999, + "huffpost": 45887, + "hug": 40790, + "hug": 10359, + "huge": 2699, + "hugely": 24648, + "hugged": 41333, + "hugging": 27058, + "hugh": 8723, + "hugh": 15385, + "hughes": 11418, + "hugo": 43935, + "hugo": 17132, + "hugs": 14248, + "huh": 13348, + "huhu": 32134, + "hui": 29978, + "hul": 7911, + "hula": 40145, + "hulk": 17637, + "hull": 25154, + "hull": 10375, + "hulu": 24666, + "hum": 5823, + "hum": 16283, + "human": 3175, + "human": 2751, + "humane": 20220, + "humanitarian": 14170, + "humanities": 24949, + "humanity": 9420, + "humanright": 44385, + "humanrights": 14148, + "humans": 8324, + "humb": 9988, + "humber": 30602, + "humber": 38063, + "humble": 38703, + "humble": 10889, + "humbled": 19682, + "humbling": 39757, + "humbold": 24739, + "humboldt": 31389, + "hume": 38197, + "humid": 14778, + "humid": 27447, + "humidi": 47666, + "humidity": 15469, + "humil": 27205, + "humili": 25332, + "humility": 28535, + "humming": 26515, + "hummingbird": 33072, + "hummus": 31785, + "humor": 29369, + "humor": 11186, + "humorous": 38173, + "humour": 19161, + "hump": 16673, + "hump": 24529, + "humpback": 47662, + "humpday": 27693, + "humph": 19767, + "humphrey": 31549, + "hun": 1616, + "hun": 10795, + "hundre": 8505, + "hundred": 11898, + "hundreds": 8879, + "hung": 13825, + "hungar": 19420, + "hungarian": 23325, + "hungary": 17232, + "hunger": 25565, + "hunger": 10184, + "hungergames": 47507, + "hungover": 41110, + "hungry": 44845, + "hungry": 8451, + "hunk": 33912, + "hunt": 16498, + "hunt": 5774, + "hunted": 37373, + "hunter": 16531, + "hunter": 6099, + "hunters": 16115, + "hunting": 27830, + "hunting": 7507, + "huntington": 23521, + "hunts": 34041, + "huntsville": 34544, + "hur": 2305, + "hur": 34523, + "hurd": 44915, + "hurdle": 27486, + "hurdles": 25440, + "huri": 42486, + "hurley": 30166, + "hurling": 24738, + "huron": 36147, + "hurrah": 40599, + "hurric": 6543, + "hurrican": 36105, + "hurricane": 24051, + "hurricane": 8782, + "hurricanes": 22357, + "hurry": 10921, + "hurst": 44742, + "hurst": 11760, + "hurt": 7413, + "hurting": 24017, + "hurts": 13059, + "hus": 5111, + "hus": 35853, + "husband": 6179, + "husbands": 33612, + "hush": 28728, + "husk": 19246, + "huskers": 26946, + "huskies": 20988, + "husky": 20421, + "huss": 13733, + "hussain": 17940, + "hussein": 31336, + "hust": 27279, + "hustle": 15709, + "huston": 46480, + "hut": 20924, + "hut": 16503, + "hutch": 31018, + "hutch": 33203, + "hutchinson": 35721, + "hutto": 27662, + "hutton": 38321, + "hv": 17209, + "hv": 18593, + "hvac": 27492, + "hw": 27491, + "hw": 18876, + "hwa": 32352, + "hwan": 44390, + "hwang": 46775, + "hwy": 13812, + "hy": 1441, + "hy": 17827, + "hya": 31600, + "hyacin": 47263, + "hyatt": 44856, + "hyatt": 25146, + "hybri": 9084, + "hybrid": 10156, + "hyd": 42382, + "hyde": 46484, + "hyde": 16343, + "hyder": 13960, + "hyderabad": 14801, + "hydr": 8031, + "hydra": 44414, + "hydra": 40420, + "hydrange": 43298, + "hydrate": 29628, + "hydrated": 23300, + "hydrating": 47653, + "hydration": 24174, + "hydrau": 26017, + "hydraulic": 26189, + "hydro": 8368, + "hydro": 22595, + "hydrogen": 20974, + "hye": 32724, + "hye": 25792, + "hygi": 16277, + "hygiene": 19591, + "hymn": 41350, + "hyo": 38960, + "hyo": 35078, + "hyp": 16964, + "hype": 30353, + "hype": 11111, + "hyped": 22507, + "hyper": 7997, + "hyper": 22146, + "hypertension": 40698, + "hypno": 23355, + "hypnosis": 48138, + "hypnoti": 40440, + "hypo": 10252, + "hypocr": 30711, + "hypocri": 25606, + "hypocrisy": 26296, + "hypocrite": 44125, + "hypothe": 46966, + "hypothesis": 44956, + "hyster": 24235, + "hysteria": 45965, + "hysterical": 48627, + "hyuk": 20452, + "hyun": 11831, + "hyun": 8589, + "hyundai": 17094, + "hyung": 46901, + "hyung": 16551, + "hz": 32533, + "i": 72, + "i": 328, + "ia": 12486, + "ia": 1073, + "iac": 32838, + "iac": 44063, + "iaf": 40789, + "iah": 35052, + "iain": 30103, + "ial": 11530, + "ial": 1974, + "ials": 20940, + "iam": 3579, + "iam": 11415, + "iambic": 43668, + "iambicpent": 43891, + "iamsrk": 15103, + "ian": 7723, + "ian": 1800, + "ians": 6451, + "iansomerhalder": 47077, + "iart": 18413, + "iartg": 18669, + "ias": 32303, + "ias": 14620, + "ib": 3962, + "ib": 13554, + "iba": 39763, + "ibadan": 44691, + "iban": 47145, + "ibc": 49014, + "ibd": 40732, + "iber": 23814, + "ibi": 12337, + "ibis": 47048, + "ibiza": 13853, + "ible": 37792, + "ibles": 44102, + "ibm": 23415, + "ibm": 13918, + "ibn": 25729, + "ibooks": 46887, + "ibra": 15476, + "ibrahi": 40350, + "ibrahim": 20816, + "ibrox": 46883, + "ibs": 41993, + "ibu": 43587, + "ibu": 46117, + "ic": 535, + "ic": 1029, + "ica": 2576, + "icago": 37492, + "ical": 6082, + "ical": 1110, + "ically": 3161, + "icals": 13999, + "ican": 17653, + "ican": 5246, + "icans": 20511, + "icar": 37211, + "ication": 21629, + "icc": 12945, + "ice": 2739, + "ice": 733, + "iceberg": 33662, + "icec": 13636, + "icecream": 21334, + "iced": 8049, + "icelan": 34114, + "iceland": 46716, + "iceland": 11935, + "icelandic": 34705, + "ices": 1931, + "ich": 5333, + "ich": 1232, + "icha": 31453, + "iche": 28972, + "iche": 21143, + "ichi": 21669, + "ichi": 14647, + "ichick": 45022, + "ichiro": 43787, + "ici": 948, + "ici": 22189, + "icia": 11774, + "icial": 17543, + "icial": 6397, + "ician": 40522, + "ician": 5374, + "icians": 6264, + "iciary": 21329, + "icic": 46006, + "icide": 6558, + "icides": 28253, + "icing": 7676, + "icio": 24207, + "icion": 45905, + "icious": 3325, + "icist": 21165, + "icists": 42171, + "icity": 7243, + "ick": 1168, + "ick": 1068, + "icked": 39799, + "icker": 40357, + "ickers": 30701, + "icki": 35468, + "icking": 6619, + "icks": 3727, + "icky": 11587, + "icn": 44516, + "ico": 13697, + "ico": 3040, + "icom": 17693, + "icom": 29796, + "icon": 13843, + "icon": 5646, + "iconic": 6959, + "icons": 15553, + "icop": 9389, + "icos": 32002, + "ics": 1324, + "ict": 6349, + "icted": 36515, + "iction": 40560, + "icton": 36548, + "icu": 45118, + "icu": 30443, + "icular": 40660, + "icus": 31459, + "icy": 28780, + "icy": 3495, + "icymi": 5315, + "icz": 46387, + "id": 1568, + "id": 1014, + "ida": 11032, + "ida": 11600, + "idad": 22462, + "idaho": 48817, + "idaho": 15165, + "idal": 39684, + "idan": 17929, + "idc": 22386, + "ide": 1909, + "ide": 14104, + "idea": 3612, + "ideal": 8789, + "ideally": 48247, + "ideals": 45096, + "ideas": 4452, + "ident": 7113, + "identi": 6009, + "identical": 25587, + "identification": 23337, + "identified": 15217, + "identifies": 35712, + "identify": 10949, + "identifying": 23589, + "identities": 34292, + "identity": 8892, + "ideology": 25840, + "iders": 8980, + "ides": 31791, + "idf": 28987, + "idge": 35567, + "idh": 44325, + "idi": 9611, + "idi": 14264, + "idio": 15994, + "idiot": 14087, + "idiots": 20856, + "idk": 8972, + "idle": 34754, + "idlib": 36199, + "ido": 6763, + "ido": 29641, + "idol": 24866, + "idol": 8884, + "idols": 21398, + "idr": 10106, + "idri": 46435, + "idris": 41312, + "ids": 6111, + "idu": 28655, + "idy": 33058, + "idyl": 44879, + "idyllic": 46632, + "ie": 6789, + "ie": 1718, + "iec": 44773, + "ied": 10059, + "ieee": 39860, + "iel": 27875, + "iel": 22729, + "ience": 1542, + "ient": 13115, + "ier": 33173, + "ier": 5912, + "iers": 45060, + "ies": 27912, + "ies": 963, + "iest": 10818, + "if": 8063, + "if": 878, + "ifa": 37574, + "ifc": 36524, + "ife": 41172, + "ife": 19590, + "iff": 35753, + "ification": 35755, + "ified": 41403, + "ift": 31143, + "iftar": 35153, + "ifu": 41523, + "ify": 32807, + "ig": 1089, + "ig": 3072, + "iga": 16493, + "igan": 27468, + "igans": 25419, + "igbo": 44591, + "ige": 10806, + "igen": 33070, + "iger": 30758, + "iger": 20685, + "igers": 40755, + "igers": 48928, + "iggy": 46219, + "iggy": 27604, + "igh": 2712, + "igh": 5451, + "ight": 14571, + "ight": 897, + "ighton": 35292, + "igi": 21901, + "igle": 29912, + "iglesias": 39432, + "ign": 7303, + "ign": 2326, + "ignati": 37573, + "ignatius": 48318, + "igne": 45843, + "ignite": 25210, + "ignition": 36115, + "igno": 15375, + "ignor": 7653, + "ignorance": 22735, + "ignorant": 26933, + "ignore": 12304, + "ignored": 20428, + "ignores": 40129, + "ignoring": 23969, + "igor": 33024, + "igs": 31344, + "igu": 21279, + "ih": 12162, + "ih": 34135, + "ihear": 13043, + "iheart": 30332, + "iheartawards": 18811, + "iheartradio": 25934, + "ihop": 45511, + "ihri": 39108, + "ihrithik": 39326, + "ii": 5103, + "ii": 2329, + "iii": 46236, + "iii": 6572, + "iiii": 20133, + "iiii": 45393, + "iiot": 30704, + "iit": 39330, + "iit": 33238, + "ij": 7337, + "ija": 42802, + "ik": 3903, + "ik": 10177, + "ika": 18188, + "ike": 12329, + "ike": 19696, + "ikea": 20528, + "iker": 38653, + "ikh": 44655, + "ikh": 12758, + "iklan": 32028, + "iklan": 29584, + "iko": 35659, + "iko": 39272, + "ikon": 38543, + "ikon": 19156, + "iku": 17780, + "il": 543, + "il": 958, + "ila": 4344, + "ilah": 32211, + "ilan": 13889, + "ilan": 28076, + "iland": 20957, + "ilation": 16180, + "ilay": 45093, + "ild": 22278, + "ild": 17164, + "ile": 18398, + "ile": 989, + "iled": 3358, + "iler": 22446, + "iler": 3615, + "ilers": 8975, + "iles": 42274, + "ili": 2076, + "ili": 19601, + "ilia": 14855, + "ilian": 10272, + "iliary": 32585, + "ilife": 42835, + "ilike": 44989, + "ilinan": 48497, + "iling": 3299, + "ilio": 47256, + "ilion": 12561, + "ilis": 43442, + "ilit": 11178, + "ilities": 5446, + "ility": 1787, + "ilive": 26478, + "ill": 828, + "ill": 660, + "illa": 8877, + "illa": 3043, + "illac": 17218, + "illage": 48922, + "illard": 21920, + "illary": 33667, + "illas": 23404, + "ille": 18213, + "ille": 5559, + "illed": 2527, + "illeg": 35808, + "illegal": 7983, + "illegally": 24466, + "illegals": 40490, + "iller": 23341, + "iller": 2956, + "illers": 30547, + "illery": 14514, + "illes": 20037, + "illi": 1086, + "illi": 25187, + "illia": 48776, + "illiams": 30301, + "illian": 48775, + "illian": 17355, + "illic": 37152, + "illicit": 40998, + "illie": 26083, + "illin": 35868, + "illing": 2803, + "illini": 28957, + "illino": 8920, + "illinois": 9414, + "illion": 35542, + "illion": 2035, + "illness": 11145, + "illnesses": 33861, + "illo": 34153, + "illo": 7588, + "illon": 20516, + "ills": 1900, + "illu": 3025, + "illumin": 11446, + "illuminate": 43261, + "illuminated": 28814, + "illuminati": 34551, + "illuminating": 46601, + "illumination": 43680, + "illus": 41386, + "illusion": 20318, + "illusions": 47429, + "illustr": 6268, + "illustrate": 37468, + "illustrated": 13151, + "illustrates": 38129, + "illustrating": 43322, + "illustration": 6052, + "illustrations": 17852, + "illustrator": 16649, + "illustri": 43116, + "illustrious": 44304, + "illy": 11707, + "illy": 9532, + "ilm": 36326, + "ilo": 4220, + "ilo": 14835, + "ilove": 7183, + "ilove": 32914, + "iloveart": 41114, + "ilovemy": 28863, + "iloveyou": 28829, + "ils": 1543, + "ilt": 25334, + "ilton": 28494, + "ilu": 27337, + "ilwx": 43777, + "ily": 4881, + "ily": 1026, + "ilya": 33377, + "ilysm": 29228, + "im": 732, + "im": 1496, + "ima": 2414, + "ima": 6432, + "imac": 40675, + "imacele": 47281, + "imag": 2316, + "image": 24101, + "image": 2867, + "imagery": 22828, + "images": 4952, + "imagin": 18178, + "imaginary": 30417, + "imagination": 13783, + "imaginative": 47233, + "imagine": 35752, + "imagine": 4826, + "imagined": 18478, + "imagines": 47379, + "imaging": 14231, + "imagining": 27384, + "imam": 37552, + "imam": 19024, + "iman": 45684, + "iman": 16247, + "imation": 44566, + "imax": 32066, + "imc": 45616, + "imdanielpadilla": 36357, + "imdb": 30407, + "ime": 44937, + "ime": 31151, + "imel": 31594, + "iment": 37157, + "imer": 21802, + "imes": 47744, + "imf": 28403, + "img": 24157, + "imi": 23559, + "imin": 23942, + "imit": 23462, + "imitation": 41630, + "imma": 19487, + "immac": 25085, + "immaculate": 29649, + "immature": 45531, + "immedi": 7366, + "immediate": 14440, + "immediately": 10108, + "immen": 17278, + "immense": 22722, + "immensely": 35013, + "immer": 13954, + "immerse": 46240, + "immersion": 31861, + "immersive": 27521, + "immigr": 5851, + "immigrant": 16474, + "immigrants": 14460, + "immigration": 9588, + "imminent": 27299, + "immort": 39244, + "immortal": 24717, + "immun": 8961, + "immune": 15606, + "immuni": 44571, + "immunity": 26254, + "immuno": 24361, + "immunology": 44483, + "immunotherapy": 39185, + "imo": 26349, + "imo": 13738, + "imp": 3335, + "imp": 31037, + "impac": 7573, + "impact": 33036, + "impact": 3844, + "impacted": 21424, + "impactful": 41631, + "impacting": 29359, + "impacts": 15069, + "impair": 36451, + "impaired": 28028, + "impairment": 44501, + "impala": 36641, + "impe": 23612, + "impeach": 16874, + "impeach": 43497, + "impeachment": 32979, + "impeachtrump": 38006, + "impecc": 34511, + "impeccable": 40111, + "impending": 34486, + "imper": 7727, + "imperative": 39833, + "imperfect": 46034, + "imperi": 30911, + "imperial": 32425, + "imperial": 12361, + "imperialism": 48855, + "imperson": 25551, + "implant": 33106, + "implants": 32202, + "imple": 7423, + "implement": 17966, + "implementation": 15102, + "implemented": 24315, + "implementing": 22862, + "implic": 15269, + "implications": 19229, + "implo": 40337, + "impo": 45704, + "import": 2336, + "import": 16294, + "importance": 6821, + "important": 2829, + "importantly": 21580, + "imported": 28798, + "imports": 25286, + "impose": 35879, + "imposed": 25871, + "imposing": 42289, + "impossible": 9815, + "impre": 3763, + "impress": 20015, + "impressed": 9689, + "impression": 14468, + "impressionism": 36114, + "impressionist": 44904, + "impressions": 22276, + "impressive": 6634, + "imprint": 43863, + "imprison": 22141, + "imprisoned": 32999, + "imprisonment": 39024, + "impro": 2531, + "impromp": 28100, + "impromptu": 28611, + "improv": 22868, + "improve": 4971, + "improved": 9446, + "improvement": 10790, + "improvements": 16320, + "improves": 18035, + "improving": 10381, + "improvis": 32343, + "improvised": 40886, + "impulse": 29683, + "impy": 42690, + "imran": 19647, + "imran": 19212, + "imrankhan": 25956, + "imrankhanpti": 26688, + "ims": 17800, + "imsa": 37262, + "imv": 35731, + "imvkohli": 37136, + "imwith": 26822, + "imwithher": 32651, + "in": 512, + "in": 530, + "ina": 18026, + "ina": 1366, + "inability": 47517, + "inaccurate": 49192, + "inaction": 41916, + "inactive": 49274, + "inadequate": 43403, + "inak": 46549, + "inal": 19178, + "inals": 26438, + "inan": 26204, + "inappropriate": 26722, + "inari": 48620, + "inary": 11337, + "inas": 36731, + "inas": 12362, + "inated": 38530, + "ination": 4706, + "inau": 10832, + "inaugu": 11309, + "inaugur": 11448, + "inaugural": 11340, + "inaugurated": 29011, + "inauguration": 16805, + "inbound": 24420, + "inbox": 18683, + "inc": 14570, + "inc": 4438, + "incan": 45964, + "incar": 18070, + "incarcer": 26334, + "incarcerated": 49178, + "incarceration": 39887, + "incase": 30463, + "ince": 44303, + "incen": 13259, + "incense": 35059, + "incentive": 29024, + "incentives": 29813, + "inception": 36653, + "inch": 6523, + "incheon": 30645, + "inches": 10809, + "inci": 5747, + "incidence": 43371, + "incident": 10103, + "incidents": 22120, + "incindia": 26161, + "inciner": 46434, + "incl": 27857, + "incl": 13338, + "inclined": 45470, + "inclu": 1738, + "include": 5942, + "included": 7414, + "includes": 6197, + "including": 2814, + "inclusion": 12079, + "inclusive": 13393, + "income": 8044, + "incoming": 15416, + "incomparable": 36027, + "incompetent": 45069, + "incomplete": 34040, + "incon": 42372, + "inconvenience": 40563, + "incorpor": 19335, + "incorporate": 34168, + "incorporated": 29494, + "incorporating": 40303, + "incorrect": 31872, + "incre": 1870, + "increase": 5230, + "increased": 9156, + "increases": 13797, + "increasing": 10270, + "increasingly": 16106, + "incredi": 2883, + "incredible": 22128, + "incredible": 3457, + "incredibleindia": 24680, + "incredibles": 48641, + "incredibly": 9513, + "incu": 38830, + "incub": 24587, + "incubator": 35736, + "incumb": 32246, + "incumbent": 38038, + "incur": 42356, + "ind": 5386, + "ind": 4655, + "inda": 15710, + "inde": 2645, + "indeed": 10031, + "indefin": 29501, + "indefinitely": 43750, + "independ": 4147, + "independence": 23117, + "independence": 7955, + "independenceday": 25971, + "independent": 33844, + "independent": 7088, + "independently": 39831, + "inder": 29225, + "index": 35209, + "index": 9458, + "indhoven": 44229, + "indi": 1098, + "indi": 46536, + "india": 27067, + "india": 1762, + "indian": 7685, + "indian": 3606, + "indiana": 8615, + "indianapolis": 17196, + "indianfootball": 45979, + "indians": 10271, + "indic": 7136, + "indicate": 26679, + "indicated": 39416, + "indicates": 29412, + "indication": 38539, + "indicator": 24776, + "indicators": 30054, + "indicted": 34992, + "indictment": 42278, + "indie": 5260, + "indie": 9383, + "indiedev": 10863, + "indiefilm": 22588, + "indiegame": 17969, + "indiegamedev": 40466, + "indiegames": 35864, + "indiegogo": 38057, + "indies": 23618, + "indiffe": 41372, + "indigen": 8348, + "indigenous": 9303, + "indigo": 21002, + "indira": 43887, + "indirec": 26398, + "indirect": 35416, + "indivi": 5649, + "individu": 9574, + "individual": 8512, + "individually": 33782, + "individuals": 11990, + "indo": 26303, + "indo": 18297, + "indom": 42926, + "indone": 6180, + "indonesia": 7229, + "indonesian": 19593, + "indoor": 44478, + "indoor": 9546, + "indoors": 22973, + "indore": 46143, + "indu": 2298, + "induc": 7973, + "induced": 24103, + "inducted": 20596, + "inductee": 39558, + "inductees": 44796, + "induction": 18338, + "indul": 19402, + "indulg": 28388, + "indulge": 24851, + "indulgence": 40856, + "indulgent": 49147, + "industri": 5082, + "industrial": 30853, + "industrial": 7520, + "industries": 11700, + "industry": 47407, + "industry": 3318, + "indv": 16942, + "indy": 9821, + "indy": 10098, + "indycar": 20484, + "indyref": 22569, + "ine": 855, + "ine": 715, + "ineau": 38122, + "inec": 45214, + "ined": 2038, + "inee": 43252, + "inee": 7986, + "inees": 13056, + "ineffe": 47202, + "inely": 18234, + "inem": 48876, + "inema": 29232, + "inen": 44365, + "inequalities": 45507, + "inequality": 17372, + "iner": 17438, + "iner": 5155, + "iners": 41863, + "ines": 2137, + "inese": 35966, + "iness": 1463, + "inet": 8121, + "inette": 38911, + "inev": 19527, + "inevit": 45871, + "inevitable": 25004, + "inews": 24300, + "inexpensive": 38614, + "iney": 30254, + "inez": 12700, + "inf": 1529, + "inf": 35241, + "infamous": 18688, + "infan": 17219, + "infant": 19192, + "infantry": 21655, + "infants": 34726, + "infe": 7164, + "infec": 26088, + "infected": 26136, + "infection": 14774, + "infections": 22227, + "infectious": 29157, + "infeld": 25035, + "infer": 16258, + "inferno": 31290, + "infertility": 40701, + "infield": 48933, + "infiltr": 28683, + "infin": 6246, + "infinite": 12748, + "infiniti": 34644, + "infinity": 34863, + "infinity": 12895, + "infl": 7627, + "inflam": 16080, + "inflammation": 24893, + "inflammatory": 26831, + "inflatable": 30135, + "inflation": 17497, + "inflicted": 48188, + "influ": 4835, + "influen": 13229, + "influence": 9199, + "influenced": 21183, + "influencer": 25013, + "influencers": 29891, + "influences": 24926, + "influencing": 45126, + "influential": 17553, + "influenza": 39897, + "info": 5680, + "info": 2222, + "infographic": 10076, + "infographics": 33172, + "infor": 31773, + "inform": 10241, + "inform": 19449, + "informal": 25705, + "informat": 29625, + "informatics": 35685, + "information": 3204, + "informative": 19364, + "informed": 13876, + "informing": 45388, + "informs": 48440, + "infosec": 17863, + "infr": 29718, + "infra": 7312, + "infra": 45877, + "infrared": 22867, + "infrastructure": 9034, + "infringe": 44882, + "infringement": 48712, + "infront": 37668, + "infu": 15048, + "infuri": 48461, + "infused": 21461, + "infusion": 43464, + "ing": 653, + "ing": 519, + "inga": 15233, + "ingco": 40444, + "ingday": 16561, + "ingdon": 38731, + "inge": 11790, + "inge": 7071, + "inged": 30046, + "ingen": 19088, + "ingeni": 36884, + "inger": 33883, + "inger": 3541, + "ingfor": 33430, + "ingh": 9170, + "ingh": 30495, + "ingham": 24497, + "ingham": 4291, + "inghamshire": 39289, + "inghour": 42728, + "inging": 4066, + "ingl": 45662, + "ingle": 22228, + "ingle": 17005, + "ingles": 24490, + "ingley": 44428, + "inglis": 46327, + "ingly": 4796, + "ingnow": 34766, + "ingo": 30175, + "ingo": 9012, + "ingra": 45165, + "ingrad": 44124, + "ingram": 26998, + "ingredi": 9272, + "ingredient": 19799, + "ingredients": 11788, + "ingrid": 33496, + "ings": 895, + "ingthe": 20170, + "ingtips": 39373, + "ington": 11846, + "ington": 2156, + "ingu": 8714, + "ingual": 22795, + "ingue": 36838, + "ingui": 12788, + "inguish": 36146, + "inha": 32612, + "inhabit": 36189, + "inhabitants": 44968, + "inhal": 30786, + "inhe": 32617, + "inher": 24611, + "inherent": 47327, + "inherit": 34322, + "inheritance": 39341, + "inherited": 39111, + "inhi": 25557, + "inhibit": 32196, + "inho": 12984, + "ini": 6154, + "ini": 3581, + "inian": 36638, + "inim": 38717, + "inindia": 34021, + "ining": 1389, + "inist": 30976, + "init": 42670, + "initi": 4580, + "initial": 13980, + "initially": 28123, + "initials": 48794, + "initiated": 27756, + "initiation": 41009, + "initiative": 8152, + "initiatives": 16549, + "inity": 22126, + "inj": 5112, + "injec": 13688, + "injection": 21438, + "inju": 5006, + "injured": 7505, + "injuries": 9481, + "injury": 6223, + "injustice": 20541, + "ink": 4547, + "ink": 967, + "inka": 40685, + "inked": 29356, + "inki": 46176, + "inkigayo": 47882, + "inking": 37586, + "inks": 20966, + "inktober": 9387, + "inland": 21943, + "inlet": 35161, + "inline": 45004, + "inlove": 28415, + "inmate": 32341, + "inmates": 28216, + "inmy": 42657, + "inn": 27260, + "inn": 5569, + "inna": 35088, + "inner": 24512, + "inner": 6955, + "inning": 4415, + "innings": 11580, + "innis": 44059, + "inno": 7961, + "innocence": 26383, + "innocent": 11241, + "innov": 2890, + "innovate": 24549, + "innovation": 33063, + "innovation": 4272, + "innovations": 18817, + "innovative": 8494, + "innovator": 34735, + "innovators": 27834, + "ino": 4211, + "ino": 2691, + "inoa": 25649, + "inos": 21828, + "inous": 47801, + "inox": 22698, + "input": 16952, + "inputs": 48763, + "inqu": 10628, + "inqui": 18527, + "inquirer": 45172, + "inquiries": 29469, + "inquiry": 15865, + "inquis": 31171, + "inr": 36325, + "ins": 12786, + "ins": 1041, + "insan": 7875, + "insane": 10260, + "insanely": 27846, + "insanity": 26645, + "inscribed": 49168, + "inscription": 41127, + "insec": 15744, + "insect": 21297, + "insects": 18714, + "insecure": 35112, + "insecurity": 36964, + "inser": 13830, + "insert": 18807, + "insi": 3453, + "inside": 19141, + "inside": 2912, + "insider": 13300, + "insiders": 32171, + "insig": 40503, + "insight": 8795, + "insightful": 20354, + "insights": 8729, + "insignia": 48864, + "insist": 35504, + "insisted": 40423, + "insists": 27255, + "inski": 32630, + "insky": 24607, + "insol": 42366, + "insom": 21755, + "insomni": 42040, + "insomnia": 30598, + "inson": 21007, + "insp": 1597, + "inspec": 7915, + "inspect": 40815, + "inspecting": 40565, + "inspection": 15142, + "inspections": 39513, + "inspector": 20514, + "inspir": 2573, + "inspiration": 4195, + "inspirational": 41936, + "inspirational": 9855, + "inspirations": 35093, + "inspire": 27901, + "inspire": 8583, + "inspired": 39849, + "inspired": 3516, + "inspires": 17245, + "inspiring": 41847, + "inspiring": 5705, + "inspo": 26897, + "inst": 1264, + "inst": 1581, + "insta": 22411, + "insta": 11694, + "instability": 41377, + "instac": 46678, + "instaf": 33800, + "instag": 14612, + "instagood": 23718, + "instagram": 27910, + "instagram": 2659, + "instal": 38805, + "install": 6940, + "install": 11168, + "installation": 9358, + "installations": 27909, + "installed": 8807, + "installing": 18301, + "installment": 25315, + "installs": 45568, + "instalment": 47766, + "instance": 34572, + "instant": 38810, + "instant": 10635, + "instantly": 17703, + "instap": 23758, + "instapic": 34378, + "instaweather": 43078, + "instaweatherpro": 43150, + "inste": 3571, + "instead": 4191, + "instein": 13421, + "instem": 27030, + "instin": 23382, + "instinct": 30544, + "institu": 4257, + "institute": 5861, + "institutes": 43674, + "institution": 18823, + "institutional": 27442, + "institutions": 15207, + "instore": 41679, + "instru": 4544, + "instruc": 19648, + "instruction": 19407, + "instructional": 31022, + "instructions": 17040, + "instructor": 16087, + "instructors": 31998, + "instrument": 42196, + "instrument": 15806, + "instrumental": 23041, + "instruments": 14793, + "instyle": 41321, + "insu": 8805, + "insul": 9615, + "insulated": 42051, + "insulation": 28194, + "insulin": 29311, + "insult": 26673, + "insulting": 39646, + "insults": 40451, + "insur": 5024, + "insurance": 5870, + "insured": 31321, + "insurers": 43142, + "insurtech": 28716, + "int": 1828, + "int": 1207, + "inta": 38314, + "intact": 26870, + "intake": 19539, + "intan": 47695, + "inte": 1598, + "inte": 41900, + "intech": 26504, + "inted": 6147, + "integr": 5151, + "integral": 27018, + "integrate": 25735, + "integrated": 12797, + "integrating": 31555, + "integration": 12583, + "integrity": 14791, + "intel": 11778, + "intel": 11426, + "intellec": 13281, + "intellect": 47828, + "intellectu": 31966, + "intellectual": 18069, + "intelli": 5324, + "intellig": 5632, + "intelligence": 6846, + "intelligent": 14063, + "inten": 2967, + "intend": 36674, + "intended": 16812, + "intense": 10258, + "intensi": 22928, + "intensity": 19956, + "intensive": 21049, + "intent": 18881, + "intention": 26786, + "intentional": 29536, + "intentionally": 31215, + "intentions": 26710, + "inter": 1006, + "inter": 10093, + "interact": 21736, + "interacting": 35045, + "interaction": 17650, + "interactions": 22162, + "interactive": 9456, + "intercep": 23676, + "interception": 48762, + "interceptions": 45313, + "interchange": 34222, + "intercontinental": 31983, + "interdisciplinary": 38132, + "intere": 2008, + "interest": 5095, + "interested": 4620, + "interesting": 3628, + "interests": 16425, + "interface": 18753, + "interfaith": 38399, + "interference": 29099, + "interim": 19509, + "interior": 10700, + "interior": 7305, + "interiordesign": 12902, + "interiors": 14836, + "intermedi": 20246, + "intermediate": 24304, + "intermission": 44805, + "intermitt": 44946, + "intern": 9976, + "intern": 14068, + "internal": 11285, + "internally": 41134, + "internation": 42534, + "international": 8566, + "international": 2436, + "internationaldayof": 41518, + "internationally": 24059, + "internationalwomensday": 17682, + "interne": 32713, + "internet": 30180, + "internet": 4757, + "internetof": 44449, + "internetofthings": 45925, + "interns": 19902, + "internship": 16661, + "internships": 39410, + "interoper": 45754, + "interpre": 11162, + "interpret": 49154, + "interpret": 40459, + "interpretation": 20652, + "interpreted": 42157, + "interpreting": 46525, + "interro": 29548, + "interrup": 21609, + "interrupt": 48449, + "interrupted": 30288, + "intersec": 45246, + "intersection": 19210, + "interstate": 21963, + "interstellar": 41506, + "interval": 36032, + "intervals": 44884, + "interven": 18245, + "intervention": 16804, + "interventions": 28848, + "interview": 2885, + "interviewed": 11688, + "interviewing": 16399, + "interviews": 9910, + "intestin": 37938, + "intestinal": 38896, + "inthe": 7486, + "inti": 14459, + "intim": 38832, + "intimacy": 46430, + "intimate": 16382, + "intimid": 24041, + "intimidating": 44405, + "intimidation": 49258, + "inting": 15571, + "intl": 38186, + "intl": 14224, + "intment": 9020, + "intments": 21420, + "into": 35235, + "into": 1095, + "intoler": 28534, + "intolerance": 37808, + "intothe": 38511, + "intra": 20922, + "intrac": 46195, + "intram": 40956, + "intre": 29397, + "intrepid": 39127, + "intri": 15421, + "intric": 23763, + "intricate": 29616, + "intrigu": 18856, + "intrigue": 45140, + "intrigued": 40034, + "intriguing": 24334, + "intrin": 45181, + "intro": 2999, + "intro": 13224, + "introduc": 3621, + "introduce": 9813, + "introduced": 10446, + "introduces": 12933, + "introducing": 6256, + "introduction": 11812, + "introductory": 38121, + "intru": 22949, + "ints": 2514, + "intu": 17225, + "intuition": 40897, + "intuitive": 35224, + "inu": 21131, + "inuit": 41250, + "inus": 45857, + "inv": 2279, + "inv": 43786, + "inva": 10084, + "invade": 34609, + "invaded": 32596, + "invaders": 35188, + "invading": 40101, + "invali": 31592, + "invalid": 46998, + "invaluable": 33976, + "invasi": 38100, + "invasion": 13378, + "invasive": 19554, + "inve": 2024, + "inven": 26233, + "invent": 11665, + "invent": 23558, + "invented": 14100, + "invention": 23607, + "inventions": 44914, + "inventor": 22836, + "inventory": 19444, + "inver": 12061, + "inverness": 33080, + "inverte": 46397, + "inverted": 40709, + "invest": 4180, + "invest": 9716, + "invested": 22536, + "investig": 4626, + "investigate": 15703, + "investigated": 29180, + "investigates": 29621, + "investigating": 13713, + "investigation": 8194, + "investigations": 24020, + "investigative": 30233, + "investigator": 30528, + "investigators": 24121, + "investin": 40195, + "investing": 10554, + "investment": 5605, + "investments": 14675, + "investor": 15490, + "investors": 10486, + "invests": 38378, + "invic": 25253, + "invigor": 48722, + "invin": 30252, + "invincible": 38052, + "invisible": 16093, + "invit": 12454, + "invitation": 15032, + "invitational": 14511, + "invitations": 40120, + "invite": 8109, + "invited": 7731, + "invites": 16034, + "inviting": 14349, + "invo": 29417, + "invol": 4000, + "involve": 26325, + "involved": 5320, + "involvement": 19502, + "involves": 22652, + "involving": 14786, + "inwx": 35674, + "iny": 23257, + "inyour": 47954, + "io": 3167, + "io": 3752, + "ioc": 43018, + "iom": 33000, + "iom": 31135, + "ion": 14871, + "ion": 3668, + "ions": 26289, + "ior": 7354, + "ior": 2498, + "iority": 46016, + "iors": 6427, + "ios": 6614, + "iot": 32694, + "iot": 6627, + "iota": 37294, + "ious": 6994, + "iously": 38233, + "iow": 7439, + "iowa": 38847, + "iowa": 8290, + "ip": 1719, + "ip": 8600, + "ipa": 11199, + "ipad": 39067, + "ipad": 7491, + "ipads": 35281, + "ipc": 41981, + "iphone": 26030, + "iphone": 4314, + "iphones": 37561, + "ipl": 13440, + "ipment": 37824, + "ipo": 40218, + "ipo": 24090, + "ipod": 17889, + "ipp": 31706, + "ips": 26910, + "ipsw": 22221, + "ipswich": 24494, + "iq": 15554, + "iq": 19996, + "iqbal": 33553, + "ir": 582, + "ir": 742, + "ira": 4923, + "ira": 5371, + "irah": 35724, + "iran": 19273, + "iran": 5075, + "irandeal": 46533, + "irani": 37984, + "iranian": 14158, + "iraq": 8543, + "iraqi": 18617, + "irc": 41527, + "ird": 2770, + "ire": 3013, + "ire": 1454, + "ired": 32728, + "ired": 2995, + "ireland": 32806, + "ireland": 4157, + "irene": 21600, + "ires": 12435, + "irez": 21581, + "irgc": 47942, + "iri": 2155, + "iri": 13880, + "irical": 33366, + "irie": 42979, + "irina": 46664, + "iring": 10169, + "iris": 16437, + "irish": 9386, + "irish": 4889, + "irl": 34494, + "irl": 8570, + "irling": 26493, + "irls": 24344, + "irma": 22406, + "irn": 42603, + "iro": 23209, + "iro": 7280, + "iron": 7699, + "iron": 5391, + "ironic": 24518, + "ironically": 36779, + "ironing": 46655, + "ironman": 20330, + "irons": 30032, + "irony": 20681, + "irport": 27769, + "irr": 24641, + "irrational": 47413, + "irregular": 38692, + "irrelevant": 34677, + "irresi": 31200, + "irresistible": 35252, + "irresponsible": 44714, + "irri": 21484, + "irrigation": 23761, + "irrit": 24218, + "irs": 6086, + "irst": 32701, + "iru": 48206, + "irvin": 47053, + "irvine": 24201, + "irving": 19738, + "irwin": 23750, + "iry": 7239, + "is": 595, + "is": 533, + "isa": 11034, + "isa": 6536, + "isaac": 37544, + "isaac": 13659, + "isab": 13357, + "isabel": 27466, + "isabella": 26192, + "isabelle": 31072, + "isable": 46631, + "isai": 15365, + "isaiah": 17952, + "isak": 40619, + "isance": 46893, + "isation": 7194, + "isback": 43811, + "isc": 39316, + "isch": 47888, + "isco": 5736, + "iscoming": 26458, + "isd": 46816, + "isd": 12002, + "ise": 7669, + "ise": 1479, + "ised": 2861, + "iselle": 48491, + "iser": 23080, + "iser": 5626, + "isers": 34879, + "ises": 5153, + "isf": 44036, + "isgreat": 34595, + "ish": 6844, + "ish": 1061, + "isha": 28050, + "ishable": 37949, + "ished": 35341, + "ishere": 46053, + "ishi": 26224, + "ishq": 27996, + "ishqba": 32503, + "ishqbaaaz": 36591, + "isi": 7233, + "isi": 17880, + "isil": 34636, + "isin": 37676, + "ising": 3426, + "isis": 7531, + "isk": 30171, + "isl": 31368, + "isla": 22807, + "islam": 6003, + "islam": 8770, + "islamabad": 19959, + "islamic": 31627, + "islamic": 9552, + "islamist": 38798, + "islamophobia": 43459, + "island": 13408, + "island": 2619, + "islander": 45651, + "islanders": 27804, + "islands": 7145, + "islay": 49279, + "isle": 19082, + "isle": 11849, + "isleof": 24718, + "isles": 21816, + "islife": 26433, + "islington": 34945, + "ism": 47730, + "ism": 1935, + "isma": 43937, + "ismail": 36140, + "isme": 43570, + "ismo": 41926, + "isms": 18700, + "isn": 2923, + "isner": 48246, + "isnow": 43694, + "isnt": 19416, + "iso": 2462, + "iso": 12263, + "isol": 11414, + "isolated": 19044, + "isolation": 26400, + "ison": 12949, + "ison": 4553, + "isons": 33318, + "isoo": 35857, + "isp": 31397, + "isp": 39041, + "isra": 3591, + "israel": 20837, + "israel": 4779, + "israeli": 8994, + "israelis": 45713, + "isreal": 47147, + "isro": 44841, + "iss": 11738, + "iss": 4950, + "issa": 38579, + "issa": 7560, + "issan": 49358, + "issance": 40828, + "issant": 38828, + "isse": 18986, + "ission": 37946, + "issu": 2049, + "issue": 3202, + "issued": 9246, + "issues": 4082, + "issuing": 37226, + "ist": 9751, + "ist": 2304, + "istanbul": 12258, + "istandwith": 33820, + "iste": 32563, + "ister": 14555, + "isthe": 46748, + "istic": 29556, + "ists": 8426, + "isu": 17030, + "isu": 23328, + "it": 529, + "it": 585, + "ita": 36920, + "ita": 2864, + "itable": 8915, + "ital": 2306, + "ital": 1660, + "itali": 11644, + "italia": 11025, + "italian": 20264, + "italian": 5175, + "italians": 44744, + "italk": 32894, + "italy": 4052, + "itan": 18383, + "itans": 40711, + "itar": 47161, + "itarian": 11599, + "itary": 17604, + "itas": 31634, + "itas": 13436, + "itate": 42457, + "itated": 36744, + "itation": 5070, + "itative": 22892, + "itc": 36449, + "itch": 2387, + "itch": 8147, + "itchen": 32664, + "itchy": 41980, + "ite": 2732, + "ite": 802, + "iteam": 37828, + "itec": 3099, + "itec": 43936, + "itech": 44215, + "itech": 23040, + "ited": 8603, + "ited": 1108, + "itel": 44638, + "itely": 4605, + "item": 8532, + "items": 6207, + "iter": 7938, + "iter": 19773, + "iteracy": 39634, + "iterate": 43106, + "iteration": 38790, + "ites": 2454, + "itez": 42131, + "itf": 35436, + "itfc": 36519, + "ith": 6133, + "ith": 1757, + "ithaca": 46257, + "iti": 760, + "iti": 6165, + "itia": 22634, + "itian": 23365, + "itic": 11950, + "itical": 48767, + "itics": 33967, + "ities": 41423, + "ities": 1480, + "itim": 15676, + "itiner": 32803, + "itinerary": 41564, + "iting": 1257, + "ition": 25263, + "ition": 1104, + "itions": 5540, + "itious": 13329, + "itis": 33539, + "itis": 8388, + "itive": 3067, + "itly": 42240, + "ito": 22167, + "ito": 4661, + "iton": 21119, + "itor": 47267, + "itor": 4584, + "itors": 22005, + "itos": 24560, + "its": 7140, + "its": 902, + "itsa": 45032, + "itself": 7290, + "itsme": 41125, + "itss": 47040, + "itt": 1031, + "itt": 11228, + "itta": 21233, + "itte": 31962, + "itted": 24429, + "itten": 30014, + "itten": 4343, + "itter": 11456, + "itters": 13082, + "itti": 28629, + "ittin": 25646, + "itting": 3147, + "ittle": 24208, + "ittle": 21366, + "ittles": 38989, + "itton": 25707, + "itty": 35096, + "itu": 1668, + "itu": 32128, + "itude": 43382, + "itude": 5012, + "itudes": 20459, + "itunes": 7007, + "itup": 35838, + "iture": 25547, + "itus": 24364, + "itutes": 32883, + "itv": 20159, + "itv": 12805, + "ity": 2480, + "ity": 696, + "itya": 32055, + "itz": 14544, + "itz": 7807, + "iu": 14292, + "iu": 15575, + "ium": 10762, + "ius": 6740, + "iv": 6775, + "iv": 9315, + "iva": 42463, + "ivan": 15544, + "ivan": 15689, + "ivanka": 37914, + "ive": 26885, + "ive": 8653, + "ived": 15654, + "iver": 36849, + "iver": 44254, + "ives": 27333, + "ivf": 39159, + "iving": 45136, + "ivory": 16776, + "ivote": 45835, + "ivy": 36939, + "ivy": 16045, + "iw": 13058, + "iw": 46604, + "iwant": 42747, + "iwd": 16815, + "iwm": 44237, + "ix": 13272, + "ix": 8756, + "iy": 13704, + "iya": 18595, + "iyaki": 48395, + "iz": 2845, + "iz": 8407, + "iza": 37704, + "ization": 10847, + "ize": 10885, + "ized": 7690, + "izen": 34776, + "izer": 23895, + "izes": 45434, + "izing": 17354, + "izo": 46910, + "izz": 31779, + "izz": 46128, + "izzy": 28861, + "j": 73, + "j": 329, + "ja": 1586, + "ja": 2641, + "jaan": 25052, + "jab": 8059, + "jab": 9439, + "jac": 2293, + "jac": 30198, + "jace": 43286, + "jack": 2679, + "jack": 3267, + "jacked": 27923, + "jacket": 6164, + "jackets": 14745, + "jacki": 47418, + "jackie": 28023, + "jackie": 11716, + "jacking": 40929, + "jackman": 35723, + "jackpot": 23926, + "jacks": 19649, + "jackson": 12321, + "jackson": 4363, + "jacksonville": 19263, + "jaco": 6840, + "jacob": 14385, + "jacob": 9222, + "jacobs": 17482, + "jacobson": 46826, + "jacqu": 14495, + "jacqueline": 22843, + "jacques": 17799, + "jad": 12976, + "jad": 38691, + "jada": 37416, + "jade": 25123, + "jade": 14513, + "jaden": 37174, + "jadine": 37445, + "jae": 16869, + "jae": 15765, + "jaejoong": 43610, + "jaf": 19362, + "jag": 7984, + "jag": 36236, + "jagan": 48530, + "jagger": 30835, + "jags": 31086, + "jagu": 10096, + "jaguar": 44777, + "jaguar": 14757, + "jaguars": 21854, + "jah": 20067, + "jah": 11084, + "jahan": 44404, + "jahan": 47827, + "jai": 10542, + "jai": 13819, + "jail": 18574, + "jail": 9332, + "jailbreak": 45990, + "jailed": 19456, + "jails": 47833, + "jaime": 24716, + "jain": 21999, + "jaipur": 23593, + "jais": 48607, + "jait": 28910, + "jaitley": 32776, + "jak": 9225, + "jak": 30589, + "jakarta": 15471, + "jake": 13140, + "jake": 7419, + "jakob": 47358, + "jal": 8380, + "jal": 26773, + "jalan": 27270, + "jalap": 49081, + "jalape": 34263, + "jalapeño": 43017, + "jalen": 33548, + "jam": 1434, + "jam": 5201, + "jama": 8977, + "jama": 35366, + "jamaica": 13019, + "jamaican": 25144, + "jamal": 26108, + "jambo": 35599, + "jamboree": 38506, + "jame": 12341, + "james": 6963, + "james": 2392, + "jamesbond": 44704, + "jamesc": 47004, + "jameson": 31731, + "jami": 15092, + "jamie": 16454, + "jamie": 8078, + "jamiedor": 34310, + "jamiedornan": 34896, + "jammed": 35590, + "jammin": 35223, + "jamming": 25862, + "jammu": 25926, + "jams": 20243, + "jan": 1891, + "jan": 3334, + "jana": 18182, + "jane": 12389, + "jane": 6736, + "janeiro": 31740, + "janet": 29665, + "janet": 15872, + "jang": 41526, + "jang": 22074, + "jani": 22606, + "janice": 36048, + "janine": 46896, + "janis": 44233, + "jann": 35377, + "jans": 22578, + "jansen": 45354, + "janu": 3623, + "january": 3697, + "jap": 2299, + "jap": 49062, + "japan": 4502, + "japan": 3400, + "japanese": 27211, + "japanese": 4925, + "japs": 42121, + "jar": 5120, + "jar": 10837, + "jard": 25778, + "jardin": 37371, + "jare": 17654, + "jared": 35597, + "jared": 12571, + "jaredle": 36739, + "jaredleto": 37106, + "jaro": 35505, + "jarpad": 44497, + "jarre": 23385, + "jarrett": 30531, + "jars": 27583, + "jarvis": 29286, + "jas": 4492, + "jas": 17559, + "jasmin": 42989, + "jasmin": 47700, + "jasmine": 17056, + "jason": 10009, + "jason": 5395, + "jasper": 19827, + "jat": 26106, + "jau": 26932, + "jauregui": 48175, + "jav": 6234, + "java": 12918, + "javascri": 16289, + "javascript": 16423, + "jave": 46218, + "javed": 42268, + "javelin": 41701, + "javi": 47627, + "javier": 23307, + "jaw": 14804, + "jaw": 17307, + "jawa": 44790, + "jaws": 25491, + "jax": 22348, + "jax": 12390, + "jay": 3427, + "jay": 4155, + "jaya": 21960, + "jayanti": 37732, + "jaye": 45703, + "jayne": 35228, + "jays": 12393, + "jaz": 3465, + "jaz": 32874, + "jazeera": 38260, + "jazz": 11488, + "jazz": 4528, + "jazzfest": 36683, + "jazzy": 28191, + "jb": 21915, + "jb": 13637, + "jc": 14991, + "jc": 11517, + "jd": 18289, + "jd": 14125, + "jdm": 42013, + "je": 1013, + "je": 8776, + "jeal": 9964, + "jealous": 11093, + "jealousy": 37654, + "jean": 13943, + "jean": 6473, + "jeanette": 48167, + "jeanne": 29201, + "jeans": 10157, + "jeb": 35101, + "jec": 1347, + "ject": 6070, + "jed": 12166, + "jed": 38748, + "jeddah": 40982, + "jedi": 16681, + "jee": 29250, + "jee": 14870, + "jeep": 16593, + "jeep": 11286, + "jeeplife": 43100, + "jeet": 45542, + "jeet": 30944, + "jef": 10276, + "jeff": 6245, + "jeff": 5550, + "jefferson": 44711, + "jefferson": 13976, + "jeffery": 41470, + "jeffree": 45994, + "jeffrey": 32886, + "jeffrey": 16027, + "jeho": 42437, + "jeky": 43893, + "jekyll": 49405, + "jel": 9794, + "jelena": 48218, + "jelly": 19110, + "jelly": 13762, + "jellyfish": 30988, + "jem": 46326, + "jem": 37530, + "jen": 2554, + "jen": 12997, + "jenkins": 16162, + "jenn": 33921, + "jenn": 29869, + "jenna": 17125, + "jenner": 14260, + "jenni": 6774, + "jennie": 28875, + "jennifer": 19786, + "jennifer": 8613, + "jennings": 21564, + "jenny": 20165, + "jenny": 13414, + "jens": 40806, + "jensen": 35558, + "jensen": 19004, + "jensenackles": 41011, + "jeon": 45200, + "jeon": 43337, + "jeong": 47146, + "jeong": 39264, + "jeopar": 22988, + "jeopardy": 29613, + "jer": 2310, + "jer": 35307, + "jere": 5614, + "jeremi": 22362, + "jeremiah": 27301, + "jeremy": 14656, + "jeremy": 8127, + "jeremycorbyn": 37484, + "jeric": 25084, + "jericho": 28892, + "jerk": 23917, + "jerky": 40079, + "jermaine": 40722, + "jerome": 19876, + "jerry": 18163, + "jerry": 9164, + "jersey": 21921, + "jersey": 4471, + "jerseys": 15518, + "jerus": 12257, + "jerusalem": 12557, + "jes": 7686, + "jes": 35826, + "jess": 5313, + "jess": 13758, + "jesse": 23112, + "jesse": 11770, + "jessi": 24373, + "jessic": 14881, + "jessica": 45421, + "jessica": 8178, + "jessie": 19424, + "jester": 44225, + "jesu": 19777, + "jesuit": 33234, + "jesus": 4070, + "jet": 11515, + "jet": 6565, + "jetblue": 45021, + "jeter": 38450, + "jets": 38584, + "jets": 10025, + "jett": 44541, + "jetty": 46382, + "jew": 27450, + "jewel": 4880, + "jewel": 17591, + "jewell": 9777, + "jewellers": 46265, + "jewellery": 11192, + "jewelry": 28018, + "jewelry": 6039, + "jewels": 20205, + "jewish": 29594, + "jewish": 9104, + "jews": 14200, + "jf": 31130, + "jf": 33718, + "jfc": 43652, + "jfk": 18486, + "jg": 41986, + "jg": 35138, + "jh": 24858, + "jh": 21485, + "jha": 47012, + "jha": 38092, + "jhal": 45695, + "jhar": 31546, + "jharkhand": 39001, + "jhb": 34631, + "ji": 3252, + "ji": 2697, + "jia": 32907, + "jian": 33427, + "jiang": 43309, + "jiang": 25762, + "jic": 48350, + "jic": 40215, + "jid": 24403, + "jie": 40005, + "jig": 15136, + "jig": 47430, + "jigsaw": 32987, + "jiha": 23194, + "jihad": 29637, + "jihoon": 44765, + "jil": 36225, + "jill": 24136, + "jill": 15254, + "jillian": 37820, + "jim": 3190, + "jim": 4550, + "jima": 20679, + "jimcantore": 43950, + "jimenez": 35947, + "jimi": 30565, + "jimin": 16286, + "jimmie": 45679, + "jimmy": 12215, + "jimmy": 6817, + "jimmyfallon": 45265, + "jin": 7927, + "jin": 8485, + "jind": 40609, + "jing": 34933, + "jing": 28607, + "jingle": 28699, + "jinnah": 43141, + "jinping": 39308, + "jinx": 42977, + "jinyoung": 38051, + "jio": 40501, + "jis": 25988, + "jis": 23515, + "jisoo": 43070, + "jit": 11947, + "jit": 20308, + "jitsu": 24530, + "jiu": 43351, + "jiu": 44123, + "jj": 12502, + "jj": 12790, + "jk": 20189, + "jk": 9702, + "jkt": 21494, + "jl": 25027, + "jl": 22911, + "jlo": 31017, + "jm": 24044, + "jm": 18657, + "jn": 24576, + "jn": 21717, + "jnr": 37145, + "jnu": 47142, + "jo": 683, + "jo": 3804, + "joachim": 48979, + "joan": 28064, + "joan": 12710, + "joann": 35484, + "joanna": 25357, + "joanne": 43736, + "joanne": 25092, + "joao": 45666, + "joaqu": 25140, + "joaquin": 30745, + "job": 13114, + "job": 2075, + "jobs": 3735, + "jobsearch": 45459, + "joburg": 39343, + "jocel": 36879, + "jocelyn": 47259, + "jock": 34485, + "jockey": 20126, + "jodh": 48689, + "jodi": 36812, + "jodi": 26888, + "jodie": 33100, + "jody": 32959, + "joe": 9309, + "joe": 3305, + "joel": 19819, + "joel": 11429, + "joes": 34756, + "joey": 16281, + "joey": 10455, + "jog": 37967, + "jog": 31691, + "jogging": 37922, + "joh": 1201, + "johan": 17416, + "johan": 27789, + "johann": 31180, + "johanna": 41494, + "johannes": 37779, + "johannesburg": 28377, + "johansson": 41512, + "johar": 34871, + "john": 2004, + "john": 1742, + "johncena": 46820, + "johnnie": 47947, + "johnny": 14464, + "johnny": 6904, + "johns": 14515, + "johnson": 26036, + "johnson": 4010, + "johnston": 19791, + "johnstone": 40766, + "johor": 34750, + "join": 14737, + "join": 1384, + "joined": 4954, + "joining": 5118, + "joins": 5681, + "joint": 6640, + "jointhe": 30422, + "jointly": 37471, + "joints": 27204, + "jojo": 41484, + "jojo": 22075, + "joke": 7198, + "joker": 18200, + "jokers": 44101, + "jokes": 11336, + "joking": 26112, + "joko": 44975, + "jol": 9174, + "jol": 36470, + "jolie": 31633, + "jolla": 46109, + "jolly": 21516, + "jom": 32152, + "jon": 3026, + "jon": 6139, + "jona": 6629, + "jonah": 47934, + "jonah": 27556, + "jonas": 42373, + "jonas": 13650, + "jonathan": 19026, + "jonathan": 7762, + "jone": 33934, + "jones": 19091, + "jones": 3538, + "jong": 20214, + "jong": 14726, + "jonghyun": 29023, + "jongin": 36957, + "joni": 43177, + "jonny": 28454, + "jonny": 21895, + "joo": 25807, + "joo": 27680, + "joom": 47543, + "joon": 18547, + "joong": 26544, + "jop": 30486, + "joplin": 42688, + "jor": 2482, + "jor": 31595, + "jordan": 14644, + "jordan": 4388, + "jordani": 46898, + "jordi": 44795, + "jorge": 48761, + "jorge": 18225, + "jos": 20560, + "jos": 19661, + "jose": 4647, + "jose": 7075, + "josef": 36584, + "josel": 47800, + "joseph": 14163, + "joseph": 6478, + "josephine": 34866, + "josh": 9998, + "josh": 5679, + "joshi": 24786, + "joshu": 9112, + "joshua": 11852, + "josi": 33583, + "josie": 33167, + "joss": 42834, + "josé": 27922, + "jou": 19921, + "jou": 32029, + "jour": 2078, + "jour": 17142, + "journ": 4563, + "journal": 6626, + "journalism": 10123, + "journalist": 9914, + "journalists": 12249, + "journals": 24391, + "journe": 48833, + "journey": 32156, + "journey": 3749, + "journeys": 23329, + "journo": 37034, + "journos": 46437, + "jovi": 33866, + "joy": 6308, + "joy": 4273, + "joyce": 43753, + "joyce": 15275, + "joye": 34052, + "joyeux": 41876, + "joyful": 24139, + "joyous": 32245, + "joyride": 46949, + "joys": 22996, + "jp": 18249, + "jp": 10557, + "jpg": 36950, + "jpn": 36212, + "jr": 13973, + "jr": 3605, + "js": 46243, + "js": 8006, + "jst": 26523, + "jt": 39480, + "jt": 18119, + "ju": 669, + "ju": 9970, + "jual": 38720, + "juan": 17148, + "juan": 9274, + "juana": 9081, + "jubi": 15485, + "jubil": 47743, + "jubilee": 16907, + "juco": 31570, + "jud": 8363, + "juda": 32478, + "judah": 41066, + "judaism": 42217, + "judas": 39532, + "judd": 29770, + "judg": 20012, + "judge": 16824, + "judge": 5656, + "judged": 33453, + "judgement": 25246, + "judges": 12575, + "judging": 16570, + "judgment": 24191, + "judi": 42546, + "judice": 28032, + "judicial": 19579, + "judiciary": 24545, + "judith": 24047, + "judo": 27011, + "judy": 34663, + "judy": 16510, + "jug": 27619, + "jugg": 38628, + "juic": 38761, + "juice": 37954, + "juice": 6916, + "juices": 36757, + "juicy": 17623, + "juju": 43020, + "juke": 32519, + "jukebox": 36411, + "jul": 34662, + "jul": 15975, + "jule": 40819, + "jules": 21996, + "juli": 3614, + "juli": 49160, + "julia": 10207, + "julian": 25459, + "julian": 12643, + "juliana": 46059, + "julie": 22534, + "julie": 10505, + "julien": 32595, + "juliet": 20641, + "juliette": 44804, + "julio": 24888, + "julius": 20870, + "july": 2272, + "jum": 20791, + "jumbo": 24678, + "jume": 45989, + "jump": 5519, + "jump": 6423, + "jumped": 16901, + "jumper": 16558, + "jumpers": 36485, + "jumping": 11476, + "jumpman": 48803, + "jumps": 18911, + "jumpsuit": 31044, + "jun": 1637, + "jun": 7719, + "junction": 11320, + "june": 23188, + "june": 2345, + "jung": 13086, + "jung": 13031, + "jungkook": 20040, + "jungle": 42421, + "jungle": 10865, + "juni": 4029, + "junior": 21167, + "junior": 5027, + "juniors": 16811, + "juniper": 33829, + "junk": 16000, + "junkie": 27613, + "junkies": 41207, + "juno": 28845, + "junto": 34282, + "jupit": 15270, + "jupiter": 16212, + "jur": 15896, + "jura": 14715, + "jurassic": 28844, + "jurassic": 21255, + "jurgen": 39263, + "juris": 37010, + "jurisdic": 37714, + "jury": 12931, + "jus": 14999, + "just": 1770, + "just": 761, + "justi": 14700, + "justic": 30399, + "justice": 16904, + "justice": 3604, + "justicefor": 25812, + "justiceleague": 41929, + "justices": 44356, + "justified": 34546, + "justify": 28192, + "justin": 7537, + "justin": 4394, + "justinbieber": 12501, + "justine": 34418, + "justintrudeau": 32184, + "justsaying": 42922, + "juve": 47717, + "juve": 23092, + "juven": 12944, + "juvenile": 19333, + "juvent": 13908, + "juventus": 47378, + "juventus": 16208, + "jux": 33552, + "juxta": 34964, + "jv": 37932, + "jv": 11805, + "jw": 30221, + "jw": 24215, + "jy": 20979, + "jyo": 27378, + "jyoti": 48696, + "jä": 45381, + "k": 74, + "k": 330, + "ka": 1595, + "ka": 1525, + "kaa": 34496, + "kab": 6554, + "kab": 45134, + "kabaddi": 41749, + "kabir": 38619, + "kabo": 47974, + "kabul": 26160, + "kac": 21693, + "kach": 14341, + "kad": 10901, + "kade": 41130, + "kaduna": 38053, + "kae": 22542, + "kaeper": 30070, + "kaepernick": 30713, + "kaf": 19870, + "kag": 13666, + "kag": 31003, + "kah": 16068, + "kah": 15463, + "kahn": 35397, + "kai": 12752, + "kai": 9601, + "kaido": 40255, + "kail": 23623, + "kaine": 39028, + "kair": 33027, + "kaiser": 43685, + "kaiser": 29960, + "kait": 19326, + "kaitlyn": 34948, + "kaj": 44788, + "kaj": 40381, + "kak": 10401, + "kak": 40128, + "kaka": 47689, + "kaku": 30900, + "kal": 4187, + "kal": 18712, + "kala": 45453, + "kala": 33105, + "kalam": 40142, + "kalamaz": 42328, + "kalamazoo": 46264, + "kalb": 34483, + "kale": 17162, + "kale": 16625, + "kaleido": 41144, + "kali": 17844, + "kali": 26964, + "kalin": 42776, + "kalyan": 23825, + "kam": 4104, + "kam": 26011, + "kamal": 31371, + "kamal": 28619, + "kamala": 45003, + "kame": 45235, + "kamen": 40738, + "kami": 28707, + "kamloops": 36602, + "kamp": 35179, + "kamp": 29522, + "kampala": 37134, + "kan": 2532, + "kan": 8101, + "kana": 35178, + "kand": 17478, + "kane": 32218, + "kane": 9765, + "kang": 12226, + "kang": 20789, + "kangar": 20622, + "kangaroo": 25513, + "kani": 40907, + "kani": 41948, + "kann": 18533, + "kannada": 30053, + "kano": 28201, + "kans": 34012, + "kansas": 25507, + "kansas": 6539, + "kansascity": 46134, + "kant": 39923, + "kant": 47132, + "kanth": 24427, + "kanu": 44565, + "kany": 13590, + "kanye": 29680, + "kanye": 14965, + "kanyewest": 31943, + "kap": 6804, + "kap": 45279, + "kapam": 48561, + "kapil": 32337, + "kapil": 42709, + "kapilshar": 48978, + "kaplan": 37401, + "kapoor": 9117, + "kapp": 36717, + "kappa": 20239, + "kapur": 42371, + "kar": 1813, + "kar": 5933, + "kara": 12552, + "karab": 40916, + "karachi": 13671, + "karak": 40372, + "karan": 20077, + "karan": 20931, + "karanjohar": 47621, + "karao": 16262, + "karaoke": 16640, + "karate": 21211, + "kardashi": 13619, + "kardashian": 14578, + "kare": 14310, + "kare": 38354, + "kareem": 38885, + "kareena": 41569, + "karen": 17719, + "karen": 10349, + "kari": 15339, + "kari": 15161, + "karim": 33477, + "karin": 43917, + "karina": 40250, + "karl": 20967, + "karl": 13134, + "karla": 42309, + "karma": 17658, + "karnat": 13994, + "karnataka": 15515, + "karo": 45305, + "kart": 47841, + "kart": 21310, + "karthik": 41397, + "karti": 23053, + "kartikeyan": 32584, + "karting": 41655, + "kas": 6119, + "kas": 14372, + "kasa": 46111, + "kash": 6954, + "kash": 21371, + "kashi": 47945, + "kashmir": 20251, + "kashmir": 10783, + "kashmiri": 35331, + "kasi": 45870, + "kasi": 32819, + "kasich": 39666, + "kat": 2844, + "kat": 9341, + "kata": 14558, + "kate": 11620, + "kate": 6699, + "katelyn": 45963, + "kath": 7386, + "kath": 19745, + "katharine": 41473, + "katherine": 17687, + "kathle": 18721, + "kathleen": 21709, + "kathmandu": 34456, + "kathniel": 36159, + "kathr": 14905, + "kathryn": 33142, + "kathryn": 19999, + "kathy": 34775, + "kathy": 18795, + "kati": 6515, + "kati": 29928, + "katic": 48058, + "katie": 24117, + "katie": 9076, + "katniss": 47916, + "kato": 27573, + "katrin": 31282, + "katrina": 21397, + "katrinakaif": 45845, + "kats": 44213, + "katsu": 49296, + "katsu": 43712, + "katy": 17609, + "katy": 14435, + "katyperry": 28309, + "katz": 30790, + "kau": 9299, + "kau": 36895, + "kauai": 44050, + "kaufman": 37188, + "kaur": 30518, + "kav": 10228, + "kavan": 18576, + "kavanaugh": 20252, + "kaw": 10842, + "kaw": 42719, + "kawa": 33244, + "kawaii": 26891, + "kawasaki": 28227, + "kawhi": 41220, + "kay": 4673, + "kay": 9862, + "kaya": 22752, + "kayak": 27043, + "kayaking": 28977, + "kaye": 33003, + "kayla": 17139, + "kaylee": 47215, + "kayo": 37021, + "kaz": 8812, + "kaz": 39622, + "kazakh": 25451, + "kazakhstan": 26720, + "kazan": 47641, + "kb": 27381, + "kb": 19960, + "kbs": 27418, + "kc": 10869, + "kc": 8638, + "kca": 14347, + "kcon": 39970, + "kcr": 46181, + "kd": 21826, + "kd": 15597, + "kday": 31074, + "kdrama": 48628, + "ke": 643, + "ke": 618, + "kea": 47926, + "kean": 43288, + "keane": 28635, + "keanu": 40608, + "kear": 21562, + "kearney": 36435, + "keating": 40045, + "keaton": 29975, + "kebab": 36497, + "ked": 11730, + "ked": 1243, + "kee": 9724, + "kee": 6760, + "keef": 42323, + "keefe": 46965, + "keegan": 31122, + "keel": 48376, + "keen": 17714, + "keen": 13218, + "keenan": 36276, + "keep": 2924, + "keep": 1726, + "keeper": 7650, + "keepers": 16130, + "keepin": 41712, + "keeping": 38371, + "keeping": 4873, + "keepit": 28044, + "keeps": 6333, + "keer": 27412, + "keerth": 47500, + "keerthyofficial": 48185, + "kees": 10791, + "keg": 32785, + "keh": 41272, + "keh": 36983, + "kei": 18735, + "kei": 24835, + "keith": 18762, + "keith": 8252, + "kej": 15674, + "kejri": 16617, + "kejriwal": 17334, + "keke": 39195, + "kel": 2825, + "kel": 7553, + "kele": 41765, + "kell": 16082, + "kell": 40103, + "keller": 21407, + "kelley": 23776, + "kelli": 45852, + "kelli": 46190, + "kellie": 49224, + "kellogg": 44218, + "kelly": 13417, + "kelly": 5220, + "kelown": 31708, + "kelowna": 32963, + "kelsey": 42295, + "kelsey": 23018, + "kelvin": 32859, + "kem": 31013, + "kem": 17349, + "kemp": 18302, + "kemp": 25325, + "ken": 1838, + "ken": 1702, + "kend": 7497, + "kendal": 44836, + "kendall": 34607, + "kendall": 16238, + "kendra": 36074, + "kendrick": 41787, + "kendrick": 21953, + "kendricklamar": 47020, + "kenne": 6209, + "kennedy": 38631, + "kennedy": 9004, + "kennel": 39595, + "kenneth": 46900, + "kenneth": 17839, + "kenney": 41373, + "kenny": 20185, + "kenny": 9595, + "kens": 29765, + "kensing": 21505, + "kensington": 24988, + "kent": 13875, + "kent": 8214, + "kentu": 9045, + "kentucky": 32230, + "kentucky": 10014, + "keny": 17374, + "kenya": 6181, + "kenyan": 22624, + "kenyans": 36263, + "kenyatta": 31012, + "kenzie": 38087, + "keo": 43062, + "kept": 7737, + "ker": 2352, + "ker": 1485, + "keral": 35122, + "kerala": 11881, + "kered": 26690, + "kerel": 32232, + "keri": 43447, + "kermit": 40908, + "kern": 40150, + "kernel": 40684, + "kerr": 20491, + "kerri": 41849, + "kerry": 24795, + "kerry": 13097, + "kers": 30347, + "kers": 2880, + "kershaw": 40785, + "kerson": 42810, + "kerswednesday": 48152, + "kert": 47279, + "kes": 38398, + "kes": 1115, + "kesh": 19751, + "kesha": 36526, + "kest": 15080, + "ket": 2715, + "ket": 1236, + "ketball": 38240, + "ketch": 22590, + "ketch": 35371, + "ketchup": 26724, + "kete": 25404, + "keted": 41396, + "keting": 15951, + "keto": 27485, + "keto": 28754, + "kets": 1632, + "kett": 23124, + "kett": 10312, + "kettering": 43779, + "kettle": 41992, + "kettle": 24303, + "kev": 22758, + "kev": 29419, + "kevin": 9419, + "kevin": 4685, + "kew": 38014, + "kew": 31409, + "kex": 30251, + "key": 2891, + "key": 1458, + "keyan": 27617, + "keyboard": 13017, + "keyboards": 49237, + "keychain": 31050, + "keye": 40516, + "keye": 20635, + "keyes": 18336, + "keynes": 32462, + "keynote": 7556, + "keys": 48912, + "keys": 6355, + "keystone": 30688, + "keyword": 42284, + "keywords": 48122, + "kf": 33308, + "kf": 42119, + "kfc": 22032, + "kg": 36772, + "kg": 7817, + "kgs": 46629, + "kh": 2166, + "kh": 7452, + "kha": 7333, + "kha": 18929, + "khair": 43742, + "khaki": 41646, + "khal": 13070, + "khaled": 29343, + "khali": 11324, + "khalid": 27166, + "khalifa": 21389, + "khalil": 36229, + "kham": 24892, + "khan": 13318, + "khan": 3873, + "khand": 43384, + "khand": 31110, + "khanna": 29931, + "khar": 18340, + "khar": 28578, + "khart": 37458, + "khat": 43290, + "khe": 26360, + "kher": 43843, + "khi": 39062, + "khi": 42925, + "khil": 34101, + "khloe": 45312, + "kho": 14022, + "kho": 28774, + "khou": 30656, + "khs": 21239, + "khtar": 45593, + "khu": 14041, + "khur": 32083, + "khy": 40917, + "khz": 45604, + "ki": 848, + "ki": 2608, + "kia": 8712, + "kian": 43961, + "kian": 25708, + "kians": 44010, + "kib": 43108, + "kiba": 37207, + "kic": 24003, + "kic": 27633, + "kicchasu": 44665, + "kicchasudeep": 45560, + "kick": 4102, + "kick": 4289, + "kickass": 39299, + "kickboxing": 36041, + "kicked": 12479, + "kicker": 26338, + "kickin": 34597, + "kicking": 7802, + "kickoff": 10245, + "kicks": 6989, + "kickstart": 40780, + "kickstarter": 13228, + "kid": 3948, + "kid": 3551, + "kidd": 24082, + "kidding": 14535, + "kiddo": 36360, + "kiddos": 29205, + "kidlit": 39064, + "kidlit": 33515, + "kidlitart": 41600, + "kidman": 44931, + "kidnap": 45100, + "kidnapp": 16183, + "kidnapped": 24737, + "kidnapping": 32361, + "kidney": 37835, + "kidney": 14610, + "kids": 15561, + "kids": 1911, + "kidz": 41938, + "kie": 8544, + "kie": 3094, + "kiefer": 48026, + "kiel": 40940, + "kiel": 25509, + "kien": 28782, + "kier": 20403, + "kier": 35575, + "kieran": 29231, + "kies": 36601, + "kies": 4993, + "kiest": 29755, + "kiev": 24585, + "kiewicz": 47574, + "kigali": 40278, + "kii": 39340, + "kik": 36176, + "kiki": 23962, + "kiko": 40861, + "kil": 4912, + "kil": 39337, + "kildare": 45541, + "kili": 24386, + "kilig": 49172, + "kilimanjaro": 43470, + "kilkenny": 33805, + "kill": 6163, + "kill": 4367, + "killa": 41355, + "killarney": 48813, + "killed": 3733, + "killer": 28230, + "killer": 6613, + "killers": 17614, + "killin": 25903, + "killing": 37977, + "killing": 5923, + "killings": 24918, + "kills": 9795, + "kiln": 44150, + "kilo": 39281, + "kilom": 26285, + "kilometers": 39192, + "kilometres": 43278, + "kilt": 49319, + "kim": 4639, + "kim": 4606, + "kimber": 16796, + "kimberley": 39859, + "kimberly": 27465, + "kimchi": 41027, + "kimi": 31536, + "kimkardashian": 35400, + "kimmel": 27820, + "kimono": 40024, + "kin": 1442, + "kin": 2667, + "kina": 28518, + "kind": 7204, + "kind": 3044, + "kinda": 6612, + "kinder": 12711, + "kinder": 24159, + "kindergarten": 16749, + "kindle": 24704, + "kindle": 10746, + "kindleunlimited": 32164, + "kindly": 13952, + "kindness": 45112, + "kindness": 10614, + "kinds": 14879, + "kine": 17607, + "kineni": 49080, + "kinetic": 37699, + "king": 2365, + "king": 674, + "kingdom": 21870, + "kingdom": 7364, + "kingdomhearts": 48570, + "kingdoms": 43890, + "kingfisher": 34330, + "kingjames": 33153, + "kingly": 33642, + "kingof": 27878, + "kings": 18590, + "kings": 4232, + "kingsley": 41807, + "kingston": 40736, + "kingston": 15393, + "kini": 41644, + "kinky": 37006, + "kinney": 37233, + "kino": 39000, + "kins": 31060, + "kins": 4386, + "kinson": 12095, + "kio": 28210, + "kio": 39401, + "kiosk": 39146, + "kip": 27636, + "kip": 15986, + "kipp": 43329, + "kir": 3476, + "kir": 32949, + "kira": 33038, + "kiran": 43234, + "kiran": 36603, + "kirby": 17065, + "kiri": 34170, + "kiri": 45826, + "kirk": 10639, + "kirk": 11508, + "kirkland": 43061, + "kiro": 39749, + "kirstel": 46483, + "kirsten": 31813, + "kirsty": 37787, + "kis": 3199, + "kis": 22796, + "kish": 25662, + "kiss": 43757, + "kiss": 5946, + "kissed": 22561, + "kisses": 47876, + "kisses": 11220, + "kissing": 18637, + "kistan": 29580, + "kit": 4566, + "kit": 4274, + "kita": 29961, + "kitch": 3850, + "kitchen": 18131, + "kitchen": 4485, + "kitchener": 34428, + "kitchens": 28301, + "kite": 47777, + "kite": 19867, + "kites": 45829, + "kits": 13730, + "kitt": 10840, + "kitten": 13063, + "kittens": 17216, + "kitties": 36013, + "kitty": 25067, + "kitty": 8417, + "kiwan": 38709, + "kiwanis": 46513, + "kiwi": 22440, + "kiwis": 48108, + "kiya": 41610, + "kj": 27385, + "kj": 28238, + "kja": 41048, + "kjv": 37387, + "kk": 4390, + "kk": 10849, + "kka": 19002, + "kke": 44239, + "kker": 32399, + "kki": 44672, + "kkk": 20073, + "kkkk": 15834, + "kkkk": 47160, + "kkkkkkkk": 31042, + "kko": 43965, + "kkr": 40855, + "kl": 8498, + "kl": 14134, + "kla": 11249, + "klan": 46935, + "klar": 41374, + "klaus": 31788, + "kle": 7612, + "kle": 7432, + "klein": 33475, + "klein": 17579, + "kley": 18594, + "kli": 31640, + "klin": 44809, + "klin": 41647, + "kline": 47580, + "kling": 40270, + "klm": 38859, + "klo": 15296, + "klopp": 26446, + "kltu": 25978, + "klu": 21852, + "kly": 45090, + "km": 29954, + "km": 4590, + "kman": 33312, + "kms": 24996, + "kn": 4825, + "kn": 23693, + "knapp": 33945, + "kne": 6358, + "knee": 9897, + "knees": 19115, + "kner": 31578, + "knew": 5009, + "kni": 6312, + "knick": 33286, + "knicks": 17657, + "knife": 44176, + "knife": 8960, + "knigh": 43099, + "knight": 17949, + "knight": 7355, + "knights": 10385, + "knit": 18745, + "knit": 14313, + "knitted": 28151, + "knitting": 18863, + "knives": 20910, + "kno": 1482, + "kno": 25362, + "knob": 29736, + "knobs": 47504, + "knock": 14195, + "knock": 11583, + "knocked": 15325, + "knocking": 20380, + "knockout": 22602, + "knocks": 24296, + "knoll": 43882, + "knot": 18412, + "knots": 32428, + "know": 4179, + "know": 1038, + "knowing": 9267, + "knowledge": 27864, + "knowledge": 5510, + "knowledgeable": 43391, + "knowles": 32631, + "known": 3102, + "knows": 4309, + "knowyour": 30773, + "knox": 18630, + "knox": 21833, + "knoxville": 23232, + "knu": 14812, + "knuck": 21333, + "knuckle": 42023, + "knuckles": 40127, + "knw": 40803, + "ko": 1313, + "ko": 2448, + "koala": 36654, + "kobe": 42644, + "kobe": 14470, + "kobo": 42390, + "koch": 25331, + "kochi": 36710, + "kodak": 30425, + "kodi": 46611, + "kof": 17528, + "koff": 47303, + "kofi": 40400, + "koh": 13379, + "koh": 31216, + "kohl": 48479, + "kohli": 17549, + "koi": 28150, + "kojima": 46419, + "kok": 32045, + "kok": 11225, + "koko": 42426, + "koko": 40003, + "kol": 7142, + "kol": 31023, + "kolkata": 18011, + "kom": 6686, + "kom": 24181, + "kombat": 29670, + "kombucha": 48615, + "komo": 31820, + "kon": 5743, + "kon": 29519, + "kona": 30203, + "kong": 31784, + "kong": 6506, + "konstant": 46583, + "koo": 12225, + "koo": 40472, + "kook": 16003, + "kool": 36755, + "kool": 26444, + "kop": 16623, + "kop": 38999, + "kor": 6428, + "kor": 24175, + "kore": 3919, + "korea": 5915, + "korean": 31949, + "korean": 8034, + "kori": 42842, + "korn": 45412, + "korn": 31492, + "kors": 34535, + "kos": 47438, + "kos": 22951, + "kosh": 45233, + "kosher": 36502, + "koso": 23892, + "kosovo": 28343, + "kot": 23323, + "kot": 20701, + "kota": 21735, + "koto": 40945, + "koto": 29977, + "kou": 18502, + "kou": 39614, + "kour": 34134, + "kov": 17733, + "kov": 15156, + "kova": 26185, + "koval": 47903, + "kovic": 16886, + "kovich": 44794, + "kovsky": 33384, + "kow": 29764, + "kow": 23919, + "kowski": 17649, + "koz": 29598, + "kp": 16174, + "kp": 16894, + "kpa": 38759, + "kph": 41138, + "kpk": 42094, + "kpmg": 38243, + "kpop": 29534, + "kpop": 15859, + "kprc": 47832, + "kprs": 46253, + "kr": 7309, + "kr": 14107, + "kra": 5762, + "kraft": 28057, + "kraja": 29016, + "kraken": 48408, + "krakow": 40033, + "kram": 19075, + "kramer": 27495, + "kran": 33243, + "kranti": 47969, + "krat": 30470, + "kre": 8362, + "kreme": 43140, + "kremlin": 33979, + "kri": 3679, + "kris": 35251, + "kris": 12261, + "krish": 11487, + "krishna": 15863, + "krishnan": 46535, + "krispy": 49292, + "krist": 16490, + "kristen": 28881, + "kristen": 16644, + "kristi": 26895, + "kristin": 35408, + "kristin": 26785, + "kristina": 33180, + "krit": 36265, + "kro": 16193, + "kroger": 36344, + "kron": 25999, + "kru": 10609, + "kruger": 32948, + "krun": 43084, + "kry": 13995, + "krystal": 36554, + "ks": 10470, + "ks": 662, + "ksa": 25439, + "ksh": 36594, + "kst": 17420, + "kstate": 48590, + "ksu": 43496, + "kswx": 36180, + "kt": 17238, + "kt": 7792, + "ktm": 33989, + "ktn": 42170, + "kton": 37848, + "kts": 48577, + "ktv": 36444, + "ku": 1836, + "ku": 4827, + "kuala": 30336, + "kubball": 48995, + "kuber": 41336, + "kubernetes": 45144, + "kubrick": 37032, + "kuch": 39394, + "kud": 40818, + "kudos": 14481, + "kul": 11325, + "kul": 31514, + "kum": 18086, + "kum": 28148, + "kuma": 43139, + "kuma": 33920, + "kumar": 22329, + "kumar": 7674, + "kumb": 31391, + "kun": 6849, + "kun": 21842, + "kung": 39656, + "kung": 22347, + "kunst": 37881, + "kup": 39023, + "kups": 27240, + "kur": 4862, + "kurdi": 23504, + "kurdish": 21644, + "kurdistan": 24459, + "kurds": 20888, + "kuri": 46375, + "kuro": 28239, + "kuro": 47826, + "kurt": 31903, + "kurt": 14527, + "kus": 27618, + "kus": 27505, + "kush": 22264, + "kush": 24594, + "kushner": 36716, + "kut": 17283, + "kut": 36965, + "kuwait": 19679, + "kuya": 34815, + "kuz": 33253, + "kv": 27594, + "kv": 34249, + "kw": 10072, + "kw": 18339, + "kwa": 32784, + "kwa": 48576, + "kwame": 46681, + "kwan": 37100, + "kwan": 39447, + "kwang": 40260, + "kwe": 26050, + "kwi": 35327, + "kwon": 36369, + "kx": 28190, + "kx": 46442, + "ky": 2018, + "ky": 2383, + "kya": 29142, + "kyc": 37758, + "kyiv": 36422, + "kyle": 15847, + "kyle": 7539, + "kylie": 28282, + "kylie": 17983, + "kyliejenner": 47232, + "kylo": 47704, + "kyo": 13150, + "kyo": 6281, + "kyoto": 23223, + "kyr": 26329, + "kyrgy": 40013, + "kyrgyz": 48346, + "kyrie": 21857, + "kyu": 28296, + "kyu": 25490, + "kyuhyun": 37229, + "kyung": 41058, + "kyungsoo": 30280, + "kywx": 39940, + "kz": 48743, + "kz": 36848, + "kzn": 38264, + "kö": 32437, + "l": 75, + "l": 331, + "la": 572, + "la": 1210, + "laa": 44642, + "lab": 3537, + "lab": 4352, + "labe": 25749, + "label": 12235, + "label": 9093, + "labeled": 32720, + "labeling": 36825, + "labelled": 45188, + "labels": 17413, + "lable": 31879, + "labor": 11201, + "labor": 7878, + "laboratories": 43421, + "laboratory": 17664, + "laborday": 39324, + "labou": 32700, + "labour": 19586, + "labour": 6019, + "labourdoorstep": 37008, + "labout": 35961, + "labra": 37067, + "labrador": 25409, + "labs": 12021, + "laby": 29131, + "labyrin": 31782, + "labyrinth": 35594, + "lac": 4477, + "lac": 16189, + "lace": 30012, + "lace": 5421, + "laced": 36800, + "laces": 23281, + "lacey": 31754, + "lach": 30558, + "lack": 24915, + "lack": 8069, + "lacking": 30080, + "lacks": 34388, + "laco": 45882, + "lacrosse": 12915, + "lacy": 38645, + "lad": 15991, + "lad": 10707, + "ladak": 42312, + "ladakh": 45295, + "ladder": 16637, + "ladders": 47125, + "lade": 26447, + "laden": 28634, + "ladi": 12934, + "ladies": 28932, + "ladies": 3431, + "lads": 9803, + "lady": 7275, + "lady": 2909, + "ladybird": 43389, + "ladybug": 40038, + "ladygaga": 21232, + "laf": 47555, + "lafayette": 22683, + "lag": 30932, + "lag": 20394, + "laga": 30161, + "lage": 24369, + "lager": 36811, + "lager": 22989, + "lagh": 37237, + "laghate": 47565, + "laghateparth": 48780, + "lagi": 39786, + "lago": 42698, + "lago": 31476, + "lagoon": 22753, + "lagos": 12728, + "lagun": 18500, + "laguna": 23609, + "lah": 27315, + "lah": 4299, + "lahat": 42164, + "lahore": 16733, + "lai": 23947, + "laid": 42560, + "laid": 11160, + "lain": 46958, + "lain": 17151, + "laine": 35860, + "lair": 31981, + "lais": 34923, + "lak": 12890, + "lak": 26793, + "lake": 6441, + "lake": 2553, + "lakedistrict": 26437, + "lakel": 26133, + "lakeland": 34306, + "laker": 45717, + "lakers": 13570, + "lakes": 9265, + "lakeshore": 42595, + "lakeside": 30915, + "lakewood": 36417, + "lakh": 21487, + "lakhs": 37985, + "lakings": 34289, + "lakota": 45510, + "laksh": 24937, + "lakshmi": 39682, + "lal": 12301, + "lal": 19430, + "lala": 33661, + "lali": 21726, + "laliga": 32383, + "lam": 2022, + "lam": 5704, + "lama": 26049, + "lamar": 28678, + "lamar": 17284, + "lamb": 19863, + "lamb": 10034, + "lambda": 36687, + "lambert": 14574, + "lambeth": 43410, + "lambo": 45464, + "lamborgh": 18709, + "lamborghini": 19462, + "lambs": 30361, + "lame": 23192, + "lamin": 22337, + "laminated": 49079, + "lamo": 41461, + "lamont": 46719, + "lamp": 26700, + "lamp": 10725, + "lampard": 39989, + "lamps": 23424, + "lan": 1193, + "lan": 4872, + "lana": 15406, + "lanapar": 47437, + "lanaparrilla": 47819, + "lanc": 11872, + "lanca": 15694, + "lancashire": 20939, + "lancaster": 16446, + "lance": 26025, + "lance": 11609, + "lancer": 38195, + "lancers": 46392, + "lancia": 48698, + "lancs": 47540, + "land": 1567, + "land": 973, + "lande": 36556, + "landed": 9873, + "lander": 37247, + "lander": 9666, + "landers": 20019, + "landfall": 38465, + "landfill": 34947, + "landia": 41384, + "landing": 8292, + "landings": 46104, + "landlord": 28938, + "landlords": 35283, + "landmark": 15208, + "landmarks": 30393, + "lando": 25463, + "lando": 7065, + "landon": 32748, + "landrover": 38125, + "landry": 36137, + "lands": 40223, + "lands": 2961, + "landsc": 4384, + "landscape": 21123, + "landscape": 5727, + "landscapephotography": 28125, + "landscapes": 15344, + "landscaping": 25642, + "landslide": 31954, + "lane": 25534, + "lane": 3980, + "lanes": 10345, + "laney": 38552, + "lang": 7969, + "lang": 8578, + "lange": 32021, + "langford": 45615, + "langley": 28595, + "langu": 4095, + "language": 46103, + "language": 4781, + "languages": 13527, + "lani": 22964, + "lanka": 16221, + "lankan": 40531, + "lannister": 49056, + "lans": 43550, + "lansing": 30805, + "lant": 44504, + "lanta": 44768, + "lantern": 17185, + "lanterns": 33676, + "lantic": 32601, + "lantic": 27678, + "lants": 38425, + "lanyard": 46808, + "lao": 32475, + "lao": 29521, + "laos": 34353, + "lap": 7213, + "lap": 8639, + "lapd": 32557, + "lapel": 47961, + "lapland": 43633, + "laps": 18711, + "lapse": 33365, + "laptop": 10464, + "laptops": 32189, + "laq": 45026, + "lar": 1592, + "lar": 1652, + "lara": 19435, + "lard": 40347, + "lare": 22415, + "laredo": 48427, + "large": 40234, + "large": 3638, + "largely": 21418, + "larger": 12567, + "largest": 4960, + "largo": 44161, + "lari": 34676, + "lark": 43164, + "lark": 23536, + "larkin": 34769, + "larry": 18642, + "larry": 8242, + "lars": 8669, + "larsen": 39721, + "larson": 27973, + "larvae": 44840, + "las": 8295, + "las": 2552, + "lasag": 31210, + "lasagna": 40683, + "lasalle": 43866, + "laser": 25607, + "laser": 9885, + "lasers": 37060, + "lash": 31995, + "lash": 18480, + "lashes": 21015, + "lass": 24203, + "lass": 18263, + "lassic": 39430, + "last": 10600, + "last": 952, + "lasted": 25711, + "lasting": 13434, + "lastnight": 30159, + "lasts": 20141, + "lasvegas": 17789, + "lat": 1591, + "lat": 28437, + "lata": 47114, + "latam": 40012, + "late": 13267, + "late": 2325, + "latel": 49035, + "lately": 11824, + "latepost": 48328, + "later": 24109, + "later": 2941, + "lateral": 26646, + "latest": 46805, + "latest": 2053, + "latex": 27520, + "lati": 16357, + "latimes": 43356, + "latin": 16695, + "latin": 9888, + "latina": 27936, + "latino": 45734, + "latino": 19470, + "latinos": 40233, + "lation": 6191, + "latitude": 37392, + "lative": 15719, + "lator": 9291, + "lators": 28278, + "latt": 33561, + "latte": 17697, + "latter": 26198, + "latvia": 30034, + "lau": 1853, + "lau": 23090, + "lauderdale": 24352, + "laugh": 4969, + "laugh": 6332, + "laughed": 16746, + "laughing": 8301, + "laughs": 14322, + "laughter": 10722, + "laun": 2944, + "launch": 31168, + "launch": 2904, + "launched": 6125, + "launcher": 35782, + "launches": 7023, + "launching": 8565, + "laundering": 34079, + "laundry": 14797, + "laur": 15256, + "laura": 17091, + "laura": 7763, + "laure": 16932, + "laureate": 25675, + "laurel": 43370, + "laurel": 19942, + "lauren": 10456, + "lauren": 7634, + "laurence": 29353, + "laurent": 23226, + "laurie": 20326, + "laus": 38895, + "laus": 28111, + "lause": 22269, + "laut": 47688, + "lav": 13767, + "lav": 26919, + "lava": 16765, + "laven": 15047, + "lavender": 16033, + "laver": 28188, + "lavish": 35443, + "law": 2874, + "law": 2606, + "lawful": 33845, + "lawler": 47862, + "lawless": 39468, + "lawmaker": 37169, + "lawmakers": 21190, + "lawn": 31675, + "lawn": 11024, + "lawrence": 32221, + "lawrence": 8820, + "laws": 7306, + "lawson": 22152, + "lawsuit": 14346, + "lawsuits": 44331, + "lawyer": 10552, + "lawyers": 14232, + "lax": 17750, + "lax": 10024, + "lay": 7205, + "lay": 6360, + "laye": 25995, + "layer": 12411, + "layered": 28520, + "layers": 15900, + "laying": 12333, + "layla": 45050, + "layne": 48721, + "layo": 21738, + "layoffs": 29019, + "layout": 17314, + "lays": 19546, + "layton": 38061, + "laz": 18806, + "lazar": 33075, + "lazarus": 49126, + "laze": 41559, + "lazer": 43735, + "lazio": 33010, + "lazy": 32614, + "lazy": 10753, + "lb": 21958, + "lb": 7422, + "lbc": 37694, + "lbj": 45683, + "lbloggers": 48695, + "lbs": 8912, + "lc": 9584, + "lc": 7225, + "lcd": 21356, + "lcfc": 25339, + "lcs": 32279, + "ld": 1431, + "ld": 730, + "lder": 6945, + "lders": 43221, + "ldn": 37050, + "ldn": 2517, + "ldnont": 25827, + "ldnt": 21690, + "ldr": 37279, + "lds": 31235, + "le": 534, + "le": 579, + "lea": 2246, + "lea": 13324, + "leach": 35527, + "lead": 1328, + "lead": 2784, + "leader": 14806, + "leader": 3236, + "leaderboard": 34519, + "leaders": 3546, + "leadership": 36876, + "leadership": 3652, + "leading": 3833, + "leads": 5335, + "leaf": 9377, + "leaf": 7232, + "leaflet": 38289, + "leaflets": 39014, + "leafs": 16688, + "leafy": 42616, + "leagu": 13317, + "league": 16635, + "league": 2313, + "leagueof": 26022, + "leagueoflegends": 31737, + "leagues": 19888, + "leah": 24350, + "leah": 19308, + "leak": 42900, + "leak": 15489, + "leaked": 14353, + "leaking": 34097, + "leaks": 15657, + "leam": 39606, + "lean": 12447, + "lean": 8208, + "leaning": 24411, + "leanne": 41448, + "leans": 9357, + "leap": 29129, + "leap": 15392, + "leaps": 48080, + "lear": 1146, + "lear": 27663, + "learn": 16959, + "learn": 1768, + "learned": 6048, + "learnenglish": 49040, + "learner": 33547, + "learners": 19572, + "learning": 22632, + "learning": 2378, + "learns": 17569, + "learnt": 18959, + "leary": 36051, + "lease": 49041, + "lease": 14394, + "leased": 48352, + "leash": 36192, + "leasing": 29160, + "least": 3651, + "leather": 21417, + "leather": 5862, + "leau": 26498, + "leav": 3198, + "leave": 37512, + "leave": 3258, + "leaves": 5579, + "leaving": 5216, + "leban": 9360, + "lebanese": 23819, + "lebanon": 11695, + "leblanc": 46381, + "lebo": 44184, + "lebron": 11971, + "lebu": 47030, + "lec": 944, + "lec": 35374, + "leche": 46197, + "lect": 45392, + "lection": 18252, + "lections": 30995, + "lecture": 6617, + "lecturer": 23795, + "lectures": 21118, + "led": 8767, + "led": 912, + "ledge": 23647, + "ledge": 4815, + "ledger": 26817, + "leds": 36763, + "lee": 6224, + "lee": 2592, + "leed": 16483, + "leed": 40206, + "leeds": 38900, + "leeds": 7420, + "leek": 34585, + "leeminho": 37831, + "leen": 35311, + "leen": 15940, + "leep": 48875, + "leep": 10191, + "lees": 29324, + "lees": 34056, + "lef": 9152, + "left": 33949, + "left": 1823, + "leftist": 35143, + "lefto": 17437, + "leftover": 26414, + "leftovers": 28481, + "lefty": 33935, + "leg": 1211, + "leg": 4924, + "lega": 38674, + "legacy": 44108, + "legacy": 6447, + "legal": 17743, + "legal": 3998, + "legalization": 40584, + "legalize": 42921, + "legally": 14152, + "legate": 46009, + "lege": 8065, + "legen": 6105, + "legend": 5480, + "legend": 3539, + "legendary": 6053, + "legendof": 47915, + "legends": 6396, + "leges": 15356, + "legg": 18474, + "legg": 32511, + "legged": 25830, + "leggings": 22895, + "leggo": 43441, + "legi": 11183, + "legion": 35503, + "legion": 14525, + "legis": 7200, + "legislat": 16486, + "legislation": 14143, + "legislative": 16755, + "legislators": 31572, + "legislature": 22309, + "legit": 12563, + "legitim": 17656, + "legitimate": 24491, + "lego": 28117, + "lego": 7849, + "legos": 45359, + "legs": 7072, + "leh": 19105, + "leh": 29298, + "lehead": 28090, + "lehigh": 34527, + "lehman": 46094, + "lei": 15828, + "lei": 21830, + "leia": 32723, + "leic": 35073, + "leica": 30206, + "leice": 10026, + "leicester": 28795, + "leicester": 11510, + "leicestershire": 45358, + "leigh": 14849, + "leigh": 9292, + "leighton": 30782, + "leila": 41342, + "lein": 20026, + "lein": 28551, + "leinster": 32242, + "leip": 36401, + "leipzig": 41860, + "leis": 13133, + "leisure": 15849, + "leit": 35446, + "leith": 34141, + "lek": 26626, + "lek": 36535, + "lel": 46623, + "lele": 26075, + "lem": 10213, + "lem": 8428, + "leman": 24478, + "lemans": 26694, + "lement": 9693, + "lements": 15833, + "lemme": 23318, + "lemon": 12272, + "lemon": 7184, + "lemonade": 18884, + "lemons": 29576, + "lemore": 41147, + "len": 3687, + "len": 2159, + "lena": 22038, + "lend": 45397, + "lend": 24987, + "lender": 44734, + "lenders": 42443, + "lending": 20209, + "lene": 17628, + "leness": 36551, + "leng": 7861, + "length": 10130, + "lengths": 31858, + "lengthy": 32624, + "lenin": 41760, + "lennon": 18360, + "lennox": 45748, + "lenny": 48448, + "lenny": 30124, + "leno": 45357, + "lenovo": 25886, + "lens": 8666, + "lenses": 21264, + "lent": 20943, + "lent": 22605, + "lentil": 41511, + "lentils": 44269, + "leo": 24008, + "leo": 8312, + "leon": 6581, + "leon": 9763, + "leonard": 43849, + "leonard": 13142, + "leonardo": 20282, + "leone": 22864, + "leop": 11234, + "leopard": 15931, + "leopards": 40996, + "leopold": 45501, + "lep": 48884, + "leppard": 41656, + "lepre": 45641, + "ler": 5587, + "ler": 1803, + "lero": 15067, + "lerosis": 35455, + "leroy": 32441, + "lers": 6247, + "lery": 38184, + "les": 4339, + "les": 840, + "lesbian": 17419, + "lesbians": 43182, + "lesh": 32282, + "lesley": 25506, + "lesli": 13649, + "leslie": 16244, + "lesn": 39568, + "lesnar": 42223, + "less": 3242, + "less": 1285, + "lesser": 20369, + "lessly": 13103, + "lessness": 24847, + "lesson": 7714, + "lessons": 7199, + "lest": 24372, + "lest": 6794, + "lester": 23157, + "lester": 24023, + "lestwe": 29726, + "lestweforget": 30273, + "let": 1898, + "let": 1094, + "leta": 34319, + "lete": 34078, + "letes": 6815, + "leth": 30022, + "leth": 42462, + "lethal": 21905, + "lethbridge": 48390, + "leti": 34176, + "letics": 14504, + "letit": 46423, + "leto": 32203, + "leton": 37674, + "leton": 7462, + "lets": 10448, + "lets": 3243, + "letsgo": 16967, + "letsgo": 29789, + "letstalk": 35591, + "lett": 22428, + "lett": 9778, + "lette": 41798, + "lette": 10301, + "letter": 15567, + "letter": 4861, + "lettering": 26382, + "letterman": 38447, + "letters": 9181, + "letting": 9510, + "letto": 35449, + "lettu": 17933, + "lettuce": 18573, + "leu": 15691, + "leuke": 31031, + "leukemia": 32097, + "leum": 21571, + "leur": 45806, + "lev": 17022, + "lev": 29950, + "levan": 42543, + "leve": 36271, + "level": 21682, + "level": 2931, + "leveled": 48453, + "levels": 6295, + "leven": 44792, + "leven": 34729, + "lever": 20178, + "lever": 23094, + "leverage": 24030, + "leveraging": 37948, + "levi": 25630, + "levi": 19113, + "leviathan": 41736, + "levin": 36949, + "levine": 26594, + "levit": 22715, + "levy": 17147, + "lew": 5063, + "lew": 25329, + "lewan": 48349, + "lewd": 45241, + "lewes": 40431, + "lewi": 19589, + "lewis": 22043, + "lewis": 6020, + "lewisham": 37385, + "lewisham": 47633, + "lewishamilton": 42960, + "lewood": 37951, + "lex": 6586, + "lex": 9658, + "lexa": 48259, + "lexi": 44231, + "lexi": 24679, + "lexington": 22308, + "lexus": 20694, + "ley": 2565, + "ley": 1066, + "leye": 37061, + "leys": 45609, + "leys": 14834, + "leyton": 46573, + "lez": 26442, + "lf": 33960, + "lf": 22078, + "lfc": 37826, + "lfc": 8267, + "lfw": 28514, + "lg": 4546, + "lg": 11368, + "lga": 39348, + "lgb": 25401, + "lgbt": 11743, + "lgbt": 9592, + "lgbti": 42730, + "lgbtq": 47625, + "lgbtq": 14939, + "lgm": 39389, + "lh": 27794, + "lh": 31159, + "lhp": 45092, + "lhs": 33170, + "li": 554, + "li": 4250, + "lia": 26118, + "lia": 6964, + "liability": 29139, + "liaison": 39294, + "liam": 5258, + "liam": 7167, + "lian": 18058, + "liance": 40864, + "liar": 16334, + "liars": 23863, + "lias": 46021, + "lib": 10249, + "lib": 13345, + "libby": 36832, + "libdems": 40869, + "liber": 3425, + "liberal": 48032, + "liberal": 9985, + "liberalism": 40018, + "liberals": 15981, + "liberated": 38690, + "liberation": 19507, + "liberia": 32208, + "libertarian": 35067, + "liberties": 48623, + "liberty": 23397, + "liberty": 8480, + "libr": 2856, + "libra": 43038, + "librarian": 25148, + "librarians": 37806, + "libraries": 14277, + "library": 25713, + "library": 3519, + "libre": 49210, + "libre": 31681, + "libs": 26401, + "liby": 36390, + "libya": 16417, + "libyan": 42319, + "lic": 2508, + "lic": 3376, + "lice": 45691, + "licen": 6706, + "licence": 20550, + "license": 10337, + "licensed": 18752, + "licenses": 36414, + "licensing": 24219, + "lich": 23979, + "lich": 25875, + "lick": 29197, + "lick": 17541, + "licking": 33013, + "licks": 42117, + "lics": 44552, + "lid": 39369, + "lid": 17678, + "lidge": 45558, + "lido": 35683, + "lids": 41609, + "lie": 6570, + "lie": 2538, + "lieb": 45387, + "liebe": 37749, + "lied": 6486, + "lief": 38428, + "lien": 45716, + "lier": 3626, + "liers": 19303, + "lies": 37236, + "lies": 3205, + "liest": 14020, + "liet": 41107, + "lieu": 20401, + "lieu": 35313, + "lieutenant": 22538, + "lif": 16456, + "life": 2666, + "life": 970, + "lifeat": 27801, + "lifeboat": 37404, + "lifecycle": 49171, + "lifein": 48447, + "lifeis": 24824, + "lifeisgood": 46433, + "lifel": 15025, + "lifeline": 38438, + "lifelong": 21358, + "lifeof": 36061, + "lifesaving": 48016, + "lifespan": 49257, + "lifestyle": 46512, + "lifestyle": 7037, + "lifestyles": 48521, + "lifetime": 48737, + "lifetime": 9107, + "liff": 34404, + "liffe": 38942, + "lift": 33146, + "lift": 6779, + "lifted": 16783, + "lifter": 38555, + "lifting": 10857, + "lifts": 18291, + "lig": 19915, + "lig": 38493, + "liga": 16802, + "ligam": 31077, + "ligament": 48705, + "ligan": 27962, + "ligans": 42133, + "ligh": 7510, + "light": 3885, + "light": 1395, + "lighted": 18404, + "lighten": 32717, + "lightening": 28170, + "lighter": 14102, + "lighthouse": 13717, + "lighting": 5799, + "lightly": 26878, + "lightning": 7756, + "lightroom": 41454, + "lights": 3073, + "lightweight": 16278, + "ligu": 42920, + "ligue": 29196, + "lik": 4831, + "lik": 18495, + "like": 9175, + "like": 789, + "liked": 7112, + "likefor": 48444, + "likeli": 40666, + "likelihood": 48158, + "likely": 5256, + "liken": 36084, + "likes": 4724, + "liking": 16810, + "lil": 6012, + "lil": 4461, + "lilac": 33647, + "lili": 26686, + "lili": 48411, + "lilies": 38110, + "lillard": 47016, + "lille": 38705, + "lilli": 40920, + "lillian": 41563, + "lilly": 47825, + "lilly": 21815, + "lily": 23803, + "lily": 10647, + "lim": 2377, + "lim": 17204, + "lima": 17589, + "limb": 27061, + "limb": 32363, + "limbo": 46179, + "limbs": 34886, + "lime": 17385, + "lime": 11193, + "limel": 48658, + "limer": 16915, + "limerick": 19501, + "limestone": 27272, + "limit": 18933, + "limit": 9973, + "limitations": 32730, + "limited": 49229, + "limited": 3472, + "limiting": 35812, + "limitless": 35833, + "limits": 11966, + "limo": 33166, + "limous": 47287, + "limpopo": 47175, + "lin": 1254, + "lin": 2424, + "lina": 26110, + "lincol": 6239, + "lincoln": 16957, + "lincoln": 7454, + "lincolnshire": 29014, + "lind": 6492, + "linda": 45410, + "linda": 10760, + "linden": 44076, + "linden": 34832, + "lindo": 38467, + "lindsay": 29846, + "lindsay": 16858, + "lindsey": 29475, + "lindsey": 18128, + "line": 3674, + "line": 1148, + "linear": 19816, + "linebacker": 29848, + "lined": 11842, + "lineman": 31501, + "linen": 20032, + "liner": 11618, + "liners": 24463, + "lines": 3418, + "liness": 28633, + "lineup": 7316, + "lineups": 33589, + "ling": 4851, + "ling": 1358, + "linger": 29593, + "lingerie": 18473, + "lingering": 46494, + "lings": 11390, + "lington": 27673, + "lington": 9002, + "lingu": 34449, + "lingui": 29942, + "linguistic": 46847, + "linguistics": 48651, + "lining": 11589, + "link": 18433, + "link": 2468, + "linke": 15088, + "linked": 11059, + "linkedin": 16302, + "linkin": 40287, + "linkin": 49291, + "linking": 23296, + "links": 8113, + "linn": 37431, + "lino": 41189, + "lino": 34995, + "lins": 6567, + "linson": 15401, + "linton": 36479, + "linus": 49303, + "linux": 14061, + "lio": 19395, + "lion": 8872, + "lion": 5567, + "lionel": 19441, + "lions": 7093, + "lip": 8630, + "lip": 8546, + "lipo": 38795, + "lipp": 38074, + "lips": 8847, + "lipse": 10351, + "lipstick": 15618, + "liqu": 6310, + "lique": 32680, + "liqueur": 43612, + "liqui": 33817, + "liquid": 18366, + "liquid": 10158, + "liquidity": 42812, + "liquor": 17828, + "lis": 7297, + "lis": 12749, + "lisa": 25236, + "lisa": 7424, + "lisam": 43072, + "lisboa": 40052, + "lisbon": 17708, + "lish": 12658, + "lish": 2354, + "lished": 22620, + "lisle": 21529, + "lism": 34390, + "liss": 45489, + "liss": 35433, + "lisse": 49309, + "list": 1734, + "list": 1998, + "lista": 37812, + "listed": 6457, + "listen": 17454, + "listen": 2672, + "listened": 15347, + "listener": 34819, + "listeners": 26901, + "listening": 3656, + "listens": 25912, + "lister": 45109, + "listing": 8145, + "listings": 21987, + "liston": 48041, + "lists": 12281, + "lit": 2213, + "lit": 4350, + "lita": 30100, + "lite": 29273, + "lite": 13694, + "litecoin": 39063, + "liter": 3085, + "liter": 34904, + "literacy": 12841, + "literal": 24269, + "literally": 4719, + "literary": 13586, + "literature": 11072, + "litfest": 40369, + "lith": 37005, + "lithium": 22794, + "litho": 31088, + "lithograph": 49022, + "lithu": 21045, + "lithuania": 27068, + "liti": 24292, + "litigation": 31769, + "lito": 47381, + "litre": 25786, + "litres": 39919, + "litt": 1216, + "litt": 47583, + "litter": 45431, + "litter": 17118, + "litters": 45300, + "little": 7024, + "little": 1274, + "littlemix": 29731, + "littlest": 48969, + "litur": 36830, + "litz": 30357, + "liu": 20466, + "liv": 13895, + "liv": 19901, + "livan": 12785, + "live": 3215, + "live": 1064, + "lived": 8867, + "livel": 17973, + "liveli": 26566, + "livelihood": 46497, + "livelihoods": 47716, + "lively": 19663, + "liveme": 35396, + "livemusic": 15688, + "liven": 41057, + "liveon": 22815, + "livepd": 38742, + "livepd": 31899, + "liver": 4755, + "liver": 12639, + "liverpool": 29778, + "liverpool": 5366, + "livery": 23248, + "lives": 3247, + "livesmatter": 20348, + "livestock": 22079, + "livestream": 16844, + "livetweet": 38546, + "livin": 28061, + "living": 10965, + "living": 2815, + "livingston": 30551, + "lix": 45068, + "liz": 8632, + "liz": 12242, + "liza": 28787, + "lizard": 17221, + "lizards": 41991, + "lizasober": 44487, + "lizasoberano": 45076, + "lizz": 34430, + "lizzie": 29530, + "lizzy": 32306, + "lj": 34211, + "lj": 32273, + "lju": 44562, + "lk": 39110, + "lk": 26596, + "lka": 21881, + "ll": 1657, + "ll": 865, + "lla": 15419, + "llama": 36679, + "llan": 17281, + "llan": 38728, + "lland": 31150, + "llc": 17161, + "lle": 26550, + "lle": 29732, + "llen": 41197, + "ller": 7722, + "llers": 26426, + "lli": 47015, + "lli": 13368, + "llis": 25518, + "lll": 27177, + "llll": 34874, + "llll": 43485, + "llo": 19293, + "lloy": 10092, + "lloyd": 33339, + "lloyd": 12400, + "llp": 28042, + "lls": 40535, + "lly": 26379, + "lm": 6981, + "lm": 15282, + "lma": 4493, + "lmao": 5121, + "lmaoo": 32623, + "lmaooo": 33362, + "lmaoooo": 45232, + "lmfa": 8928, + "lmfao": 11068, + "lmfaooo": 47658, + "lmp": 43575, + "lms": 30381, + "ln": 31644, + "ln": 18654, + "lng": 22339, + "lnp": 39679, + "lo": 549, + "lo": 2982, + "loa": 39678, + "load": 4515, + "load": 2834, + "loaded": 6756, + "loader": 28492, + "loading": 9975, + "loads": 8691, + "loaf": 26467, + "loaf": 18273, + "loan": 28431, + "loan": 8176, + "loans": 14206, + "lob": 11197, + "lob": 46606, + "lobal": 34574, + "lobb": 27698, + "lobby": 12449, + "lobbying": 36047, + "lobe": 46325, + "lobes": 24148, + "lobo": 39323, + "lobos": 36586, + "lobster": 13793, + "loc": 1378, + "loc": 25826, + "local": 9202, + "local": 2029, + "localized": 49399, + "locally": 15603, + "locals": 15041, + "locate": 20490, + "located": 5677, + "location": 4372, + "locations": 9580, + "loch": 20188, + "loch": 14101, + "lock": 7201, + "lock": 4381, + "lockdown": 35636, + "locke": 29698, + "locked": 8371, + "locker": 14053, + "lockhart": 48642, + "lockheed": 36637, + "locking": 19978, + "locks": 13212, + "lockscreen": 42439, + "loco": 25555, + "locom": 22798, + "locomo": 46147, + "locomotive": 30439, + "locu": 33635, + "locust": 46237, + "lod": 45650, + "lodge": 10504, + "loe": 30113, + "loe": 25484, + "loeb": 49334, + "lof": 15011, + "loff": 31008, + "loft": 35707, + "loft": 20049, + "loftus": 46689, + "log": 3239, + "log": 7383, + "logan": 20655, + "logan": 10569, + "logans": 40752, + "logg": 43002, + "logged": 31457, + "logger": 39089, + "logging": 24444, + "logi": 3177, + "logia": 48031, + "logic": 10670, + "logical": 4791, + "logically": 24782, + "logie": 33445, + "logies": 7378, + "login": 31121, + "logist": 7407, + "logistics": 14755, + "logists": 12233, + "logne": 19911, + "logo": 31480, + "logo": 5750, + "logos": 24879, + "logs": 22745, + "logue": 27785, + "logy": 22721, + "logy": 1659, + "loh": 49129, + "loh": 37983, + "loi": 35128, + "loid": 31408, + "loin": 21760, + "loire": 46040, + "lois": 27040, + "lok": 19908, + "lok": 23575, + "loki": 24435, + "lol": 10721, + "lol": 1824, + "lola": 19065, + "lolita": 42615, + "lolla": 45483, + "lolli": 27906, + "lollipop": 34605, + "lolly": 48264, + "lolo": 16895, + "lolo": 37481, + "lolol": 25280, + "lololol": 34738, + "lolz": 35260, + "lom": 9279, + "loma": 42889, + "lombar": 25493, + "lombard": 46461, + "lombardi": 44346, + "lomond": 48941, + "lon": 1235, + "lon": 6507, + "london": 6835, + "london": 1789, + "londonmarathon": 35018, + "lone": 22220, + "lone": 13576, + "lonel": 28872, + "loneliness": 30310, + "lonely": 34509, + "lonely": 12368, + "lonelyplanet": 44984, + "long": 4792, + "long": 1538, + "longe": 25793, + "longer": 5349, + "longest": 10731, + "longevity": 35354, + "longh": 20286, + "longhorn": 41047, + "longhorns": 38295, + "longing": 38482, + "longlive": 47840, + "longs": 43618, + "longtime": 19685, + "loo": 731, + "loo": 11804, + "look": 8874, + "look": 1012, + "lookalike": 38307, + "lookbook": 39184, + "looked": 4913, + "lookin": 11254, + "looking": 36898, + "looking": 1312, + "lookout": 18330, + "looks": 1606, + "lool": 33125, + "loom": 37440, + "loom": 17199, + "looming": 35384, + "looms": 30550, + "loon": 28222, + "loona": 48137, + "looney": 45315, + "looo": 20902, + "loool": 36016, + "looool": 47038, + "looooo": 31484, + "loop": 19606, + "loop": 10408, + "loops": 21625, + "loos": 45723, + "loose": 43815, + "loose": 9786, + "loot": 21518, + "lop": 36734, + "lop": 17066, + "lopes": 49269, + "lopez": 12982, + "lor": 2179, + "lor": 11335, + "lord": 18896, + "lord": 3486, + "lorde": 35483, + "lords": 14969, + "lore": 12880, + "lore": 27218, + "loren": 13602, + "loren": 33398, + "lorenzo": 21342, + "lores": 34510, + "loretta": 40863, + "lori": 20164, + "lori": 23095, + "lorna": 46316, + "lorraine": 27602, + "lorry": 31354, + "los": 32217, + "los": 3087, + "losange": 14037, + "losangeles": 14638, + "lose": 43318, + "lose": 5354, + "loser": 18168, + "losers": 23201, + "loses": 14263, + "losing": 7918, + "loss": 34761, + "loss": 4327, + "losses": 16909, + "lost": 14258, + "lost": 2624, + "lostdog": 48482, + "lot": 5132, + "lot": 1954, + "loth": 43625, + "lothian": 31360, + "lothing": 42058, + "lotion": 25260, + "lotr": 34165, + "lots": 2958, + "lott": 42854, + "lotta": 29125, + "lotte": 16535, + "lotte": 7274, + "lottery": 16975, + "lottie": 48517, + "lotto": 28265, + "lotus": 13824, + "lou": 2207, + "lou": 9745, + "loubout": 38369, + "loud": 22884, + "loud": 7464, + "louder": 25904, + "loudest": 49214, + "loudly": 39256, + "lough": 21927, + "lough": 28045, + "loughborough": 49153, + "loui": 42173, + "louie": 25790, + "louis": 8916, + "louis": 4459, + "louisa": 40011, + "louise": 32275, + "louise": 13076, + "louisi": 12187, + "louisiana": 12946, + "louisville": 13860, + "louisvuitton": 44911, + "loun": 6466, + "lounge": 7141, + "lounging": 45430, + "lour": 29383, + "lourdes": 45071, + "louvre": 36995, + "lov": 8923, + "lov": 21229, + "lova": 37394, + "lovable": 38565, + "lovato": 18960, + "love": 2618, + "love": 793, + "lovecraft": 42405, + "loved": 3249, + "lovefl": 38884, + "loveher": 38306, + "lovehim": 45733, + "loveis": 30931, + "loveisland": 30970, + "loveislove": 43603, + "loveit": 24764, + "lovel": 8999, + "lovelies": 31412, + "lovelondon": 46493, + "lovely": 33250, + "lovely": 2165, + "lovemy": 20041, + "lovemyjob": 40130, + "loven": 33754, + "lover": 28508, + "lover": 7168, + "lovers": 48416, + "lovers": 5973, + "loves": 37773, + "loves": 3925, + "lovethe": 33040, + "lovethem": 48298, + "lovett": 47095, + "lovewins": 47687, + "loveyou": 39226, + "loveyou": 25964, + "loveyour": 26462, + "lovin": 33442, + "lovin": 16354, + "loving": 29568, + "loving": 3721, + "lovingly": 44100, + "low": 1049, + "low": 1042, + "loway": 16104, + "lowe": 17910, + "lowed": 22733, + "lowell": 24458, + "lower": 32578, + "lower": 4909, + "lowered": 34968, + "lowering": 35261, + "lowers": 36398, + "lowes": 38515, + "lowest": 12098, + "lowing": 8283, + "lowkey": 29481, + "lowry": 27444, + "lows": 4406, + "lox": 41725, + "loy": 4519, + "loy": 23929, + "loyal": 13032, + "loyalty": 14686, + "loyd": 44212, + "loyed": 29279, + "loyment": 18307, + "loyola": 32569, + "lp": 22282, + "lp": 6392, + "lpc": 44092, + "lpg": 47905, + "lpga": 34295, + "lps": 32094, + "lr": 20572, + "lr": 7041, + "lrt": 32996, + "ls": 19051, + "ls": 1268, + "lsd": 43766, + "lse": 46127, + "lse": 43886, + "lsu": 35428, + "lsu": 15672, + "lt": 13642, + "lt": 3333, + "ltc": 27664, + "ltd": 6802, + "lte": 25202, + "lton": 14237, + "lu": 664, + "lu": 9657, + "lub": 22469, + "lub": 11836, + "lubbock": 37660, + "lubric": 40963, + "luc": 7013, + "luc": 28014, + "luca": 21053, + "lucas": 23425, + "lucas": 10225, + "lucci": 45849, + "luce": 46217, + "lucent": 41552, + "lucer": 36042, + "luch": 36646, + "lucha": 38449, + "luci": 8787, + "lucia": 22290, + "luciano": 46365, + "lucid": 44540, + "lucie": 39461, + "lucifer": 46224, + "lucifer": 27687, + "lucille": 47454, + "lucin": 27523, + "luck": 9647, + "luck": 2820, + "luckiest": 42469, + "luckily": 20100, + "lucknow": 29407, + "lucky": 20495, + "lucky": 4133, + "lucrative": 41485, + "lucy": 17262, + "lucy": 10120, + "lud": 14288, + "lude": 28755, + "ludo": 40141, + "ludwig": 30633, + "lue": 45199, + "luf": 25264, + "lufc": 17818, + "luffy": 39047, + "lufthan": 37769, + "lufthansa": 39145, + "lug": 45521, + "lugg": 19673, + "luggage": 20138, + "luhan": 20975, + "luigi": 28444, + "luis": 25231, + "luis": 11339, + "luiz": 39633, + "lujah": 31639, + "luk": 21652, + "luka": 34878, + "lukaku": 37177, + "lukas": 37941, + "luke": 11970, + "luke": 5652, + "lul": 20861, + "lulla": 37019, + "lullaby": 41676, + "lulu": 32052, + "lulu": 26935, + "lum": 18112, + "lum": 5997, + "lumb": 36231, + "lumber": 27421, + "lumber": 34692, + "lumi": 41437, + "lumia": 31912, + "lumin": 15867, + "luminous": 37913, + "lump": 38704, + "lumpur": 34411, + "lun": 3221, + "lun": 49390, + "luna": 14425, + "lunar": 16043, + "lunatic": 45874, + "lunch": 10954, + "lunch": 2772, + "luncheon": 15104, + "lunches": 29705, + "lunchtime": 14330, + "lund": 30975, + "lund": 20181, + "lunes": 35648, + "lung": 38479, + "lung": 16271, + "lungs": 27366, + "lup": 27413, + "lupita": 49352, + "lupus": 36017, + "lur": 14439, + "lure": 31376, + "lures": 46747, + "lurking": 29941, + "lus": 7158, + "lusci": 38004, + "luscious": 39935, + "lush": 40382, + "lush": 16263, + "lust": 42071, + "lust": 12662, + "lustre": 46673, + "luther": 21848, + "luther": 17208, + "lutheran": 27341, + "luton": 28288, + "luv": 24726, + "luv": 8502, + "lux": 3439, + "lux": 16704, + "luxe": 26373, + "luxemb": 21314, + "luxembour": 22712, + "luxembourg": 23949, + "luxu": 16112, + "luxurious": 17292, + "luxury": 12083, + "luxury": 5247, + "luxurytravel": 29010, + "luz": 41008, + "lv": 10862, + "lv": 11184, + "lvl": 31256, + "lw": 40515, + "lw": 35115, + "lx": 30789, + "ly": 1251, + "ly": 597, + "lydia": 24316, + "lyf": 43688, + "lyfe": 30787, + "lyft": 32944, + "lying": 7175, + "lyk": 46376, + "lyle": 36828, + "lym": 20087, + "lyme": 31167, + "lymph": 30073, + "lymphoma": 37648, + "lyn": 3957, + "lyn": 5054, + "lynch": 31586, + "lynch": 13560, + "lynd": 33416, + "lynda": 42959, + "lyndon": 48518, + "lynn": 25303, + "lynn": 10667, + "lynne": 26900, + "lynx": 28941, + "lyon": 17176, + "lyons": 29453, + "lyric": 24366, + "lyric": 21291, + "lyrical": 33358, + "lyricist": 49013, + "lyrics": 9551, + "lyrix": 46814, + "lys": 45054, + "lyte": 40059, + "lywood": 4012, + "lz": 30818, + "lé": 39641, + "m": 76, + "m": 332, + "ma": 577, + "ma": 1226, + "maa": 42774, + "maa": 21555, + "maan": 33668, + "maar": 48927, + "maas": 43332, + "mab": 35639, + "mabel": 47319, + "mable": 23001, + "mably": 40082, + "mabu": 44682, + "mac": 1961, + "mac": 4945, + "macar": 21558, + "macaroni": 41824, + "macarthur": 36785, + "macau": 43984, + "macau": 33370, + "macbeth": 36321, + "macbook": 20617, + "macdonald": 20315, + "mace": 44869, + "maced": 21102, + "macedonia": 27071, + "macfar": 45374, + "macfarlane": 48825, + "mach": 2637, + "mach": 35091, + "machado": 42318, + "mache": 43220, + "macher": 29330, + "machi": 41783, + "machin": 17972, + "machine": 11539, + "machine": 4169, + "machinelearning": 13621, + "machinery": 21858, + "machines": 11108, + "machining": 45562, + "macho": 43977, + "macht": 45225, + "macin": 36533, + "mack": 8590, + "mack": 12145, + "mackay": 32497, + "macken": 48057, + "mackenzie": 22351, + "mackerel": 35002, + "mackin": 26010, + "macklemore": 41758, + "macle": 33843, + "maclean": 47137, + "macleod": 43684, + "macmillan": 36364, + "macmillan": 35191, + "macon": 35818, + "macos": 45469, + "macqu": 38365, + "macquarie": 40858, + "macro": 20891, + "macro": 16626, + "macron": 24859, + "macs": 46548, + "macy": 17113, + "macys": 47652, + "mad": 2740, + "mad": 3843, + "mada": 37799, + "madagas": 24758, + "madagascar": 25744, + "madam": 33634, + "madam": 27538, + "madame": 23507, + "madd": 31717, + "madden": 19093, + "maddie": 39959, + "maddie": 18875, + "maddow": 32644, + "maddy": 31734, + "made": 5388, + "made": 1105, + "madein": 13670, + "madeira": 33810, + "madel": 34532, + "madele": 29831, + "madeleine": 33264, + "madeline": 33905, + "madewith": 28627, + "madewithunity": 43190, + "madhu": 23000, + "madhuri": 38346, + "madhuridixit": 43889, + "madhya": 48302, + "madi": 6527, + "madi": 27282, + "madison": 24798, + "madison": 8791, + "madmen": 45452, + "madness": 8755, + "madon": 44852, + "madonna": 14137, + "madra": 27416, + "madras": 42046, + "madre": 42130, + "madri": 5529, + "madrid": 5909, + "mads": 41201, + "madu": 34913, + "madurai": 49159, + "maduro": 32912, + "mae": 16898, + "mae": 17339, + "maer": 47088, + "maestro": 24140, + "mafi": 47164, + "mafia": 14890, + "mag": 1191, + "mag": 4508, + "maga": 8694, + "magaz": 2974, + "magazine": 3113, + "magazines": 22253, + "magdal": 29673, + "mage": 46568, + "mage": 10923, + "magee": 43872, + "magenta": 38091, + "magento": 42442, + "mages": 31059, + "maggi": 29611, + "maggie": 41443, + "maggie": 14524, + "maggio": 49087, + "magh": 45555, + "magi": 19270, + "magic": 13061, + "magic": 3778, + "magical": 36408, + "magical": 7823, + "magician": 26368, + "magin": 42678, + "maging": 41310, + "magn": 10290, + "magna": 34076, + "magne": 9921, + "magnesium": 36379, + "magnet": 18240, + "magnetic": 13838, + "magnets": 33030, + "magni": 24297, + "magnific": 9725, + "magnificent": 10724, + "magnitude": 22955, + "magno": 21184, + "magnolia": 27123, + "magnu": 45198, + "magnum": 23496, + "magnus": 26275, + "magpie": 45973, + "mags": 31021, + "maguire": 26470, + "mah": 7206, + "mah": 10801, + "maha": 12237, + "maha": 33983, + "mahal": 22301, + "mahan": 45191, + "mahar": 11635, + "maharaj": 38488, + "maharashtra": 19328, + "mahat": 32434, + "mahatma": 40530, + "mahe": 15756, + "maher": 29826, + "mahesh": 33448, + "mahesh": 22095, + "mahi": 32529, + "mahi": 38659, + "mahin": 24113, + "mahindra": 31285, + "mahmoud": 41361, + "mahog": 30804, + "mahogany": 33084, + "mahon": 45864, + "mahon": 20371, + "mahone": 26634, + "mai": 7138, + "mai": 14595, + "maia": 46585, + "maid": 23148, + "maid": 10226, + "maidan": 37346, + "maiden": 37011, + "maiden": 13809, + "maids": 27305, + "maidstone": 44395, + "mail": 10478, + "mail": 2614, + "mailbox": 31482, + "mailed": 42314, + "mailing": 26680, + "mailonline": 26021, + "mails": 45213, + "main": 3904, + "main": 2623, + "maine": 18639, + "maine": 7836, + "mained": 15609, + "mainedcm": 15845, + "mainland": 27629, + "mainly": 15280, + "mains": 33656, + "mainst": 42102, + "mainstream": 18034, + "maintain": 12954, + "maintained": 26665, + "maintaining": 21964, + "maintains": 38335, + "mainten": 9399, + "maintenance": 9610, + "mais": 28153, + "maisie": 47355, + "maison": 37065, + "maison": 27626, + "mait": 26387, + "maize": 35386, + "maj": 2948, + "maj": 28723, + "maja": 47498, + "maje": 9852, + "majestic": 15335, + "majesty": 21188, + "major": 8008, + "major": 3350, + "majority": 10508, + "majors": 23597, + "mak": 11271, + "mak": 19253, + "makar": 42242, + "makati": 39402, + "make": 3232, + "make": 1078, + "makeaw": 45859, + "makeinindia": 42739, + "makeit": 26308, + "maken": 47093, + "makeover": 17926, + "maker": 15196, + "maker": 4836, + "makers": 6577, + "makerspace": 42400, + "makes": 2088, + "makeshift": 43274, + "makeu": 41707, + "makeup": 26402, + "makeup": 5853, + "makeyourown": 34090, + "makeyourownlane": 34823, + "maki": 34514, + "makin": 43096, + "makin": 22407, + "making": 17976, + "making": 1665, + "makk": 39852, + "maknae": 44118, + "mako": 49061, + "mal": 1662, + "mal": 3796, + "mala": 28290, + "malade": 36928, + "malaga": 35395, + "malala": 41137, + "malam": 48956, + "malaria": 24929, + "malawi": 23405, + "malay": 5323, + "malay": 42430, + "malayalam": 34860, + "malaysi": 39668, + "malaysia": 8146, + "malaysian": 21136, + "malbec": 47741, + "malcol": 12645, + "malcolm": 14139, + "maldives": 16795, + "male": 11326, + "male": 2801, + "males": 14426, + "malhotra": 28866, + "mali": 6701, + "mali": 22669, + "malia": 46714, + "malibu": 21723, + "malicious": 42147, + "malign": 41122, + "malik": 11394, + "mall": 10984, + "mall": 6220, + "mallorca": 28082, + "mallory": 38968, + "malls": 36447, + "malm": 44071, + "malnutrition": 41153, + "malo": 43518, + "malone": 19852, + "maloney": 45897, + "mals": 25370, + "malt": 21688, + "malta": 16989, + "maltese": 39838, + "malvern": 39356, + "malware": 24153, + "mam": 4404, + "mam": 17778, + "mama": 7133, + "mamamoo": 36012, + "mamas": 42395, + "mamba": 44189, + "mament": 45690, + "mami": 43858, + "mamma": 34893, + "mammal": 33385, + "mammals": 31987, + "mammoth": 28022, + "man": 723, + "man": 786, + "mana": 29467, + "mana": 15837, + "manafort": 40108, + "manag": 1830, + "manage": 9770, + "managed": 7928, + "management": 3319, + "manager": 3898, + "managerial": 44261, + "managers": 12853, + "manages": 29699, + "managing": 10892, + "manas": 44188, + "manatee": 46558, + "mance": 2324, + "manchester": 24424, + "manchester": 4651, + "mancini": 47681, + "mancity": 31538, + "mancrush": 36945, + "mancrushmonday": 39307, + "mand": 4325, + "mand": 27244, + "mandala": 41106, + "mandarin": 26455, + "mandate": 26228, + "mandatory": 19934, + "mandel": 34960, + "mandela": 16280, + "mandi": 38961, + "mandir": 35815, + "mando": 34006, + "mands": 12340, + "mandu": 31440, + "mandy": 41505, + "mandy": 24302, + "mane": 44471, + "mane": 16044, + "maneu": 33216, + "mang": 25616, + "mang": 31096, + "manga": 11873, + "mangal": 43027, + "manger": 48251, + "mango": 43831, + "mango": 13962, + "mangrove": 47180, + "manhatt": 10152, + "manhattan": 10961, + "mani": 5654, + "mani": 10718, + "mania": 8435, + "maniac": 31814, + "maniacs": 41444, + "manian": 40077, + "manic": 23017, + "manic": 37825, + "manicure": 33637, + "manife": 14379, + "manifest": 34422, + "manifestation": 48348, + "manifesto": 20907, + "manil": 38827, + "manila": 10969, + "manipu": 40261, + "manipul": 19237, + "manipulation": 30277, + "manipur": 47757, + "manish": 41759, + "manish": 44720, + "manit": 15693, + "manitoba": 20342, + "manjaro": 41489, + "mankind": 24155, + "manly": 25194, + "mann": 19396, + "mann": 4783, + "manne": 30160, + "manned": 26139, + "mannequin": 43388, + "manner": 20700, + "manners": 31693, + "manning": 15996, + "manny": 37054, + "manny": 20933, + "mano": 15753, + "mano": 24016, + "manoj": 41146, + "manor": 41830, + "manor": 13614, + "mans": 28422, + "mans": 7746, + "mansfield": 25543, + "manship": 15460, + "mansion": 13404, + "manslaughter": 48632, + "manson": 26715, + "mant": 25122, + "mant": 27037, + "manta": 41431, + "mantis": 39946, + "mantle": 22159, + "mantra": 25162, + "manu": 3404, + "manu": 25799, + "manual": 12268, + "manuel": 29171, + "manuel": 9567, + "manufac": 5105, + "manufacture": 27741, + "manufactured": 24010, + "manufacturer": 15668, + "manufacturers": 18763, + "manufacturing": 8386, + "manure": 47907, + "manus": 28181, + "manuscript": 24365, + "manuscripts": 40765, + "manutd": 20994, + "many": 28484, + "many": 1346, + "manziel": 40637, + "mao": 47447, + "mao": 25605, + "maori": 43400, + "map": 25180, + "map": 3923, + "maple": 21980, + "maple": 10570, + "mapleleafs": 41257, + "mapoli": 28768, + "mapp": 36894, + "mapped": 41596, + "mapping": 15231, + "maps": 8765, + "mapu": 42082, + "mar": 675, + "mar": 3091, + "mara": 15655, + "marais": 47913, + "maran": 44732, + "marath": 16274, + "marathi": 34102, + "marathon": 40764, + "marathon": 5910, + "marau": 38475, + "marbella": 36182, + "marble": 45429, + "marble": 13071, + "marbles": 42931, + "marc": 14054, + "marc": 9075, + "marca": 38242, + "marcel": 17726, + "marcel": 24652, + "marcelo": 35939, + "march": 10638, + "march": 2227, + "marche": 36173, + "marched": 37976, + "marches": 38249, + "marchfor": 31721, + "marching": 15082, + "marchmadness": 28555, + "marci": 36698, + "marcia": 41075, + "marck": 47733, + "marco": 24719, + "marco": 10924, + "marcor": 39945, + "marcorubio": 41143, + "marcos": 21696, + "marcu": 20760, + "marcus": 48955, + "marcus": 9895, + "mardi": 39728, + "mardi": 29229, + "mardigras": 43343, + "mare": 26512, + "mare": 8870, + "mares": 19724, + "marg": 44014, + "margar": 16838, + "margare": 10232, + "margaret": 12185, + "margarita": 25958, + "margaritas": 42679, + "margate": 37428, + "margin": 19464, + "margin": 21357, + "marginal": 38320, + "margins": 33763, + "margot": 37144, + "mari": 2603, + "mari": 19322, + "maria": 41109, + "maria": 6595, + "mariachi": 44299, + "mariah": 31214, + "mariah": 24789, + "mariahcarey": 36538, + "marian": 41129, + "marian": 24677, + "mariana": 44224, + "marianne": 32214, + "mariano": 43988, + "marie": 20657, + "marie": 7864, + "marietta": 46634, + "marig": 41002, + "marijuana": 9864, + "maril": 14611, + "marilyn": 38959, + "marilyn": 18489, + "marin": 8910, + "marin": 23992, + "marina": 12060, + "marinated": 33406, + "marine": 20674, + "marine": 5746, + "mariner": 39972, + "mariners": 19086, + "marines": 15018, + "marino": 30878, + "mario": 39176, + "mario": 7600, + "marion": 37765, + "marion": 18397, + "maris": 21512, + "maris": 33093, + "marisa": 42938, + "mariska": 44703, + "marissa": 31219, + "marist": 48223, + "mariti": 13124, + "maritime": 14331, + "marj": 38639, + "mark": 3805, + "mark": 2110, + "marke": 2399, + "marked": 12360, + "marker": 18170, + "markers": 23664, + "market": 11614, + "market": 2196, + "marketer": 33482, + "marketers": 23682, + "marketing": 19535, + "marketing": 2905, + "marketplace": 18241, + "markets": 7292, + "markham": 39817, + "marking": 14705, + "markings": 41046, + "markle": 32672, + "marko": 38338, + "marks": 5466, + "markus": 33725, + "marl": 24922, + "marlborough": 43515, + "marlene": 45117, + "marley": 16504, + "marlin": 34275, + "marlins": 23309, + "marlon": 32995, + "marmalade": 39068, + "marnock": 48305, + "maro": 27029, + "maroon": 20501, + "marqu": 20704, + "marque": 13012, + "marquee": 27725, + "marquette": 37624, + "marquez": 27317, + "marquis": 33530, + "marr": 32871, + "marrake": 37125, + "marrakech": 39006, + "marri": 3839, + "marriage": 38047, + "marriage": 7040, + "marriages": 38190, + "married": 6791, + "marries": 46283, + "marriott": 19211, + "marrow": 31030, + "marry": 13288, + "marrying": 40507, + "mars": 41469, + "mars": 7496, + "marsden": 43344, + "marse": 26577, + "marseille": 30365, + "marsh": 9237, + "marsh": 13505, + "marsha": 21491, + "marshal": 26608, + "marshall": 30939, + "marshall": 9811, + "marshals": 44175, + "marshes": 43450, + "marshmal": 21069, + "marshmallow": 28530, + "marshmallows": 39471, + "mart": 2348, + "mart": 7772, + "marta": 32858, + "martens": 43211, + "marth": 34493, + "martha": 16427, + "marti": 20577, + "martial": 17088, + "martialarts": 35895, + "martian": 30214, + "martin": 6929, + "martin": 3690, + "martina": 34393, + "martinez": 13913, + "marting": 47570, + "martini": 22199, + "martino": 41675, + "martins": 30569, + "marty": 9926, + "marty": 17169, + "martyn": 44075, + "martyr": 36155, + "martyr": 26067, + "martyrdom": 43110, + "martyred": 39114, + "martyrs": 24707, + "maru": 37413, + "maru": 31838, + "marvel": 13835, + "marvel": 5996, + "marvelcomics": 46897, + "marvell": 26576, + "marvellous": 28402, + "marvelous": 25487, + "marvin": 19675, + "marx": 30559, + "marx": 26001, + "marxist": 45205, + "mary": 5146, + "mary": 2676, + "maryam": 33636, + "maryam": 36393, + "maryland": 11379, + "marys": 40905, + "marys": 40228, + "mas": 5226, + "mas": 1412, + "masa": 24995, + "masa": 41868, + "masala": 31483, + "masc": 23564, + "mascar": 46984, + "mascara": 31635, + "mascot": 13983, + "mascots": 43266, + "mascul": 25589, + "masculine": 48269, + "masculinity": 40465, + "mase": 49128, + "maser": 25798, + "maserati": 30442, + "mash": 12317, + "mash": 15680, + "mashable": 41026, + "mashed": 27395, + "mashup": 27079, + "masi": 35965, + "masjid": 31420, + "mask": 19262, + "mask": 8306, + "masked": 25757, + "masking": 47046, + "masks": 19055, + "maslow": 44359, + "mason": 17424, + "mason": 9699, + "masonic": 36491, + "masonry": 30764, + "masons": 37195, + "masqu": 26593, + "masquer": 29604, + "masquerade": 36944, + "mass": 4636, + "mass": 4854, + "massach": 14484, + "massachuse": 14577, + "massachusetts": 14756, + "massacre": 14696, + "massage": 13055, + "masse": 41735, + "masses": 22978, + "massey": 29868, + "massi": 17239, + "massimo": 45821, + "massive": 4818, + "massively": 34297, + "mast": 45916, + "mast": 27920, + "master": 4534, + "master": 3498, + "mastercard": 40542, + "masterchef": 34809, + "masterclass": 17529, + "mastered": 32616, + "masterful": 46823, + "mastering": 28326, + "mastermind": 34029, + "masterpiece": 12066, + "masterpieces": 37596, + "masters": 6913, + "mastery": 34800, + "mastiff": 42311, + "maswar": 47887, + "mat": 905, + "mat": 9063, + "mata": 17270, + "match": 7733, + "match": 2439, + "matcha": 32433, + "matchday": 15947, + "matched": 17792, + "matches": 8609, + "matching": 11840, + "matchup": 19355, + "matchups": 49162, + "mate": 6137, + "mate": 2936, + "mated": 33813, + "mateo": 34991, + "mater": 23724, + "materi": 7084, + "material": 7118, + "materials": 8161, + "maternal": 26131, + "maternity": 23894, + "mates": 5817, + "math": 13277, + "math": 6025, + "mathe": 8725, + "mathemat": 11901, + "mathematical": 25609, + "mathematician": 41036, + "mathematics": 20113, + "mathew": 36333, + "mathews": 37120, + "mathi": 23014, + "mathieu": 40417, + "maths": 14763, + "mati": 12716, + "mati": 32268, + "matic": 36859, + "matic": 7900, + "matically": 38282, + "matics": 23634, + "matil": 26751, + "matilda": 36308, + "matin": 44849, + "matinee": 38525, + "mating": 34346, + "mation": 11701, + "matisse": 43446, + "mato": 13127, + "matologist": 48842, + "matology": 27940, + "matory": 25519, + "matri": 27041, + "matrix": 18078, + "mats": 22259, + "matsu": 30242, + "matt": 7972, + "matt": 3972, + "mattb": 42791, + "matte": 31237, + "matte": 19771, + "mattel": 35365, + "matteo": 33120, + "matter": 30471, + "matter": 3828, + "matters": 5708, + "matth": 41846, + "matthe": 5116, + "matthew": 17588, + "matthew": 7008, + "matthews": 16739, + "matthi": 29853, + "matthias": 45104, + "matti": 39840, + "mattress": 23438, + "matty": 31233, + "matty": 29176, + "matu": 40616, + "matur": 22897, + "mature": 14417, + "maturity": 28047, + "mau": 8134, + "mau": 23033, + "maui": 20463, + "maul": 30725, + "maur": 10574, + "maure": 25191, + "maureen": 31723, + "maurice": 20200, + "mauricio": 39066, + "mauriti": 28406, + "mauritius": 29305, + "mauro": 41691, + "mav": 25697, + "maver": 16700, + "maverick": 27425, + "mavericks": 30092, + "mavs": 30665, + "maw": 39351, + "maw": 42271, + "mawards": 37682, + "max": 4898, + "max": 3902, + "maxi": 8554, + "maxi": 23266, + "maxim": 19892, + "maxim": 38574, + "maximize": 28673, + "maximum": 13162, + "maximus": 44312, + "maxine": 38468, + "maxwell": 19611, + "maxx": 37466, + "may": 1686, + "may": 1270, + "maya": 45783, + "maya": 12987, + "mayan": 37952, + "maybe": 3746, + "mayday": 29957, + "mayer": 21196, + "mayfair": 35171, + "mayfield": 33933, + "mayhem": 21502, + "maymay": 26600, + "maymay": 33853, + "maymayentrata": 30480, + "maynard": 32487, + "mayne": 35771, + "mayo": 22449, + "mayo": 11280, + "mayor": 15429, + "mayor": 4676, + "mayoral": 28983, + "mayorof": 43533, + "mayors": 28501, + "mays": 35445, + "maythe": 42281, + "mayward": 45751, + "mayward": 23519, + "mayweather": 22774, + "maz": 9177, + "maz": 36215, + "mazda": 18506, + "maze": 21988, + "mazz": 29439, + "mañ": 37059, + "mañana": 39354, + "mb": 758, + "mb": 3996, + "mba": 8329, + "mban": 46685, + "mbar": 44452, + "mbb": 10736, + "mbc": 20137, + "mbe": 38395, + "mbe": 27004, + "mber": 5467, + "mber": 1034, + "mberg": 26372, + "mbers": 5443, + "mbi": 45347, + "mble": 20310, + "mble": 4756, + "mbles": 28693, + "mbling": 28604, + "mbo": 25733, + "mbo": 11319, + "mbps": 44896, + "mbs": 10370, + "mbta": 38979, + "mbu": 42228, + "mbuhari": 36752, + "mc": 1278, + "mc": 4126, + "mca": 40570, + "mca": 14635, + "mcal": 28663, + "mcar": 43776, + "mcbride": 35080, + "mcc": 21192, + "mccabe": 37628, + "mccaf": 47385, + "mccain": 20397, + "mccall": 34844, + "mccann": 27140, + "mccar": 9570, + "mccarthy": 16974, + "mccartney": 19958, + "mccl": 24709, + "mccla": 43672, + "mccle": 40139, + "mcclure": 44945, + "mcco": 46152, + "mccon": 32638, + "mccor": 23057, + "mccormack": 45164, + "mccormick": 39088, + "mccoy": 20218, + "mccr": 41996, + "mccre": 25393, + "mccul": 38833, + "mccull": 41782, + "mcd": 28930, + "mcder": 27355, + "mcdermott": 34504, + "mcdon": 12171, + "mcdonald": 10741, + "mcdonalds": 17674, + "mcdonnell": 34360, + "mcdowell": 34119, + "mce": 26864, + "mcel": 28752, + "mcen": 47423, + "mcfad": 36976, + "mcfadden": 42105, + "mcfar": 29020, + "mcfarlane": 47174, + "mcfc": 16416, + "mcfly": 38211, + "mcg": 42507, + "mcg": 27995, + "mcgee": 29223, + "mcgill": 46524, + "mcgill": 35511, + "mcgin": 29596, + "mcgowan": 40462, + "mcgr": 25169, + "mcgra": 29367, + "mcgrath": 28759, + "mcgraw": 40950, + "mcgregor": 19642, + "mcgu": 34294, + "mcguinness": 45299, + "mcguire": 32635, + "mci": 46212, + "mci": 45491, + "mcil": 30481, + "mcin": 18770, + "mcintosh": 45353, + "mcintyre": 33369, + "mck": 6781, + "mckay": 33611, + "mcke": 27424, + "mckee": 43529, + "mcken": 42619, + "mckenna": 24924, + "mckenzie": 25502, + "mckin": 15437, + "mckinley": 39891, + "mckinney": 33554, + "mckinnon": 48736, + "mckinsey": 48143, + "mcl": 49021, + "mcla": 12565, + "mclaren": 37381, + "mclaren": 16789, + "mclau": 32285, + "mclaughlin": 35346, + "mcle": 25299, + "mclean": 28666, + "mcleod": 40259, + "mcm": 12251, + "mcmahon": 24026, + "mcmaster": 42703, + "mcmillan": 45603, + "mcn": 42919, + "mcnam": 32682, + "mcnamara": 37506, + "mcne": 42545, + "mco": 33723, + "mcqueen": 22544, + "mcr": 29884, + "mcr": 16966, + "mcs": 27020, + "mcu": 30403, + "md": 8637, + "md": 4732, + "mdc": 38773, + "mdc": 41761, + "mds": 48746, + "mdt": 40822, + "me": 613, + "me": 614, + "mea": 46045, + "mea": 17711, + "mead": 12134, + "mead": 21567, + "meade": 37218, + "meado": 16402, + "meadow": 25213, + "meadow": 17195, + "meadows": 17178, + "meal": 29662, + "meal": 5478, + "meals": 11229, + "mean": 4189, + "mean": 3450, + "meand": 48015, + "meaning": 14586, + "meaning": 8342, + "meaningful": 17480, + "meaningless": 48932, + "meanings": 45814, + "means": 3494, + "meant": 8674, + "meantime": 27499, + "meanwhile": 9650, + "meas": 5867, + "measles": 38230, + "measurable": 48010, + "measure": 15261, + "measure": 10579, + "measured": 23154, + "measurement": 20973, + "measurements": 29894, + "measures": 11936, + "measuring": 18064, + "meat": 10805, + "meat": 6480, + "meatball": 43642, + "meatballs": 29233, + "meath": 37920, + "meatless": 48085, + "meats": 29558, + "mec": 27432, + "mecca": 36095, + "mech": 38305, + "mechan": 6715, + "mechanic": 24582, + "mechanical": 14467, + "mechanics": 20536, + "mechanism": 22576, + "mechanisms": 28610, + "meck": 41908, + "med": 1948, + "med": 2177, + "meda": 33614, + "medal": 29714, + "medal": 6974, + "medalist": 21040, + "medalists": 43397, + "medalli": 31349, + "medallion": 43469, + "medallist": 41472, + "medals": 14710, + "mede": 48225, + "meded": 27627, + "medi": 1436, + "media": 22064, + "media": 1895, + "mediac": 37490, + "median": 30491, + "mediation": 42829, + "medic": 3602, + "medic": 35441, + "medicaid": 25421, + "medical": 18432, + "medical": 4116, + "medicare": 23710, + "medication": 23771, + "medications": 37181, + "medicinal": 28772, + "medicine": 5616, + "medicines": 26541, + "medics": 46688, + "medieval": 38956, + "medieval": 10789, + "medina": 27281, + "mediocre": 41170, + "medit": 19130, + "meditate": 38039, + "meditation": 10827, + "mediter": 14194, + "mediterran": 14358, + "mediterranean": 15327, + "medium": 8675, + "medley": 24793, + "meds": 25075, + "medtech": 42044, + "medusa": 44216, + "medway": 42286, + "mee": 1725, + "mee": 14075, + "meek": 28935, + "meen": 37940, + "meen": 46515, + "meer": 26714, + "meer": 27555, + "meet": 5714, + "meet": 1633, + "meeting": 48566, + "meeting": 2071, + "meetings": 9980, + "meets": 5972, + "meetthe": 27575, + "meetup": 15430, + "meg": 11500, + "meg": 16186, + "mega": 15979, + "mega": 9068, + "megab": 38103, + "megadeth": 46741, + "megal": 37650, + "megam": 26073, + "megan": 19127, + "megan": 11503, + "megap": 33624, + "megat": 35581, + "megh": 31192, + "meghan": 39939, + "meghan": 18261, + "meh": 10512, + "meh": 22211, + "mehta": 25031, + "mei": 22564, + "mei": 25198, + "meier": 29812, + "mein": 28857, + "mein": 21466, + "meister": 28407, + "mek": 44645, + "mel": 1902, + "mel": 6834, + "mela": 35032, + "melan": 22261, + "melanch": 44818, + "melancholy": 47821, + "melani": 34031, + "melania": 32796, + "melanie": 22153, + "melanoma": 40862, + "melb": 47007, + "melb": 28980, + "melbourne": 28387, + "melbourne": 6995, + "melee": 45108, + "meli": 28885, + "melinda": 46303, + "melis": 18913, + "melissa": 41866, + "melissa": 13030, + "mell": 22531, + "mell": 41583, + "mello": 47594, + "mellon": 45162, + "mellow": 32034, + "melo": 10354, + "melo": 22374, + "melodic": 41877, + "melodies": 38412, + "melody": 19119, + "melon": 12146, + "melrose": 36296, + "melt": 22209, + "melt": 15957, + "meltdown": 30613, + "melted": 23037, + "melting": 19247, + "melton": 46062, + "melts": 31446, + "melville": 46030, + "melvin": 31544, + "mely": 6373, + "mem": 4937, + "mem": 34944, + "memb": 2114, + "member": 29566, + "member": 1640, + "members": 2567, + "membership": 11562, + "membrane": 34088, + "meme": 35157, + "meme": 9169, + "memes": 12828, + "memo": 15967, + "memo": 19334, + "memoir": 20532, + "memoirs": 45311, + "memor": 1858, + "memorab": 26271, + "memorabilia": 27488, + "memorable": 13172, + "memorial": 16285, + "memorial": 4642, + "memorialday": 21598, + "memoriam": 48191, + "memories": 4304, + "memory": 44766, + "memory": 5137, + "memph": 10285, + "memphis": 38432, + "memphis": 11298, + "men": 1552, + "men": 1656, + "mena": 23052, + "menace": 29949, + "mend": 8151, + "mend": 46927, + "mendel": 49268, + "mendes": 18060, + "mendez": 48275, + "mendo": 19327, + "mendoza": 23680, + "meng": 37102, + "meng": 37450, + "mening": 46428, + "menon": 38255, + "menopau": 34974, + "menopause": 46026, + "mens": 16924, + "mens": 10495, + "mensfashion": 27578, + "menstru": 28345, + "menstrual": 40915, + "menswear": 18803, + "ment": 1585, + "ment": 777, + "mental": 8611, + "mental": 3448, + "mentalhealth": 20593, + "mentalhealth": 13022, + "mentality": 26647, + "mentally": 14307, + "mentary": 4468, + "mentation": 9512, + "mentday": 40397, + "mente": 40302, + "mente": 36396, + "mented": 9249, + "menting": 14471, + "mention": 43881, + "mention": 6762, + "mentioned": 11948, + "mentioning": 34290, + "mentions": 12334, + "mento": 30582, + "mentor": 45342, + "mentor": 11642, + "mentoring": 19610, + "mentors": 20945, + "mentorship": 33878, + "ments": 1827, + "menu": 6225, + "menus": 33534, + "meo": 30792, + "meow": 39965, + "meow": 17246, + "mep": 27095, + "mer": 1316, + "mer": 2452, + "mera": 20028, + "merc": 34357, + "merc": 44399, + "mercado": 45479, + "merce": 8409, + "mercede": 34959, + "mercedes": 26403, + "mercedes": 10685, + "mercedesam": 40107, + "mercedesbenz": 32347, + "mercen": 40301, + "mercer": 21632, + "merch": 11504, + "merchandi": 14954, + "merchandise": 16808, + "merchandising": 49196, + "merchant": 19563, + "merchants": 34427, + "merci": 23364, + "merci": 29378, + "mercur": 11471, + "mercury": 45203, + "mercury": 12653, + "mercy": 33249, + "mercy": 10815, + "mere": 29657, + "mere": 10342, + "mered": 24657, + "mered": 32297, + "meredith": 25103, + "merely": 28718, + "merge": 30406, + "merged": 46492, + "merger": 24744, + "merging": 49256, + "meri": 17993, + "meri": 36109, + "meria": 48433, + "meric": 27097, + "merica": 30561, + "meridi": 37901, + "meridian": 31195, + "mering": 41060, + "meringue": 41661, + "merino": 42648, + "merit": 20830, + "merkel": 24715, + "merle": 48586, + "merlin": 26517, + "merlot": 40424, + "mermaid": 16064, + "mermaids": 43617, + "mero": 19097, + "merr": 48288, + "merri": 21462, + "merrill": 47713, + "merritt": 36462, + "merry": 14167, + "merry": 5779, + "merrychristmas": 19672, + "mers": 4199, + "mersal": 36711, + "mersey": 25248, + "mersey": 46239, + "merseyside": 35382, + "mert": 48496, + "merton": 35315, + "mery": 40873, + "meryl": 35787, + "mes": 28432, + "mes": 3029, + "mesa": 18956, + "mese": 42018, + "mesh": 15030, + "mesm": 18695, + "mesmer": 38435, + "mesmeri": 25985, + "mesmerizing": 35637, + "meso": 25537, + "mesqu": 46819, + "mess": 2490, + "mess": 8188, + "message": 3918, + "messages": 9390, + "messaging": 23234, + "messe": 40391, + "messed": 23580, + "messenger": 17389, + "messi": 19394, + "messi": 11252, + "messiah": 28737, + "messing": 23144, + "messy": 15987, + "mest": 23780, + "mester": 47349, + "mesut": 49177, + "met": 5249, + "met": 2340, + "meta": 14803, + "meta": 22701, + "metab": 16150, + "metabol": 48389, + "metaboli": 25573, + "metabolic": 34311, + "metabolism": 27824, + "metal": 8935, + "metal": 4044, + "metall": 19084, + "metallic": 17257, + "metallica": 24079, + "metals": 21375, + "metam": 28862, + "metamor": 39030, + "metamorpho": 47601, + "metaph": 24189, + "metaphor": 34233, + "metast": 41973, + "mete": 11226, + "meteor": 26429, + "meteor": 26823, + "meteoro": 25948, + "meteorologist": 42849, + "meter": 10104, + "meters": 13247, + "metgala": 30089, + "meth": 21867, + "meth": 26177, + "methane": 37565, + "metho": 5770, + "method": 10284, + "methodist": 25165, + "methodo": 28488, + "methodology": 37316, + "methods": 12200, + "methyl": 48999, + "metmuseum": 28207, + "meto": 25679, + "metoo": 24722, + "metr": 15086, + "metre": 27889, + "metres": 19798, + "metric": 19950, + "metrical": 40704, + "metrics": 24396, + "metro": 7257, + "metro": 6784, + "metroid": 39957, + "metropolis": 40476, + "metropolitan": 19013, + "metry": 20039, + "mets": 9633, + "mett": 28081, + "metz": 40506, + "meu": 34520, + "mew": 40368, + "mex": 3213, + "mex": 18387, + "mexic": 31728, + "mexican": 37442, + "mexican": 8186, + "mexicans": 47729, + "mexico": 31834, + "mexico": 4604, + "mey": 28584, + "mey": 27777, + "meyer": 13963, + "meyers": 32326, + "mez": 30615, + "mez": 46833, + "mezz": 38771, + "mf": 18199, + "mf": 11067, + "mfa": 24107, + "mfc": 39474, + "mfg": 21912, + "mfw": 27309, + "mg": 10003, + "mg": 8014, + "mga": 23954, + "mgm": 27572, + "mgmt": 22288, + "mgr": 31500, + "mgs": 48073, + "mgt": 48663, + "mh": 9962, + "mh": 10834, + "mha": 41944, + "mhealth": 41225, + "mhs": 28815, + "mhz": 31550, + "mi": 714, + "mi": 2251, + "mia": 5852, + "miam": 31053, + "miami": 15106, + "miami": 4891, + "mian": 24792, + "miaw": 36046, + "mib": 48178, + "mic": 1213, + "mic": 3816, + "mica": 41551, + "micah": 33870, + "mice": 19030, + "mich": 25628, + "mich": 23029, + "micha": 2083, + "michael": 6051, + "michael": 2511, + "michaela": 41897, + "michaeljackson": 33532, + "michaels": 23868, + "michal": 47144, + "miche": 37966, + "micheal": 43709, + "michel": 5158, + "michel": 17153, + "michelangelo": 41245, + "michele": 20642, + "michelin": 26330, + "michelle": 19028, + "michelle": 8625, + "michi": 5658, + "michigan": 32344, + "michigan": 6296, + "mick": 15171, + "mick": 12592, + "mickey": 41813, + "mickey": 13053, + "micky": 43011, + "micro": 3160, + "micro": 11374, + "microbes": 44671, + "microbi": 19496, + "microbial": 30335, + "microbiology": 35348, + "microbiome": 35148, + "micron": 48742, + "microphone": 24643, + "micropoetry": 35997, + "microscope": 29114, + "microscopy": 38431, + "microsof": 42424, + "microsoft": 38650, + "microsoft": 7254, + "microwave": 24240, + "mics": 16554, + "mid": 2192, + "mid": 4734, + "midcentury": 48988, + "midd": 2983, + "midday": 23390, + "middle": 9849, + "middle": 3694, + "middleeast": 32783, + "middles": 29769, + "middlesbrough": 32436, + "middlesex": 39154, + "middleton": 23627, + "middleweight": 35829, + "midfield": 28116, + "midfielder": 13423, + "midget": 30734, + "midi": 39496, + "midi": 27326, + "midland": 24822, + "midlands": 18062, + "midnight": 35746, + "midnight": 6302, + "mids": 40821, + "midst": 24752, + "midsummer": 35234, + "midterm": 34365, + "midterms": 32015, + "midtown": 26069, + "midway": 26536, + "midweek": 29120, + "midwest": 16627, + "midwi": 44802, + "midwife": 37681, + "midwives": 42355, + "mie": 20865, + "mie": 10555, + "miento": 46482, + "mier": 36490, + "mies": 8840, + "miff": 49398, + "mig": 28743, + "might": 2727, + "mighty": 26632, + "mighty": 7815, + "mign": 41678, + "migos": 44640, + "migr": 3736, + "migra": 28186, + "migraine": 35360, + "migrant": 18902, + "migrants": 15814, + "migrate": 41804, + "migrating": 43604, + "migration": 11891, + "migu": 12279, + "miguel": 33672, + "miguel": 14436, + "miho": 46870, + "mii": 39896, + "mik": 15096, + "mik": 46203, + "mika": 28609, + "mika": 25185, + "mike": 5884, + "mike": 3178, + "mikel": 48865, + "mikequind": 33508, + "mikequindazzi": 33551, + "mikey": 34934, + "mikey": 23368, + "mikha": 30999, + "mikhail": 38327, + "miki": 48863, + "miko": 35413, + "miku": 37703, + "mil": 1469, + "mil": 12826, + "mila": 26183, + "milan": 30380, + "milan": 8552, + "milano": 18585, + "milb": 42248, + "mild": 16085, + "mildly": 49059, + "mile": 7833, + "mile": 6243, + "mileage": 30579, + "miler": 44680, + "miles": 3446, + "milestone": 13485, + "milestones": 34025, + "miley": 25336, + "miley": 14321, + "mileycyrus": 28528, + "milf": 45386, + "milford": 35840, + "mili": 16698, + "miliband": 41440, + "milit": 3715, + "militant": 33629, + "militants": 23974, + "military": 24498, + "military": 4323, + "militi": 46625, + "militia": 32114, + "milk": 13409, + "milk": 5205, + "milkshake": 29066, + "milky": 37320, + "milky": 21120, + "milkyway": 43246, + "mill": 4221, + "mill": 6637, + "milla": 49381, + "millan": 34930, + "millan": 22188, + "millar": 41851, + "mille": 34066, + "millen": 48501, + "millenni": 10406, + "millennial": 28357, + "millennials": 18804, + "millennium": 21116, + "miller": 21699, + "miller": 5733, + "milli": 5340, + "millie": 29283, + "milling": 39133, + "million": 13154, + "million": 2506, + "millionaire": 25179, + "millionaires": 47159, + "millions": 8492, + "mills": 10331, + "millwall": 35902, + "milly": 45794, + "milne": 44590, + "milner": 45230, + "milo": 24548, + "milton": 39004, + "milton": 17360, + "milwau": 13452, + "milwaukee": 14259, + "mim": 39379, + "mimi": 27086, + "mimic": 47116, + "mimic": 46519, + "mimo": 45551, + "min": 771, + "min": 3331, + "mina": 15281, + "minaj": 25136, + "minal": 40222, + "minat": 33275, + "mince": 32396, + "mind": 5890, + "mind": 2575, + "mindanao": 44228, + "minded": 21330, + "mindful": 28457, + "mindfulness": 15707, + "minding": 45337, + "minds": 9244, + "mindset": 14217, + "mindy": 46875, + "mindy": 38551, + "mine": 20149, + "mine": 3347, + "minecraft": 15678, + "mined": 48034, + "minent": 12533, + "miner": 14109, + "miner": 26572, + "mineral": 17692, + "minerals": 21169, + "miners": 22119, + "mines": 16211, + "ming": 10868, + "ming": 2107, + "mingham": 7590, + "mingle": 38437, + "mingly": 36909, + "mington": 49283, + "mington": 23119, + "minh": 48734, + "minho": 21318, + "mini": 1810, + "mini": 3954, + "miniature": 44298, + "miniature": 16377, + "miniatures": 38816, + "minic": 31522, + "minim": 10005, + "minimal": 18458, + "minimalism": 42594, + "minimalist": 26641, + "minimize": 38697, + "minimum": 12244, + "minindia": 28458, + "mining": 8473, + "minion": 28622, + "minions": 27035, + "minis": 33409, + "minis": 35976, + "minister": 25688, + "minister": 3569, + "ministerial": 33008, + "ministers": 16406, + "ministries": 27895, + "ministry": 8742, + "mink": 42017, + "minn": 45991, + "minn": 47318, + "minne": 7083, + "minneapolis": 16977, + "minneso": 9380, + "minnesota": 9968, + "minnie": 24493, + "mino": 22791, + "minogue": 44202, + "minor": 8522, + "minorities": 28119, + "minority": 16210, + "minors": 36789, + "mins": 6196, + "minsk": 46151, + "minster": 11189, + "mint": 48084, + "mint": 7506, + "minted": 49377, + "minton": 20050, + "minu": 29064, + "minus": 15358, + "minute": 28931, + "minute": 4497, + "minutes": 3056, + "mio": 26366, + "mir": 2750, + "mir": 6585, + "mira": 21665, + "mira": 22762, + "mirac": 13685, + "miracle": 49208, + "miracle": 11543, + "miracles": 23478, + "miraculous": 38671, + "mirage": 28679, + "mirai": 49060, + "mirand": 32367, + "miranda": 17590, + "mire": 38140, + "mire": 30140, + "miri": 22273, + "miriam": 30950, + "miro": 34851, + "miro": 48317, + "mirren": 47600, + "mirro": 48500, + "mirror": 29823, + "mirror": 7220, + "mirrors": 21823, + "mirza": 36440, + "mis": 866, + "mis": 11239, + "mischief": 33896, + "misconceptions": 48681, + "misconduct": 30601, + "mise": 46567, + "mise": 17267, + "miser": 33394, + "miserable": 26196, + "misery": 28360, + "mises": 24390, + "misfits": 42708, + "mish": 15494, + "mish": 20981, + "misha": 35434, + "mishra": 33042, + "misleading": 30862, + "mism": 15948, + "miso": 27657, + "miso": 33441, + "misogy": 31315, + "misogyny": 48415, + "miss": 6984, + "miss": 1526, + "missal": 38337, + "missed": 3955, + "misses": 15844, + "missi": 3008, + "missile": 14411, + "missiles": 27868, + "missin": 36209, + "missing": 23509, + "missing": 3423, + "mission": 12738, + "mission": 2406, + "missionaries": 40580, + "missionary": 27915, + "missions": 6990, + "mississ": 26483, + "mississauga": 28393, + "mississi": 11687, + "mississippi": 12232, + "missou": 30710, + "missoula": 48549, + "missouri": 11835, + "missuni": 26347, + "missuniverse": 28766, + "missy": 48105, + "missy": 31515, + "missyou": 45799, + "mist": 12610, + "mist": 11946, + "mistak": 20478, + "mistake": 11303, + "mistaken": 29182, + "mistakenly": 48494, + "mistakes": 12824, + "mister": 26949, + "mister": 18895, + "mistle": 46800, + "mistletoe": 48569, + "mistre": 42039, + "mistress": 24349, + "mists": 28636, + "misty": 18799, + "misunderstood": 41574, + "misuse": 40970, + "mit": 3303, + "mit": 4551, + "mita": 47514, + "mitage": 27964, + "mitch": 6969, + "mitch": 14150, + "mitchell": 39339, + "mitchell": 9007, + "mite": 26929, + "mith": 21752, + "mith": 17948, + "miti": 17857, + "mitigate": 42273, + "mitigation": 35514, + "mito": 38254, + "mitochondri": 42132, + "mitra": 47703, + "mits": 24086, + "mitsu": 17905, + "mitsubi": 21604, + "mitsubishi": 23030, + "mitt": 17321, + "mitt": 21341, + "mitted": 10307, + "mitting": 27938, + "mitz": 41827, + "mium": 35891, + "miwx": 43941, + "mix": 3210, + "mix": 3285, + "mixed": 29376, + "mixed": 6780, + "mixer": 17200, + "mixers": 39175, + "mixes": 19061, + "mixing": 15588, + "mixtape": 11044, + "mixture": 28286, + "miy": 25695, + "miya": 36257, + "miz": 20881, + "miz": 30795, + "mize": 19076, + "mized": 43418, + "mizing": 38715, + "mizz": 19985, + "mizzou": 26165, + "mj": 13117, + "mj": 14733, + "mk": 11581, + "mk": 8937, + "mke": 36642, + "mkt": 24814, + "ml": 3627, + "ml": 5780, + "mla": 16723, + "mlas": 48464, + "mlb": 21039, + "mlb": 7482, + "mley": 40329, + "mlg": 45801, + "mlin": 24556, + "mlk": 17941, + "mlkday": 39905, + "mlm": 37611, + "mln": 18971, + "mlp": 23620, + "mlpfi": 45475, + "mlpfim": 45640, + "mls": 13077, + "mm": 1028, + "mm": 2848, + "mma": 34140, + "mma": 6096, + "mmc": 44253, + "mme": 13105, + "mmed": 19570, + "mmer": 35717, + "mmer": 7508, + "mmers": 28128, + "mmes": 42862, + "mmi": 34147, + "mming": 21038, + "mming": 16507, + "mmings": 31357, + "mmit": 41050, + "mmj": 43015, + "mmm": 37908, + "mmm": 7641, + "mmmm": 36312, + "mmmm": 13180, + "mmmmm": 21808, + "mmmmmm": 43740, + "mmo": 30418, + "mmon": 41131, + "mmor": 36657, + "mmorpg": 39476, + "mms": 37803, + "mmva": 42666, + "mmy": 28837, + "mmy": 8722, + "mn": 5086, + "mn": 4057, + "mna": 34877, + "mnd": 44776, + "mnet": 34129, + "mnf": 41105, + "mnl": 32980, + "mnleg": 42653, + "mns": 39040, + "mnt": 21477, + "mntwins": 45448, + "mnwild": 39044, + "mnwx": 39592, + "mo": 617, + "mo": 2080, + "moa": 33174, + "moana": 43241, + "mob": 2818, + "mob": 12754, + "mobi": 9451, + "mobil": 26343, + "mobil": 29815, + "mobile": 12935, + "mobile": 3451, + "mobiles": 44302, + "mobili": 20770, + "mobility": 12546, + "mobilization": 48916, + "moby": 47219, + "moc": 41439, + "moc": 36992, + "mocha": 28425, + "mochi": 47973, + "mock": 15641, + "mock": 12759, + "mocked": 47400, + "mocking": 28692, + "mocking": 37870, + "mocks": 35142, + "mod": 6362, + "mod": 10893, + "moda": 25814, + "modal": 33157, + "mode": 20402, + "mode": 6493, + "model": 4591, + "model": 2863, + "modeled": 39527, + "modeling": 13706, + "modelling": 19946, + "models": 6176, + "moder": 2894, + "moderate": 16435, + "moderated": 27928, + "moderating": 34242, + "moderator": 32659, + "modern": 11706, + "modern": 4077, + "modernart": 34417, + "moderni": 24328, + "modernism": 39601, + "modernist": 36773, + "modernization": 47294, + "modes": 30454, + "modest": 25436, + "modi": 9047, + "modi": 7774, + "modification": 37630, + "modified": 17964, + "modo": 36820, + "mods": 23843, + "modu": 9036, + "modular": 22437, + "module": 16757, + "modules": 30575, + "moe": 38655, + "moe": 17938, + "mof": 30798, + "moff": 27160, + "mog": 42362, + "moga": 41732, + "mogadishu": 45133, + "mogul": 41320, + "moh": 18979, + "moh": 35388, + "moha": 46892, + "moham": 7923, + "mohamed": 18472, + "mohammad": 19926, + "mohammed": 16168, + "mohan": 26521, + "mohan": 23586, + "mohawk": 34942, + "mohd": 49094, + "mohsin": 48861, + "moi": 20691, + "moi": 21825, + "moil": 30349, + "moines": 32091, + "moist": 19831, + "moist": 33263, + "moisture": 20412, + "moisturi": 25942, + "moj": 34505, + "moja": 49055, + "mojito": 46830, + "mojo": 25204, + "mok": 49146, + "mol": 4246, + "mol": 31582, + "mold": 21846, + "molding": 46274, + "moldova": 47317, + "mole": 9927, + "mole": 23529, + "molecular": 19370, + "molecule": 39233, + "molecules": 35643, + "molina": 34201, + "mollie": 48203, + "molly": 24368, + "molly": 12573, + "molo": 41510, + "mology": 32255, + "molten": 46071, + "moly": 47083, + "mom": 1614, + "mom": 2543, + "moma": 33605, + "mombasa": 40340, + "moment": 12197, + "moment": 2495, + "momento": 30078, + "moments": 5251, + "momentum": 15722, + "momlife": 43825, + "momma": 14508, + "mommy": 12456, + "momo": 48490, + "momo": 25980, + "moms": 28446, + "moms": 10042, + "momsdemand": 33744, + "mon": 749, + "mon": 2173, + "mona": 19143, + "monaco": 14938, + "monaghan": 39797, + "monarch": 27235, + "monarch": 22619, + "monarchs": 36750, + "monarchy": 47503, + "monaster": 19422, + "monastery": 21850, + "monc": 34847, + "moncton": 44962, + "mond": 14522, + "mond": 4475, + "monday": 6205, + "monday": 2098, + "mondaymorning": 40089, + "mondaymotiv": 45488, + "mondaymotivation": 8198, + "mondaymotivaton": 47034, + "mondays": 13815, + "monde": 29339, + "mondo": 36207, + "monds": 20317, + "mone": 25990, + "monet": 24499, + "monetary": 26394, + "moneti": 38056, + "money": 12743, + "money": 2327, + "mong": 43566, + "monger": 38928, + "mongers": 27670, + "mongo": 20680, + "mongolia": 27144, + "mongolian": 46335, + "moni": 46851, + "monia": 31161, + "monic": 30893, + "monica": 13540, + "monit": 9014, + "monitor": 10198, + "monitored": 45828, + "monitoring": 11030, + "monitors": 30478, + "monk": 30557, + "monk": 16424, + "monkey": 29597, + "monkey": 9465, + "monkeys": 15781, + "monks": 29090, + "monmouth": 36929, + "mono": 8220, + "mono": 22537, + "monochrome": 25576, + "monogram": 39665, + "monologue": 47776, + "monopoly": 25241, + "monoxide": 49314, + "monro": 45750, + "monroe": 13625, + "mons": 19885, + "monsanto": 37592, + "monsi": 46677, + "monsieur": 48879, + "monsoon": 18872, + "monsta": 30718, + "monstax": 45631, + "monste": 47045, + "monster": 14454, + "monster": 6060, + "monsters": 11546, + "mont": 5186, + "mont": 5382, + "montag": 37202, + "montage": 32325, + "montal": 42126, + "montan": 28405, + "montana": 11436, + "monte": 8711, + "monte": 14667, + "montene": 28538, + "montenegro": 30378, + "monter": 36673, + "monterey": 23388, + "monterrey": 45254, + "montess": 43205, + "montessori": 45443, + "montgom": 13852, + "montgomery": 14951, + "month": 7680, + "month": 1924, + "monthly": 8764, + "months": 3109, + "monthsary": 42420, + "monton": 41961, + "montp": 39523, + "montre": 8434, + "montreal": 9262, + "montrose": 42347, + "monty": 43997, + "monty": 24038, + "monu": 9748, + "monument": 12019, + "monumental": 31297, + "monuments": 26916, + "mony": 4117, + "monza": 40380, + "moo": 4953, + "moo": 24626, + "mood": 42358, + "mood": 5394, + "moods": 43727, + "moody": 17170, + "moom": 36887, + "moon": 6334, + "moon": 3293, + "mooney": 37942, + "moonlight": 20001, + "moons": 29887, + "moonshine": 46706, + "moor": 14817, + "moor": 11877, + "moore": 28613, + "moore": 6708, + "moors": 32577, + "moose": 37562, + "moose": 17338, + "moot": 46895, + "mop": 33900, + "mopar": 41166, + "mor": 657, + "mor": 18614, + "mora": 29262, + "moral": 11246, + "morale": 39404, + "morales": 27117, + "morality": 34133, + "morally": 42519, + "morals": 46223, + "moran": 21557, + "moray": 44569, + "more": 5434, + "more": 750, + "morecam": 37305, + "morecambe": 43414, + "mored": 20195, + "moreland": 44135, + "moreno": 24826, + "morethan": 30889, + "morg": 34284, + "morgan": 15432, + "morgan": 6075, + "morgen": 35106, + "mori": 25710, + "mori": 29514, + "moris": 43131, + "moritz": 45594, + "morley": 40439, + "mormon": 27715, + "morn": 22393, + "mornin": 28327, + "morning": 10769, + "morning": 1119, + "mornings": 12106, + "moro": 31613, + "moroc": 11996, + "moroccan": 27546, + "morocco": 15228, + "moron": 31875, + "morons": 46477, + "morow": 40779, + "morph": 23915, + "morph": 41700, + "morphe": 38978, + "morpho": 38622, + "morrha": 43044, + "morri": 9876, + "morris": 22560, + "morris": 9090, + "morrison": 40961, + "morrison": 14094, + "morrisons": 40965, + "morrissey": 30040, + "morro": 48363, + "morrow": 21611, + "mors": 13064, + "morse": 25282, + "mort": 24257, + "mort": 30583, + "mortal": 31883, + "mortal": 14680, + "mortality": 20347, + "mortar": 27258, + "mortg": 12069, + "mortgage": 13988, + "mortgages": 45391, + "mortimer": 47836, + "morton": 20698, + "morty": 37391, + "mory": 22633, + "mos": 28658, + "mos": 9593, + "mosa": 14164, + "mosa": 23809, + "mosaic": 17506, + "mosch": 47003, + "mosco": 9840, + "moscow": 10371, + "moseley": 47080, + "moses": 18451, + "mosley": 46228, + "mosqu": 15215, + "mosque": 12694, + "mosques": 41214, + "mosquit": 39699, + "mosquito": 25083, + "mosquitoes": 41870, + "moss": 25107, + "moss": 12815, + "most": 7034, + "most": 1096, + "mostly": 8829, + "mosul": 29165, + "mot": 16352, + "mot": 15452, + "mota": 42499, + "motd": 46232, + "motel": 26191, + "moth": 33208, + "moth": 11736, + "mother": 7455, + "mother": 3050, + "motherhood": 32274, + "motherland": 46774, + "mothers": 10546, + "mothersday": 15583, + "motherwell": 48104, + "moths": 29086, + "moti": 38210, + "motif": 35373, + "motion": 32139, + "motion": 7860, + "motiv": 3183, + "motivate": 26771, + "motivated": 16521, + "motivates": 44684, + "motivating": 37720, + "motivation": 26117, + "motivation": 4193, + "motivational": 32832, + "motivational": 20472, + "motivationmonday": 28703, + "motive": 36669, + "motley": 42553, + "motm": 41192, + "moto": 10646, + "moto": 11431, + "motocross": 34562, + "motogp": 16615, + "motor": 3975, + "motor": 7659, + "motorbike": 33341, + "motorcycle": 10297, + "motorcycles": 24869, + "motoring": 44491, + "motorists": 32766, + "motorola": 33738, + "motors": 14989, + "motorsport": 18371, + "motorsports": 24264, + "motorway": 31808, + "motown": 32685, + "mott": 44570, + "mott": 21708, + "motto": 23338, + "mou": 2809, + "mou": 25289, + "moud": 37698, + "moul": 25725, + "mould": 36743, + "moulin": 47656, + "moun": 2023, + "mound": 21414, + "mount": 20553, + "mount": 5532, + "mountain": 14547, + "mountain": 3965, + "mountaine": 24841, + "mountaineer": 49255, + "mountains": 5873, + "mounted": 17897, + "mounting": 29910, + "mounts": 36767, + "mour": 9053, + "mour": 42446, + "moured": 29555, + "mourinho": 18536, + "mourn": 33592, + "mourning": 24169, + "mourns": 42811, + "mous": 24837, + "mous": 17425, + "mouse": 33032, + "mouse": 9301, + "mousse": 31869, + "moustache": 32795, + "mouth": 15152, + "mouth": 4932, + "mouths": 38518, + "mov": 23950, + "move": 16624, + "move": 2783, + "moved": 6997, + "movember": 23474, + "movement": 5208, + "movements": 19665, + "mover": 37673, + "movers": 33957, + "moves": 6880, + "movi": 1707, + "movic": 43838, + "movie": 11247, + "movie": 2016, + "movies": 4772, + "moving": 32160, + "moving": 3584, + "mow": 31006, + "mow": 36329, + "mower": 30895, + "mowing": 46424, + "mowx": 44263, + "moy": 27276, + "moy": 34205, + "moyes": 37119, + "moz": 14761, + "moz": 43738, + "mozam": 26648, + "mozambique": 28831, + "mozart": 22132, + "mozz": 26317, + "mozzarella": 27845, + "mp": 1037, + "mp": 1246, + "mpa": 30749, + "mpc": 38560, + "mpd": 33814, + "mped": 28134, + "mper": 22803, + "mpg": 39830, + "mpg": 37454, + "mpgvip": 42149, + "mph": 5306, + "mpi": 43263, + "mping": 27999, + "mple": 21139, + "mplo": 47071, + "mpls": 34298, + "mpo": 33674, + "mpp": 39570, + "mps": 5504, + "mption": 9717, + "mpton": 27448, + "mpu": 47156, + "mpus": 25864, + "mpy": 17192, + "mq": 19103, + "mqm": 24687, + "mr": 3139, + "mr": 1982, + "mra": 44568, + "mrc": 25897, + "mri": 24773, + "mrs": 25003, + "mrs": 4255, + "mrt": 30256, + "mru": 22370, + "mrw": 15303, + "ms": 3525, + "ms": 988, + "msa": 36306, + "msc": 31826, + "msc": 20529, + "msd": 25804, + "msd": 36407, + "msdhoni": 32850, + "msf": 36239, + "msg": 44430, + "msg": 10928, + "msh": 41751, + "msi": 43597, + "msi": 45278, + "msk": 38501, + "msl": 42736, + "msm": 22210, + "msn": 18824, + "msn": 41042, + "msnbc": 20245, + "mson": 27773, + "mson": 12298, + "msp": 41445, + "msp": 22318, + "mss": 42136, + "mss": 48610, + "mst": 26335, + "msu": 26763, + "msu": 17298, + "mswx": 42957, + "msy": 43919, + "mt": 4252, + "mt": 3284, + "mta": 28691, + "mtb": 48306, + "mtb": 18747, + "mtc": 42482, + "mtg": 49142, + "mtg": 13648, + "mth": 48151, + "mtl": 22135, + "mtn": 26041, + "mtn": 18953, + "mtr": 46650, + "mts": 38751, + "mtv": 8099, + "mtv": 12555, + "mtvbr": 47258, + "mtvhottest": 16751, + "mtvstars": 19948, + "mu": 670, + "mu": 6411, + "mua": 21395, + "muay": 44910, + "muaythai": 47763, + "mubarak": 17957, + "muc": 49115, + "much": 14300, + "much": 1238, + "mucha": 42191, + "muchas": 26278, + "mucho": 19864, + "muck": 44731, + "muck": 45330, + "mud": 17491, + "mud": 11673, + "mudder": 49104, + "muddy": 21524, + "mue": 44383, + "mue": 40717, + "mueller": 46863, + "mueller": 14719, + "muen": 48646, + "muer": 33840, + "muf": 33852, + "mufc": 9013, + "muffin": 22696, + "muffins": 25922, + "mufti": 44930, + "mug": 16339, + "mug": 9722, + "mugabe": 36441, + "mughal": 37508, + "mugs": 22852, + "mugshot": 40028, + "muh": 36335, + "muh": 46475, + "muham": 10043, + "muhammad": 12259, + "muir": 44650, + "muir": 24745, + "muj": 44635, + "muk": 17327, + "muk": 32600, + "mukher": 34575, + "mukherjee": 37862, + "mul": 1899, + "mul": 43193, + "mula": 40937, + "mulator": 17463, + "mulberry": 39221, + "mule": 28695, + "mull": 17313, + "mull": 35310, + "mulled": 44641, + "mullen": 30797, + "muller": 33956, + "mullet": 35010, + "mulligan": 44336, + "mullins": 41265, + "mult": 34219, + "multi": 3947, + "multi": 6400, + "multic": 21683, + "multicul": 28004, + "multicultural": 34667, + "multil": 27975, + "multimedia": 27977, + "multin": 38996, + "multinational": 46540, + "multip": 40314, + "multiplayer": 27460, + "multiple": 6470, + "multipurpose": 47665, + "multit": 27814, + "multitasking": 48684, + "mulus": 26180, + "mum": 15565, + "mum": 4030, + "mumb": 5850, + "mumbai": 24279, + "mumbai": 6971, + "mumford": 46184, + "mummy": 16301, + "mums": 17868, + "mun": 2617, + "mun": 21059, + "muna": 48424, + "munch": 23587, + "munch": 33299, + "munchies": 44324, + "munchkin": 41305, + "mund": 14244, + "mundo": 20990, + "muni": 27327, + "muni": 39795, + "munich": 13526, + "munici": 12159, + "municipal": 43667, + "municipal": 16600, + "municipality": 29987, + "munition": 32668, + "munro": 36501, + "munster": 27201, + "mup": 21966, + "muppet": 40598, + "muppets": 40187, + "mups": 42195, + "mur": 2144, + "mur": 18293, + "mura": 45176, + "mural": 12315, + "murals": 31499, + "murder": 28136, + "murder": 5787, + "murdered": 13158, + "murderer": 26956, + "murderers": 48472, + "murdering": 36055, + "murders": 22409, + "murdoch": 29037, + "murphy": 48976, + "murphy": 8914, + "murray": 31978, + "murray": 7513, + "murs": 38783, + "mus": 2198, + "mus": 8103, + "musa": 30540, + "musc": 5696, + "muscat": 33322, + "muscle": 27323, + "muscle": 9269, + "muscles": 16786, + "muscular": 30606, + "muse": 2369, + "muse": 15686, + "museo": 36457, + "muses": 48243, + "museu": 27087, + "museum": 15602, + "museum": 2786, + "museums": 15542, + "museumweek": 37996, + "mush": 7635, + "mushroom": 13011, + "mushrooms": 14730, + "musi": 15628, + "music": 4110, + "music": 1179, + "musica": 26668, + "musical": 36002, + "musical": 5173, + "musically": 48893, + "musicals": 36974, + "musichistory": 37890, + "musician": 11179, + "musicians": 12498, + "musicislife": 43311, + "musicmonday": 35887, + "musicvideo": 26764, + "musik": 32986, + "musings": 44961, + "musique": 42250, + "musk": 32143, + "musk": 19063, + "muskete": 32775, + "musketeers": 37993, + "musko": 34987, + "muskoka": 40832, + "musli": 4958, + "muslim": 43795, + "muslim": 7060, + "muslims": 10513, + "muss": 41493, + "mussels": 33393, + "must": 6783, + "must": 2048, + "mustache": 23451, + "mustaf": 23596, + "mustafa": 29000, + "mustang": 42361, + "mustang": 13309, + "mustangs": 22500, + "mustard": 15794, + "muster": 47361, + "mustread": 28978, + "mut": 12598, + "mut": 22839, + "mutant": 28384, + "mutation": 38626, + "mutations": 39651, + "mute": 31252, + "muted": 48028, + "muth": 34280, + "mutil": 39950, + "mutt": 45924, + "mutu": 17574, + "mutual": 15055, + "mutuals": 31158, + "muy": 44625, + "mv": 10580, + "mv": 8269, + "mvc": 40549, + "mvp": 8905, + "mw": 16725, + "mw": 11206, + "mwc": 24289, + "mwf": 48565, + "mx": 21947, + "mx": 9575, + "my": 1152, + "my": 607, + "mya": 31401, + "myal": 42735, + "myan": 13761, + "myanmar": 14764, + "myart": 38826, + "myco": 48362, + "mydayin": 41896, + "mydayinla": 42801, + "mydubai": 43475, + "mye": 27551, + "myel": 40084, + "myers": 15993, + "myjaps": 47939, + "myle": 43700, + "myles": 25511, + "mylife": 30537, + "mylittle": 37757, + "mylittlepony": 45107, + "myo": 16206, + "myr": 20272, + "myra": 35694, + "myri": 34972, + "myrt": 47785, + "myrtle": 27768, + "mys": 11724, + "myself": 3245, + "mysore": 44924, + "myspace": 41382, + "myster": 4669, + "mysteries": 20605, + "mysterious": 12650, + "mystery": 39828, + "mystery": 6711, + "mysti": 28711, + "mystic": 36264, + "mystic": 23722, + "mystical": 34122, + "myth": 20322, + "myth": 13878, + "mythical": 34377, + "mytho": 43857, + "mythology": 22496, + "myths": 18675, + "mz": 29509, + "mz": 33400, + "mzan": 36322, + "mzansi": 43301, + "má": 36842, + "mé": 21890, + "méxico": 46159, + "mü": 28142, + "mün": 41235, + "n": 77, + "n": 333, + "na": 1097, + "na": 1272, + "naa": 37738, + "naacp": 32176, + "nab": 6951, + "nab": 19440, + "nabe": 35111, + "naby": 24800, + "nac": 14557, + "nac": 18950, + "nach": 12168, + "nach": 43622, + "nacho": 35647, + "nachos": 32847, + "nacht": 37261, + "nacional": 38782, + "nad": 6204, + "nad": 43928, + "nada": 31683, + "nadal": 20814, + "nade": 24908, + "nadi": 30512, + "nadia": 27487, + "nadine": 23356, + "nadu": 20936, + "nae": 19374, + "naf": 16161, + "naf": 45956, + "nafta": 43123, + "nag": 6694, + "nag": 23902, + "naga": 45953, + "naga": 38997, + "nagar": 17490, + "nage": 41219, + "nago": 38349, + "nagoya": 43303, + "nagpur": 43328, + "nah": 26421, + "nah": 11129, + "nahi": 35244, + "nai": 6230, + "nai": 10692, + "naia": 31340, + "naidu": 42429, + "naija": 16326, + "naik": 34424, + "nail": 19459, + "nail": 9059, + "nailart": 43532, + "nailed": 19035, + "nails": 8469, + "nair": 27107, + "naira": 39450, + "naire": 48892, + "nairobi": 17756, + "nais": 46396, + "naissance": 44761, + "naive": 43362, + "naj": 30985, + "naji": 32589, + "nak": 9248, + "nak": 25550, + "naked": 46371, + "naked": 11478, + "naku": 39864, + "nal": 14132, + "nal": 3119, + "nale": 27198, + "nall": 32869, + "nally": 26158, + "nam": 1410, + "nam": 12344, + "nama": 39586, + "naman": 27635, + "namaste": 35549, + "name": 18160, + "name": 1981, + "named": 3194, + "nameis": 40831, + "nament": 3916, + "naments": 16540, + "names": 6130, + "namesake": 41298, + "nami": 20393, + "namibia": 23731, + "naming": 19367, + "namjoon": 31986, + "namm": 35524, + "namo": 46013, + "namo": 24854, + "nan": 4375, + "nan": 7750, + "nana": 18761, + "nanaimo": 40518, + "nancy": 21511, + "nancy": 11425, + "nand": 20435, + "nandez": 12764, + "nando": 46044, + "nang": 48148, + "nani": 27980, + "nanny": 31104, + "nano": 15835, + "nano": 22006, + "nanop": 34177, + "nanotechnology": 42235, + "nanow": 46734, + "nant": 22526, + "nantes": 47533, + "nantucket": 41573, + "nao": 39319, + "naom": 34955, + "naomi": 20173, + "nap": 6568, + "nap": 11012, + "napa": 20545, + "napier": 40875, + "napkin": 38930, + "naples": 23560, + "napo": 18715, + "napol": 20122, + "napoleon": 24969, + "napoli": 22445, + "napp": 11359, + "napping": 37657, + "naps": 31317, + "naq": 46453, + "nar": 2977, + "nar": 20145, + "nara": 33823, + "narcis": 25229, + "narcissi": 35442, + "narco": 38461, + "nard": 18216, + "nare": 34853, + "naren": 8468, + "narendr": 9807, + "narendra": 25848, + "narendramodi": 9853, + "narnia": 48693, + "narr": 11845, + "narrated": 43609, + "narrative": 15933, + "narratives": 35117, + "narrator": 46529, + "narrow": 24006, + "narrow": 16652, + "narrowly": 29747, + "naruto": 22732, + "nas": 3090, + "nas": 15250, + "nasa": 6841, + "nasal": 42853, + "nascar": 25723, + "nascar": 7868, + "nasdaq": 26629, + "nash": 6771, + "nash": 13620, + "nasheed": 49176, + "nashgrier": 33372, + "nashville": 45356, + "nashville": 8585, + "nasi": 47987, + "nasir": 47509, + "nassau": 34048, + "nasser": 43559, + "nasty": 32930, + "nasty": 8709, + "nat": 1276, + "nat": 11310, + "nata": 39392, + "natal": 28516, + "natali": 20296, + "natalia": 32978, + "natalie": 36634, + "natalie": 13595, + "natash": 48701, + "natasha": 23093, + "nate": 26643, + "nate": 7587, + "natgeo": 33009, + "natgeo": 25046, + "nath": 22203, + "nath": 19843, + "nathan": 13028, + "nathan": 9711, + "nathanfillion": 47422, + "nathaniel": 32667, + "nati": 1060, + "nati": 13384, + "natic": 44944, + "natin": 44358, + "nation": 2317, + "nation": 2670, + "national": 3126, + "national": 1362, + "nationalbestfriend": 42222, + "nationaldogday": 32227, + "nationalism": 29867, + "nationalist": 25058, + "nationality": 44451, + "nationally": 15130, + "nationalpark": 33060, + "nationalparks": 41204, + "nationals": 10784, + "nationaltrust": 34051, + "nations": 7654, + "nationwide": 13795, + "native": 20639, + "native": 4562, + "natives": 36060, + "nativity": 33988, + "natl": 39225, + "natl": 34465, + "nato": 13139, + "nats": 21106, + "natu": 2775, + "natur": 6800, + "natural": 13198, + "natural": 3288, + "naturally": 12995, + "naturals": 44686, + "nature": 9382, + "nature": 2625, + "naturelovers": 41514, + "naturephotography": 22533, + "natures": 15616, + "natureuk": 46193, + "nau": 5955, + "nau": 32878, + "naught": 41001, + "naughty": 47255, + "naughty": 15101, + "nautical": 31660, + "nav": 3413, + "nav": 25308, + "navajo": 35523, + "naval": 44725, + "naval": 13273, + "navar": 24848, + "navarro": 37104, + "nave": 42704, + "naveen": 43837, + "naver": 32534, + "navi": 16159, + "navi": 44848, + "navig": 12507, + "navigate": 24400, + "navigating": 33134, + "navigation": 20148, + "navigator": 38910, + "navis": 36377, + "navratri": 45428, + "navy": 28414, + "navy": 5598, + "naw": 16259, + "naw": 30500, + "nawaz": 49161, + "nawaz": 19523, + "nax": 38299, + "nay": 11704, + "nay": 16182, + "naya": 38917, + "nayanth": 38157, + "nayanthara": 45184, + "naz": 6363, + "naz": 35534, + "nazi": 12972, + "nazis": 21778, + "nb": 6459, + "nb": 6813, + "nba": 22524, + "nba": 5139, + "nbad": 43458, + "nbaf": 30127, + "nbafinals": 33803, + "nbap": 41956, + "nbaplayoffs": 43860, + "nbat": 46291, + "nbc": 9352, + "nbc": 8799, + "nbd": 24526, + "nbl": 42652, + "nc": 5021, + "nc": 4911, + "nca": 6921, + "ncaa": 9418, + "ncbd": 47221, + "ncc": 33195, + "ncc": 36686, + "ncds": 47573, + "ncfc": 31274, + "ncis": 33617, + "ncpol": 40562, + "ncr": 38474, + "ncs": 42689, + "nct": 27723, + "nct": 20319, + "ncwx": 36166, + "nd": 5625, + "nd": 1764, + "nda": 32862, + "ndc": 47564, + "ndi": 48229, + "ndp": 19257, + "nds": 31347, + "ndtv": 26261, + "ne": 557, + "ne": 1422, + "nea": 24068, + "neal": 33652, + "neal": 16730, + "near": 11296, + "near": 2252, + "nearby": 13314, + "nearest": 18985, + "nearing": 26571, + "nearly": 4816, + "nears": 37710, + "neat": 43201, + "neat": 15465, + "neath": 18315, + "neau": 31559, + "neb": 40209, + "nebra": 13371, + "nebraska": 14565, + "nebu": 49295, + "nebula": 22532, + "nec": 25109, + "nec": 22992, + "necess": 6961, + "necessarily": 25853, + "necessary": 8955, + "necessities": 43483, + "necessity": 33163, + "neck": 6066, + "neck": 6906, + "necklace": 7385, + "necklaces": 32276, + "necks": 29701, + "nectar": 33683, + "ned": 16030, + "ned": 1369, + "nederland": 49058, + "nee": 20494, + "nee": 10601, + "need": 3229, + "need": 1262, + "needed": 4049, + "needing": 22894, + "needle": 44490, + "needle": 19886, + "needles": 27250, + "needless": 39984, + "needs": 2536, + "needy": 30150, + "neel": 33092, + "neel": 46043, + "neer": 34245, + "nees": 47248, + "neet": 46362, + "neg": 5513, + "negan": 42623, + "negative": 8869, + "negatively": 40254, + "negativity": 34658, + "neglec": 18827, + "neglect": 33680, + "neglected": 31893, + "negli": 32594, + "negligence": 45658, + "negoti": 10216, + "negotiate": 32969, + "negotiating": 35510, + "negotiation": 36504, + "negotiations": 20433, + "negr": 42190, + "negro": 26554, + "neh": 40416, + "neh": 41697, + "neha": 44463, + "nehru": 30316, + "nei": 9366, + "neigh": 4061, + "neighb": 6534, + "neighbor": 7759, + "neighbor": 14485, + "neighborhood": 9471, + "neighborhoods": 26713, + "neighboring": 44754, + "neighbors": 13037, + "neighbour": 15858, + "neighbour": 23719, + "neighbourhood": 20312, + "neighbours": 17594, + "neil": 13591, + "neil": 8030, + "neilhimself": 45682, + "neill": 19324, + "neither": 14398, + "nek": 47727, + "neko": 47066, + "nel": 5476, + "nel": 2693, + "nell": 27081, + "nell": 8117, + "nelly": 21166, + "nels": 19296, + "nelson": 24774, + "nelson": 8586, + "nem": 45153, + "neman": 48553, + "neme": 30993, + "nemesis": 37811, + "nemo": 30441, + "nen": 17817, + "nen": 15451, + "nene": 44167, + "neo": 14562, + "neo": 11017, + "neon": 21043, + "neon": 13919, + "neonatal": 46464, + "neop": 49069, + "nep": 20739, + "nep": 41960, + "nepal": 25597, + "nepal": 10066, + "nepali": 47579, + "neph": 27926, + "nephe": 41810, + "nephew": 11689, + "nephews": 43747, + "nephro": 43054, + "neptune": 30566, + "ner": 2064, + "ner": 998, + "nerd": 24452, + "nerd": 12273, + "nerds": 22609, + "nerdy": 33124, + "nered": 17583, + "nerf": 42914, + "nering": 20226, + "nero": 29048, + "ners": 2129, + "nerve": 18571, + "nerves": 27813, + "nervous": 13928, + "nery": 48597, + "nes": 5457, + "nes": 4980, + "nesburg": 27159, + "nese": 32220, + "ness": 7187, + "ness": 1294, + "nesses": 20107, + "nessy": 32939, + "nest": 20302, + "nest": 8719, + "nesting": 28860, + "nestle": 43967, + "nestled": 38107, + "nests": 41133, + "net": 1851, + "net": 2315, + "netany": 23137, + "netanyahu": 23583, + "netball": 19761, + "netes": 44335, + "netfli": 6304, + "netflix": 35325, + "netflix": 6600, + "nether": 9946, + "netherlands": 11060, + "neti": 43980, + "netneutrality": 47794, + "nets": 8582, + "nett": 23403, + "nett": 6975, + "nette": 13271, + "network": 23285, + "network": 3304, + "networking": 9818, + "networks": 10004, + "neu": 3855, + "neu": 43342, + "neue": 45764, + "neur": 19001, + "neur": 31976, + "neural": 26388, + "neuro": 7401, + "neuro": 36000, + "neurological": 41718, + "neurology": 43197, + "neurons": 40442, + "neuroscience": 23381, + "neutr": 17207, + "neutral": 17011, + "neutrality": 26511, + "neutron": 44056, + "nev": 10236, + "nev": 43645, + "neva": 43304, + "nevada": 13499, + "neve": 44099, + "neve": 44023, + "never": 6746, + "never": 1426, + "neveragain": 45053, + "neverforget": 19242, + "nevergiveup": 42497, + "neverland": 41483, + "nevertheless": 48355, + "nevertrump": 47494, + "neville": 19269, + "nevis": 43670, + "new": 1218, + "new": 686, + "newark": 20240, + "newbie": 45427, + "newborn": 18320, + "newbury": 34169, + "newcastle": 41955, + "newcastle": 9302, + "newcomer": 30648, + "newcomers": 44037, + "newe": 40068, + "newell": 41436, + "newer": 33099, + "newest": 4990, + "newfound": 25250, + "newfoundland": 28079, + "newh": 18546, + "newin": 31911, + "newjersey": 32621, + "newly": 42186, + "newly": 7056, + "newman": 15815, + "newmarket": 38617, + "newmexico": 35238, + "newmusic": 32510, + "newmusic": 17201, + "newor": 25969, + "neworleans": 31205, + "newport": 42580, + "newport": 14846, + "newprofile": 14633, + "newprofilepic": 14754, + "newrelease": 34793, + "news": 6216, + "news": 1120, + "newsat": 43979, + "newsc": 28656, + "newscast": 45031, + "newsle": 10727, + "newsletter": 11069, + "newsnow": 48650, + "newsp": 7109, + "newspaper": 8786, + "newspapers": 22423, + "newsroom": 23200, + "newt": 37224, + "newton": 33122, + "newton": 12606, + "newtown": 31747, + "newyear": 22161, + "newyear": 12999, + "newyearseve": 37587, + "newyork": 18140, + "newyork": 10454, + "newyorkcity": 30460, + "newyorker": 39732, + "newzealand": 21117, + "nex": 6897, + "nex": 39720, + "next": 12434, + "next": 1131, + "nextgen": 41933, + "nexus": 19053, + "ney": 3857, + "ney": 1438, + "neymar": 21878, + "neys": 12616, + "nez": 27388, + "nf": 15195, + "nf": 25643, + "nfamily": 20098, + "nfc": 23695, + "nffc": 27893, + "nfl": 11219, + "nfl": 4691, + "nfldraft": 25002, + "ng": 10352, + "ng": 5215, + "nga": 35477, + "ngc": 29046, + "ngo": 38740, + "ngo": 24821, + "ngos": 34627, + "nguyen": 29947, + "nh": 3760, + "nh": 10803, + "nhc": 44817, + "nhl": 12290, + "nhl": 8167, + "nhlbruins": 39081, + "nhljets": 49357, + "nhm": 39483, + "nhpolitics": 36125, + "nhq": 42368, + "nhra": 30052, + "nhs": 23282, + "nhs": 7695, + "ni": 697, + "ni": 3256, + "nia": 3098, + "niag": 18071, + "niagar": 39298, + "niagara": 18965, + "niall": 41354, + "niall": 8327, + "niallo": 22855, + "niallofficial": 23084, + "niam": 39347, + "nian": 46003, + "nib": 31049, + "nic": 2109, + "nic": 6651, + "nica": 29040, + "nicar": 25119, + "nicaragua": 28423, + "nice": 28386, + "nice": 1805, + "nicely": 12303, + "nicer": 29488, + "nicest": 22967, + "niche": 25279, + "nichol": 7668, + "nicholas": 39814, + "nicholas": 13148, + "nicholls": 38846, + "nichols": 22730, + "nicholson": 28745, + "nick": 4209, + "nick": 4253, + "nickel": 22034, + "nickelo": 28668, + "nickelodeon": 33279, + "nicki": 17738, + "nickimin": 27390, + "nickiminaj": 27593, + "nickjonas": 43862, + "nickname": 24731, + "nicknamed": 45190, + "nicks": 15049, + "nicky": 28893, + "nicky": 22091, + "nico": 20850, + "nico": 17779, + "nicol": 9919, + "nicol": 48274, + "nicola": 21791, + "nicolas": 43813, + "nicolas": 18918, + "nicole": 21246, + "nicole": 10000, + "nicot": 45099, + "nicotine": 46697, + "nie": 9524, + "nie": 3501, + "niece": 12795, + "nieces": 44877, + "niel": 19109, + "niel": 26837, + "niels": 37154, + "nielsen": 28372, + "nier": 13014, + "nies": 10586, + "niest": 15007, + "nieu": 29781, + "nific": 4748, + "nifty": 25604, + "nig": 27933, + "nig": 28099, + "nigan": 48516, + "nigel": 33919, + "nigel": 15153, + "niger": 4524, + "niger": 29920, + "nigeri": 40913, + "nigeria": 6106, + "nigerian": 12167, + "nigerians": 25358, + "nigh": 13525, + "nigh": 48157, + "night": 3870, + "night": 930, + "nightclub": 20418, + "nighter": 41349, + "nighting": 36211, + "nightingale": 40696, + "nightlife": 28823, + "nightly": 28868, + "nightmare": 12867, + "nightmares": 24032, + "nightout": 44257, + "nights": 4296, + "nighttime": 38147, + "nightw": 39956, + "nih": 25783, + "nik": 5126, + "nik": 13705, + "nike": 16300, + "nike": 5783, + "nikeplus": 43154, + "niki": 36136, + "nikita": 37118, + "nikk": 38596, + "nikki": 23156, + "nikki": 16689, + "niko": 43771, + "nikol": 27430, + "nikola": 42146, + "nikon": 25488, + "nikon": 13849, + "nikov": 43960, + "nil": 16852, + "nil": 35030, + "nile": 24252, + "nim": 30402, + "nim": 42093, + "nima": 42586, + "nin": 5794, + "nin": 14145, + "nina": 13891, + "nine": 16213, + "nine": 7330, + "ninety": 48214, + "ning": 6050, + "ning": 762, + "ningham": 23395, + "ningly": 43537, + "nings": 4588, + "nington": 26214, + "ninj": 23225, + "ninja": 11969, + "ninjas": 42796, + "nino": 25633, + "ninten": 6184, + "nintendo": 13969, + "nintendo": 7886, + "nintendoswitch": 16404, + "ninth": 22770, + "nip": 33889, + "nip": 22333, + "nipp": 24634, + "nipple": 45987, + "nipples": 44774, + "nippon": 47960, + "nips": 49241, + "nir": 15503, + "nir": 40057, + "nireland": 45763, + "niro": 47373, + "nirvana": 28300, + "nis": 5609, + "nis": 3786, + "nish": 19834, + "nish": 13256, + "nished": 24141, + "nishi": 32386, + "nishings": 49247, + "nison": 45700, + "niss": 39043, + "nissan": 37635, + "nissan": 11082, + "nist": 17782, + "nister": 36640, + "nit": 4087, + "nit": 19011, + "nite": 8427, + "niti": 43964, + "niti": 45355, + "nitin": 37529, + "nitro": 30726, + "nitrogen": 30706, + "niture": 7840, + "nity": 12707, + "niu": 48187, + "niv": 47300, + "niversary": 29643, + "nix": 48552, + "nix": 32278, + "nixon": 20671, + "nj": 8343, + "nj": 6672, + "njcaa": 48992, + "njpw": 38992, + "nk": 22708, + "nk": 17456, + "nko": 36353, + "nl": 12057, + "nl": 7655, + "nli": 37502, + "nlp": 35680, + "nlwx": 49260, + "nm": 15956, + "nm": 11370, + "nmd": 43331, + "nme": 40454, + "nmwx": 47967, + "nn": 8947, + "nn": 12925, + "nnn": 26277, + "nnnn": 41420, + "no": 578, + "no": 871, + "noaa": 27557, + "noah": 28806, + "noah": 11519, + "nobel": 33742, + "nobel": 15605, + "nobelprize": 46074, + "noble": 29430, + "noble": 12051, + "nobody": 7009, + "noc": 16988, + "noc": 44420, + "nocchi": 46359, + "noch": 38672, + "noche": 29689, + "noches": 44166, + "nock": 16993, + "noctur": 26291, + "nocturnal": 41738, + "nod": 18648, + "nodapl": 39079, + "node": 31434, + "node": 24871, + "nodejs": 39262, + "nodes": 40534, + "noel": 38406, + "noel": 17496, + "nof": 29505, + "noff": 46979, + "nofilter": 16418, + "nog": 31157, + "noh": 40775, + "noi": 43115, + "noi": 39889, + "noida": 33404, + "noir": 39291, + "noir": 12953, + "nois": 22057, + "noise": 41018, + "noise": 9307, + "noises": 31575, + "noisse": 45686, + "noisy": 33495, + "nokia": 17731, + "nol": 8055, + "nola": 13289, + "nolan": 17323, + "nold": 40322, + "nole": 34654, + "noles": 40569, + "nollywood": 43145, + "nology": 42221, + "nom": 2981, + "nom": 12799, + "nomad": 27849, + "noman": 45592, + "nomin": 5643, + "nominate": 17122, + "nominated": 8710, + "nominating": 45747, + "nomination": 14136, + "nominations": 17124, + "nominee": 14122, + "nominees": 17873, + "nomnom": 26962, + "nomore": 35126, + "noms": 35706, + "non": 4282, + "non": 3353, + "none": 29644, + "none": 8906, + "nonetheless": 39675, + "nonfiction": 31654, + "nonprofit": 19315, + "nonprofits": 37935, + "nonsense": 19136, + "nonstop": 30300, + "nont": 25207, + "noo": 6759, + "noo": 46672, + "noodle": 19521, + "noodles": 15782, + "nook": 30088, + "noon": 37693, + "noon": 2347, + "noor": 46978, + "noor": 31323, + "nope": 15625, + "nor": 1062, + "nor": 6190, + "nora": 25890, + "norcal": 41970, + "nord": 19261, + "nord": 36067, + "nordic": 36439, + "nordic": 20734, + "nordstrom": 38562, + "norfolk": 30232, + "norfolk": 12202, + "norm": 10990, + "norm": 22457, + "norma": 35757, + "normal": 28748, + "normal": 5967, + "normali": 45157, + "normally": 15870, + "norman": 22027, + "norman": 11338, + "normandy": 23840, + "normani": 44596, + "norms": 33011, + "norris": 21814, + "norse": 36559, + "norte": 35638, + "north": 3468, + "north": 2188, + "northampton": 49246, + "northampton": 26175, + "northan": 37081, + "northbound": 24228, + "northcarolina": 43386, + "northe": 24675, + "northeast": 42673, + "northeast": 13009, + "northeastern": 28297, + "northeasthour": 42869, + "norther": 26908, + "northern": 17210, + "northern": 5049, + "northernlights": 48940, + "northkorea": 38495, + "northside": 45957, + "northumber": 22295, + "northumberland": 22922, + "northwales": 49371, + "northwest": 12894, + "northwestern": 23685, + "norton": 18032, + "norway": 8780, + "norwe": 14414, + "norwegian": 15971, + "norwich": 37629, + "norwich": 15812, + "norwood": 37889, + "nos": 13420, + "nose": 24192, + "nose": 8231, + "noses": 48163, + "nostal": 12076, + "nostalgia": 16622, + "nostalgic": 24468, + "not": 2534, + "not": 783, + "notable": 22023, + "notch": 19476, + "notdead": 42059, + "note": 10910, + "note": 3246, + "notebook": 16365, + "notebooks": 37623, + "noted": 22501, + "notes": 5795, + "nothin": 24291, + "nothing": 28412, + "nothing": 2586, + "noti": 10686, + "notic": 6915, + "notice": 6683, + "noticeable": 40857, + "noticed": 9324, + "notices": 33459, + "noticias": 47759, + "noticing": 37571, + "notification": 22512, + "notifications": 23169, + "notified": 39454, + "noting": 38649, + "notion": 37856, + "notjust": 33212, + "notjustlakes": 45803, + "notmy": 39301, + "noto": 29878, + "noton": 48258, + "notor": 21711, + "notori": 44065, + "notorious": 22489, + "notre": 24397, + "notre": 15306, + "notredame": 34077, + "notsorry": 34361, + "nott": 9333, + "nott": 34989, + "notte": 47308, + "nottingham": 12852, + "notts": 25598, + "nou": 8751, + "nou": 30953, + "noun": 33663, + "nouri": 23796, + "nourish": 46025, + "nourished": 48354, + "nous": 29485, + "nouveau": 29948, + "nouvel": 34215, + "nov": 2264, + "nov": 4293, + "nova": 11236, + "novak": 26465, + "novasco": 33785, + "novascotia": 34744, + "novation": 39753, + "nove": 30507, + "novel": 15044, + "novel": 6080, + "novelist": 27314, + "novella": 42770, + "novels": 16040, + "novelty": 37750, + "november": 3680, + "nover": 37465, + "novi": 47957, + "novice": 33743, + "novo": 27504, + "novo": 36581, + "now": 2040, + "now": 692, + "nowadays": 26155, + "nowhere": 14108, + "nowplaying": 3708, + "nowwatching": 30852, + "nox": 27406, + "noxi": 39304, + "noxious": 42833, + "noy": 32787, + "np": 18205, + "np": 6314, + "npa": 42378, + "npc": 33966, + "npr": 39941, + "npr": 24078, + "nps": 22025, + "npt": 47231, + "nr": 6574, + "nr": 9713, + "nra": 17286, + "nrc": 45786, + "nrf": 47982, + "nrg": 48662, + "nrl": 27142, + "nrl": 18127, + "ns": 12405, + "ns": 1373, + "nsa": 23004, + "nsc": 32792, + "nsd": 36659, + "nsf": 34180, + "nsfw": 19847, + "nsi": 47824, + "nsw": 21301, + "nsw": 11693, + "nswpol": 44434, + "nt": 10902, + "nt": 3207, + "ntr": 30845, + "nts": 43775, + "ntt": 22859, + "ntv": 24807, + "ntv": 45304, + "nu": 1156, + "nu": 9444, + "nucle": 25693, + "nuclear": 34136, + "nuclear": 7279, + "nude": 16630, + "nudes": 32122, + "nue": 22834, + "nuestra": 45649, + "nuestro": 38590, + "nuev": 47861, + "nueva": 48810, + "nuevo": 30265, + "nufc": 15720, + "nuff": 37324, + "nug": 13471, + "nugent": 47457, + "nugget": 25448, + "nuggets": 18970, + "nuh": 45950, + "nuit": 38815, + "nuk": 39228, + "nuke": 39399, + "nul": 29358, + "null": 47376, + "num": 17896, + "num": 30534, + "numb": 34639, + "numb": 39427, + "number": 44078, + "number": 2842, + "numbered": 25975, + "numbers": 6121, + "numer": 11442, + "numerous": 17082, + "numis": 39100, + "nun": 12511, + "nun": 28540, + "nunavut": 48626, + "nunes": 40697, + "nuns": 44061, + "nup": 46757, + "nur": 3920, + "nur": 33493, + "nure": 42480, + "nurse": 37547, + "nurse": 10058, + "nursery": 15540, + "nurses": 12938, + "nursing": 11126, + "nurture": 38865, + "nurturing": 45229, + "nus": 25157, + "nus": 18239, + "nut": 10358, + "nut": 6491, + "nutcracker": 36733, + "nutella": 27312, + "nutr": 6198, + "nutri": 15470, + "nutrient": 32900, + "nutrients": 24668, + "nutriti": 17978, + "nutrition": 41546, + "nutrition": 7989, + "nutritional": 26457, + "nutritious": 30387, + "nuts": 8644, + "nutshell": 26659, + "nutty": 39846, + "nv": 17217, + "nv": 16985, + "nvi": 22847, + "nvidia": 27325, + "nw": 7826, + "nw": 7030, + "nwa": 34237, + "nwo": 40976, + "nws": 23333, + "nws": 30998, + "nwsl": 48394, + "nwt": 25029, + "nx": 18810, + "nx": 16997, + "nxt": 35037, + "nxt": 17804, + "ny": 1383, + "ny": 1350, + "nya": 24165, + "nyc": 13304, + "nyc": 2832, + "nycc": 27187, + "nycfc": 47497, + "nye": 40723, + "nye": 13416, + "nyfw": 21089, + "nyk": 46841, + "nylon": 25915, + "nyo": 41534, + "nyo": 44586, + "nypd": 42293, + "nypd": 18279, + "nyr": 32538, + "nyrd": 47936, + "nys": 36375, + "nys": 23423, + "nyse": 32650, + "nyt": 46311, + "nyt": 12816, + "nytimes": 13772, + "nyu": 43143, + "nyu": 31355, + "nz": 10142, + "nz": 7082, + "o": 78, + "o": 334, + "oa": 11994, + "oahu": 37790, + "oak": 6010, + "oak": 7221, + "oakland": 42663, + "oakland": 12077, + "oakley": 27810, + "oaks": 16734, + "oakville": 38500, + "oasis": 18185, + "oat": 20095, + "oat": 34132, + "oates": 47094, + "oath": 20108, + "oatmeal": 26374, + "oats": 24150, + "oax": 43090, + "oaxaca": 47818, + "ob": 1411, + "ob": 14908, + "oba": 42902, + "oba": 15147, + "obam": 13174, + "obama": 4276, + "obamacare": 18005, + "obe": 11897, + "obe": 29117, + "obedience": 48921, + "ober": 15284, + "obese": 41757, + "obesity": 19499, + "obey": 26926, + "obi": 21454, + "obi": 18414, + "obile": 20513, + "obitu": 39218, + "obituary": 43580, + "objec": 7970, + "object": 14115, + "objective": 23663, + "objectives": 30238, + "objects": 13770, + "obl": 31452, + "oblast": 42672, + "obli": 11416, + "obligation": 34473, + "obligations": 38232, + "obligatory": 35020, + "oblivion": 45323, + "obo": 46001, + "obo": 26618, + "obrien": 31946, + "obs": 39162, + "obsc": 20392, + "obscure": 33337, + "obse": 8433, + "observ": 9050, + "observation": 20250, + "observations": 27409, + "observatory": 21236, + "observe": 23217, + "observed": 21267, + "observer": 22077, + "observers": 47544, + "observing": 28359, + "obsessed": 9744, + "obsession": 15718, + "obsi": 47323, + "obsole": 35561, + "obsolete": 40628, + "obst": 29398, + "obstac": 24075, + "obstacle": 29751, + "obstacles": 24480, + "obste": 49103, + "obstru": 44876, + "obstruc": 38762, + "obstruction": 40240, + "obtain": 26555, + "obtained": 29322, + "obvious": 13959, + "obviously": 10068, + "oc": 1566, + "oc": 6603, + "oca": 31120, + "ocal": 38148, + "occ": 43940, + "occa": 8530, + "occasion": 12280, + "occasional": 33059, + "occasionally": 32479, + "occasions": 26154, + "occer": 20804, + "occi": 42994, + "occu": 7863, + "occult": 42529, + "occup": 11152, + "occupation": 18624, + "occupational": 30644, + "occupied": 17271, + "occupy": 22453, + "occupy": 24210, + "occur": 11264, + "occur": 21813, + "occurred": 19850, + "occurrence": 40615, + "occurring": 31335, + "occurs": 26563, + "ocd": 35904, + "oce": 3509, + "ocean": 12941, + "ocean": 4918, + "oceans": 16792, + "och": 29334, + "och": 32011, + "oche": 33045, + "oci": 9891, + "ocity": 46039, + "ock": 33579, + "ock": 21313, + "ocks": 22410, + "oclock": 36274, + "oco": 32553, + "ocon": 33090, + "ocr": 45813, + "ocre": 40320, + "ocs": 27297, + "oct": 4565, + "octa": 23444, + "octag": 37768, + "octagon": 49167, + "octane": 43040, + "octavia": 47416, + "octo": 31032, + "october": 3481, + "octopus": 22327, + "ocu": 22709, + "oculus": 30082, + "od": 4886, + "od": 9719, + "oda": 24777, + "oday": 41954, + "odd": 15525, + "odd": 11387, + "oddly": 34213, + "odds": 11555, + "ode": 19125, + "ode": 19639, + "odell": 41556, + "odessa": 43574, + "odi": 12223, + "odi": 18853, + "odin": 35175, + "odisha": 15737, + "odo": 49188, + "odo": 40993, + "odor": 39509, + "odu": 35095, + "odu": 39904, + "odyssey": 19991, + "oe": 24251, + "oe": 11667, + "oec": 24288, + "oecd": 30816, + "oem": 29650, + "oes": 3643, + "of": 684, + "of": 539, + "ofa": 29774, + "ofc": 19877, + "ofe": 30000, + "ofer": 47322, + "off": 892, + "off": 1007, + "offe": 8261, + "offee": 34059, + "offen": 7231, + "offence": 34594, + "offences": 33972, + "offended": 30765, + "offender": 48294, + "offenders": 35878, + "offense": 15253, + "offensive": 11037, + "offer": 20607, + "offer": 3271, + "offered": 9395, + "offering": 6896, + "offerings": 24535, + "offers": 4679, + "offic": 3276, + "office": 18033, + "office": 2171, + "officeof": 38750, + "officeofrg": 47100, + "officer": 4683, + "officers": 6335, + "offices": 10933, + "offici": 1401, + "official": 5768, + "official": 1868, + "officially": 4226, + "officials": 7658, + "officiel": 26548, + "offl": 16851, + "offline": 22724, + "offro": 32198, + "offroad": 37173, + "offs": 23987, + "offseason": 25485, + "offset": 28843, + "offshore": 15496, + "offside": 49347, + "offspring": 38635, + "offthe": 38189, + "ofi": 36692, + "ofi": 49090, + "oficial": 18061, + "oft": 16693, + "oftball": 39768, + "often": 4864, + "ofthe": 7592, + "oftheday": 6988, + "oftheweek": 20654, + "oftheyear": 33975, + "og": 11542, + "og": 8555, + "oga": 47312, + "ogden": 42011, + "ogil": 39013, + "ography": 22399, + "ogue": 24761, + "ogun": 48970, + "oh": 5648, + "oh": 1779, + "ohana": 48330, + "ohh": 23076, + "ohhh": 27697, + "ohhhh": 40201, + "ohi": 5207, + "ohio": 18951, + "ohio": 6155, + "ohiostate": 41324, + "ohl": 45547, + "ohl": 41095, + "ohmy": 29758, + "ohn": 48043, + "ohs": 39542, + "ohwx": 47993, + "oi": 27357, + "oi": 13934, + "oic": 45554, + "oid": 14758, + "oids": 21847, + "oil": 11973, + "oil": 2870, + "oiland": 32316, + "oilandgas": 34130, + "oilers": 21627, + "oilpainting": 34279, + "oils": 17886, + "oily": 47550, + "oir": 48079, + "oir": 37113, + "ois": 23262, + "oit": 18453, + "oitnb": 34865, + "oj": 30986, + "oj": 34553, + "ok": 1944, + "ok": 2481, + "oka": 42258, + "oka": 19092, + "okan": 41263, + "okanagan": 43233, + "okay": 4917, + "okc": 42418, + "okc": 18357, + "oke": 26636, + "oke": 23598, + "oki": 20390, + "okin": 30687, + "okinawa": 35877, + "okla": 9431, + "oklahoma": 10170, + "oko": 26892, + "oko": 26095, + "okstate": 36356, + "oktoberfest": 32026, + "oku": 45010, + "oku": 43829, + "okwx": 27336, + "ol": 562, + "ol": 2985, + "ola": 20499, + "ola": 3373, + "olaf": 39709, + "olan": 48489, + "olan": 24227, + "oland": 26452, + "olas": 40800, + "old": 4931, + "old": 896, + "olde": 37731, + "older": 7700, + "oldest": 9285, + "oldham": 29929, + "oldie": 35280, + "oldies": 36278, + "oldman": 48614, + "olds": 8580, + "oldschool": 44384, + "oldschool": 25133, + "oldsmobile": 45396, + "ole": 9089, + "ole": 1947, + "oled": 46768, + "oler": 24069, + "oles": 16962, + "olf": 16346, + "olga": 34779, + "oli": 3811, + "oli": 8810, + "olic": 31341, + "oligar": 46185, + "olim": 47769, + "olin": 37823, + "olin": 18283, + "olina": 34711, + "oline": 17441, + "oling": 38033, + "olini": 36040, + "olis": 49397, + "olithic": 35574, + "olive": 22486, + "olive": 9898, + "oliver": 22882, + "oliver": 9261, + "olives": 27149, + "olivi": 20773, + "olivia": 11697, + "olivier": 23891, + "oll": 32270, + "oll": 15510, + "olla": 31908, + "ollie": 24434, + "olls": 42697, + "olly": 23998, + "olo": 14628, + "olo": 7606, + "ological": 12345, + "ologist": 23442, + "ologists": 30912, + "ology": 4627, + "olor": 29245, + "olph": 25077, + "ols": 2236, + "olsen": 26307, + "olson": 28046, + "olt": 46252, + "olu": 16502, + "olu": 46302, + "olulu": 27645, + "oly": 20323, + "oly": 24823, + "olym": 3594, + "olympi": 13597, + "olympia": 23965, + "olympiad": 47694, + "olympian": 25420, + "olympians": 44583, + "olympic": 26099, + "olympic": 6388, + "olympics": 7629, + "olympus": 30960, + "om": 547, + "om": 3932, + "oma": 44603, + "oma": 5358, + "omaha": 16509, + "oman": 22088, + "oman": 10871, + "omar": 19488, + "omar": 13367, + "omars": 37099, + "omas": 36023, + "omat": 40788, + "omb": 34447, + "ombe": 35967, + "omd": 49346, + "ome": 3693, + "ome": 5832, + "omed": 16835, + "omega": 13465, + "omelette": 38789, + "omen": 9969, + "omen": 25469, + "oment": 43683, + "omeo": 39844, + "omer": 24087, + "omer": 17902, + "omes": 25736, + "ometer": 20060, + "ometric": 38702, + "omez": 12541, + "omf": 47496, + "omfg": 12523, + "omg": 35233, + "omg": 3186, + "omi": 24097, + "omi": 10341, + "omic": 40536, + "omic": 12793, + "omics": 15138, + "omile": 46915, + "omin": 16457, + "omination": 42571, + "oming": 10796, + "ominous": 40914, + "omni": 18793, + "omni": 39489, + "omnibus": 44760, + "omnic": 48383, + "omo": 14478, + "omo": 11066, + "omon": 48758, + "omor": 29431, + "oms": 3770, + "omusic": 38965, + "omy": 40805, + "omy": 6884, + "on": 521, + "on": 525, + "ona": 2687, + "onair": 29511, + "onal": 918, + "onboard": 21689, + "once": 16331, + "once": 2654, + "onceupon": 28122, + "onceuponatime": 33505, + "onco": 46700, + "oncology": 24593, + "ond": 27918, + "ond": 2636, + "onda": 32643, + "onday": 29864, + "onde": 44532, + "ondo": 29529, + "ondon": 42043, + "ondon": 11851, + "one": 1980, + "one": 637, + "onec": 27746, + "oned": 28012, + "oned": 4698, + "onedirection": 16245, + "onee": 44433, + "oneill": 44808, + "onelove": 47417, + "onent": 12147, + "onents": 11709, + "oneof": 48478, + "onep": 20440, + "onepiece": 43153, + "oneplus": 25981, + "oner": 30055, + "oner": 6071, + "oners": 12324, + "ones": 20757, + "ones": 1575, + "oneself": 46874, + "onesie": 33237, + "oness": 25379, + "onet": 36058, + "oneteam": 41094, + "onetsy": 33392, + "onew": 43848, + "onews": 18696, + "onex": 49116, + "oney": 44498, + "oney": 9408, + "onf": 41790, + "onfox": 29874, + "ong": 2787, + "ong": 846, + "onga": 30259, + "ongchang": 35071, + "ongi": 21754, + "ongo": 31226, + "ongoing": 10393, + "ongs": 12143, + "oni": 4385, + "oni": 8048, + "onia": 8001, + "onial": 27599, + "onian": 21090, + "onic": 15838, + "onic": 3711, + "onica": 14631, + "onics": 9779, + "onie": 35249, + "onies": 22601, + "onimo": 41271, + "oning": 5197, + "onion": 10985, + "onions": 15255, + "onist": 10099, + "onists": 19659, + "onix": 27370, + "onized": 43657, + "onlin": 31103, + "online": 12940, + "online": 2027, + "onlinemarketing": 41820, + "onlineshopping": 38587, + "only": 11646, + "only": 1033, + "onlyin": 32947, + "onna": 25438, + "onna": 35458, + "onnaise": 48934, + "onne": 23466, + "onnell": 45613, + "ono": 28165, + "ono": 14388, + "onom": 48014, + "onomy": 36873, + "onpoli": 20708, + "ons": 26076, + "ons": 708, + "onsale": 36324, + "onset": 30527, + "onsite": 37336, + "onstage": 21821, + "onstorm": 49333, + "ont": 34303, + "ont": 11157, + "ontari": 6739, + "ontario": 42766, + "ontario": 7436, + "onte": 34723, + "onthe": 12241, + "onther": 46563, + "ontheroad": 47516, + "onthisday": 6862, + "onto": 11745, + "onto": 3141, + "ontology": 37364, + "ontour": 32155, + "onu": 44142, + "onward": 34827, + "onwards": 20682, + "ony": 9490, + "ony": 2926, + "onym": 11483, + "onymous": 13038, + "onyx": 31353, + "oo": 574, + "oo": 2822, + "ood": 16429, + "ood": 738, + "oodle": 45289, + "oods": 44660, + "oof": 42270, + "ooh": 16806, + "ook": 22326, + "ook": 8394, + "ooks": 31082, + "ool": 37702, + "ool": 929, + "oom": 22786, + "oom": 15002, + "oomf": 40607, + "oon": 35651, + "oon": 7100, + "ooo": 9571, + "oooh": 28927, + "oooo": 4002, + "oooo": 13643, + "ooooo": 12532, + "oooooo": 43590, + "oooooo": 20372, + "ooooooo": 30859, + "oooooooo": 15473, + "oooooooo": 43408, + "oooooooooooooooo": 48645, + "oop": 7326, + "ooper": 39906, + "oops": 9116, + "oor": 35239, + "oos": 9896, + "oosa": 30834, + "oose": 38941, + "oot": 17667, + "ootball": 28914, + "ootd": 16547, + "ooth": 12682, + "oott": 34316, + "ooza": 22809, + "op": 676, + "op": 3691, + "opa": 28949, + "opal": 28982, + "opar": 18167, + "opath": 33079, + "opathic": 37521, + "opathy": 28466, + "opau": 27239, + "opd": 38288, + "ope": 31694, + "ope": 11440, + "opec": 33138, + "opel": 36952, + "open": 3647, + "open": 1488, + "openaccess": 26591, + "opend": 28069, + "opendata": 35709, + "openday": 46991, + "opened": 5303, + "opener": 8998, + "openhouse": 36091, + "opening": 33728, + "opening": 2516, + "openingday": 36359, + "openings": 27643, + "openly": 23005, + "opens": 4801, + "opensource": 29930, + "oper": 2796, + "oper": 37533, + "opera": 8056, + "operate": 19306, + "operated": 23031, + "operates": 38675, + "operating": 12218, + "operation": 27173, + "operation": 7639, + "operational": 18237, + "operations": 8106, + "operative": 28380, + "operator": 15972, + "operators": 19267, + "opers": 48728, + "opes": 37258, + "oph": 6796, + "opha": 38634, + "ophel": 45017, + "ophelia": 49118, + "ophi": 44547, + "ophile": 35915, + "opho": 12900, + "ophobia": 21111, + "ophobic": 29934, + "ophon": 25120, + "ophone": 26345, + "ophthal": 33135, + "ophy": 28539, + "opi": 40056, + "opi": 48994, + "opin": 7636, + "opini": 14825, + "opinion": 7843, + "opinions": 16192, + "opio": 17371, + "opioid": 22833, + "opioids": 47578, + "opla": 36270, + "ople": 25663, + "opol": 15173, + "opoly": 23729, + "opor": 39650, + "opoulos": 42020, + "opp": 2020, + "opp": 21024, + "oppa": 23637, + "oppo": 7399, + "oppo": 41770, + "opponent": 17002, + "opponents": 19664, + "oppor": 2914, + "opportun": 2939, + "opportunities": 5978, + "opportunity": 4004, + "oppos": 10091, + "oppose": 23617, + "opposed": 22509, + "opposes": 47471, + "opposing": 24376, + "opposite": 12872, + "opposition": 11062, + "oppre": 17341, + "oppressed": 41492, + "oppression": 30650, + "opra": 28291, + "oprah": 22562, + "opry": 35340, + "ops": 3054, + "opt": 45103, + "opt": 27188, + "opted": 42035, + "opti": 6580, + "optic": 25190, + "optic": 24755, + "optical": 16822, + "optics": 27165, + "optim": 22331, + "optimal": 25235, + "optimi": 9737, + "optimis": 39459, + "optimism": 25226, + "optimist": 44581, + "optimistic": 23104, + "optimization": 25125, + "optimize": 30456, + "optimized": 43939, + "optimizing": 49157, + "optimum": 35974, + "optimus": 43453, + "option": 8464, + "optional": 25411, + "options": 7063, + "optome": 35533, + "opul": 39858, + "opus": 33295, + "opy": 21835, + "or": 523, + "or": 541, + "ora": 4301, + "orac": 24673, + "oracle": 37308, + "oracle": 15966, + "orah": 40820, + "orail": 45120, + "oral": 32490, + "oral": 6007, + "orama": 33619, + "oran": 32209, + "oran": 28395, + "orang": 22116, + "orange": 13957, + "orange": 4287, + "oranges": 32417, + "orangu": 36112, + "orb": 28894, + "orb": 36958, + "orbit": 19713, + "orbital": 40312, + "orc": 44305, + "orca": 18631, + "orcas": 47676, + "orch": 11893, + "orchar": 40226, + "orchard": 19530, + "orche": 8004, + "orchestr": 42937, + "orchestra": 9573, + "orchestral": 40285, + "orchi": 23696, + "orchid": 18678, + "orchids": 28376, + "ord": 26903, + "ord": 11502, + "orda": 33462, + "ordained": 38302, + "order": 24613, + "order": 2191, + "ordered": 8335, + "ordering": 19588, + "orderly": 43457, + "orders": 6187, + "ordin": 4378, + "ordinance": 38583, + "ordinary": 8012, + "ore": 3580, + "ore": 1423, + "orean": 36696, + "ored": 5133, + "oregon": 21759, + "oregon": 8035, + "oren": 21645, + "oreo": 21873, + "oreos": 41688, + "ores": 17328, + "org": 3401, + "org": 5593, + "organ": 3338, + "organ": 13213, + "organi": 3636, + "organic": 24080, + "organic": 5980, + "organics": 44199, + "organis": 13204, + "organisation": 15868, + "organisations": 20651, + "organise": 36073, + "organised": 13191, + "organiser": 49141, + "organisers": 35778, + "organising": 22787, + "organisms": 37041, + "organiz": 11107, + "organization": 8064, + "organizational": 29510, + "organizations": 13453, + "organize": 19973, + "organized": 10681, + "organizer": 23905, + "organizers": 27191, + "organizing": 15779, + "organs": 29872, + "orgs": 29500, + "ori": 1540, + "ori": 8693, + "oria": 11474, + "orial": 8648, + "orian": 21193, + "oric": 43810, + "orice": 41341, + "orie": 18815, + "orient": 13149, + "orient": 30770, + "oriental": 23056, + "orientation": 16873, + "oriente": 40390, + "oriented": 24596, + "orienteering": 42985, + "ories": 5934, + "orig": 2273, + "orig": 38463, + "origami": 31832, + "origin": 2555, + "origin": 12372, + "original": 18496, + "original": 3117, + "originally": 12849, + "originals": 16953, + "originated": 41823, + "origins": 16291, + "orin": 39863, + "oring": 3006, + "orio": 24308, + "orioles": 21430, + "orion": 21765, + "oris": 37064, + "orities": 7903, + "ority": 5556, + "orium": 12015, + "ork": 22202, + "ork": 37235, + "orkney": 34254, + "orl": 39465, + "orlando": 32247, + "orlando": 7827, + "orleans": 11127, + "orm": 38464, + "orn": 25412, + "orn": 8130, + "ornam": 36122, + "ornament": 23409, + "ornamental": 46270, + "ornaments": 28968, + "ornate": 46865, + "orni": 27713, + "ornithology": 38275, + "orns": 19340, + "oro": 9848, + "oro": 14573, + "orous": 19286, + "orph": 17318, + "orphan": 22718, + "orphan": 28994, + "orphanage": 45196, + "orphaned": 46792, + "orphans": 36588, + "orphe": 39186, + "orr": 32977, + "ors": 1127, + "orship": 20846, + "ort": 1019, + "ortega": 39727, + "orth": 22584, + "orth": 24461, + "ortho": 11366, + "orthodon": 37730, + "orthodox": 19008, + "orthop": 42123, + "orthopedic": 49341, + "ortiz": 23544, + "orton": 37238, + "oru": 44629, + "oru": 31281, + "orum": 42724, + "orwell": 41218, + "ory": 16983, + "ory": 1985, + "os": 2211, + "os": 1299, + "osa": 16340, + "osa": 17237, + "osaka": 21347, + "osborne": 22402, + "osbourne": 43376, + "osc": 5092, + "oscar": 21157, + "oscar": 8191, + "oscars": 11098, + "osce": 37303, + "oscill": 38272, + "ose": 46942, + "ose": 22541, + "osh": 30717, + "osh": 35011, + "osha": 33907, + "oshi": 34770, + "osi": 25247, + "osi": 17636, + "osis": 13903, + "osity": 12730, + "oslo": 20547, + "osm": 31626, + "osman": 46539, + "oso": 42793, + "oso": 21285, + "osp": 24387, + "ospre": 49001, + "osprey": 37893, + "oss": 29362, + "oss": 34640, + "ost": 23701, + "ost": 18749, + "oste": 20632, + "osteo": 43163, + "oster": 31781, + "ostr": 33673, + "ostrich": 47640, + "osu": 29480, + "osu": 19818, + "oswald": 38471, + "ot": 1863, + "ot": 2062, + "ota": 17509, + "ota": 8741, + "otago": 45919, + "otaku": 40743, + "otas": 47616, + "otc": 37934, + "otd": 5683, + "ote": 28511, + "ote": 19744, + "otes": 27280, + "oth": 33262, + "oth": 33519, + "other": 9758, + "other": 1010, + "others": 3326, + "otherwise": 12376, + "oti": 19567, + "oti": 45564, + "otic": 9671, + "otis": 28246, + "otive": 10877, + "oto": 23946, + "oto": 23399, + "otp": 29822, + "otr": 38685, + "ots": 5769, + "ott": 10167, + "ott": 7936, + "otta": 7623, + "otta": 20941, + "ottawa": 49027, + "ottawa": 9019, + "otte": 35214, + "otter": 34710, + "otter": 22456, + "otters": 38883, + "otti": 36721, + "ottnews": 33995, + "otto": 17730, + "ottoman": 27503, + "otw": 35259, + "otwol": 46868, + "ou": 520, + "ou": 6544, + "ouat": 32954, + "ouch": 13493, + "oud": 1359, + "oue": 48838, + "ouf": 34618, + "ough": 4204, + "ough": 991, + "ought": 2253, + "oughton": 36860, + "oui": 39421, + "ouk": 21796, + "oul": 20253, + "oul": 8081, + "ould": 859, + "oulos": 32808, + "oun": 636, + "oun": 20960, + "ounce": 15027, + "ounces": 30299, + "ound": 2013, + "ound": 853, + "oundation": 40132, + "ounded": 9634, + "ounding": 11944, + "ounds": 2753, + "oung": 35875, + "oung": 25341, + "ounge": 29427, + "ount": 43801, + "ount": 4172, + "ounts": 10963, + "oup": 32815, + "our": 727, + "our": 581, + "oura": 29806, + "oura": 36352, + "ourable": 24126, + "ourage": 34525, + "oural": 45840, + "oured": 6956, + "ouri": 12696, + "ouring": 12000, + "ourism": 25496, + "ourke": 26480, + "ourlives": 37541, + "ouro": 41224, + "ours": 1491, + "ourse": 15415, + "ourselves": 10124, + "ourt": 22960, + "oury": 29484, + "ous": 1987, + "ous": 879, + "ouse": 32048, + "ouse": 7603, + "ouses": 33666, + "ously": 2501, + "ousness": 10689, + "ousy": 28302, + "out": 1130, + "out": 620, + "outa": 35187, + "outage": 27320, + "outages": 40353, + "outback": 28532, + "outbound": 41256, + "outbreak": 20103, + "outcome": 16552, + "outcomes": 14016, + "outdated": 38313, + "outdoor": 19184, + "outdoor": 6368, + "outdoors": 10469, + "oute": 44180, + "outed": 34435, + "outer": 30499, + "outer": 14188, + "outes": 39600, + "outfield": 41826, + "outfit": 6525, + "outfits": 16366, + "outfitters": 37725, + "outfy": 34920, + "outgoing": 27302, + "outh": 16933, + "outh": 8111, + "outine": 35452, + "outing": 11251, + "outlander": 45820, + "outlander": 17095, + "outlaw": 37498, + "outlaw": 27340, + "outlaws": 30935, + "outlet": 16855, + "outlets": 20822, + "outline": 26894, + "outlines": 29159, + "outlining": 45960, + "outlook": 12983, + "outof": 43958, + "outpatient": 46603, + "outpost": 44622, + "output": 17255, + "outra": 14262, + "outrage": 23577, + "outraged": 43402, + "outrageous": 29342, + "outre": 14373, + "outreach": 15297, + "outright": 38200, + "outs": 5790, + "outsi": 22515, + "outside": 47693, + "outside": 2782, + "outsider": 41196, + "outsiders": 41742, + "outskirts": 42088, + "outsourcing": 34543, + "outstanding": 6387, + "outta": 15807, + "outtuesday": 48692, + "outw": 34650, + "oux": 40960, + "oux": 14228, + "ov": 6420, + "ov": 8479, + "ova": 12762, + "oval": 15039, + "ovarian": 42913, + "ovation": 24333, + "ove": 8649, + "ove": 15456, + "oven": 44620, + "oven": 12579, + "over": 1658, + "over": 962, + "overall": 6914, + "overboard": 42982, + "overcame": 47235, + "overcast": 36942, + "overcome": 14365, + "overcoming": 29348, + "overdose": 27017, + "overdrive": 40088, + "overdue": 30240, + "overflow": 32885, + "overflowing": 45370, + "overhaul": 31531, + "overhead": 20321, + "overland": 38808, + "overlay": 44827, + "overload": 24327, + "overlook": 35767, + "overlooked": 27632, + "overlooking": 17319, + "overly": 28820, + "overnight": 9913, + "overpass": 44310, + "overrated": 38214, + "overs": 45774, + "overs": 17329, + "overseas": 15100, + "oversight": 32494, + "oversized": 31557, + "overtime": 19347, + "overturned": 31048, + "overview": 14789, + "overwatch": 18124, + "overweight": 43465, + "overwhel": 12204, + "overwhelmed": 23459, + "overwhelming": 20306, + "overwhelmingly": 43549, + "ovi": 32508, + "ovic": 22417, + "ovich": 27623, + "ovie": 47677, + "ovo": 41920, + "ovo": 18065, + "ovski": 26167, + "ow": 2032, + "ow": 2250, + "owa": 32770, + "owe": 19073, + "owed": 37641, + "owen": 24838, + "owen": 12056, + "owens": 20664, + "owes": 35069, + "owing": 48582, + "owl": 34332, + "owl": 9899, + "owls": 18247, + "own": 3845, + "own": 1758, + "owned": 8536, + "owner": 5019, + "owners": 7712, + "ownership": 16583, + "owning": 24661, + "owns": 17533, + "owo": 46142, + "ows": 27423, + "owski": 22573, + "ox": 3282, + "ox": 12071, + "oxfam": 45466, + "oxford": 28588, + "oxford": 8824, + "oxfordshire": 37855, + "oxi": 33731, + "oxi": 48147, + "oxid": 17701, + "oxide": 28235, + "oxo": 37088, + "oxy": 12432, + "oxygen": 16214, + "oy": 6638, + "oy": 12437, + "oya": 38894, + "oye": 48677, + "oyster": 40545, + "oyster": 17253, + "oysters": 22672, + "oz": 10584, + "oz": 6044, + "ozar": 31848, + "ozil": 41365, + "ozone": 37052, + "ozzy": 39549, + "p": 79, + "p": 335, + "pa": 765, + "pa": 2217, + "paa": 32812, + "pab": 9354, + "pablo": 42172, + "pablo": 14473, + "pac": 2332, + "pac": 7608, + "pace": 40600, + "pace": 9450, + "paced": 32611, + "pacers": 23976, + "paces": 43001, + "paci": 5699, + "pacific": 19723, + "pacific": 6654, + "pacing": 45202, + "pack": 2711, + "pack": 3420, + "package": 7053, + "packaged": 29656, + "packages": 14305, + "packaging": 11658, + "packard": 46421, + "packed": 5883, + "packer": 28209, + "packers": 14294, + "packet": 25022, + "packets": 40448, + "packing": 9829, + "packs": 11086, + "paco": 41364, + "pacqui": 28456, + "pacquiao": 30485, + "pact": 27182, + "pad": 3798, + "pad": 7601, + "padded": 42253, + "paddington": 33162, + "paddle": 38276, + "paddle": 20811, + "paddling": 40645, + "paddock": 29590, + "paddy": 33103, + "paddy": 19855, + "padi": 47037, + "padilla": 22380, + "padma": 44595, + "padma": 46457, + "padre": 38343, + "padres": 22829, + "pads": 17353, + "paedi": 41488, + "paella": 46924, + "paf": 47185, + "pafc": 49259, + "pag": 4151, + "pag": 30525, + "pagan": 27854, + "page": 14996, + "page": 2504, + "pageant": 22139, + "pages": 8082, + "pagoda": 44309, + "pah": 41054, + "pah": 26884, + "pai": 20624, + "pai": 21198, + "paid": 5057, + "paige": 33659, + "paige": 16022, + "paign": 31796, + "pain": 2141, + "pain": 4495, + "paine": 38069, + "painful": 16361, + "pains": 25639, + "paint": 7948, + "paint": 5185, + "paintball": 39730, + "painted": 6433, + "painter": 10888, + "painters": 35703, + "painting": 49164, + "painting": 3086, + "paintings": 9956, + "paints": 21672, + "pair": 19848, + "pair": 4038, + "paired": 12433, + "pairing": 16313, + "pairings": 41152, + "pairs": 9950, + "pais": 16878, + "paisley": 22954, + "pajam": 24110, + "pajama": 40244, + "pajamas": 37231, + "pak": 13186, + "pak": 9094, + "paki": 3438, + "pakistan": 10713, + "pakistan": 3994, + "pakistani": 14050, + "pakistanis": 45707, + "pakv": 38196, + "pal": 1850, + "pal": 3611, + "pala": 17895, + "palace": 6381, + "palaces": 45625, + "palad": 28371, + "palae": 43379, + "palais": 35673, + "palate": 34666, + "palawan": 48202, + "palazzo": 36006, + "pale": 4768, + "pale": 12518, + "paleo": 36741, + "paleo": 22198, + "paler": 38028, + "palermo": 40635, + "palestin": 9449, + "palestine": 11682, + "palestinian": 11764, + "palestinians": 21874, + "palette": 13901, + "pali": 48063, + "palin": 40153, + "palis": 44256, + "pality": 27296, + "pall": 35817, + "palla": 21208, + "palladium": 37888, + "pallet": 39057, + "palli": 28954, + "palliative": 46014, + "pally": 46073, + "palm": 19651, + "palm": 8612, + "palma": 29888, + "palmer": 40112, + "palmer": 13633, + "palms": 27059, + "palo": 31562, + "palom": 47698, + "palooza": 25861, + "pals": 11043, + "palsy": 46651, + "pam": 8228, + "pam": 18513, + "pamela": 26991, + "pamp": 37653, + "pamper": 44345, + "pamph": 41332, + "pan": 1072, + "pan": 7437, + "panam": 24606, + "panama": 15522, + "panas": 26207, + "panasonic": 29750, + "pancake": 18723, + "pancakes": 15308, + "panch": 27251, + "pancra": 42472, + "pancre": 27708, + "pancreatic": 49337, + "pancy": 41625, + "pand": 5631, + "panda": 12952, + "pandas": 35119, + "pande": 38419, + "pandey": 34895, + "pandit": 41191, + "pandor": 30250, + "pandora": 17727, + "pandoramusic": 42344, + "pane": 27470, + "panel": 3724, + "paneli": 19410, + "panelist": 39719, + "panelists": 24619, + "panels": 12735, + "panera": 48471, + "pang": 16756, + "pang": 23672, + "panhandle": 40919, + "pani": 36092, + "panic": 46671, + "panic": 14124, + "panini": 30410, + "pann": 42302, + "panna": 49065, + "pano": 36165, + "panor": 12962, + "panorama": 19763, + "panoramic": 22563, + "pans": 35204, + "pant": 22550, + "panther": 22825, + "panther": 13262, + "panthers": 10494, + "panties": 32515, + "panto": 28776, + "pantry": 25608, + "pants": 5003, + "panty": 44217, + "pany": 45567, + "panzer": 41159, + "pao": 33790, + "paola": 44689, + "paolo": 48488, + "paolo": 21133, + "pap": 1884, + "pap": 30756, + "papa": 12211, + "papar": 32782, + "paparazzi": 37842, + "papaya": 44098, + "paper": 8680, + "paper": 2802, + "paperback": 17928, + "papers": 8204, + "paperwork": 35785, + "papi": 35177, + "papp": 26361, + "paprika": 44793, + "papua": 32629, + "par": 699, + "par": 9163, + "para": 18355, + "para": 8976, + "parach": 23147, + "parachute": 30122, + "parad": 37143, + "parade": 5809, + "parades": 46479, + "paradi": 6658, + "paradig": 27786, + "paradigm": 33485, + "paradise": 45869, + "paradise": 7247, + "paradox": 33109, + "parag": 11866, + "paragon": 48099, + "paragra": 24903, + "paragraph": 28499, + "paragu": 38021, + "paraguay": 43579, + "paral": 15143, + "paralle": 13184, + "parallel": 18201, + "paralleled": 42520, + "parallels": 46101, + "paraly": 30255, + "paralym": 18727, + "paralympic": 30806, + "paralympics": 37162, + "paralysis": 45702, + "param": 12250, + "parame": 27106, + "paramedic": 34630, + "paramedics": 35991, + "parameters": 44890, + "paramore": 34401, + "paramount": 26642, + "parano": 30283, + "paranoid": 43029, + "paranor": 16940, + "paranormal": 19047, + "parap": 41091, + "paras": 15198, + "parasite": 42460, + "parasites": 46175, + "parc": 30914, + "parcel": 30367, + "parcels": 45589, + "pard": 18773, + "pardon": 47606, + "pardon": 26565, + "pare": 18202, + "pared": 5498, + "paren": 3106, + "parent": 47848, + "parent": 10183, + "parental": 28339, + "parenthood": 23887, + "parenting": 14529, + "parents": 3731, + "pares": 12420, + "parfait": 46140, + "pari": 17961, + "pari": 27979, + "paris": 13982, + "paris": 3445, + "parisagreement": 47405, + "parish": 47328, + "parish": 13020, + "parisi": 45081, + "parisian": 38512, + "parity": 42734, + "park": 4985, + "park": 1452, + "parked": 16487, + "parker": 31119, + "parker": 8365, + "parkin": 34868, + "parking": 5984, + "parkinson": 28129, + "parkland": 31287, + "parkrun": 25747, + "parks": 6873, + "parkway": 19882, + "parl": 30373, + "parl": 29897, + "parliam": 5941, + "parliament": 41599, + "parliament": 7151, + "parliamentary": 17912, + "parlor": 38253, + "parlour": 37829, + "parma": 36077, + "parme": 26295, + "parmesan": 27274, + "paro": 17429, + "parody": 24318, + "parole": 32158, + "parr": 44113, + "parrish": 43043, + "parrot": 23565, + "parry": 40604, + "parsley": 30077, + "parsons": 22505, + "part": 1872, + "part": 1551, + "parte": 48508, + "parth": 34790, + "parti": 10509, + "partial": 18957, + "partially": 21269, + "partic": 2871, + "partici": 9540, + "particip": 4400, + "participant": 27674, + "participants": 10237, + "participate": 9433, + "participated": 14252, + "participates": 46414, + "participating": 11535, + "participation": 13529, + "particle": 27716, + "particles": 27012, + "particul": 11408, + "particular": 14098, + "particularly": 12170, + "parties": 9032, + "parting": 32844, + "partisan": 20772, + "partist": 44713, + "partition": 42219, + "partly": 21459, + "partner": 5210, + "partner": 4568, + "partnered": 21402, + "partnering": 21182, + "partners": 5568, + "partnership": 6123, + "partnerships": 17418, + "parton": 43245, + "partridge": 34872, + "parts": 5149, + "party": 12877, + "party": 1580, + "partying": 25702, + "pas": 1341, + "pas": 9525, + "pasadena": 25892, + "pascal": 28626, + "pasco": 49220, + "pascu": 42692, + "pash": 23936, + "pasha": 46986, + "paso": 18542, + "pasqu": 44941, + "pass": 5016, + "pass": 3511, + "passage": 16477, + "passages": 48937, + "passed": 4957, + "passenger": 12311, + "passengers": 12781, + "passer": 48544, + "passes": 7633, + "passi": 32471, + "passing": 6589, + "passion": 8822, + "passion": 5332, + "passionate": 10947, + "passionately": 44028, + "passions": 38441, + "passive": 23171, + "passover": 38426, + "passport": 14739, + "passports": 46368, + "password": 20258, + "passwords": 43095, + "past": 7315, + "past": 2729, + "pasta": 10441, + "paste": 34765, + "paste": 17038, + "pastel": 19457, + "pastels": 45699, + "pastor": 19792, + "pastor": 9664, + "pastoral": 37191, + "pastors": 30959, + "pastr": 45478, + "pastries": 39409, + "pastry": 18582, + "pasture": 34764, + "pastures": 47793, + "pat": 1300, + "pat": 7036, + "patag": 29862, + "patagonia": 32786, + "patch": 29284, + "patch": 8721, + "patches": 22104, + "patchwork": 44675, + "patchy": 47488, + "pate": 42122, + "pate": 42098, + "patel": 14168, + "patent": 14692, + "patented": 37277, + "patents": 33911, + "paterson": 36560, + "path": 7408, + "path": 5035, + "pathetic": 18222, + "pathfinder": 35415, + "pathi": 34976, + "pathi": 27347, + "pathic": 49025, + "patho": 18534, + "pathology": 23290, + "paths": 16333, + "pathway": 23488, + "pathways": 24690, + "pathy": 13330, + "pati": 2799, + "pati": 26708, + "patience": 13575, + "patient": 30139, + "patient": 6262, + "patiently": 22980, + "patients": 5543, + "patil": 49187, + "patio": 14304, + "pational": 30627, + "patna": 45025, + "patory": 41859, + "patreon": 17165, + "patri": 4771, + "patriarch": 49054, + "patriarchy": 48806, + "patric": 12569, + "patrice": 40731, + "patricia": 18143, + "patrick": 12078, + "patrick": 5286, + "patricks": 46783, + "patriot": 28896, + "patriot": 15692, + "patrioti": 35520, + "patriotic": 20217, + "patriotism": 35807, + "patriots": 8707, + "patro": 31650, + "patrol": 10073, + "patrolling": 39344, + "patrols": 35978, + "patron": 26658, + "patron": 17683, + "patrons": 28308, + "pats": 24874, + "patsy": 46093, + "patt": 12637, + "patter": 4982, + "pattern": 7447, + "patterned": 47212, + "patterns": 11637, + "patterson": 21384, + "patti": 44927, + "patti": 26123, + "pattinson": 32474, + "patton": 29026, + "patty": 48741, + "patty": 18321, + "pau": 1834, + "pau": 35970, + "paul": 6035, + "paul": 2597, + "paula": 37363, + "paula": 16777, + "pauline": 30438, + "paulo": 48002, + "paulo": 21628, + "pauls": 41413, + "pauls": 40010, + "paulson": 48201, + "pause": 19439, + "paused": 46782, + "pav": 6661, + "pave": 37107, + "paved": 27898, + "pavel": 43152, + "pavement": 27669, + "pavilion": 13374, + "paving": 28651, + "paw": 14009, + "paw": 16016, + "pawan": 29754, + "pawankalyan": 33702, + "pawn": 43195, + "paws": 16714, + "pax": 20007, + "pax": 19033, + "paxton": 38347, + "pay": 2642, + "pay": 3345, + "payback": 36413, + "paycheck": 45078, + "payday": 26957, + "payee": 46985, + "payer": 41503, + "paying": 8341, + "payment": 10596, + "payments": 11832, + "payne": 12775, + "paypal": 21442, + "payroll": 31610, + "pays": 10845, + "paysoff": 48174, + "paytm": 45352, + "payton": 27348, + "paz": 22267, + "pb": 20112, + "pb": 10981, + "pba": 28205, + "pbb": 48567, + "pbb": 40589, + "pbc": 49191, + "pbl": 35166, + "pbr": 32998, + "pbs": 17908, + "pc": 6782, + "pc": 3808, + "pca": 35705, + "pcb": 26235, + "pcc": 36059, + "pci": 38957, + "pcm": 47436, + "pcr": 35704, + "pcs": 11917, + "pcso": 31963, + "pct": 22168, + "pd": 4387, + "pd": 4675, + "pdates": 16842, + "pdc": 40498, + "pdf": 15181, + "pdp": 24601, + "pdt": 21743, + "pdx": 25470, + "pdx": 16153, + "pe": 661, + "pe": 956, + "pea": 13915, + "peabo": 34083, + "peabody": 41244, + "peac": 34615, + "peace": 6249, + "peace": 3021, + "peaceful": 9461, + "peacefully": 30530, + "peacekeeping": 43630, + "peach": 10522, + "peach": 11538, + "peaches": 27216, + "peak": 18572, + "peak": 6026, + "peakdistrict": 41289, + "peake": 24810, + "peaked": 36391, + "peaks": 14067, + "pean": 11563, + "peanu": 25843, + "peanut": 12491, + "peanuts": 26503, + "pear": 4910, + "pear": 18820, + "pearce": 25996, + "pearl": 21806, + "pearl": 8560, + "pearljam": 46739, + "pearls": 19581, + "pears": 39565, + "pearson": 20461, + "peas": 15937, + "peasant": 40621, + "peasants": 48788, + "peat": 26914, + "pebble": 28056, + "pebbles": 40155, + "pec": 32447, + "pec": 17611, + "pecan": 32177, + "peck": 25186, + "peck": 29234, + "pecker": 30169, + "peckham": 45863, + "pecu": 34200, + "peculiar": 42808, + "ped": 13197, + "ped": 2966, + "pedago": 34590, + "pedagogy": 48072, + "pedal": 32943, + "pedal": 19621, + "pedals": 38535, + "pede": 12862, + "pede": 19560, + "pedestri": 30027, + "pedestrian": 18256, + "pedestrians": 33895, + "pedi": 12967, + "pedia": 11733, + "pediatric": 48431, + "pediatric": 22071, + "pedic": 35319, + "pedic": 44528, + "pedro": 29963, + "pedro": 15114, + "peds": 45377, + "pee": 12988, + "pee": 11196, + "peed": 47369, + "peek": 46323, + "peek": 7569, + "peeking": 48771, + "peel": 34386, + "peel": 17158, + "peeled": 33533, + "peeling": 48649, + "peep": 25425, + "peep": 16857, + "peeps": 11681, + "peer": 32416, + "peer": 14432, + "peers": 21626, + "pees": 31830, + "peg": 32182, + "peg": 11207, + "pegas": 30018, + "pegasus": 37822, + "peggy": 24271, + "pei": 48166, + "pei": 12917, + "pel": 4286, + "pel": 7006, + "pele": 44105, + "pelican": 34131, + "pelicans": 29363, + "pell": 46981, + "pelle": 31267, + "pelled": 32506, + "pellegr": 38529, + "pellets": 48240, + "pelo": 40192, + "pelo": 40238, + "pelosi": 22169, + "pelvic": 45646, + "pemb": 19880, + "pembro": 24084, + "pembroke": 36702, + "pembroke": 40044, + "pembrokeshire": 40695, + "pen": 1501, + "pen": 5356, + "pena": 35788, + "penalties": 25417, + "penalty": 11491, + "penang": 29545, + "penc": 20065, + "pence": 18002, + "pencil": 41303, + "pencil": 11200, + "pencils": 21909, + "pend": 3052, + "pendant": 12415, + "pendants": 44117, + "pending": 12770, + "pendleton": 44272, + "pendu": 45336, + "penelope": 36703, + "penetr": 26058, + "peng": 42955, + "peng": 39200, + "pengu": 8854, + "penguin": 28249, + "penguin": 14952, + "penguins": 16557, + "peninsu": 13464, + "peninsula": 14070, + "penn": 7760, + "penn": 11128, + "pennant": 43971, + "penned": 45077, + "penney": 47856, + "pennies": 43094, + "pennsylvania": 13673, + "penny": 20400, + "penny": 11388, + "pens": 13307, + "pens": 13310, + "pensac": 30925, + "pensacola": 33573, + "pension": 32840, + "pension": 17764, + "pensions": 29773, + "penske": 47154, + "pent": 10699, + "pent": 22725, + "pentagon": 23133, + "pente": 33165, + "penthouse": 32673, + "penultimate": 36553, + "peop": 1030, + "people": 10573, + "people": 1047, + "peoples": 28241, + "peoples": 14627, + "peopleschoice": 32418, + "peoplesvote": 45830, + "peoria": 36985, + "pep": 12761, + "pep": 14898, + "pepe": 24778, + "pepp": 34425, + "pepper": 14861, + "pepper": 8253, + "peppermint": 30321, + "pepperoni": 47307, + "peppers": 14650, + "pepsi": 21307, + "per": 703, + "per": 1284, + "pera": 26294, + "perce": 24135, + "perceived": 38436, + "percent": 16328, + "percent": 9017, + "percentage": 19477, + "percep": 28017, + "perception": 20591, + "perceptions": 38138, + "perch": 34281, + "perched": 40071, + "percu": 41722, + "percussion": 23980, + "percy": 23940, + "pere": 8665, + "pere": 36300, + "pered": 24509, + "peregr": 37479, + "peregrine": 44546, + "pereira": 43927, + "peren": 24564, + "perenni": 26996, + "perennial": 34038, + "perez": 15107, + "perf": 22816, + "perfe": 1624, + "perfec": 6599, + "perfect": 17261, + "perfect": 1878, + "perfection": 9646, + "perfectly": 8037, + "perfecto": 42898, + "perfor": 2311, + "perform": 3866, + "perform": 5940, + "performan": 8973, + "performance": 2714, + "performances": 9553, + "performed": 9997, + "performer": 17061, + "performers": 18476, + "performing": 5170, + "performs": 13839, + "perfu": 14214, + "perfume": 17525, + "perhaps": 9297, + "peri": 12618, + "peri": 44068, + "perience": 19302, + "peril": 40119, + "peril": 48301, + "perimeter": 38499, + "pering": 29746, + "perio": 5101, + "period": 6131, + "periodic": 36476, + "periods": 24401, + "periph": 35308, + "peripheral": 43901, + "peris": 19461, + "periscope": 21668, + "perk": 33424, + "perkins": 20057, + "perks": 17660, + "perl": 44018, + "perm": 47847, + "perman": 9018, + "permanent": 11144, + "permanently": 25584, + "perme": 42456, + "permission": 15822, + "permit": 21950, + "permits": 33267, + "permitted": 44380, + "pero": 23551, + "perpe": 15749, + "perpetr": 33376, + "perpetu": 30132, + "perpetual": 32018, + "perrie": 32691, + "perry": 28478, + "perry": 7899, + "pers": 3688, + "pers": 10710, + "perse": 27498, + "persecu": 22878, + "persecution": 32009, + "perseverance": 29820, + "persi": 11509, + "persian": 19859, + "persist": 19412, + "persist": 40938, + "persistence": 34588, + "persistent": 29028, + "person": 3510, + "person": 2533, + "persona": 18401, + "personal": 10114, + "personal": 4121, + "personalised": 24186, + "personalities": 27888, + "personality": 10386, + "personalized": 17845, + "personally": 13885, + "personnel": 14546, + "persons": 14592, + "perspec": 17997, + "perspective": 8996, + "perspectives": 18777, + "persu": 20972, + "pert": 36970, + "pert": 16306, + "perth": 19067, + "perth": 11011, + "peru": 20612, + "peru": 12964, + "peruvian": 30822, + "pes": 38368, + "pes": 2598, + "pesa": 47409, + "pesc": 44044, + "pesh": 33184, + "peshaw": 28524, + "peshawar": 29230, + "pesky": 42512, + "pesos": 47872, + "pessi": 43902, + "pest": 20130, + "pest": 9425, + "pesticide": 48481, + "pesticides": 37868, + "pesto": 26186, + "pests": 41919, + "pet": 2167, + "pet": 3703, + "peta": 28785, + "petal": 38430, + "petal": 40469, + "petals": 26064, + "petday": 45314, + "pete": 14479, + "pete": 8571, + "peter": 5093, + "peter": 3696, + "peterborough": 26012, + "peters": 16336, + "petersburg": 21052, + "petersen": 39794, + "peterson": 16877, + "peth": 48920, + "petit": 36437, + "petit": 21276, + "petite": 27213, + "petition": 10975, + "petitions": 43536, + "petr": 29808, + "petra": 31300, + "petre": 47179, + "petri": 31831, + "petro": 8716, + "petrol": 18149, + "petroleum": 22063, + "petron": 42875, + "pets": 7663, + "pett": 27051, + "petti": 48001, + "petting": 44334, + "petty": 17324, + "peu": 21411, + "peuge": 22893, + "peugeot": 24129, + "pew": 21608, + "pew": 30783, + "pewdie": 41882, + "pewdiepie": 42563, + "pex": 43765, + "pey": 14966, + "pey": 30933, + "peyton": 49254, + "peyton": 20307, + "pez": 45798, + "pez": 10482, + "pf": 16680, + "pf": 12572, + "pfa": 47839, + "pfc": 35007, + "pff": 44121, + "pfi": 29810, + "pfw": 31229, + "pg": 12476, + "pg": 5211, + "pga": 13351, + "pgat": 36514, + "pgatour": 40094, + "pgh": 44862, + "pgh": 30031, + "pgs": 49204, + "ph": 745, + "ph": 2042, + "pha": 4443, + "pha": 26255, + "phal": 19962, + "phan": 8731, + "phan": 40126, + "phant": 36998, + "phantom": 37688, + "phantom": 14490, + "phar": 5570, + "phara": 35792, + "pharaoh": 40437, + "pharm": 45761, + "pharma": 17831, + "pharmac": 8193, + "pharmaceu": 19490, + "pharmaceutical": 25217, + "pharmaceuticals": 44623, + "pharmacist": 41024, + "pharmacists": 44337, + "pharmacy": 15293, + "pharo": 42308, + "pharoah": 49287, + "pharrell": 31316, + "phase": 8304, + "phases": 35337, + "phat": 42492, + "phc": 41102, + "phd": 20875, + "phd": 8472, + "phdchat": 39564, + "phdlife": 39638, + "phe": 4787, + "phe": 19853, + "pheasant": 41983, + "phee": 41292, + "phel": 23711, + "phelps": 27128, + "phen": 7718, + "pheno": 47336, + "phenom": 31673, + "phenom": 39618, + "phenomen": 11304, + "phenomena": 41538, + "phenomenal": 15035, + "phenomenon": 24464, + "pher": 9194, + "pher": 19828, + "phers": 29531, + "pherson": 36421, + "phew": 10295, + "phi": 2239, + "phi": 12220, + "phia": 9228, + "phic": 3977, + "phie": 30237, + "phies": 17062, + "phil": 2821, + "phil": 6199, + "phila": 47443, + "philadel": 9428, + "philadelphia": 9749, + "philanthro": 16587, + "philanthropist": 44153, + "philanthropy": 25047, + "philately": 33695, + "phile": 36543, + "philharmon": 25228, + "philharmonic": 31699, + "phili": 4277, + "philia": 46654, + "philip": 20748, + "philip": 11074, + "philipp": 5623, + "philipp": 47591, + "philippe": 20942, + "philippine": 17629, + "philippines": 8149, + "philips": 25175, + "phill": 42346, + "phill": 48272, + "philli": 6456, + "phillies": 18748, + "phillip": 48832, + "phillip": 19323, + "phillips": 11041, + "philly": 19545, + "philly": 7785, + "philos": 8395, + "philosop": 20349, + "philosoph": 10187, + "philosopher": 25220, + "philosophical": 32628, + "philosophy": 12213, + "phils": 38573, + "phin": 33816, + "phine": 40985, + "phins": 40210, + "phish": 36897, + "phishing": 36546, + "phl": 25603, + "pho": 816, + "pho": 22707, + "phobia": 28749, + "phoe": 22673, + "phoebe": 27582, + "phoeni": 6778, + "phoenix": 20615, + "phoenix": 7793, + "phol": 48140, + "phon": 19602, + "phon": 31115, + "phone": 15486, + "phone": 1951, + "phones": 6351, + "phony": 31925, + "phora": 31363, + "phosp": 22638, + "photo": 1153, + "photo": 1125, + "photobomb": 37075, + "photobook": 41894, + "photog": 28115, + "photogenic": 36108, + "photogra": 36754, + "photograph": 1688, + "photograph": 8853, + "photographed": 11573, + "photographer": 5748, + "photographers": 17141, + "photographic": 22053, + "photographing": 30074, + "photographs": 15759, + "photography": 33183, + "photography": 2108, + "photom": 32223, + "photoo": 11106, + "photooftheday": 11933, + "photos": 2479, + "photoshoot": 11121, + "photoshop": 12419, + "photoshopped": 35738, + "phouse": 27848, + "php": 17370, + "phra": 12777, + "phrase": 18809, + "phrases": 35264, + "phs": 16495, + "phu": 21274, + "phuket": 34028, + "phx": 35466, + "phx": 29507, + "phy": 6484, + "phy": 4292, + "phyl": 35600, + "phyllis": 37844, + "phys": 3734, + "phys": 37894, + "physi": 13782, + "physic": 46641, + "physical": 44127, + "physical": 6671, + "physically": 18105, + "physician": 21055, + "physicians": 26702, + "physicist": 29052, + "physics": 9369, + "physio": 29574, + "physio": 29177, + "physiology": 32349, + "physique": 42884, + "phyto": 42197, + "pi": 741, + "pi": 5357, + "pia": 8918, + "pian": 24637, + "pianist": 21048, + "piano": 49278, + "piano": 7894, + "pianos": 47904, + "piazza": 28496, + "pic": 901, + "pic": 1282, + "pical": 5482, + "picard": 48507, + "picasso": 21481, + "piccad": 33876, + "piccadilly": 37287, + "piccollage": 43621, + "pick": 6379, + "pick": 3142, + "picked": 6018, + "picker": 43105, + "pickering": 47605, + "picket": 33559, + "picking": 9545, + "pickle": 24570, + "pickled": 21705, + "pickles": 25001, + "picks": 8551, + "pickup": 15382, + "pickups": 33383, + "picnic": 12007, + "pico": 23363, + "picoftheday": 18319, + "pics": 2559, + "pict": 18778, + "pictorial": 40640, + "picture": 11663, + "picture": 1674, + "pictured": 7647, + "pictures": 3646, + "picturesque": 24894, + "pid": 5225, + "piday": 48056, + "pie": 12065, + "pie": 5319, + "piece": 39632, + "piece": 2754, + "pieces": 6194, + "pied": 24686, + "pied": 12713, + "piedmont": 39691, + "pier": 5641, + "pier": 11348, + "pierc": 49216, + "pierce": 48462, + "pierce": 16782, + "pierced": 32799, + "piercing": 22557, + "piero": 43125, + "pierre": 34670, + "pierre": 11985, + "piers": 29030, + "pies": 6898, + "pieter": 44801, + "pietro": 42169, + "piff": 40719, + "pig": 12009, + "pig": 9619, + "pigeon": 18008, + "pigeons": 32910, + "piggy": 28245, + "pigment": 40284, + "pigs": 16228, + "pik": 48539, + "pika": 47372, + "pikach": 27268, + "pikachu": 28107, + "pike": 33457, + "pike": 14011, + "pil": 2893, + "pil": 20645, + "pilates": 29518, + "pile": 44403, + "pile": 13930, + "piled": 26873, + "piles": 31968, + "pilgri": 13966, + "pilgrim": 32662, + "pilgrimage": 24335, + "pilgrims": 31370, + "piling": 43050, + "pilip": 27234, + "pilipinas": 32392, + "pill": 14830, + "pill": 19226, + "pillar": 17322, + "pillars": 22054, + "pillow": 42237, + "pillow": 12182, + "pillows": 26499, + "pills": 23964, + "pilo": 37526, + "pilot": 31619, + "pilot": 6687, + "pilots": 15586, + "pilsner": 47153, + "pim": 15285, + "pim": 35472, + "pimp": 35789, + "pin": 2629, + "pin": 5164, + "pinball": 31679, + "pinch": 26114, + "pine": 9398, + "pine": 7374, + "pineapple": 14831, + "pines": 20338, + "ping": 23720, + "ping": 2089, + "pinion": 40557, + "pink": 11151, + "pink": 3360, + "pinkfloyd": 48520, + "pinky": 29803, + "pinn": 31448, + "pinnacle": 32754, + "pinned": 12165, + "pinning": 44515, + "pino": 36633, + "pinot": 41399, + "pinot": 21146, + "pinoy": 43578, + "pinoy": 35258, + "pins": 14619, + "pinst": 41173, + "pint": 42537, + "pint": 13584, + "pinterest": 15379, + "pinto": 35992, + "pints": 27935, + "pinup": 37349, + "pio": 22108, + "pion": 36728, + "pion": 29190, + "pione": 7975, + "pioneer": 34892, + "pioneer": 12459, + "pioneering": 25933, + "pioneers": 22383, + "pious": 42441, + "pip": 30854, + "pipe": 29333, + "pipe": 10459, + "pipel": 12387, + "pipeline": 14151, + "pipelines": 39683, + "piper": 47052, + "piper": 16293, + "pipes": 16991, + "piping": 40744, + "pippa": 47672, + "pir": 4351, + "pir": 38899, + "piracy": 39452, + "piran": 49034, + "pirate": 38680, + "pirate": 13592, + "pirates": 10442, + "pire": 16613, + "pires": 14988, + "pis": 9230, + "pis": 44441, + "pisa": 43632, + "pisces": 45982, + "piss": 20818, + "pissed": 17989, + "pist": 15556, + "pist": 32826, + "pistachi": 29760, + "pistachio": 36320, + "pistol": 20480, + "piston": 48236, + "pistons": 27242, + "pistor": 48162, + "pit": 2946, + "pit": 7476, + "pita": 27070, + "pitbull": 25295, + "pitch": 8992, + "pitch": 5872, + "pitched": 28447, + "pitcher": 13445, + "pitchers": 27835, + "pitches": 21005, + "pitching": 16455, + "piti": 47568, + "pits": 24144, + "pitt": 7607, + "pitt": 15599, + "pitts": 9531, + "pittsburgh": 10453, + "pity": 24380, + "pius": 39988, + "pivo": 18009, + "pivot": 31805, + "pivotal": 31432, + "pix": 6185, + "pix": 13088, + "pixar": 27493, + "pixel": 14384, + "pixel": 13241, + "pixelart": 18516, + "pixels": 34099, + "pixie": 35573, + "piyu": 30772, + "piyush": 36191, + "piyushgoyal": 45318, + "pizz": 3897, + "pizza": 4474, + "pizzas": 30647, + "pizzeria": 44174, + "pj": 12524, + "pj": 17179, + "pjnet": 22011, + "pjs": 36009, + "pk": 10149, + "pk": 10991, + "pkg": 49011, + "pkk": 47480, + "pknot": 41779, + "pkwy": 36827, + "pl": 712, + "pl": 5678, + "pla": 841, + "pla": 19945, + "plac": 2331, + "place": 14884, + "place": 1445, + "placed": 9729, + "placement": 16724, + "placements": 43885, + "placer": 49170, + "places": 4448, + "placing": 18531, + "plague": 25360, + "plaid": 23291, + "plain": 22776, + "plain": 10709, + "plains": 16345, + "plan": 1740, + "plan": 2970, + "pland": 24801, + "plane": 22728, + "plane": 5363, + "planes": 12581, + "planet": 16833, + "planet": 5172, + "planetary": 28361, + "planets": 22315, + "plank": 30991, + "plankton": 48249, + "plann": 6409, + "planned": 8169, + "planner": 18083, + "planners": 33664, + "planning": 4446, + "plano": 34063, + "plans": 4181, + "plant": 8521, + "plant": 3912, + "plantation": 20014, + "plantbased": 33720, + "planted": 14286, + "planter": 34453, + "planters": 43661, + "planting": 13922, + "plants": 5829, + "plaque": 16097, + "plaques": 45610, + "plar": 26754, + "plas": 45673, + "plasma": 24999, + "plaster": 31980, + "plastic": 15645, + "plastic": 6102, + "plasticpollution": 47129, + "plastics": 20999, + "plasticsurgery": 48555, + "plat": 3172, + "plata": 46456, + "plate": 28744, + "plate": 5135, + "plateau": 29301, + "plated": 21161, + "plates": 11485, + "platform": 5549, + "platforms": 13551, + "platin": 10267, + "plating": 44564, + "platinum": 10979, + "plato": 41101, + "platoon": 41254, + "platt": 44459, + "platt": 40097, + "platte": 46785, + "platter": 29071, + "platz": 40878, + "plau": 39139, + "play": 1222, + "play": 1453, + "playa": 23756, + "playable": 33885, + "playback": 39194, + "playbook": 34856, + "playboy": 24383, + "played": 3432, + "player": 24503, + "player": 2477, + "players": 3030, + "playful": 23871, + "playground": 15861, + "playhouse": 23254, + "playin": 24674, + "playing": 47368, + "playing": 1629, + "playlist": 9180, + "playlists": 47183, + "playo": 5804, + "playoff": 9655, + "playoffs": 9548, + "plays": 5134, + "playstation": 11332, + "playtime": 43037, + "playwright": 32070, + "plaza": 8943, + "plc": 16827, + "ple": 926, + "ple": 1619, + "plea": 21956, + "plead": 47539, + "pleads": 31425, + "plear": 21362, + "pleas": 8481, + "pleas": 48740, + "pleasant": 12271, + "please": 41074, + "please": 1474, + "pleased": 6107, + "pleasing": 32893, + "pleasure": 5854, + "pleasures": 29513, + "pledge": 11507, + "pledged": 36799, + "pledges": 26746, + "pledis": 41202, + "plein": 43429, + "plenary": 19891, + "plenty": 7524, + "pler": 17677, + "ples": 6248, + "pless": 39821, + "pless": 17059, + "plets": 43230, + "plex": 23765, + "plex": 15241, + "pley": 19543, + "pli": 30001, + "pli": 45797, + "plic": 5806, + "plicity": 19823, + "plight": 40317, + "plin": 44531, + "plin": 32335, + "pline": 25376, + "pling": 12899, + "plings": 31184, + "pll": 47629, + "pll": 25266, + "pln": 48755, + "plo": 1778, + "plo": 43523, + "plor": 34695, + "plot": 9918, + "plots": 25672, + "plotting": 30751, + "plough": 33811, + "plow": 38363, + "pls": 5572, + "plu": 2052, + "plug": 12628, + "plugged": 23261, + "plugin": 31278, + "plugins": 48797, + "plugs": 28083, + "plum": 26267, + "plum": 16202, + "plumb": 21769, + "plumber": 43478, + "plumbing": 24647, + "plume": 39495, + "plun": 15122, + "plunge": 26506, + "plur": 44664, + "plus": 3097, + "plush": 18926, + "pluto": 26380, + "ply": 17249, + "ply": 28705, + "plying": 36071, + "plym": 11907, + "plymouth": 13786, + "plz": 10538, + "pm": 13699, + "pm": 990, + "pmi": 41206, + "pmln": 23208, + "pmo": 18782, + "pmoindia": 20374, + "pms": 44223, + "pn": 14431, + "pn": 13774, + "pnc": 37148, + "pne": 30966, + "pneu": 28714, + "pneumonia": 42906, + "png": 20992, + "pnp": 25972, + "pnpp": 42175, + "pnw": 31521, + "po": 628, + "po": 3057, + "poa": 43912, + "poached": 27665, + "poaching": 35140, + "poc": 13232, + "poc": 27780, + "pocaly": 37987, + "pocalypse": 42307, + "poche": 38336, + "poche": 39022, + "pocket": 29147, + "pocket": 8504, + "pockets": 19566, + "pocon": 41850, + "pod": 3583, + "pod": 7446, + "podcast": 39654, + "podcast": 4294, + "podcasting": 40106, + "podcasts": 19392, + "pode": 33368, + "poder": 24960, + "podernfamily": 26620, + "podi": 32853, + "podium": 14093, + "pods": 18776, + "poe": 4746, + "poe": 19254, + "poem": 9436, + "poems": 15577, + "poet": 41019, + "poet": 9872, + "poetic": 26365, + "poetry": 20192, + "poetry": 6038, + "poetryday": 39255, + "poets": 19804, + "pof": 40850, + "poff": 28236, + "pogba": 25998, + "poign": 29682, + "poignant": 32138, + "poin": 9074, + "point": 13280, + "point": 2301, + "pointe": 24631, + "pointed": 20703, + "pointer": 29883, + "pointers": 36760, + "pointing": 19233, + "pointless": 33586, + "points": 3396, + "pois": 17008, + "poise": 45087, + "poised": 27354, + "poison": 30722, + "poison": 17074, + "poisoned": 43624, + "poisoning": 25750, + "poisonous": 37131, + "pok": 15387, + "poke": 6892, + "poke": 23186, + "pokemon": 16239, + "pokemon": 9528, + "pokemongo": 23985, + "poker": 30735, + "poker": 11865, + "pokes": 40221, + "poking": 49169, + "poké": 20656, + "pokémon": 22066, + "pol": 977, + "pol": 7649, + "pola": 43876, + "poland": 9834, + "polar": 21432, + "polar": 12214, + "polari": 27919, + "polaris": 37965, + "polarized": 48437, + "polaro": 25237, + "polaroid": 30427, + "poldark": 41322, + "pole": 26682, + "pole": 8170, + "poles": 22585, + "poli": 9675, + "poli": 5414, + "polic": 16126, + "police": 15535, + "police": 2120, + "policeman": 37713, + "policemen": 47946, + "polici": 10819, + "policies": 10993, + "policing": 20969, + "policy": 30173, + "policy": 4660, + "polio": 30533, + "polis": 16133, + "polish": 46941, + "polish": 9632, + "polished": 21478, + "polishing": 43629, + "polit": 2247, + "politan": 15337, + "polite": 31497, + "politi": 40597, + "politic": 33333, + "political": 37744, + "political": 4197, + "politically": 24323, + "politician": 15960, + "politicians": 12914, + "politico": 39403, + "politics": 4929, + "polk": 33317, + "polka": 29476, + "poll": 7032, + "pollen": 27651, + "pollin": 19152, + "pollinators": 36599, + "polling": 18024, + "pollo": 42755, + "pollock": 37614, + "polls": 11813, + "pollu": 8370, + "polluted": 43346, + "pollution": 10384, + "polly": 31204, + "polo": 35928, + "polo": 10229, + "poly": 6833, + "poly": 18367, + "polye": 31730, + "polyester": 38514, + "polym": 23626, + "polymer": 29993, + "polyne": 38892, + "polyvore": 24771, + "pom": 7548, + "pom": 24280, + "pome": 27963, + "pomegran": 29326, + "pomegranate": 32415, + "pomer": 35156, + "pomona": 41690, + "pompe": 18352, + "pompeii": 47775, + "pompeo": 34351, + "pompey": 35079, + "pon": 3809, + "pon": 22391, + "ponce": 43637, + "pond": 10750, + "ponder": 36863, + "pondering": 47395, + "ponds": 31033, + "pone": 32183, + "pong": 40546, + "pong": 17710, + "ponies": 34157, + "pons": 41255, + "pont": 47563, + "pont": 22997, + "ponte": 40892, + "ponti": 15527, + "pontiac": 25373, + "pontifex": 33566, + "ponty": 45152, + "pony": 24438, + "pony": 12678, + "ponytail": 43265, + "poo": 6601, + "poo": 14389, + "pooch": 37037, + "poodle": 34961, + "pooh": 27103, + "pooja": 35676, + "pool": 12484, + "pool": 2831, + "poole": 26290, + "pools": 18736, + "poolside": 35509, + "poon": 33799, + "poon": 36178, + "poop": 23310, + "poor": 14528, + "poor": 3665, + "poorest": 40771, + "poorly": 21101, + "pop": 6530, + "pop": 2852, + "popart": 47425, + "popcorn": 15034, + "pope": 16994, + "pope": 9283, + "popefrancis": 37254, + "poplar": 38726, + "popo": 38835, + "popo": 35572, + "popp": 13156, + "popped": 14934, + "poppies": 30385, + "poppin": 28536, + "popping": 18152, + "poppins": 41216, + "poppy": 32194, + "poppy": 15447, + "pops": 11705, + "popsic": 38481, + "popu": 3785, + "popul": 6593, + "popular": 15854, + "popular": 4368, + "popularity": 19235, + "populated": 38420, + "population": 8423, + "populations": 23797, + "populism": 48998, + "populist": 49376, + "popup": 33053, + "por": 817, + "por": 7697, + "pora": 23537, + "porcel": 19409, + "porcelain": 20451, + "porch": 17154, + "pore": 28267, + "pork": 40379, + "pork": 7897, + "poro": 48110, + "porridge": 34924, + "porsch": 48009, + "porsche": 44049, + "porsche": 8783, + "port": 1641, + "port": 1418, + "porta": 45037, + "portable": 11949, + "portage": 32087, + "portal": 14982, + "porte": 28654, + "ported": 16879, + "porter": 28319, + "porter": 10318, + "porters": 15670, + "portfoli": 45766, + "portfolio": 11938, + "porth": 37425, + "porti": 45760, + "porting": 26052, + "portion": 13739, + "portions": 22914, + "portland": 38366, + "portland": 8880, + "portman": 34755, + "porto": 24853, + "porto": 18947, + "portobello": 48025, + "portra": 4175, + "portrait": 39312, + "portrait": 5352, + "portraits": 14203, + "portray": 46282, + "portrayal": 39238, + "portrayed": 36093, + "ports": 7734, + "portsm": 17063, + "portsmouth": 19074, + "portu": 7159, + "portugal": 9503, + "portugue": 17498, + "portuguese": 18019, + "pos": 1780, + "pos": 11839, + "pose": 25478, + "pose": 4230, + "posed": 5206, + "posei": 47270, + "poser": 46899, + "poses": 9773, + "posey": 34852, + "posh": 26748, + "posing": 10518, + "posit": 28793, + "positi": 7895, + "position": 4657, + "positioned": 34482, + "positioning": 30657, + "positions": 12188, + "positive": 21811, + "positive": 4844, + "positively": 24688, + "positivity": 19966, + "poss": 39745, + "posse": 17414, + "posse": 28413, + "possess": 36810, + "possessed": 36220, + "possession": 16154, + "possessions": 40588, + "possi": 2521, + "possibilities": 17932, + "possibility": 18517, + "possible": 3134, + "possibly": 8601, + "possum": 38575, + "post": 3489, + "post": 1549, + "postage": 27570, + "postal": 21687, + "postcard": 14785, + "postcards": 23922, + "postdoc": 41013, + "posted": 4752, + "poster": 22881, + "poster": 3574, + "posters": 9673, + "postgame": 34873, + "postgraduate": 31997, + "posthum": 42410, + "posting": 7559, + "postman": 38285, + "postpon": 23247, + "postponed": 25097, + "posts": 7824, + "postseason": 24521, + "posture": 29681, + "posure": 35539, + "pot": 3547, + "pot": 5168, + "potam": 45825, + "potassi": 36889, + "potassium": 37147, + "potat": 5975, + "potato": 8527, + "potatoes": 11567, + "potd": 28765, + "pote": 41869, + "poten": 4454, + "potent": 26082, + "potenti": 44104, + "potential": 5100, + "potentially": 16508, + "potholes": 47506, + "potion": 46055, + "potom": 38848, + "potomac": 43372, + "pots": 19234, + "pott": 28698, + "potted": 48581, + "potter": 24975, + "potter": 9026, + "pottery": 18396, + "potts": 39839, + "potty": 43569, + "potus": 8740, + "pou": 9423, + "pouch": 26811, + "poul": 22485, + "poultry": 31005, + "poun": 33719, + "pound": 33809, + "pound": 10674, + "pounding": 46544, + "pounds": 10752, + "pour": 33112, + "pour": 8180, + "poured": 26621, + "pouring": 16098, + "pours": 26005, + "pout": 39621, + "poutine": 43768, + "pov": 25731, + "pover": 8432, + "pover": 29464, + "poverty": 9095, + "pow": 1317, + "pow": 17745, + "powder": 32427, + "powder": 9674, + "powe": 36955, + "powell": 13305, + "power": 2789, + "power": 1807, + "powerball": 47803, + "powered": 45442, + "powered": 7332, + "powerful": 4875, + "powerhouse": 22858, + "powering": 16231, + "powerof": 31961, + "powerpoint": 38940, + "powerrangers": 40620, + "powers": 9422, + "pox": 43649, + "poy": 34737, + "poyn": 47655, + "poz": 39953, + "pp": 604, + "pp": 4186, + "ppa": 10416, + "ppard": 23391, + "ppc": 27778, + "ppe": 24573, + "ppe": 11867, + "pped": 1873, + "ppel": 46523, + "ppen": 30663, + "pper": 6719, + "pper": 2440, + "ppers": 5232, + "ppery": 27833, + "ppet": 20744, + "ppets": 25849, + "ppg": 27433, + "ppi": 9594, + "ppie": 33795, + "ppin": 8076, + "pping": 22214, + "pping": 1682, + "ppings": 35687, + "ppl": 6758, + "pple": 12302, + "ppm": 42053, + "ppo": 10215, + "ppor": 37613, + "ppp": 14017, + "pps": 10683, + "ppv": 38864, + "ppy": 30360, + "ppy": 3860, + "pr": 766, + "pr": 4150, + "pra": 1865, + "pra": 19285, + "prab": 17901, + "prabhas": 29959, + "prabhu": 31529, + "prac": 2243, + "practi": 29995, + "practic": 5495, + "practical": 10792, + "practically": 25588, + "practice": 3349, + "practiced": 36749, + "practices": 9040, + "practicing": 12750, + "practise": 38938, + "practising": 36478, + "practiti": 19909, + "practitioner": 32591, + "practitioners": 29045, + "prada": 29456, + "pradesh": 15384, + "prado": 44141, + "prag": 31025, + "prague": 14940, + "prairi": 12629, + "prairie": 14753, + "praise": 10013, + "praised": 27649, + "praises": 23049, + "praising": 36961, + "prakash": 43708, + "prakash": 25366, + "pram": 47774, + "pran": 20048, + "prank": 23654, + "pras": 41562, + "prasad": 29562, + "prat": 23069, + "prati": 45773, + "pratt": 37863, + "pratt": 23396, + "prawn": 33102, + "prawns": 34903, + "pray": 12671, + "pray": 6041, + "prayed": 34665, + "prayer": 41452, + "prayer": 6583, + "prayers": 8393, + "prayfor": 18443, + "praying": 11550, + "prays": 46602, + "prc": 28781, + "pre": 679, + "pre": 2900, + "preach": 22545, + "preacher": 29357, + "preaching": 23642, + "precau": 36532, + "precautions": 47845, + "prece": 15361, + "preci": 5470, + "precin": 27908, + "precinct": 32587, + "precious": 8226, + "precipit": 27463, + "precipitation": 33399, + "precise": 24457, + "precisely": 34954, + "precision": 44021, + "precision": 15621, + "pred": 40370, + "predat": 13364, + "predator": 20653, + "predators": 25569, + "prede": 38454, + "predecess": 38963, + "predic": 4876, + "predict": 16900, + "predictable": 25344, + "predicted": 18702, + "predicting": 30414, + "prediction": 16296, + "predictions": 15125, + "predictive": 29798, + "predicts": 25960, + "preds": 40125, + "pree": 47026, + "preet": 30131, + "prefe": 14542, + "prefecture": 32890, + "prefer": 33426, + "prefer": 11450, + "preference": 35057, + "preferences": 38118, + "preferred": 18772, + "prefers": 38528, + "pregame": 18575, + "pregn": 7190, + "pregnancy": 12769, + "pregnant": 11195, + "prehistoric": 32750, + "prejudice": 28337, + "preli": 15523, + "prelimin": 19990, + "preliminary": 20997, + "prelims": 43223, + "prelude": 42966, + "prem": 32090, + "prem": 21724, + "premature": 39253, + "premi": 2413, + "premier": 16996, + "premier": 5539, + "premiere": 5367, + "premiered": 27652, + "premieres": 19907, + "premiering": 32615, + "premierleague": 22608, + "premiers": 44883, + "premiership": 23665, + "premiosm": 38460, + "premiosmtvmiaw": 38630, + "premise": 45952, + "premises": 27266, + "premium": 8011, + "pren": 20801, + "preneur": 46288, + "preorder": 16703, + "preorders": 45985, + "prep": 6430, + "prep": 7277, + "prepa": 26270, + "prepaid": 42934, + "prepar": 4968, + "preparation": 11651, + "preparations": 19135, + "prepare": 7014, + "prepared": 7677, + "preparedness": 29492, + "prepares": 16375, + "preparing": 7365, + "prepped": 34379, + "prepping": 16459, + "preps": 14765, + "prequel": 40461, + "pres": 1385, + "pres": 8529, + "presale": 27135, + "presby": 30447, + "presbyter": 33959, + "presbyterian": 35370, + "preschool": 24354, + "prescott": 29392, + "prescri": 14851, + "prescribed": 36968, + "prescription": 23061, + "preseason": 13813, + "presen": 16742, + "presence": 8848, + "present": 2344, + "present": 2881, + "presentation": 4594, + "presentations": 16998, + "presented": 4587, + "presenter": 18587, + "presenters": 32759, + "presenting": 5339, + "presents": 4215, + "preserv": 17616, + "preservation": 21074, + "preserve": 15570, + "preserved": 23161, + "preserves": 44881, + "preserving": 32315, + "presi": 1697, + "presiden": 43374, + "presidency": 18077, + "president": 19900, + "president": 1940, + "presidente": 47363, + "presidenti": 48297, + "presidential": 8503, + "presidents": 16726, + "presiding": 45298, + "presley": 30013, + "press": 4124, + "press": 2124, + "pressed": 20080, + "presser": 27826, + "presses": 33748, + "pressing": 20893, + "pressure": 6083, + "pressures": 38487, + "prest": 41840, + "presti": 12245, + "prestige": 29328, + "prestigious": 15888, + "presto": 42211, + "preston": 37335, + "preston": 15179, + "presu": 21667, + "presumably": 42562, + "pret": 9652, + "preten": 15871, + "pretend": 18111, + "pretending": 21306, + "pretoria": 36080, + "prett": 46667, + "prettier": 31745, + "prettiest": 22866, + "pretty": 18286, + "pretty": 2111, + "pretz": 24890, + "pretzel": 36707, + "pretzels": 45468, + "prev": 20274, + "prevail": 31637, + "prevalence": 41729, + "prevalent": 46260, + "preven": 29382, + "prevent": 26436, + "prevent": 7968, + "preventable": 44250, + "prevented": 35356, + "preventing": 21756, + "prevention": 9500, + "preventive": 40949, + "prevents": 31746, + "preview": 4449, + "previews": 20279, + "previous": 9252, + "previously": 13359, + "prey": 17131, + "prez": 17956, + "pri": 955, + "pri": 23400, + "pric": 24275, + "price": 13254, + "price": 2827, + "priced": 16934, + "priceless": 15743, + "prices": 5954, + "pricing": 14800, + "prick": 43921, + "prick": 46516, + "pride": 15323, + "pride": 3436, + "pridemonth": 41410, + "prie": 22477, + "priest": 38756, + "priest": 14222, + "priests": 30005, + "prim": 22004, + "prima": 35611, + "prima": 33277, + "primal": 36604, + "primar": 21579, + "primaries": 46126, + "primarily": 29465, + "primark": 48329, + "primary": 35024, + "primary": 5814, + "primavera": 44899, + "prime": 14162, + "prime": 5183, + "primed": 45694, + "primer": 22388, + "primetime": 29763, + "primitive": 37467, + "primo": 43215, + "primrose": 45891, + "prin": 1588, + "prince": 9457, + "prince": 4735, + "princes": 45329, + "princes": 30136, + "princess": 24123, + "princess": 5079, + "princesses": 34161, + "princeton": 22433, + "princi": 5129, + "principal": 33599, + "principal": 8860, + "principals": 27524, + "principle": 19595, + "principles": 13755, + "print": 17851, + "print": 3557, + "printable": 29648, + "printed": 7978, + "printer": 14521, + "printers": 27881, + "printing": 7369, + "printmaking": 38669, + "prints": 7704, + "prior": 20328, + "prior": 10572, + "priorit": 47773, + "prioriti": 28822, + "priorities": 15232, + "prioritize": 46715, + "priority": 12451, + "priory": 38665, + "prisc": 32468, + "priscilla": 42396, + "prise": 23343, + "prism": 49311, + "prism": 34356, + "prison": 9281, + "prison": 6622, + "prisoner": 21427, + "prisoners": 17460, + "prisons": 26607, + "pristine": 30618, + "prit": 41668, + "prit": 37523, + "prith": 39173, + "prius": 43561, + "priv": 3270, + "privacy": 10437, + "private": 20362, + "private": 4439, + "privately": 32970, + "privati": 27379, + "privi": 8367, + "privileg": 18015, + "privilege": 11537, + "privileged": 18166, + "prix": 10875, + "priya": 31275, + "priyan": 16488, + "priyanka": 31959, + "priyankach": 30030, + "priyankachopra": 30264, + "prize": 48222, + "prize": 4521, + "prized": 38769, + "prizes": 9268, + "prk": 37094, + "pro": 644, + "pro": 2630, + "proactive": 33364, + "prob": 17706, + "prob": 24007, + "probab": 3907, + "probability": 32637, + "probable": 42444, + "probably": 4047, + "probation": 36531, + "probe": 14359, + "probes": 48564, + "probiotics": 49395, + "proble": 2719, + "problem": 4324, + "problematic": 33767, + "problems": 4671, + "probs": 16330, + "probz": 34243, + "proc": 38417, + "proce": 4076, + "procedu": 18204, + "procedural": 48177, + "procedure": 20163, + "procedures": 21109, + "proceed": 26664, + "proceed": 33894, + "proceedings": 26953, + "proceeds": 11882, + "process": 17291, + "process": 4078, + "processed": 23816, + "processes": 15169, + "processing": 11737, + "procession": 26288, + "processor": 22838, + "processors": 43634, + "proclaimed": 34489, + "proclamation": 32065, + "procra": 25361, + "procrastin": 25586, + "procrastination": 42825, + "procreate": 39336, + "proctor": 47204, + "procu": 21001, + "procurement": 23733, + "prod": 44349, + "prod": 11991, + "prodi": 27759, + "prodigy": 31973, + "produ": 27852, + "produc": 1471, + "produce": 7529, + "produced": 7479, + "producer": 7064, + "producers": 13883, + "produces": 19940, + "producing": 13579, + "product": 32602, + "product": 4306, + "production": 4146, + "productions": 14166, + "productive": 9697, + "productivity": 12800, + "products": 3964, + "prof": 15043, + "prof": 5488, + "profe": 2611, + "profess": 5486, + "professi": 3705, + "profession": 8104, + "profession": 19671, + "professional": 46007, + "professional": 4774, + "professionalism": 41252, + "professionally": 33892, + "professionals": 10165, + "professor": 47302, + "professor": 6092, + "professors": 27758, + "profici": 34685, + "profile": 14291, + "profile": 6444, + "profiles": 22070, + "profiling": 37123, + "profit": 16941, + "profit": 7909, + "profitable": 25465, + "profits": 13410, + "profound": 48245, + "profound": 22998, + "profs": 19260, + "prog": 22219, + "progno": 46070, + "program": 4162, + "program": 2737, + "programme": 6322, + "programmer": 37001, + "programmes": 20468, + "programming": 10831, + "programs": 7345, + "progre": 7069, + "progress": 4421, + "progressi": 23297, + "progressing": 32346, + "progression": 24772, + "progressive": 12208, + "progressives": 41709, + "prohi": 41124, + "prohib": 45040, + "prohibition": 34440, + "proj": 39156, + "proje": 48345, + "projec": 1610, + "project": 15911, + "project": 1965, + "projected": 22873, + "projection": 22384, + "projections": 34638, + "projector": 27816, + "projects": 5090, + "proli": 19710, + "prolife": 32126, + "prolifer": 39018, + "prolific": 27839, + "prolly": 45968, + "prolon": 35379, + "prolonged": 41972, + "prom": 40363, + "prom": 7944, + "prome": 34355, + "promen": 33578, + "promenade": 35522, + "promethe": 44183, + "promin": 35217, + "prominent": 19172, + "promis": 3963, + "promise": 6745, + "promised": 11516, + "promises": 12064, + "promising": 14183, + "promo": 3037, + "promo": 6755, + "promos": 35044, + "promote": 47384, + "promote": 8003, + "promoted": 16395, + "promoter": 33081, + "promotes": 20169, + "promoting": 9695, + "promotion": 9259, + "promotional": 17619, + "promotions": 19142, + "promp": 11671, + "prompt": 20198, + "prompted": 45746, + "prompts": 33490, + "proms": 37759, + "pron": 13285, + "prone": 30964, + "pronoun": 23022, + "pronounce": 40489, + "pronounced": 34109, + "pronto": 44296, + "proof": 17020, + "proof": 5248, + "proofing": 35679, + "proofs": 41023, + "prop": 19123, + "prop": 16254, + "propag": 12151, + "propaganda": 14718, + "propane": 45546, + "propel": 48439, + "propeller": 47404, + "proper": 3577, + "proper": 8205, + "properly": 12560, + "properties": 10922, + "property": 26486, + "property": 5043, + "prophe": 9662, + "prophecy": 32501, + "prophet": 15549, + "prophetic": 47476, + "prophets": 39441, + "propor": 35016, + "proportion": 35775, + "proportions": 39391, + "propos": 9455, + "proposal": 12139, + "proposals": 20568, + "propose": 28471, + "proposed": 10615, + "proposes": 27133, + "proposing": 42631, + "proposition": 44780, + "propri": 28243, + "props": 15249, + "propulsion": 49380, + "pros": 33925, + "pros": 14147, + "prosciutto": 46565, + "prose": 47063, + "prose": 28675, + "prosecco": 28839, + "prosecu": 12136, + "prosecution": 30902, + "prosecutor": 23736, + "prosecutors": 31656, + "prosp": 24242, + "prospec": 12693, + "prospect": 11211, + "prospective": 28034, + "prospects": 15372, + "prosper": 16121, + "prosper": 33526, + "prosperity": 17203, + "prosperous": 28252, + "prost": 47923, + "prostate": 28808, + "prostatec": 49064, + "prosthetic": 44602, + "prostitu": 37333, + "protag": 28950, + "protagonist": 38183, + "prote": 1845, + "protec": 5640, + "protect": 25563, + "protect": 4817, + "protected": 12266, + "protecting": 11710, + "protection": 6238, + "protections": 33772, + "protective": 17028, + "protector": 20441, + "protectors": 45039, + "protects": 21889, + "protein": 8088, + "proteins": 28661, + "protest": 6279, + "protestant": 46945, + "protested": 48089, + "protester": 42073, + "protesters": 12660, + "protesting": 18788, + "protestors": 27822, + "protests": 12450, + "proto": 8672, + "proto": 44958, + "protocol": 19938, + "protocols": 39631, + "proton": 40009, + "prototype": 16675, + "prototyping": 42081, + "prou": 5739, + "proud": 11080, + "proud": 1679, + "prouder": 39585, + "proudest": 46806, + "proudly": 11203, + "proudof": 48184, + "proudtobe": 35043, + "prov": 23772, + "prov": 35021, + "prove": 10107, + "proved": 16473, + "proven": 35405, + "proven": 14569, + "provence": 28067, + "prover": 18312, + "proverb": 34419, + "proverbs": 27016, + "proves": 16119, + "provi": 2289, + "provide": 4832, + "provided": 9046, + "providence": 19331, + "provider": 14409, + "providers": 17120, + "provides": 7161, + "providing": 7250, + "provin": 12074, + "province": 8978, + "provinces": 35050, + "provincial": 16002, + "proving": 18055, + "provision": 30148, + "provisional": 36008, + "provisions": 39269, + "provo": 15367, + "provoc": 31618, + "provocative": 43809, + "provoking": 25510, + "provost": 36627, + "prow": 38737, + "prowrestling": 39825, + "prox": 41616, + "proxim": 31436, + "proximity": 38298, + "proxy": 31680, + "prs": 23879, + "pru": 12961, + "pruitt": 39453, + "prun": 29029, + "pruning": 48133, + "pry": 31965, + "pryor": 43375, + "ps": 3982, + "ps": 814, + "psa": 14031, + "psal": 13859, + "psalm": 17995, + "psalms": 35003, + "psb": 37017, + "psc": 43118, + "psd": 28810, + "pse": 19737, + "pse": 5423, + "pseu": 24919, + "pseudo": 46618, + "psg": 17123, + "psi": 45848, + "psi": 24533, + "psic": 29299, + "psis": 33041, + "psl": 21373, + "psn": 36781, + "pso": 27045, + "pson": 7487, + "psori": 44688, + "psp": 32769, + "pss": 35718, + "pss": 42535, + "psst": 47814, + "pst": 12692, + "psu": 41286, + "psu": 28338, + "psv": 44530, + "psy": 3576, + "psy": 11056, + "psych": 31041, + "psych": 20509, + "psyched": 19932, + "psyched": 35199, + "psychedelic": 23292, + "psychi": 18147, + "psychiatric": 30578, + "psychiatry": 39706, + "psychic": 24916, + "psycho": 6472, + "psycho": 22154, + "psychological": 18153, + "psychologist": 32827, + "psychology": 12352, + "psychop": 30112, + "psychotic": 48774, + "pt": 11139, + "pt": 1459, + "pta": 11586, + "ptbo": 40481, + "ptc": 44646, + "pte": 47804, + "pter": 49323, + "pti": 29375, + "pti": 10491, + "ptic": 20670, + "ption": 3479, + "ptions": 24963, + "pto": 31372, + "pto": 34092, + "pton": 19780, + "pts": 5886, + "ptsd": 23973, + "ptv": 42402, + "pu": 755, + "pu": 11780, + "pub": 20720, + "pub": 6301, + "puberty": 44122, + "pubg": 31496, + "publ": 3434, + "publi": 1617, + "public": 3592, + "public": 2122, + "publica": 49007, + "publication": 13538, + "publications": 27334, + "publichealth": 35872, + "publicity": 20831, + "publicly": 18554, + "publish": 19032, + "published": 4311, + "publisher": 20455, + "publishers": 25222, + "publishes": 35633, + "publishing": 10994, + "publix": 47985, + "pubs": 21099, + "puc": 48779, + "puck": 17550, + "pud": 39234, + "pudding": 14025, + "puddle": 33545, + "pue": 20161, + "pueblo": 33076, + "puer": 8968, + "puerto": 12289, + "puertor": 22757, + "puertorico": 26356, + "puff": 44477, + "puff": 17184, + "puffin": 47632, + "puffs": 47453, + "puffy": 49245, + "pug": 20950, + "pug": 17739, + "pugchat": 42266, + "pugh": 41302, + "puglia": 38345, + "pugs": 39425, + "puj": 46163, + "puja": 33753, + "puk": 31811, + "pul": 2469, + "pul": 40512, + "pula": 45856, + "puli": 47293, + "pulit": 27745, + "pulitzer": 31419, + "pull": 20155, + "pull": 6857, + "pulled": 8525, + "pulling": 12897, + "pullman": 40203, + "pullover": 44020, + "pulls": 16041, + "pulmon": 32613, + "pulmonary": 39132, + "pulp": 25410, + "pulse": 40091, + "pulse": 12485, + "pulses": 42177, + "pulsion": 35398, + "pum": 37497, + "puma": 20858, + "pump": 5179, + "pump": 9173, + "pumped": 12796, + "pumping": 25150, + "pumpkin": 36386, + "pumpkin": 8842, + "pumpkins": 23787, + "pumps": 18540, + "pun": 2707, + "pun": 19929, + "punc": 43907, + "punch": 29332, + "punch": 10730, + "punched": 31689, + "punches": 35279, + "punching": 33468, + "punctu": 31565, + "punctuation": 47051, + "pundit": 41466, + "pune": 32593, + "pune": 14488, + "pung": 45420, + "puni": 11479, + "punish": 34569, + "punished": 31598, + "punisher": 38509, + "punishment": 19099, + "punjab": 19405, + "punjab": 12883, + "punjabi": 25430, + "punk": 28933, + "punk": 7246, + "punks": 47171, + "puns": 35231, + "punt": 32699, + "punta": 34112, + "punter": 47092, + "pup": 11926, + "pup": 11302, + "pupil": 27265, + "pupils": 13628, + "pupp": 7116, + "puppet": 18439, + "puppets": 28475, + "puppies": 14820, + "puppy": 25431, + "puppy": 6829, + "puppylove": 40849, + "pups": 20778, + "pur": 1727, + "pur": 6265, + "pura": 25596, + "puram": 46174, + "purcell": 46065, + "purch": 8384, + "purchase": 5481, + "purchased": 13399, + "purchases": 21887, + "purchasing": 20718, + "purdu": 40691, + "purdue": 22280, + "pure": 14202, + "pure": 5979, + "puree": 45474, + "purely": 32459, + "puremichigan": 39783, + "purest": 45497, + "purge": 33514, + "puri": 16910, + "puri": 21974, + "purification": 47724, + "purity": 29780, + "purple": 17837, + "purple": 5496, + "purpose": 33492, + "purpose": 7391, + "purposes": 22020, + "purr": 49262, + "purr": 46343, + "purse": 16480, + "pursue": 19463, + "pursuing": 26424, + "pursuit": 16469, + "purée": 40981, + "pus": 13841, + "pusa": 40825, + "push": 16028, + "push": 6831, + "pushaw": 35407, + "pushaward": 35448, + "pushawards": 47184, + "pushed": 16155, + "pushes": 23828, + "pushing": 11549, + "put": 29535, + "put": 1983, + "putin": 10693, + "putnam": 40235, + "puts": 7898, + "putt": 30279, + "putter": 44723, + "putting": 5154, + "puzz": 19760, + "puzzle": 12875, + "puzzles": 27986, + "pv": 14517, + "pv": 13495, + "pvc": 26959, + "pvp": 44172, + "pvt": 29898, + "pw": 19419, + "pw": 16067, + "pwc": 22965, + "px": 24790, + "px": 10262, + "pxrtg": 36262, + "py": 4005, + "py": 7504, + "pye": 31099, + "pyeongchang": 36066, + "pyg": 41450, + "pyram": 14405, + "pyramid": 18725, + "pyramids": 36877, + "pyrene": 36740, + "pyrenees": 39744, + "pyro": 39762, + "python": 13370, + "pz": 48361, + "pé": 43167, + "q": 80, + "q": 336, + "qa": 24944, + "qa": 16360, + "qad": 27844, + "qadri": 35672, + "qaeda": 31246, + "qanda": 48672, + "qanon": 19182, + "qant": 35404, + "qantas": 43250, + "qatar": 32804, + "qatar": 10872, + "qb": 8073, + "qbs": 38188, + "qc": 17406, + "qe": 30974, + "qf": 27215, + "qi": 25054, + "qi": 11256, + "qing": 46522, + "qing": 34339, + "ql": 28366, + "qld": 23039, + "qld": 13765, + "qldpol": 42296, + "qm": 42148, + "qotd": 24504, + "qpr": 24788, + "qq": 31960, + "qr": 18193, + "qs": 14364, + "qt": 15013, + "qtr": 44803, + "qu": 666, + "qu": 28646, + "qua": 20363, + "quack": 45575, + "quad": 11656, + "quad": 13419, + "quadcopter": 39792, + "quadru": 35831, + "quaid": 34265, + "quail": 34392, + "quaint": 45976, + "quake": 8421, + "quaker": 43395, + "quakes": 24572, + "qual": 9979, + "qual": 32405, + "qualcomm": 38683, + "quali": 4574, + "qualification": 21508, + "qualifications": 35225, + "qualified": 11927, + "qualifier": 18733, + "qualifiers": 21388, + "qualifies": 35820, + "qualify": 17019, + "qualifying": 11895, + "qualitative": 45847, + "qualities": 20488, + "quality": 28545, + "quality": 3027, + "quan": 11669, + "quan": 27490, + "quand": 28198, + "quant": 15050, + "quanti": 31540, + "quantitative": 40583, + "quantities": 33917, + "quantity": 26920, + "quantum": 15320, + "quar": 3856, + "quare": 42549, + "quarry": 27601, + "quart": 7851, + "quarter": 8816, + "quarter": 6632, + "quarterback": 16545, + "quarterfinal": 37992, + "quarterfinals": 28971, + "quarterly": 23350, + "quarters": 10146, + "quartet": 18056, + "quartz": 17752, + "quat": 25715, + "quattro": 40300, + "quay": 40276, + "quay": 17304, + "que": 1147, + "que": 2319, + "quebec": 15373, + "queen": 6407, + "queen": 2997, + "queenof": 44398, + "queens": 22943, + "queens": 9330, + "queensland": 15168, + "queer": 38874, + "queer": 18161, + "quel": 39774, + "quel": 21879, + "quen": 23876, + "quen": 38324, + "quent": 23808, + "quentin": 27530, + "quer": 17378, + "quer": 26859, + "quered": 23210, + "queries": 32958, + "querque": 30338, + "query": 27464, + "ques": 25328, + "ques": 7715, + "queso": 40110, + "quest": 31653, + "quest": 4846, + "questi": 2391, + "question": 18961, + "question": 4382, + "questionable": 30733, + "questioned": 31847, + "questioning": 24887, + "questions": 3883, + "quests": 44611, + "quet": 8513, + "quets": 39055, + "quetta": 38326, + "quette": 18993, + "queu": 32705, + "queue": 18549, + "queues": 40649, + "queuing": 44082, + "quez": 18677, + "quezon": 41117, + "qui": 1912, + "qui": 18046, + "quic": 26474, + "quiche": 47723, + "quick": 5969, + "quick": 3712, + "quicker": 29211, + "quickest": 37734, + "quickly": 7787, + "quid": 30732, + "quie": 43875, + "quien": 43482, + "quiere": 42723, + "quiero": 32567, + "quiet": 17853, + "quiet": 7557, + "quietly": 22208, + "quig": 44690, + "quil": 12305, + "quill": 48951, + "quilt": 23977, + "quilted": 46052, + "quin": 8607, + "quin": 17167, + "quincy": 27640, + "quind": 32339, + "quinn": 12306, + "quinoa": 26703, + "quins": 39701, + "quint": 26898, + "quinta": 47446, + "quinte": 22098, + "quintess": 37538, + "quintet": 35125, + "quipment": 42813, + "quir": 15943, + "quirky": 25044, + "quis": 15064, + "quist": 25128, + "quit": 19358, + "quit": 11140, + "quite": 4135, + "quito": 35828, + "quits": 32505, + "quitting": 33871, + "quity": 33133, + "quiz": 31197, + "quiz": 8344, + "quizz": 35041, + "quo": 3046, + "quo": 28127, + "quoi": 45549, + "quot": 5452, + "quot": 47587, + "quota": 42097, + "quotation": 49195, + "quote": 15446, + "quote": 4020, + "quoted": 27706, + "quoteoftheday": 19975, + "quotes": 5808, + "quoting": 31651, + "qur": 37782, + "quran": 19690, + "qureshi": 46307, + "qvist": 42322, + "qx": 45038, + "r": 81, + "r": 337, + "ra": 559, + "ra": 1735, + "raa": 44344, + "rab": 14816, + "rab": 33224, + "rabb": 6875, + "rabbi": 20959, + "rabbit": 10274, + "rabbits": 27028, + "rabhu": 25806, + "rable": 10182, + "rac": 1773, + "rac": 30462, + "raccoon": 29516, + "race": 10978, + "race": 2471, + "racec": 18814, + "racecourse": 25036, + "raced": 36021, + "racer": 16798, + "racers": 33603, + "races": 8605, + "raceway": 24650, + "rach": 6876, + "rach": 33429, + "racha": 21952, + "racha": 35022, + "rachael": 29095, + "rachel": 13511, + "rachel": 8029, + "raci": 33381, + "racial": 13801, + "racially": 43577, + "racing": 23306, + "racing": 3699, + "racism": 11276, + "racist": 9684, + "racists": 41777, + "rack": 24600, + "rack": 12034, + "racket": 37691, + "racks": 21191, + "rad": 4473, + "rad": 8238, + "rada": 30437, + "radar": 9672, + "radcliffe": 33096, + "rade": 44494, + "rade": 17911, + "rader": 45002, + "radford": 45800, + "radha": 43122, + "radi": 5772, + "radial": 42028, + "radiance": 45670, + "radiant": 25614, + "radiation": 18210, + "radiator": 39372, + "radic": 18082, + "radical": 13712, + "radicals": 45903, + "radio": 7176, + "radio": 2638, + "radioactive": 34704, + "radiodisney": 36483, + "radiohead": 39472, + "radiology": 29684, + "radios": 43669, + "radish": 37789, + "radius": 37570, + "rado": 29784, + "rae": 21646, + "rae": 15051, + "rael": 45390, + "raer": 44561, + "raf": 11495, + "raf": 11490, + "rafa": 14352, + "rafa": 24850, + "rafael": 38221, + "rafael": 19216, + "rafaelnadal": 49219, + "raff": 34900, + "raffic": 32928, + "raffle": 13752, + "raffles": 43489, + "rafi": 35304, + "raft": 9233, + "rafting": 36309, + "rag": 13958, + "rag": 20687, + "rage": 8593, + "rages": 34253, + "ragh": 35642, + "ragha": 40972, + "raging": 25015, + "ragn": 24125, + "ragnar": 34385, + "ragnarok": 41856, + "ragon": 34768, + "rags": 47838, + "rah": 12277, + "rah": 8766, + "raheem": 43317, + "rahim": 24152, + "rahman": 19680, + "rahu": 13129, + "rahul": 37239, + "rahul": 17440, + "rahulg": 27510, + "rahulgandhi": 28293, + "rai": 9165, + "rai": 9638, + "raid": 6877, + "raided": 43417, + "raider": 27368, + "raider": 21455, + "raidernation": 47901, + "raiders": 11817, + "raids": 26655, + "rail": 4573, + "rail": 6879, + "raila": 47273, + "railminindia": 35557, + "railroad": 17080, + "rails": 23427, + "railway": 27614, + "railway": 7856, + "railwayana": 46750, + "railways": 20765, + "raim": 45785, + "rain": 3128, + "rain": 2443, + "raina": 30564, + "rainbow": 24562, + "rainbow": 6286, + "rainbows": 30483, + "raine": 49038, + "raine": 6871, + "rained": 32310, + "rainf": 15024, + "rainfall": 15350, + "rainforest": 22823, + "rainier": 37850, + "raining": 13964, + "rains": 14272, + "rainy": 10222, + "rais": 14729, + "raise": 24249, + "raise": 5078, + "raised": 6027, + "raiser": 33555, + "raises": 13297, + "raisethe": 47109, + "raisin": 36864, + "raising": 6883, + "raj": 5958, + "raj": 10813, + "raja": 46069, + "raja": 19150, + "rajan": 46595, + "rajas": 16185, + "rajasthan": 18017, + "raje": 21899, + "rajesh": 43602, + "raji": 27569, + "rajini": 29600, + "rajini": 40622, + "rajinikanth": 32922, + "rajiv": 40197, + "rajkumar": 49304, + "rajput": 47572, + "raju": 47029, + "rak": 13523, + "rak": 26287, + "rake": 26825, + "rake": 32712, + "rakesh": 41083, + "ral": 8062, + "ral": 1406, + "rale": 14192, + "raleigh": 18207, + "rall": 23249, + "rallies": 25230, + "rally": 18882, + "rally": 5041, + "rallying": 36836, + "ralph": 25290, + "ralph": 12234, + "ram": 1976, + "ram": 2007, + "rama": 22112, + "ramad": 12736, + "ramadan": 15547, + "ramadhan": 47415, + "raman": 39816, + "ramapho": 43963, + "ramaphosa": 44993, + "ramatta": 49112, + "rambo": 41855, + "ramcharan": 45275, + "rame": 47745, + "ramen": 18892, + "ramesh": 48640, + "ramesh": 40186, + "rami": 43016, + "ramirez": 23877, + "ramon": 27958, + "ramone": 47201, + "ramos": 21046, + "ramp": 14271, + "rampage": 32077, + "rampant": 41985, + "ramps": 35257, + "rams": 10292, + "ramsay": 26259, + "ramsey": 19215, + "ran": 1433, + "ran": 4031, + "rana": 22143, + "ranbir": 40881, + "rance": 29034, + "ranch": 43955, + "ranch": 10659, + "rancho": 26258, + "rand": 5628, + "rand": 18718, + "randall": 23639, + "rande": 21469, + "randolph": 29899, + "random": 11396, + "random": 6160, + "randomly": 17272, + "rands": 39153, + "randy": 29479, + "randy": 13279, + "rane": 28852, + "rang": 4043, + "rang": 24377, + "range": 13627, + "range": 3818, + "ranger": 31472, + "ranger": 13593, + "rangers": 7664, + "ranges": 25685, + "ranging": 25946, + "rani": 29264, + "rani": 22631, + "rank": 11501, + "ranked": 8307, + "rankin": 37539, + "ranking": 12347, + "rankings": 12596, + "ranks": 14469, + "rano": 18608, + "rans": 46259, + "ransom": 28523, + "ransom": 34646, + "ransomware": 33815, + "rant": 46467, + "rant": 9819, + "rants": 34014, + "ranveer": 32402, + "ranveer": 41482, + "ranveerofficial": 42116, + "rao": 16913, + "rap": 7773, + "rap": 7348, + "rape": 46099, + "rape": 10070, + "raped": 23700, + "rapha": 22754, + "raphael": 30091, + "rapi": 8610, + "rapid": 47697, + "rapid": 12205, + "rapidly": 16710, + "rapids": 18848, + "raping": 44926, + "rapist": 33360, + "rapp": 19283, + "rapper": 11860, + "rappers": 30315, + "rapping": 42864, + "raps": 37887, + "raptor": 26762, + "raptors": 17035, + "raq": 39787, + "raq": 43312, + "raqqa": 47074, + "raquel": 44338, + "rar": 26819, + "rar": 24605, + "rard": 21012, + "rare": 18992, + "rare": 3865, + "rarely": 17315, + "rarest": 43237, + "rarity": 45862, + "ras": 23492, + "ras": 8224, + "rasc": 30085, + "rascal": 43481, + "rash": 14917, + "rash": 30608, + "rashad": 46527, + "rasheed": 41638, + "rashi": 19426, + "rashid": 26757, + "rasp": 10487, + "raspberries": 37742, + "raspberry": 40162, + "raspberry": 13615, + "raspberrypi": 43934, + "rass": 45654, + "rasta": 47002, + "rat": 3806, + "rat": 8985, + "rata": 28568, + "ratchet": 25078, + "rate": 5068, + "rated": 8183, + "rates": 6864, + "rath": 18268, + "rath": 39772, + "rather": 5252, + "rati": 11486, + "rating": 10567, + "ratings": 14176, + "ratio": 15893, + "ration": 27002, + "ration": 35662, + "rational": 33086, + "ratna": 49078, + "ratri": 32288, + "rats": 19043, + "ratt": 20737, + "ratt": 34785, + "rattle": 40824, + "rattle": 41839, + "rau": 27744, + "raul": 30218, + "raun": 41169, + "rav": 14367, + "rav": 23606, + "rave": 38784, + "rave": 17601, + "ravel": 27927, + "raven": 10269, + "raven": 16803, + "ravens": 17946, + "ravi": 22947, + "ravi": 19538, + "ravin": 39099, + "raving": 45807, + "raviol": 41104, + "ravioli": 43460, + "raw": 10166, + "raw": 6323, + "rawlings": 40662, + "rax": 38520, + "ray": 5312, + "ray": 3077, + "raya": 29991, + "raymond": 16683, + "rayn": 47852, + "rayon": 47900, + "rays": 11064, + "raz": 9700, + "raz": 19087, + "raza": 37724, + "razer": 33832, + "razor": 24934, + "razor": 21300, + "razz": 43769, + "rb": 12740, + "rb": 7477, + "rbc": 37500, + "rbi": 15687, + "rbs": 29102, + "rc": 7575, + "rc": 7457, + "rca": 33942, + "rcb": 45240, + "rcmp": 31489, + "rcn": 49370, + "rctid": 49223, + "rd": 13501, + "rd": 1973, + "rda": 45755, + "rdr": 44364, + "rds": 32378, + "re": 515, + "re": 810, + "rea": 11521, + "reach": 4483, + "reach": 4279, + "reached": 6878, + "reaches": 14462, + "reaching": 11358, + "react": 36566, + "react": 15065, + "reacted": 42515, + "reacting": 40595, + "reaction": 7189, + "reactions": 18438, + "reactive": 42072, + "reactjs": 46173, + "reactor": 32037, + "reacts": 23115, + "read": 933, + "read": 1199, + "reader": 9884, + "readers": 10335, + "readiness": 28131, + "reading": 17556, + "reading": 2337, + "readingfc": 47428, + "readings": 23361, + "reads": 6597, + "ready": 17351, + "ready": 1112, + "reagan": 17767, + "real": 2017, + "real": 1532, + "realdonaldtrump": 7025, + "reale": 5930, + "realest": 45855, + "realestate": 32937, + "realestate": 6569, + "reali": 4185, + "realis": 38114, + "realise": 14773, + "realised": 17945, + "realising": 39537, + "realism": 20024, + "realist": 30248, + "realistic": 16157, + "realities": 32443, + "reality": 46802, + "reality": 5004, + "realization": 40402, + "realize": 7538, + "realized": 10489, + "realizes": 42918, + "realizing": 23284, + "reall": 39686, + "really": 43249, + "really": 1414, + "realm": 23083, + "realmadrid": 27866, + "realms": 43033, + "realness": 46761, + "realtime": 44002, + "realtime": 38203, + "realtor": 18038, + "realtors": 31759, + "realty": 20471, + "ream": 37242, + "ream": 15219, + "rean": 48477, + "reap": 31334, + "reaper": 29922, + "rear": 39652, + "rear": 10223, + "reas": 9121, + "reason": 12882, + "reason": 3893, + "reasonable": 18558, + "reasonably": 38589, + "reasoning": 30341, + "reasons": 5686, + "reau": 32398, + "reb": 12370, + "reb": 18796, + "reba": 48543, + "rebate": 43817, + "rebe": 25227, + "rebec": 10774, + "rebecca": 12892, + "rebel": 8185, + "rebel": 12248, + "rebellion": 22170, + "rebels": 13623, + "rebirth": 33303, + "reboot": 22385, + "reborn": 30229, + "reboun": 43381, + "rebound": 31280, + "rebounds": 19190, + "rebs": 28164, + "rebu": 43162, + "rebuild": 20022, + "rebuilding": 30880, + "rebuilt": 33137, + "rec": 1020, + "rec": 11243, + "recall": 15151, + "recalled": 32142, + "recalling": 47855, + "recalls": 24740, + "recap": 29816, + "recap": 8337, + "recaps": 47997, + "recard": 35536, + "rece": 1890, + "recei": 2148, + "receip": 38503, + "receipt": 30479, + "receipts": 41181, + "receive": 4800, + "received": 4178, + "receiver": 17659, + "receivers": 45294, + "receives": 10027, + "receiving": 7252, + "recent": 3969, + "recently": 4482, + "recep": 17450, + "reception": 8364, + "receptions": 46881, + "receptor": 41835, + "recess": 38182, + "recession": 27176, + "recharge": 29396, + "rechargeable": 37516, + "reci": 2037, + "recipe": 28923, + "recipe": 4614, + "recipeoftheday": 38727, + "recipes": 9243, + "recipi": 10136, + "recipient": 13703, + "recipients": 18940, + "recipro": 41789, + "recital": 23457, + "recite": 48824, + "reck": 11715, + "reckless": 26284, + "reckon": 23854, + "recl": 42277, + "reclaim": 35969, + "reclaimed": 32648, + "reco": 2535, + "reco": 46038, + "recogn": 6343, + "recogni": 5329, + "recognise": 19824, + "recognised": 20986, + "recognising": 48423, + "recognition": 9415, + "recognizable": 47240, + "recognize": 10905, + "recognized": 9929, + "recognizes": 26909, + "recognizing": 19666, + "recomm": 4540, + "recommend": 11628, + "recommend": 8942, + "recommendation": 20118, + "recommendations": 16516, + "recommended": 11100, + "recommending": 44301, + "recommends": 22940, + "recon": 15371, + "recon": 28996, + "reconciliation": 26451, + "reconstruction": 24955, + "recor": 1723, + "record": 21328, + "record": 2717, + "recorded": 9392, + "recorder": 26747, + "recording": 48237, + "recording": 6942, + "recordings": 19715, + "records": 4529, + "recover": 16785, + "recovered": 16444, + "recovering": 19005, + "recovers": 47935, + "recovery": 6591, + "recre": 22148, + "recreate": 29775, + "recreated": 40888, + "recreating": 48224, + "recreation": 17331, + "recreational": 24329, + "recru": 4745, + "recruit": 9011, + "recruit": 15585, + "recruited": 36518, + "recruiter": 43120, + "recruiters": 46542, + "recruiting": 10533, + "recruitment": 10541, + "recruits": 22647, + "recs": 33069, + "rectan": 43041, + "rectangular": 43321, + "rector": 41585, + "recu": 26798, + "recur": 19983, + "recurring": 35912, + "recy": 6790, + "recycla": 40659, + "recyclable": 48907, + "recycle": 19366, + "recycled": 16829, + "recycling": 12566, + "red": 1893, + "red": 736, + "redbubble": 46137, + "redbull": 29483, + "redbull": 29219, + "redcarpet": 32259, + "redcross": 30659, + "redd": 22149, + "redd": 40618, + "redding": 41061, + "reddish": 43383, + "reddit": 15226, + "reddy": 23028, + "rede": 10913, + "redeem": 37449, + "redefining": 46352, + "redemption": 20233, + "redesign": 24188, + "redesigned": 33111, + "redevelopment": 30322, + "redhead": 36267, + "redi": 7976, + "redman": 44753, + "redmond": 39627, + "rednation": 28180, + "rednationrising": 28262, + "redneck": 39105, + "redness": 22626, + "redo": 42524, + "redon": 48506, + "redro": 37722, + "reds": 11221, + "redskins": 19023, + "redsox": 19144, + "reduc": 5015, + "reduce": 6604, + "reduced": 10821, + "reduces": 20539, + "reducing": 13836, + "reduction": 12219, + "reductions": 48263, + "redux": 43014, + "redvelvet": 41845, + "redwings": 31058, + "redwood": 31748, + "ree": 9282, + "ree": 5813, + "reebok": 26734, + "reece": 30457, + "reed": 26209, + "reed": 10435, + "reedus": 32865, + "reef": 46557, + "reef": 15624, + "reefs": 34459, + "reel": 34467, + "reel": 17166, + "reels": 48127, + "reem": 48891, + "reen": 21638, + "reen": 23679, + "rees": 18314, + "reese": 20929, + "reeves": 23060, + "ref": 4067, + "ref": 9591, + "refe": 5624, + "refer": 18425, + "refer": 22325, + "referee": 20398, + "referees": 45583, + "referen": 13535, + "reference": 10214, + "references": 24009, + "referendum": 16732, + "referr": 47784, + "referral": 30219, + "referred": 22969, + "referring": 29797, + "refers": 30069, + "refill": 37859, + "refin": 13455, + "refined": 26098, + "refinery": 31393, + "refining": 48406, + "reflec": 4608, + "reflect": 13373, + "reflected": 28732, + "reflecting": 19700, + "reflection": 11884, + "reflections": 16647, + "reflective": 27008, + "reflects": 15821, + "reflex": 45756, + "reflex": 36050, + "reform": 45678, + "reform": 8875, + "reformation": 45119, + "reformed": 40880, + "reforms": 19274, + "refr": 34850, + "refre": 11995, + "refresh": 17836, + "refresh": 23288, + "refreshed": 35925, + "refresher": 41481, + "refreshing": 14159, + "refreshments": 31127, + "refriger": 21076, + "refrigerator": 36662, + "refs": 35595, + "refu": 3545, + "refuge": 5638, + "refuge": 17432, + "refugee": 11556, + "refugees": 42687, + "refugees": 8316, + "refund": 28899, + "refur": 15519, + "refurbi": 18259, + "refurbished": 26190, + "refurbishment": 35803, + "refusal": 46547, + "refuse": 16412, + "refused": 17190, + "refuses": 20085, + "refusing": 26704, + "reg": 5472, + "reg": 12353, + "regain": 37510, + "regal": 31512, + "regal": 25028, + "regan": 34062, + "regar": 5881, + "regard": 21801, + "regarded": 32017, + "regarding": 8493, + "regardless": 17220, + "regards": 23079, + "regatta": 26316, + "regen": 46545, + "regency": 29341, + "regeneration": 29257, + "regent": 30455, + "regents": 46710, + "regg": 12757, + "reggae": 37821, + "reggae": 15214, + "reggie": 21872, + "regi": 1608, + "regime": 11378, + "regiment": 18603, + "regin": 23287, + "regina": 16841, + "region": 16542, + "region": 4341, + "regional": 5552, + "regionals": 26043, + "regions": 14530, + "regis": 28094, + "register": 3967, + "registered": 10254, + "registering": 33510, + "registr": 29193, + "registration": 7302, + "registrations": 38423, + "registry": 30020, + "rego": 47351, + "regram": 30329, + "regrann": 48802, + "regre": 8627, + "regression": 43733, + "regret": 14374, + "regrets": 23231, + "regu": 3411, + "regui": 46722, + "regul": 11847, + "regular": 14882, + "regular": 6307, + "regularly": 17263, + "regulat": 14575, + "regulate": 33494, + "regulated": 31384, + "regulating": 48156, + "regulation": 14267, + "regulations": 16654, + "regulator": 30364, + "regulators": 35837, + "regulatory": 17717, + "reh": 21492, + "reha": 10193, + "rehab": 16973, + "rehabil": 17930, + "rehabilitation": 21042, + "rehear": 7273, + "rehearsal": 11482, + "rehearsals": 17977, + "rehearsing": 23125, + "rehman": 39206, + "rei": 15343, + "rei": 26033, + "reic": 41230, + "reich": 48589, + "reich": 28929, + "reid": 45125, + "reid": 11744, + "reig": 13092, + "reign": 41419, + "reign": 14827, + "reigning": 28409, + "reigns": 21217, + "reiki": 46960, + "reilly": 28120, + "reim": 35421, + "reimagined": 46799, + "reimbur": 39857, + "rein": 9240, + "rein": 45009, + "reina": 43847, + "reinde": 23810, + "reindeer": 25072, + "reinfor": 48161, + "reinforced": 41909, + "reinst": 33969, + "reinvent": 38171, + "reissue": 34042, + "reiter": 35394, + "rejec": 9958, + "reject": 22435, + "rejected": 17505, + "rejection": 32264, + "rejects": 23155, + "rejo": 20150, + "rejoice": 24712, + "rejuven": 26332, + "rek": 47542, + "rek": 19201, + "rel": 1825, + "rel": 5233, + "rela": 4362, + "reland": 15220, + "relat": 27192, + "relatable": 31010, + "relate": 17520, + "related": 5880, + "relates": 36064, + "relating": 27373, + "relation": 4561, + "relation": 16207, + "relations": 10100, + "relationship": 47239, + "relationship": 5837, + "relationships": 10610, + "relative": 17265, + "relatively": 18351, + "relatives": 21981, + "relax": 6777, + "relax": 9035, + "relaxation": 22194, + "relaxed": 18999, + "relaxing": 10256, + "relay": 12403, + "relays": 28404, + "rele": 1602, + "release": 29100, + "release": 2706, + "released": 3410, + "releases": 7393, + "releasethe": 44008, + "releasing": 10321, + "releg": 23378, + "relegated": 45884, + "relegation": 35040, + "relent": 22213, + "relentless": 27207, + "relessly": 33927, + "relev": 9349, + "relevance": 31400, + "relevant": 10568, + "reli": 2674, + "reliability": 27220, + "reliable": 13714, + "reliance": 27727, + "relic": 27802, + "relics": 43208, + "relief": 7518, + "relies": 41579, + "relieve": 28623, + "relieved": 36597, + "religi": 4940, + "religion": 8803, + "religions": 31189, + "religious": 8289, + "relish": 35550, + "relive": 23939, + "reliving": 47558, + "rell": 28802, + "rell": 7127, + "rella": 9952, + "relle": 31390, + "reloaded": 38908, + "relocated": 46791, + "relocation": 39198, + "rels": 23320, + "relu": 32058, + "reluct": 32549, + "reluctant": 45552, + "rely": 4158, + "relying": 42168, + "rem": 15098, + "rem": 21637, + "rema": 4569, + "remain": 29144, + "remain": 6415, + "remainder": 41672, + "remained": 23714, + "remaining": 11392, + "remains": 6807, + "remake": 16234, + "remark": 11136, + "remarkable": 12404, + "remarkably": 39087, + "remarks": 15001, + "remastered": 24932, + "rematch": 26473, + "rembrandt": 45972, + "reme": 20071, + "remedi": 18442, + "remedies": 25581, + "remedy": 25794, + "remem": 7966, + "rememb": 7062, + "remember": 22045, + "remember": 2195, + "remembered": 11763, + "remembering": 8135, + "remembers": 12551, + "remembrance": 40321, + "remembrance": 15860, + "remembranceday": 48333, + "rement": 7173, + "rements": 12667, + "remi": 41693, + "remin": 3216, + "remind": 9868, + "reminded": 12309, + "reminder": 5565, + "reminders": 34121, + "reminding": 19976, + "reminds": 8303, + "remington": 43527, + "reminis": 17723, + "reminiscent": 41704, + "reminiscing": 32552, + "remix": 8519, + "remixes": 31011, + "remn": 29127, + "remnants": 39032, + "remo": 4064, + "remo": 33259, + "remodel": 34159, + "remodel": 37495, + "remodeling": 41432, + "remote": 47163, + "remote": 9687, + "remotely": 32375, + "removable": 44095, + "removal": 13679, + "remove": 9709, + "removed": 10289, + "remover": 44267, + "removes": 29018, + "removing": 18504, + "remy": 30434, + "ren": 737, + "ren": 2596, + "rena": 12591, + "renais": 15409, + "renaissance": 16007, + "renal": 36096, + "renamed": 31535, + "renault": 17600, + "rence": 19245, + "rence": 1553, + "rences": 8545, + "rend": 33932, + "rend": 22851, + "render": 39752, + "render": 13024, + "rendered": 23652, + "rendering": 21339, + "renders": 39419, + "rendez": 43293, + "rendezvous": 45644, + "rendition": 28891, + "rendon": 46272, + "rendous": 49403, + "rends": 38842, + "rene": 15438, + "rene": 12597, + "renee": 23480, + "reneg": 29909, + "renegade": 41229, + "renergy": 37151, + "renew": 6645, + "renew": 22015, + "renewable": 31269, + "renewable": 15941, + "renewableenergy": 33357, + "renewables": 21619, + "renewal": 21270, + "renewed": 20524, + "renfre": 45043, + "reng": 36795, + "reno": 11520, + "reno": 12831, + "renov": 9984, + "renovated": 23839, + "renovation": 17121, + "renovations": 31311, + "renowned": 14727, + "rens": 18183, + "renshaw": 44445, + "rent": 17377, + "rent": 1609, + "rental": 12193, + "rentals": 24105, + "rented": 35932, + "rential": 31692, + "renting": 37662, + "rently": 2615, + "rents": 31109, + "reo": 15963, + "reo": 26854, + "reon": 15761, + "reopen": 26883, + "reopened": 32868, + "reopening": 36663, + "reopens": 40644, + "rep": 4229, + "rep": 6487, + "repair": 8419, + "repaired": 32953, + "repairing": 38534, + "repairs": 16297, + "repar": 34065, + "repe": 5785, + "repeal": 42622, + "repeal": 23938, + "repeat": 10192, + "repeated": 27904, + "repeatedly": 26630, + "repeating": 33834, + "repeats": 39158, + "repell": 46235, + "repent": 47261, + "reper": 29085, + "repet": 38533, + "repl": 13047, + "replac": 6069, + "replace": 9466, + "replaceable": 47762, + "replaced": 13200, + "replacement": 10835, + "replaces": 27781, + "replacing": 18647, + "replay": 16875, + "repleni": 44839, + "replic": 21651, + "replica": 18125, + "replied": 24238, + "replies": 18808, + "reply": 8965, + "replying": 47599, + "repor": 2628, + "report": 2417, + "reported": 7598, + "reportedly": 10953, + "reporter": 11019, + "reporters": 18454, + "reporting": 9218, + "reports": 4908, + "reposit": 41276, + "repository": 46977, + "repost": 33147, + "repost": 7217, + "repostapp": 38388, + "reposting": 20223, + "reppin": 19163, + "repping": 22574, + "repre": 3397, + "represent": 8293, + "represent": 8406, + "representation": 13520, + "representative": 13175, + "representatives": 15591, + "represented": 12299, + "representing": 7561, + "represents": 14433, + "repri": 31854, + "reproduction": 35714, + "reproductive": 25522, + "reps": 14265, + "reptile": 36938, + "reptiles": 38679, + "republic": 6376, + "republic": 7185, + "republican": 9842, + "republicans": 12384, + "repur": 41852, + "req": 42411, + "requ": 10664, + "reque": 9539, + "request": 7813, + "requested": 16199, + "requesting": 33245, + "requests": 17087, + "requi": 4863, + "requiem": 40316, + "require": 14437, + "required": 8500, + "requirement": 27146, + "requirements": 12860, + "requires": 13396, + "requiring": 33425, + "requis": 42602, + "rer": 41295, + "rer": 3407, + "rera": 14301, + "rero": 21860, + "rers": 18869, + "res": 4466, + "res": 934, + "resc": 3956, + "rescheduled": 43553, + "rescu": 8618, + "rescue": 28567, + "rescue": 5718, + "rescued": 11919, + "rescues": 32439, + "rescuing": 43770, + "rese": 13000, + "resear": 6090, + "research": 25694, + "research": 2379, + "researched": 42733, + "researcher": 18334, + "researchers": 9522, + "researching": 24544, + "reseller": 35391, + "resemb": 16916, + "resemblance": 26856, + "resemble": 37230, + "resembles": 35417, + "reser": 16420, + "reserv": 11906, + "reservation": 20289, + "reservations": 19307, + "reserve": 6911, + "reserved": 19796, + "reserves": 19705, + "reservoir": 20574, + "reset": 26250, + "resh": 47432, + "reshi": 39435, + "resi": 2152, + "residen": 22311, + "residence": 11672, + "residences": 38855, + "residency": 18545, + "resident": 9016, + "residente": 44637, + "residentevil": 48393, + "residential": 11002, + "residents": 6008, + "resign": 23584, + "resignation": 24779, + "resigned": 31014, + "resigns": 29738, + "resil": 10932, + "resili": 39212, + "resilience": 15271, + "resilient": 24694, + "resin": 24156, + "resist": 37345, + "resist": 9587, + "resistance": 7392, + "resistant": 17542, + "resisting": 43679, + "resolution": 9977, + "resolutions": 26816, + "resolve": 20787, + "resolved": 28807, + "reson": 18092, + "resonance": 42310, + "resort": 6594, + "resorts": 18839, + "resource": 43729, + "resource": 9760, + "resources": 6723, + "respec": 7466, + "respect": 31411, + "respect": 4916, + "respected": 19126, + "respectful": 24379, + "respecting": 36172, + "respective": 25817, + "respectively": 28794, + "respects": 23553, + "respir": 20771, + "respiratory": 24483, + "respon": 2421, + "respond": 12355, + "responded": 21121, + "respondents": 49253, + "responders": 25155, + "responding": 18037, + "responds": 17436, + "response": 5399, + "responses": 19006, + "responsi": 5490, + "responsibilities": 30375, + "responsibility": 11272, + "responsible": 8936, + "responsibly": 33675, + "responsive": 21544, + "ress": 34651, + "ress": 13629, + "resso": 15133, + "rest": 10974, + "rest": 2539, + "restart": 37378, + "restaur": 3775, + "restaurant": 41930, + "restaurant": 4489, + "restaurants": 11714, + "rested": 46020, + "resting": 18044, + "restless": 36724, + "restling": 30076, + "resto": 11118, + "resto": 41666, + "restock": 34060, + "restocked": 36966, + "restor": 8984, + "restoration": 11989, + "restorative": 46509, + "restore": 14008, + "restored": 14238, + "restoring": 24406, + "restra": 25424, + "restric": 11036, + "restricted": 27197, + "restriction": 44282, + "restrictions": 19884, + "restroom": 43423, + "restructuring": 43260, + "rests": 33775, + "resu": 10095, + "resul": 2655, + "result": 5659, + "resulted": 26449, + "resulting": 24581, + "results": 3790, + "resume": 15077, + "resumes": 30268, + "resur": 14865, + "resurg": 45962, + "resurgence": 47692, + "resurrec": 18487, + "resurrection": 25811, + "resusc": 47523, + "ret": 20500, + "ret": 10048, + "reta": 20153, + "retail": 14910, + "retail": 6455, + "retailer": 22549, + "retailers": 19418, + "retain": 24430, + "retained": 42737, + "retaining": 35571, + "retains": 42583, + "retali": 33101, + "retar": 29964, + "retarded": 44111, + "retention": 26247, + "rethink": 29078, + "rethinking": 42951, + "reti": 4721, + "retin": 31270, + "retina": 36919, + "retire": 18846, + "retired": 11477, + "retirement": 9205, + "retires": 29060, + "retiring": 21200, + "retrac": 32735, + "retreat": 11210, + "retri": 16918, + "retriever": 28394, + "retro": 6535, + "retro": 7755, + "retrogamer": 47220, + "retrogaming": 11316, + "retrospective": 27105, + "rett": 41082, + "rett": 8425, + "rette": 33066, + "return": 43042, + "return": 3458, + "returned": 10476, + "returning": 9290, + "returns": 5020, + "retwee": 48190, + "retweet": 3195, + "retweeted": 12705, + "retweeting": 32345, + "retweets": 10160, + "rety": 41550, + "reu": 20255, + "reu": 40371, + "reuben": 40450, + "reunion": 10247, + "reunite": 26179, + "reunited": 13516, + "reusable": 30395, + "reuse": 26535, + "reut": 15210, + "reuters": 15569, + "rev": 8424, + "rev": 11789, + "revamp": 29819, + "revamped": 36420, + "revan": 45277, + "reve": 3115, + "reveal": 8052, + "revealed": 7171, + "revealing": 21321, + "reveals": 6621, + "revel": 14133, + "revelation": 24053, + "revelations": 36163, + "reven": 10171, + "revenge": 12717, + "revenue": 10637, + "revenues": 33348, + "rever": 14829, + "rever": 41913, + "revere": 44187, + "reverend": 34407, + "revers": 20726, + "reversal": 33367, + "reverse": 12812, + "reversed": 42485, + "reversi": 31601, + "reversible": 34212, + "revi": 8317, + "review": 2268, + "reviewed": 16678, + "reviewer": 36409, + "reviewers": 48195, + "reviewing": 20458, + "reviews": 7227, + "revise": 46801, + "revised": 22806, + "revising": 46882, + "revision": 20335, + "revisit": 26568, + "revisited": 34302, + "revisiting": 33144, + "revit": 26367, + "revitalization": 46923, + "revival": 14142, + "revive": 26450, + "revived": 42912, + "revo": 28660, + "revol": 13447, + "revolt": 31697, + "revolu": 4900, + "revolution": 17699, + "revolution": 6644, + "revolutionary": 14734, + "revolver": 38747, + "revolving": 47230, + "revs": 49286, + "revue": 43428, + "rew": 37564, + "rewar": 15857, + "reward": 11223, + "rewarded": 27163, + "rewarding": 23351, + "rewards": 15235, + "rewatch": 35610, + "rewatching": 41287, + "rewind": 26867, + "rewrite": 45218, + "rex": 13002, + "rex": 10904, + "rexperience": 33924, + "rey": 9681, + "rey": 4517, + "reyes": 18255, + "reykja": 47571, + "reyn": 11998, + "reynolds": 14309, + "reys": 48284, + "rez": 27597, + "rez": 15192, + "reza": 35888, + "rf": 35529, + "rf": 16368, + "rfc": 19003, + "rfid": 40204, + "rg": 33055, + "rg": 14897, + "rgb": 36128, + "rgv": 33685, + "rh": 8745, + "rh": 22404, + "rha": 19473, + "rhapso": 32532, + "rhapsody": 35774, + "rhe": 9186, + "rhea": 28612, + "rhetor": 24359, + "rhetoric": 29985, + "rhett": 42984, + "rheu": 42953, + "rhi": 21212, + "rhin": 12269, + "rhine": 22863, + "rhine": 44833, + "rhinestone": 30450, + "rhino": 41744, + "rhino": 20056, + "rhinos": 30671, + "rho": 7637, + "rhode": 39302, + "rhode": 27907, + "rhodes": 17785, + "rhon": 25882, + "rhonda": 46100, + "rhp": 27199, + "rhs": 24551, + "rhu": 23897, + "rhubarb": 30213, + "rhy": 7740, + "rhyme": 37356, + "rhymes": 33143, + "rhys": 28647, + "rhyth": 27069, + "rhythm": 16172, + "rhythmic": 46386, + "rhythms": 40872, + "ri": 553, + "ri": 2574, + "ria": 3650, + "rial": 15200, + "rian": 7788, + "rib": 44634, + "rib": 18298, + "riba": 44992, + "ribb": 10081, + "ribbon": 12114, + "ribbons": 35271, + "ribe": 46115, + "ribs": 17519, + "ric": 920, + "ric": 4798, + "rica": 14230, + "rical": 18109, + "rican": 30958, + "ricardo": 23140, + "ricci": 35783, + "ricciardo": 49282, + "rice": 36362, + "rice": 4741, + "rich": 5223, + "rich": 4021, + "richar": 9350, + "richard": 9080, + "richard": 4470, + "richards": 11372, + "richardson": 15984, + "riche": 23286, + "richer": 34138, + "riches": 37093, + "richest": 25572, + "richi": 38934, + "richie": 19797, + "richland": 43079, + "richmond": 34143, + "richmond": 11292, + "richter": 37591, + "rick": 6237, + "rick": 3064, + "ricket": 46161, + "ricket": 23671, + "ricks": 23111, + "ricky": 19188, + "ricky": 12814, + "rico": 37962, + "rico": 11362, + "ricotta": 38473, + "rics": 7353, + "ricul": 6980, + "rid": 18103, + "rid": 9874, + "ridd": 21990, + "ridden": 32025, + "riddle": 31839, + "ride": 15816, + "ride": 2994, + "rider": 31056, + "rider": 9707, + "riders": 10826, + "rides": 11308, + "ridg": 42646, + "ridge": 16580, + "ridge": 6352, + "ridic": 9624, + "ridiculous": 12659, + "ridiculously": 25661, + "ridin": 47869, + "riding": 6765, + "ridley": 27883, + "rie": 14824, + "rie": 5322, + "ried": 7552, + "riel": 26696, + "rien": 35237, + "rier": 40714, + "rier": 13336, + "ries": 28179, + "ries": 3059, + "riesling": 36372, + "rif": 7044, + "riff": 30359, + "rifle": 15354, + "rifles": 25678, + "rift": 26681, + "rig": 18462, + "rig": 13871, + "riga": 36626, + "rigged": 35897, + "rigging": 38160, + "riggs": 40328, + "righ": 15391, + "right": 13341, + "right": 1155, + "righte": 20762, + "righteous": 28169, + "righteousness": 42481, + "rightful": 42601, + "rightly": 42669, + "rights": 3336, + "rigid": 43138, + "rigor": 36788, + "rigorous": 41654, + "rigs": 42893, + "rihanna": 13744, + "rij": 41097, + "rik": 31136, + "rik": 27832, + "rika": 28580, + "ril": 12270, + "ril": 2388, + "riley": 35056, + "riley": 12260, + "rill": 23705, + "rilla": 43956, + "rilla": 18685, + "rim": 28147, + "rim": 12199, + "rime": 27064, + "rimin": 11527, + "rimo": 47817, + "rims": 34327, + "rin": 5859, + "rin": 11739, + "rina": 12869, + "rine": 24952, + "ring": 8318, + "ring": 2540, + "ringed": 44712, + "ringer": 35761, + "ringing": 26035, + "ringo": 38845, + "rings": 5751, + "rington": 12455, + "rink": 21497, + "rinka": 47316, + "rino": 47188, + "rinse": 48320, + "rio": 15681, + "rio": 5782, + "rion": 31623, + "rion": 34046, + "rios": 32814, + "riot": 32636, + "riot": 14218, + "riots": 24844, + "rious": 6340, + "rip": 10353, + "rip": 4243, + "ripe": 22832, + "ripley": 41589, + "ripp": 25276, + "ripped": 17815, + "ripper": 35347, + "ripping": 29126, + "ripple": 24825, + "rips": 30182, + "rir": 36792, + "ris": 6108, + "ris": 1999, + "rise": 13641, + "rise": 3151, + "risen": 23653, + "risers": 44983, + "rises": 13362, + "riseup": 35760, + "rish": 18378, + "rish": 18927, + "rishi": 48434, + "rising": 30452, + "rising": 5448, + "risis": 37998, + "risk": 27967, + "risk": 4213, + "risking": 48155, + "risks": 12474, + "risky": 27630, + "risotto": 31471, + "rist": 40610, + "rit": 5156, + "rit": 17333, + "rita": 16178, + "ritchie": 30997, + "rite": 39318, + "rite": 18429, + "rites": 36160, + "rith": 48169, + "rith": 48850, + "riti": 32904, + "rito": 19379, + "ritos": 33507, + "ritt": 26092, + "ritter": 34854, + "ritu": 13391, + "ritual": 19712, + "rituals": 31145, + "ritz": 39151, + "ritz": 25627, + "rium": 33884, + "riv": 25113, + "rival": 13412, + "rival": 15629, + "rivalry": 19511, + "rivals": 15135, + "rive": 27588, + "rive": 34917, + "river": 5239, + "river": 2473, + "rivera": 18275, + "riverdale": 28304, + "riverfront": 44439, + "rivers": 10723, + "riverside": 15809, + "riveting": 44024, + "riviera": 25851, + "rix": 43407, + "rix": 9483, + "riya": 36908, + "riyad": 31564, + "riyadh": 33577, + "riz": 18426, + "riz": 35411, + "rizal": 41555, + "rizio": 40191, + "rizz": 34826, + "rizzo": 49076, + "rj": 26016, + "rj": 20949, + "rk": 38725, + "rk": 21422, + "rl": 18041, + "rl": 14590, + "rlly": 43222, + "rly": 25954, + "rm": 20202, + "rm": 8431, + "rmb": 49097, + "rms": 40529, + "rn": 13206, + "rn": 7666, + "rna": 24566, + "rnb": 31556, + "rnc": 35309, + "rnli": 29748, + "ro": 532, + "ro": 2795, + "roa": 8313, + "roach": 31073, + "road": 4370, + "road": 1759, + "roadhouse": 47891, + "roadmap": 30111, + "roads": 6189, + "roadsafety": 39992, + "roadshow": 21168, + "roadside": 26928, + "roadster": 28920, + "roadto": 24681, + "roadtrip": 15094, + "roadway": 42744, + "roam": 34045, + "roaming": 29240, + "roano": 34184, + "roanoke": 36587, + "roar": 34193, + "roar": 18483, + "roaring": 26428, + "roast": 11404, + "roasted": 10479, + "roasting": 32228, + "rob": 2668, + "rob": 6442, + "robb": 14059, + "robb": 39673, + "robbed": 24163, + "robber": 35545, + "robbers": 40852, + "robbery": 16393, + "robbi": 44898, + "robbie": 37200, + "robbie": 15970, + "robbing": 47569, + "robbins": 23461, + "robby": 44128, + "robe": 23116, + "rober": 4532, + "robert": 8811, + "robert": 3929, + "roberta": 43373, + "roberto": 42645, + "roberto": 16227, + "roberts": 10366, + "robertson": 17643, + "robes": 29304, + "robi": 16743, + "robin": 6681, + "robin": 7988, + "robins": 35502, + "robinson": 8523, + "robles": 47646, + "roblo": 27481, + "roblox": 37798, + "robo": 4672, + "robo": 36057, + "robot": 46089, + "robot": 8797, + "robotic": 23975, + "robotics": 13546, + "robots": 13473, + "robson": 31113, + "robust": 22780, + "robyn": 34533, + "roc": 3268, + "roc": 13776, + "rocco": 30009, + "roch": 23788, + "rochdale": 41880, + "roche": 31776, + "rochelle": 40161, + "rochester": 18057, + "rock": 2640, + "rock": 2172, + "rockab": 39353, + "rockabilly": 45019, + "rocke": 19914, + "rocked": 16116, + "rockefeller": 35476, + "rocker": 29008, + "rockers": 32338, + "rocket": 25435, + "rocket": 8383, + "rockets": 13292, + "rockford": 41039, + "rockies": 20621, + "rockin": 12073, + "rocking": 7081, + "rockn": 24442, + "rocknroll": 27840, + "rocks": 6135, + "rockstar": 23603, + "rockstar": 18000, + "rockstargames": 27516, + "rockstars": 46639, + "rockthe": 49363, + "rockwell": 34747, + "rocky": 33481, + "rocky": 9648, + "rod": 9712, + "rod": 8291, + "roddy": 42332, + "rode": 18449, + "rodeo": 18250, + "rodgers": 17612, + "rodi": 49100, + "rodney": 21753, + "rodri": 11053, + "rodrigo": 33944, + "rodriguez": 14057, + "rods": 28618, + "roe": 27671, + "roe": 9996, + "rof": 33029, + "rofl": 48228, + "roft": 45212, + "rog": 34269, + "rog": 34017, + "rogen": 23380, + "roger": 13929, + "roger": 7735, + "rogerfederer": 40182, + "rogers": 10661, + "rogue": 32575, + "rogue": 15162, + "roh": 14933, + "roh": 29840, + "rohan": 39848, + "rohing": 23600, + "rohingya": 26146, + "rohit": 44649, + "rohit": 24299, + "roi": 21877, + "rok": 36807, + "rol": 3393, + "rol": 7818, + "roland": 33713, + "roland": 19569, + "role": 18485, + "role": 3414, + "roles": 11871, + "rolex": 21093, + "rolf": 48606, + "roll": 4711, + "roll": 3341, + "rolled": 11982, + "roller": 21034, + "roller": 12342, + "rollercoaster": 38248, + "rollers": 36941, + "rollin": 27545, + "rolling": 24250, + "rolling": 6347, + "rollingstones": 41309, + "rollins": 27724, + "rollout": 47710, + "rollover": 39214, + "rolls": 8614, + "rolltide": 28101, + "rom": 11377, + "rom": 19205, + "roma": 44134, + "roma": 11631, + "romain": 48897, + "roman": 4416, + "roman": 7370, + "romance": 7215, + "romania": 15884, + "romanian": 30866, + "romano": 38409, + "romans": 23066, + "romantic": 41457, + "romantic": 8821, + "rome": 9406, + "rome": 5243, + "romeo": 14429, + "romero": 23694, + "romney": 19287, + "romo": 32248, + "romper": 43699, + "ron": 2393, + "ron": 3372, + "rona": 42385, + "ronal": 46194, + "ronald": 15683, + "ronaldo": 13463, + "ronan": 34971, + "rond": 31935, + "ronda": 37436, + "rondo": 43756, + "rone": 48082, + "rone": 32763, + "roni": 47234, + "ronnie": 45257, + "ronnie": 16421, + "rons": 19536, + "ront": 48881, + "roo": 1249, + "roo": 31227, + "rood": 38007, + "roof": 9120, + "roof": 6449, + "roofing": 24415, + "roofs": 34635, + "rooftop": 16319, + "rook": 35918, + "rookie": 9771, + "rookies": 31917, + "room": 8845, + "room": 1530, + "roomie": 36851, + "roommate": 19825, + "roommates": 37323, + "rooms": 6328, + "rooney": 17712, + "roos": 32938, + "roosevel": 17644, + "roosevelt": 18488, + "rooster": 46263, + "rooster": 30926, + "roosters": 43693, + "root": 25930, + "root": 9728, + "rooted": 30428, + "rooting": 25523, + "roots": 8084, + "rop": 43401, + "rope": 9953, + "ropes": 30506, + "ror": 8668, + "ror": 2843, + "rors": 12072, + "rory": 42804, + "rory": 17813, + "ros": 5288, + "ros": 6930, + "rosa": 14393, + "rosal": 30397, + "rosario": 33640, + "rosary": 33098, + "rosberg": 46037, + "rose": 6146, + "rose": 3568, + "roseanne": 47528, + "rosel": 33616, + "rosemary": 19472, + "rosen": 13214, + "rosen": 36424, + "rosenberg": 43558, + "rosenthal": 46990, + "roses": 9061, + "rosetta": 43800, + "rosewood": 38686, + "rosie": 43049, + "rosie": 16888, + "ross": 8801, + "ross": 2158, + "rosse": 11602, + "rossi": 24817, + "rosso": 33023, + "roster": 12487, + "roswell": 45116, + "rosy": 46705, + "rosé": 28006, + "rot": 10055, + "rot": 9643, + "rotar": 45959, + "rotary": 14654, + "rotating": 32265, + "rotation": 18089, + "rotc": 32252, + "roth": 17741, + "roth": 19139, + "rother": 23174, + "rotherham": 37687, + "rothschild": 45089, + "roti": 46940, + "roto": 34698, + "rotor": 42991, + "rots": 16642, + "rott": 34806, + "rotten": 24324, + "rotter": 22614, + "rotterdam": 23422, + "rotun": 42970, + "rou": 2964, + "rou": 34783, + "roud": 28375, + "rouge": 16209, + "rough": 11699, + "rough": 8511, + "roughly": 21910, + "roughs": 37598, + "rouhani": 39912, + "roulette": 39930, + "roun": 5602, + "round": 9403, + "round": 2522, + "roundabout": 29953, + "rounded": 26973, + "rounder": 37024, + "rounding": 40208, + "rounds": 11242, + "roundtable": 19386, + "roundup": 17503, + "roup": 29220, + "rourke": 38753, + "rous": 33645, + "rous": 34531, + "rousey": 46267, + "rout": 7502, + "rout": 41778, + "route": 5261, + "router": 29962, + "routes": 14923, + "routine": 12319, + "routines": 44074, + "routing": 44086, + "roux": 43416, + "rov": 23971, + "rove": 30130, + "rover": 12776, + "rovers": 16373, + "row": 5275, + "row": 1044, + "rowan": 26240, + "rowdy": 32141, + "rowe": 28323, + "rowed": 22615, + "rower": 43345, + "rowers": 41806, + "rowing": 12807, + "rowland": 33037, + "rowley": 48793, + "rowling": 29371, + "rown": 22287, + "rown": 25060, + "rows": 9409, + "rox": 14111, + "rox": 41033, + "roxy": 28093, + "roy": 2128, + "roy": 6354, + "royal": 6691, + "royal": 3853, + "royale": 20630, + "royalnavy": 41545, + "royals": 13335, + "royalties": 48660, + "royalty": 18296, + "royalwedding": 27461, + "royce": 18444, + "royd": 41476, + "royo": 39357, + "roz": 28989, + "roz": 37250, + "rp": 17305, + "rp": 8174, + "rpa": 41872, + "rpg": 12445, + "rpm": 23715, + "rps": 49215, + "rr": 5311, + "rr": 9126, + "rrp": 36967, + "rrr": 18267, + "rrrr": 25561, + "rrrr": 34444, + "rs": 6978, + "rs": 1724, + "rsa": 29437, + "rsc": 48524, + "rsd": 34426, + "rsi": 39046, + "rsl": 44752, + "rsp": 16381, + "rspb": 38508, + "rspb": 36727, + "rspca": 45643, + "rss": 46466, + "rss": 22350, + "rstats": 38700, + "rsvp": 9774, + "rt": 8959, + "rt": 8991, + "rtc": 31648, + "rte": 33822, + "rte": 23322, + "rtg": 22028, + "rti": 47549, + "rtr": 43999, + "rts": 8496, + "rtw": 34673, + "ru": 681, + "ru": 13735, + "rub": 15862, + "rub": 22586, + "rubb": 19597, + "rubbed": 45239, + "rubber": 31131, + "rubber": 11331, + "rubbing": 41262, + "rubbish": 21108, + "rubble": 42230, + "ruben": 44058, + "ruben": 29722, + "rubi": 27856, + "rubin": 34128, + "rubio": 24244, + "rubs": 43422, + "ruby": 24552, + "ruby": 11493, + "ruck": 27449, + "rucker": 45402, + "rud": 35256, + "rudd": 31836, + "rude": 16548, + "rudi": 48360, + "rudol": 40927, + "rudolf": 46835, + "rudolph": 30119, + "rudy": 38226, + "rudy": 22131, + "rue": 38024, + "rue": 19276, + "rufc": 45084, + "ruff": 28177, + "ruff": 30304, + "rufus": 39322, + "rug": 4217, + "rug": 19220, + "rugby": 15091, + "rugby": 4964, + "rugbyleague": 44419, + "ruger": 48655, + "rugged": 25225, + "rugs": 29946, + "rui": 46974, + "ruin": 16256, + "ruined": 17231, + "ruining": 29952, + "ruins": 16094, + "ruiz": 27873, + "ruk": 46628, + "rukh": 43075, + "rukh": 27631, + "rule": 31643, + "rule": 6175, + "ruled": 16324, + "ruler": 26286, + "rulers": 45328, + "rules": 5272, + "ruling": 14690, + "rum": 9223, + "rum": 11233, + "rumb": 42432, + "rumble": 18900, + "rumi": 31428, + "rumor": 22254, + "rumored": 36694, + "rumors": 16160, + "rumour": 34296, + "rumours": 20716, + "rump": 29366, + "run": 1639, + "run": 1934, + "runaway": 28851, + "runchat": 25838, + "rundown": 41100, + "rune": 33882, + "rune": 49244, + "runner": 37370, + "runner": 7913, + "runners": 10571, + "runnin": 43130, + "running": 24451, + "running": 2761, + "runoff": 38564, + "runs": 5586, + "runway": 13927, + "rup": 7996, + "rup": 14980, + "rupaul": 44211, + "rupee": 43916, + "rupees": 44110, + "rupert": 25625, + "rupt": 23055, + "ruption": 35403, + "rural": 28801, + "rural": 8737, + "rus": 35811, + "rus": 5998, + "rush": 12148, + "rush": 6973, + "rushed": 28104, + "rusher": 48745, + "rushes": 47217, + "rushing": 20284, + "russ": 6285, + "russ": 20764, + "russell": 26122, + "russell": 8150, + "russi": 2600, + "russia": 4018, + "russian": 30731, + "russian": 4868, + "russians": 25413, + "russo": 30679, + "rust": 28682, + "rust": 14212, + "rustic": 19822, + "rusty": 43966, + "rusty": 22646, + "rut": 14973, + "rut": 39102, + "rutger": 49029, + "rutgers": 28934, + "ruth": 15798, + "ruth": 12029, + "ruther": 26676, + "rutherford": 31070, + "ruthless": 36063, + "rutland": 46024, + "ruto": 43702, + "ruz": 23275, + "rv": 17135, + "rv": 17951, + "rva": 24278, + "rw": 9085, + "rw": 22926, + "rwa": 47452, + "rwand": 31758, + "rwanda": 15427, + "rwby": 39698, + "rwc": 32321, + "rx": 41188, + "rx": 15945, + "ry": 1511, + "ry": 913, + "ryan": 8682, + "ryan": 4053, + "ryanair": 43526, + "ryder": 43564, + "ryder": 21805, + "rye": 24015, + "rye": 17409, + "rying": 7838, + "ryn": 37728, + "ryo": 24460, + "rys": 21654, + "ryu": 46656, + "ryu": 34604, + "ré": 29106, + "s": 82, + "s": 338, + "sa": 774, + "sa": 1344, + "saa": 13429, + "saab": 27158, + "saad": 36530, + "saas": 25761, + "saat": 33151, + "sab": 3233, + "sab": 23213, + "saba": 38344, + "sabah": 32854, + "saban": 41620, + "sabar": 47102, + "sabbath": 26008, + "sabc": 30010, + "sabcnews": 41093, + "saber": 46822, + "saber": 25624, + "sabha": 23431, + "sabi": 47073, + "sabine": 44062, + "sable": 19224, + "sabot": 30700, + "sabotage": 40496, + "sabre": 35110, + "sabres": 29620, + "sabrin": 37029, + "sabrina": 24994, + "sac": 3632, + "sac": 12905, + "sach": 30168, + "sacha": 49010, + "sachin": 47527, + "sachin": 30297, + "sachs": 31451, + "sack": 28964, + "sack": 14979, + "sacked": 27519, + "sacks": 26441, + "sacram": 13334, + "sacramento": 16065, + "sacred": 40612, + "sacred": 12477, + "sacri": 15283, + "sacrif": 12117, + "sacrific": 16919, + "sacrifice": 12556, + "sacrificed": 31116, + "sacrifices": 28858, + "sacrificing": 48146, + "sad": 2810, + "sad": 3719, + "saddened": 27720, + "saddest": 34925, + "saddle": 30469, + "saddle": 20283, + "sade": 27429, + "sadh": 40955, + "sadi": 22207, + "sadie": 30333, + "sadiq": 44107, + "sadler": 45600, + "sadly": 11603, + "sadness": 20399, + "sae": 38633, + "sae": 34883, + "saeed": 29745, + "saf": 2125, + "saf": 25760, + "safar": 23443, + "safari": 14091, + "safarilive": 34816, + "safc": 27998, + "safe": 2901, + "safe": 2996, + "safeguard": 42249, + "safeguarding": 47451, + "safely": 11513, + "safer": 40124, + "safer": 15504, + "safest": 38973, + "safety": 19050, + "safety": 3406, + "safetyfirst": 43608, + "saffron": 27529, + "sag": 6609, + "sag": 30048, + "saga": 15758, + "sagan": 37193, + "sagar": 42518, + "sage": 25800, + "sage": 7509, + "sages": 25979, + "sagin": 47097, + "sagitt": 44685, + "sagu": 44708, + "sah": 30943, + "sah": 26342, + "saha": 36062, + "sahara": 24599, + "saharan": 44255, + "sahi": 24608, + "sahib": 34150, + "sai": 16048, + "sai": 10886, + "said": 40319, + "said": 1946, + "saif": 44164, + "saig": 36328, + "saigon": 41081, + "sail": 7528, + "sail": 12156, + "sailed": 43047, + "sailing": 11003, + "sailor": 28002, + "sailor": 16076, + "sailormoon": 40673, + "sailors": 25355, + "sails": 27526, + "sain": 21226, + "sain": 40378, + "sains": 24860, + "sainsbury": 45879, + "sainsburys": 36934, + "saint": 11274, + "saint": 5599, + "saints": 8769, + "saintsfc": 31102, + "sair": 46600, + "sair": 30971, + "saire": 28087, + "saison": 33256, + "sait": 48008, + "saj": 33580, + "sak": 11511, + "sak": 35900, + "saka": 33609, + "sake": 12874, + "sakh": 43945, + "saki": 40514, + "saku": 37550, + "sakura": 24162, + "sal": 980, + "sal": 6126, + "sala": 17300, + "salaam": 46773, + "salad": 6188, + "salads": 30948, + "salah": 22516, + "salam": 19007, + "salam": 33963, + "salamat": 44696, + "salami": 46885, + "salaries": 33132, + "salary": 16312, + "salazar": 45988, + "sale": 17786, + "sale": 1690, + "saleh": 38353, + "salem": 48194, + "salem": 16884, + "sales": 13347, + "sales": 3765, + "salesforce": 22680, + "salesman": 37633, + "salford": 25629, + "sali": 15411, + "salim": 42760, + "salinas": 41990, + "saline": 46918, + "salis": 20667, + "salis": 39378, + "salisbury": 24763, + "sall": 27122, + "sall": 20883, + "salle": 23738, + "sally": 29542, + "sally": 13349, + "salman": 13754, + "salman": 16219, + "salmankhan": 15177, + "salmon": 37040, + "salmon": 9137, + "salom": 38268, + "salon": 33916, + "salon": 11105, + "saloon": 26038, + "sals": 16307, + "salsa": 16442, + "salt": 12763, + "salt": 6611, + "salted": 26313, + "saltlife": 47809, + "salts": 40559, + "saltwater": 43616, + "salty": 20678, + "salu": 31711, + "salud": 46867, + "salut": 44998, + "salute": 44908, + "salute": 9747, + "salutes": 32762, + "salv": 8299, + "salvador": 20874, + "salvage": 33131, + "salvation": 19534, + "salvatore": 38772, + "salz": 33594, + "salzburg": 43396, + "sam": 1644, + "sam": 3730, + "sama": 19272, + "samanth": 11465, + "samantha": 15466, + "samanthap": 38266, + "samanthaprabhu": 38643, + "samar": 21820, + "samaritan": 45495, + "samba": 37190, + "same": 23062, + "same": 2208, + "samheughan": 36255, + "sami": 48400, + "sami": 24322, + "sammy": 31091, + "sammy": 16758, + "samo": 30006, + "samoa": 34932, + "samp": 31225, + "sample": 9542, + "sampler": 40629, + "samples": 13387, + "sampling": 19522, + "sampson": 39983, + "sams": 44667, + "samson": 34659, + "samsun": 47875, + "samsung": 35369, + "samsung": 8115, + "samu": 7646, + "samuel": 30612, + "samuel": 12787, + "samurai": 21739, + "san": 1489, + "san": 2223, + "sana": 19434, + "sanantonio": 34714, + "sanat": 29091, + "sanatomy": 36052, + "sanc": 7398, + "sance": 15930, + "sanchez": 13971, + "sanctioned": 43032, + "sanctions": 17790, + "sanctu": 12712, + "sanctuary": 14044, + "sand": 2147, + "sand": 5094, + "sandal": 36445, + "sandal": 42185, + "sandals": 20731, + "sandalwood": 47502, + "sandeep": 46973, + "sander": 34111, + "sanders": 10429, + "sanderson": 36198, + "sandi": 44249, + "sandiego": 45997, + "sandiego": 15793, + "sandman": 45730, + "sando": 35921, + "sandoval": 44157, + "sandra": 33733, + "sandra": 13415, + "sandro": 42389, + "sands": 5936, + "sandstone": 36796, + "sandwich": 17050, + "sandwich": 8687, + "sandwiches": 19667, + "sandy": 29679, + "sandy": 10355, + "sane": 23419, + "sanford": 32330, + "sanfrancisco": 20254, + "sang": 13235, + "sang": 11684, + "sange": 12466, + "sangria": 42665, + "sani": 39137, + "sani": 34492, + "sanitary": 33842, + "sanitation": 25414, + "saniti": 43987, + "sanity": 30517, + "sanjay": 31712, + "sanjay": 25796, + "sanje": 40405, + "sanjose": 45971, + "sank": 43692, + "sano": 34053, + "sans": 16982, + "sansk": 39689, + "sanskrit": 48083, + "sant": 8356, + "sant": 23120, + "santa": 22175, + "santa": 4555, + "santac": 28876, + "santam": 45627, + "santana": 27033, + "santander": 46476, + "santi": 13856, + "santiago": 16568, + "santo": 29631, + "santo": 18400, + "santor": 28448, + "santorini": 39573, + "santos": 16582, + "sany": 47679, + "sao": 28026, + "sap": 8089, + "sap": 11591, + "sapi": 40016, + "sapp": 13427, + "sapp": 40729, + "sapphire": 22044, + "sar": 1808, + "sar": 9424, + "sara": 37196, + "sara": 10063, + "sarab": 40716, + "sarac": 35722, + "sarah": 9086, + "sarah": 5327, + "saraj": 42592, + "sarajevo": 48211, + "saras": 20373, + "sarasota": 31990, + "sarato": 24845, + "saratoga": 29496, + "sarawak": 47331, + "sarcasm": 37246, + "sarcastic": 48639, + "sardar": 41786, + "sarde": 43925, + "sardin": 27383, + "sardinia": 41025, + "sare": 13051, + "saree": 30860, + "sargent": 34864, + "sari": 42327, + "sari": 20261, + "saries": 47586, + "sarkar": 30673, + "sarko": 33658, + "sarkodie": 42848, + "sarmy": 20954, + "sart": 33006, + "sary": 15398, + "sas": 3960, + "sas": 5235, + "sash": 35656, + "sasha": 46078, + "sasha": 20894, + "sasia": 44751, + "sask": 47091, + "sask": 30416, + "saskat": 17102, + "saskatchewan": 23899, + "saskatoon": 31128, + "sass": 31351, + "sassy": 20827, + "sat": 1382, + "sat": 3279, + "sata": 41520, + "satan": 19446, + "satanic": 38224, + "satchel": 45908, + "sate": 35749, + "satell": 9031, + "satellite": 10316, + "satellites": 28483, + "sath": 29675, + "sathletics": 30154, + "sati": 7038, + "satin": 21803, + "sation": 23674, + "sations": 31232, + "satire": 29875, + "satis": 9906, + "satisf": 22941, + "satisfaction": 19925, + "satisfied": 18101, + "satisfy": 29444, + "satisfying": 23755, + "sato": 34376, + "satu": 45283, + "satur": 1634, + "saturated": 32466, + "saturday": 12537, + "saturday": 1748, + "saturdaymorning": 29053, + "saturdaymotivation": 40843, + "saturdays": 18930, + "saturn": 17312, + "saty": 39426, + "sau": 2096, + "sau": 19455, + "sauce": 5520, + "saucer": 42272, + "sauces": 40367, + "saucy": 46684, + "saudi": 24511, + "saudi": 8548, + "saudiarabia": 28680, + "sauer": 46333, + "saul": 47623, + "saul": 23252, + "sault": 40361, + "sauna": 35460, + "saunders": 23794, + "saur": 13227, + "saura": 46532, + "saurus": 22118, + "saus": 36121, + "sausage": 11855, + "sausages": 31593, + "sauté": 36290, + "sautéed": 38517, + "sauvi": 30116, + "sauvignon": 32745, + "sav": 2248, + "sav": 26533, + "sava": 40198, + "savag": 43039, + "savage": 11859, + "savannah": 18662, + "save": 5895, + "save": 2673, + "saved": 7137, + "saveour": 33390, + "saver": 20987, + "savers": 31416, + "saves": 12907, + "savethe": 18031, + "savi": 14721, + "saving": 28498, + "saving": 6979, + "savings": 10651, + "savior": 24762, + "saviour": 35800, + "savor": 48071, + "savory": 32992, + "savoury": 49071, + "savoy": 39552, + "savvy": 29278, + "saw": 12429, + "saw": 2425, + "sawa": 39613, + "sawards": 29012, + "sawyer": 27726, + "sax": 14169, + "sax": 23766, + "saxon": 31856, + "saxophon": 43760, + "saxophone": 32296, + "say": 3047, + "say": 1451, + "saya": 35170, + "sayang": 46322, + "sayers": 44116, + "sayin": 23662, + "saying": 4455, + "says": 1563, + "saz": 35577, + "sb": 5576, + "sb": 4977, + "sba": 44970, + "sback": 43840, + "sband": 27539, + "sbaseball": 46491, + "sbball": 39190, + "sbc": 31404, + "sberg": 20358, + "sbi": 41369, + "sbk": 39211, + "sboro": 18909, + "sbridge": 49228, + "sbs": 18883, + "sbu": 48075, + "sbu": 46281, + "sburg": 7390, + "sburgh": 48205, + "sbury": 14081, + "sby": 26519, + "sby": 10287, + "sc": 663, + "sc": 3219, + "sca": 11001, + "scab": 31716, + "scaf": 28981, + "scafe": 45574, + "scaffolding": 41687, + "scal": 10859, + "scala": 37997, + "scalable": 44084, + "scale": 37817, + "scale": 5879, + "scaled": 41923, + "scales": 22891, + "scaling": 29116, + "scallo": 19936, + "scallop": 39544, + "scallops": 31430, + "scalp": 38898, + "scam": 17620, + "scam": 13215, + "scamp": 28451, + "scams": 34395, + "scan": 10650, + "scan": 11261, + "scanada": 27121, + "scand": 8110, + "scandal": 35420, + "scandal": 11622, + "scandals": 45490, + "scandin": 32014, + "scandinavian": 35661, + "scanned": 43719, + "scanner": 24185, + "scanning": 24092, + "scans": 31251, + "scap": 35883, + "scape": 36005, + "scape": 12314, + "scapes": 31933, + "scar": 4171, + "scar": 18088, + "scarborough": 24254, + "scarce": 38572, + "scarcity": 45812, + "scare": 33536, + "scare": 15920, + "scarec": 38814, + "scarecrow": 46504, + "scared": 9870, + "scares": 34096, + "scarf": 13365, + "scari": 27050, + "scariest": 37213, + "scarlet": 20389, + "scarlett": 28325, + "scars": 20747, + "scarves": 29249, + "scary": 9250, + "scat": 13899, + "scattered": 22090, + "scavenger": 36778, + "scc": 19458, + "scd": 48422, + "scen": 2204, + "scenario": 20456, + "scenarios": 31346, + "scence": 33418, + "scene": 3562, + "scenery": 16025, + "scenes": 5415, + "scenic": 15394, + "scent": 36277, + "scent": 7683, + "scented": 27190, + "scenter": 23059, + "scentre": 39371, + "scents": 26336, + "scep": 24439, + "scfc": 38578, + "sch": 844, + "sch": 7542, + "scha": 42809, + "schaf": 45588, + "schaft": 41010, + "schal": 35568, + "schalke": 41029, + "schallenge": 43665, + "schan": 31328, + "schar": 15085, + "schat": 31842, + "schau": 35830, + "sche": 3038, + "sche": 7289, + "schedu": 4207, + "schedule": 5521, + "scheduled": 10986, + "schedules": 28986, + "scheduling": 32216, + "scheer": 26776, + "schel": 39881, + "schel": 38569, + "schem": 17720, + "scheme": 9024, + "schemes": 22958, + "schen": 22738, + "scher": 21925, + "scher": 21299, + "schi": 13731, + "schi": 24984, + "schicago": 46230, + "schiff": 39431, + "schild": 32148, + "schiz": 33230, + "schizoph": 40004, + "schizophre": 41163, + "schle": 32022, + "schmid": 17375, + "schmidt": 18463, + "schnau": 45745, + "schnei": 19941, + "schneider": 22972, + "schnit": 40903, + "scho": 2493, + "schoice": 23860, + "schol": 4498, + "scholar": 7192, + "scholar": 12830, + "scholarly": 41065, + "scholars": 13818, + "scholarship": 9070, + "scholarships": 17866, + "scholastic": 35743, + "schoo": 20721, + "school": 6063, + "school": 1228, + "schooled": 44722, + "schoolers": 31455, + "schooling": 28608, + "schools": 3513, + "schre": 47685, + "schri": 25453, + "schro": 32381, + "schu": 11318, + "schubert": 46939, + "schul": 14945, + "schultz": 30308, + "schulz": 39572, + "schumacher": 39208, + "schumer": 25313, + "schur": 42475, + "schwab": 47602, + "schwar": 13985, + "schwartz": 30617, + "schwarz": 27074, + "schwarzenegger": 33860, + "schwe": 25324, + "sci": 2267, + "sci": 8309, + "sciart": 31704, + "scicom": 28606, + "scicomm": 29573, + "scien": 39261, + "science": 10201, + "science": 2497, + "sciencefiction": 39170, + "sciences": 11481, + "scienti": 4338, + "scientific": 9750, + "scientist": 11083, + "scientists": 8045, + "sciento": 36193, + "scientology": 44694, + "scifi": 41862, + "scifi": 12230, + "scion": 47208, + "sciss": 25667, + "scissors": 30867, + "sciutto": 44392, + "sclerosis": 39446, + "sclub": 20017, + "sco": 1065, + "sco": 4763, + "scoe": 31164, + "scol": 13599, + "scoll": 44895, + "scollege": 39536, + "scom": 26407, + "scon": 17163, + "scon": 29272, + "scones": 36443, + "sconf": 39704, + "scoo": 14199, + "scooby": 34469, + "scoop": 13829, + "scoops": 41360, + "scope": 7979, + "scopes": 30328, + "scopic": 23869, + "scopy": 20018, + "scor": 8442, + "score": 12067, + "score": 4431, + "scoreboard": 30104, + "scorecard": 38128, + "scored": 6143, + "scoreless": 33469, + "scorer": 16572, + "scorers": 26699, + "scores": 7039, + "scoring": 9198, + "scorpi": 15445, + "scorpio": 34331, + "scorpion": 28461, + "scorpions": 45401, + "scorsese": 45975, + "scot": 2496, + "scot": 9271, + "scotch": 16687, + "scoti": 46446, + "scotia": 27859, + "scotland": 29174, + "scotland": 4203, + "scots": 17260, + "scotsman": 39612, + "scott": 7775, + "scott": 3664, + "scotti": 6227, + "scottish": 18039, + "scottish": 7442, + "scottsdale": 27817, + "scotty": 39697, + "scotty": 26836, + "scotus": 21720, + "scou": 44909, + "scoun": 16110, + "scouncil": 48787, + "scountry": 40432, + "scour": 46172, + "scout": 32213, + "scout": 10786, + "scouting": 19072, + "scouts": 14837, + "scow": 27929, + "scowboys": 31386, + "scp": 45030, + "scr": 36131, + "scra": 11187, + "scrabble": 39488, + "scram": 17289, + "scramble": 32688, + "scrambled": 39026, + "scran": 41774, + "scranton": 45274, + "scrap": 27950, + "scrap": 21695, + "scrapbook": 48733, + "scrapped": 43325, + "scraps": 40809, + "scrat": 9572, + "scratch": 13258, + "scratched": 48831, + "scratches": 46556, + "scratching": 44617, + "scre": 1795, + "scream": 31645, + "scream": 13239, + "screamed": 35427, + "screaming": 12891, + "screams": 23989, + "screen": 5351, + "screen": 3750, + "screened": 31450, + "screening": 6688, + "screenings": 27655, + "screenplay": 30058, + "screens": 12689, + "screenshot": 20637, + "screenshot": 12646, + "screenshots": 26783, + "screenshotsaturday": 21406, + "screenwriter": 37293, + "screenwriting": 35465, + "screw": 25529, + "screw": 14225, + "screwdriver": 48748, + "screwed": 30592, + "screws": 38292, + "scri": 2139, + "scrib": 34259, + "scribe": 36228, + "scribed": 38334, + "scricket": 45947, + "scrim": 21978, + "scrimmage": 25216, + "scrip": 11955, + "script": 8374, + "scripted": 40513, + "scription": 26604, + "scriptions": 39512, + "scripts": 20109, + "scripture": 27186, + "scro": 30768, + "scroll": 24160, + "scrolling": 28889, + "scrolls": 38113, + "scroo": 42263, + "scru": 7589, + "scrub": 23432, + "scrubs": 37919, + "scrum": 29047, + "scrump": 39791, + "scrumptious": 40987, + "scrutiny": 34305, + "scs": 26853, + "sct": 39284, + "scu": 8181, + "scu": 32135, + "scuba": 39053, + "scuba": 20559, + "scubadiving": 49046, + "scue": 25955, + "scul": 4948, + "scully": 36598, + "sculp": 6093, + "sculpt": 45044, + "sculpted": 41296, + "sculpting": 44389, + "sculptor": 29409, + "sculpture": 8757, + "sculptures": 20378, + "scum": 29655, + "scumb": 44525, + "scup": 21506, + "scur": 32742, + "scwx": 41966, + "scy": 27471, + "sd": 3080, + "sd": 4159, + "sda": 25548, + "sdale": 12327, + "sday": 5902, + "sday": 1376, + "sdays": 14491, + "sdc": 40992, + "sdcc": 13246, + "sden": 17241, + "sdf": 34681, + "sdg": 20177, + "sdgs": 16261, + "sdk": 40015, + "sdlive": 34561, + "sdn": 41925, + "sdsu": 41284, + "se": 567, + "se": 611, + "sea": 5970, + "sea": 2102, + "seab": 15728, + "seabir": 42558, + "seac": 35626, + "seaf": 9336, + "seafood": 12472, + "seag": 15730, + "seagu": 38076, + "seagull": 38858, + "seagulls": 42215, + "seahawks": 15341, + "seal": 21381, + "seal": 10159, + "sealed": 13358, + "sealing": 42992, + "seals": 18179, + "seam": 13710, + "seam": 44201, + "seaman": 47513, + "seamless": 29373, + "seamus": 40175, + "sean": 11406, + "sean": 6077, + "seanhannity": 43316, + "seap": 29983, + "seaport": 46418, + "sear": 1612, + "search": 23129, + "search": 1920, + "searched": 28961, + "searches": 26378, + "searching": 10626, + "seared": 29727, + "sears": 26693, + "seas": 7329, + "seas": 9556, + "seascape": 42593, + "seaside": 18867, + "season": 19288, + "season": 1367, + "seasonal": 14215, + "seasoned": 28399, + "seasoning": 43439, + "seasons": 8635, + "seat": 19670, + "seat": 4922, + "seated": 23953, + "seater": 37543, + "seating": 16240, + "seats": 6944, + "seattle": 24388, + "seattle": 6274, + "seau": 32263, + "seaw": 32658, + "seaweed": 30204, + "seaworld": 27422, + "seb": 35766, + "seb": 25171, + "sebasti": 10324, + "sebastian": 43792, + "sebastian": 13181, + "sebring": 41086, + "sec": 2875, + "sec": 5338, + "seca": 37847, + "secco": 27394, + "sece": 46297, + "seclu": 42392, + "secon": 1846, + "second": 9329, + "second": 2241, + "secondary": 13107, + "seconds": 6541, + "secre": 2460, + "secret": 20710, + "secret": 4145, + "secretari": 29515, + "secretariat": 31767, + "secretary": 6552, + "secretly": 21400, + "secrets": 9735, + "secs": 28665, + "sect": 15772, + "section": 34986, + "section": 4853, + "sectional": 21876, + "sections": 20061, + "sector": 6579, + "sectors": 22173, + "secu": 4894, + "secular": 47483, + "secular": 27560, + "secur": 2557, + "secure": 44763, + "secure": 7515, + "secured": 16848, + "secures": 31567, + "securing": 24759, + "securities": 25080, + "security": 31245, + "security": 2741, + "sed": 14034, + "sed": 1252, + "sedan": 24237, + "sedg": 46926, + "sedge": 45288, + "sedi": 29269, + "sedly": 31771, + "sedona": 46862, + "seduc": 19933, + "seductive": 43721, + "see": 1751, + "see": 862, + "seed": 14064, + "seed": 6488, + "seeded": 33688, + "seeding": 40050, + "seedlings": 47933, + "seeds": 9128, + "seeing": 3214, + "seek": 8839, + "seeker": 28011, + "seekers": 20732, + "seeking": 8592, + "seeks": 12594, + "seem": 20043, + "seem": 7523, + "seemed": 17240, + "seemingly": 25917, + "seems": 4453, + "seen": 36273, + "seen": 2041, + "seer": 32486, + "sees": 7594, + "seeyou": 41279, + "sef": 27453, + "seg": 10551, + "sega": 16122, + "segment": 15615, + "segments": 43053, + "segreg": 49117, + "segregation": 39086, + "segu": 33156, + "segun": 43087, + "seh": 27536, + "seh": 41430, + "sehun": 17705, + "sei": 13130, + "sei": 15907, + "sein": 24669, + "seine": 41378, + "seinfeld": 33706, + "seis": 25559, + "seismic": 38459, + "seiz": 22171, + "seize": 26624, + "seized": 15826, + "seizure": 36804, + "seizures": 47199, + "sek": 45515, + "sek": 25880, + "sel": 1000, + "sel": 4098, + "sela": 47006, + "selamat": 37692, + "selangor": 44402, + "selby": 43546, + "selca": 38606, + "selcaday": 35924, + "seldom": 48322, + "sele": 29137, + "selec": 3014, + "select": 8690, + "selected": 6881, + "selecting": 32696, + "selection": 6724, + "selections": 24099, + "selective": 28686, + "selects": 32902, + "selen": 19970, + "selena": 14677, + "selenagomez": 27653, + "seley": 30556, + "self": 10139, + "self": 1322, + "selfcare": 39560, + "selfi": 3007, + "selfie": 26735, + "selfie": 3666, + "selfies": 46058, + "selfies": 10050, + "selfish": 26907, + "selfless": 34236, + "sell": 10279, + "sell": 5119, + "seller": 11779, + "sellers": 16562, + "selling": 4396, + "sells": 14306, + "selma": 36652, + "sels": 42070, + "selves": 4505, + "sely": 8402, + "sem": 8645, + "sem": 17106, + "sema": 31816, + "seman": 29119, + "seman": 28378, + "semana": 41780, + "semb": 36054, + "seme": 10855, + "sement": 10714, + "sements": 31449, + "semester": 11905, + "semi": 11023, + "semi": 6684, + "semic": 26967, + "semicon": 34315, + "semiconduc": 35646, + "semiconductor": 43551, + "semifinal": 22935, + "semifinals": 21863, + "semin": 5595, + "seminar": 7269, + "seminars": 34870, + "seminary": 31655, + "seminole": 42956, + "semis": 24013, + "semit": 22628, + "semite": 23721, + "semitic": 34894, + "semitism": 25911, + "semper": 47391, + "sen": 1057, + "sen": 2249, + "sena": 21584, + "senate": 30703, + "senate": 6843, + "senator": 20871, + "senator": 8495, + "senators": 16889, + "send": 27684, + "send": 3625, + "sending": 6985, + "sends": 10817, + "sene": 25269, + "seneca": 33419, + "senegal": 28255, + "senew": 49313, + "seng": 43022, + "seng": 29971, + "senior": 19865, + "senior": 3415, + "seniors": 8138, + "senna": 36195, + "senpai": 46562, + "sens": 5218, + "sens": 22837, + "sensation": 19383, + "sensational": 23051, + "sense": 29162, + "sense": 4747, + "sensei": 36158, + "senses": 21809, + "sensi": 38802, + "sensible": 30635, + "sensing": 29236, + "sensiti": 20531, + "sensitive": 13734, + "sensitivity": 27788, + "sensor": 15330, + "sensors": 20356, + "sensory": 21831, + "sensu": 28157, + "sensual": 40860, + "sent": 6200, + "sent": 3676, + "sentence": 12737, + "sentenced": 17773, + "sentences": 25858, + "sentencing": 34394, + "senti": 19042, + "sentim": 25102, + "sentiment": 25949, + "sentimental": 40070, + "sentiments": 47450, + "sentin": 20042, + "sentinel": 23123, + "senting": 3924, + "seo": 24743, + "seo": 8622, + "seok": 34697, + "seok": 22482, + "seokjin": 45584, + "seoul": 13253, + "sep": 3212, + "sep": 10434, + "separ": 6859, + "separate": 13886, + "separated": 22163, + "separately": 41904, + "separates": 45365, + "separati": 39377, + "separating": 43480, + "separation": 22007, + "sephora": 38414, + "sepsis": 40205, + "sept": 5380, + "septe": 3672, + "september": 3707, + "septic": 34690, + "sepul": 47360, + "seq": 44379, + "sequ": 5491, + "seque": 44662, + "sequel": 15701, + "sequence": 18833, + "sequences": 47306, + "sequencing": 33484, + "sequo": 32781, + "sequoia": 42404, + "ser": 803, + "ser": 2771, + "sera": 28250, + "serbia": 19038, + "serbian": 33687, + "sere": 35770, + "seren": 7880, + "serena": 19519, + "serenawilliams": 48316, + "serendip": 45805, + "serendipity": 49386, + "serene": 28269, + "serenity": 24187, + "serge": 13477, + "serge": 35700, + "sergeant": 22049, + "sergei": 39870, + "sergey": 35390, + "sergi": 47675, + "sergio": 18359, + "seri": 2763, + "seri": 37509, + "serial": 14216, + "serie": 19752, + "seriea": 32660, + "series": 1857, + "serious": 47421, + "serious": 4770, + "seriously": 4885, + "sermon": 24884, + "sero": 48883, + "serpent": 37084, + "serpent": 35364, + "serra": 39851, + "serrano": 44236, + "sers": 13509, + "serum": 25385, + "serv": 1297, + "serv": 24571, + "servant": 20810, + "servants": 29652, + "serve": 39202, + "serve": 2838, + "served": 4740, + "server": 36458, + "server": 8398, + "serverless": 49243, + "servers": 22262, + "serves": 9915, + "servic": 27115, + "service": 21496, + "service": 2086, + "serviced": 44687, + "services": 3100, + "servicing": 41300, + "serving": 5722, + "sery": 14279, + "ses": 23708, + "ses": 1386, + "sesame": 21706, + "sese": 37128, + "sesh": 24274, + "session": 2550, + "sessions": 6327, + "set": 7965, + "set": 1167, + "setback": 43605, + "seth": 20005, + "seth": 11870, + "sethu": 38933, + "setlist": 33141, + "seton": 43799, + "sets": 4650, + "sett": 4984, + "sett": 17567, + "sette": 14613, + "setter": 23153, + "settes": 44145, + "setti": 45170, + "setting": 5264, + "settings": 18628, + "settle": 15075, + "settled": 18310, + "settlement": 16494, + "settlements": 36605, + "settlers": 35671, + "settles": 41498, + "settling": 22036, + "setup": 11092, + "seu": 31539, + "seul": 48975, + "seum": 18838, + "seun": 24209, + "seung": 32393, + "seung": 33711, + "seungri": 41627, + "seuss": 34441, + "sev": 26585, + "sev": 37600, + "seva": 42604, + "seve": 21458, + "seve": 22468, + "sevel": 17439, + "seven": 7874, + "seven": 5757, + "sevens": 29911, + "sevent": 43048, + "seventeen": 19337, + "seventh": 17568, + "seventy": 47170, + "sever": 3250, + "sever": 45557, + "several": 5560, + "severance": 26194, + "severe": 6215, + "severely": 24417, + "severn": 34626, + "severy": 34207, + "sevilla": 24947, + "seville": 34988, + "sew": 28640, + "sewage": 32777, + "sewer": 28294, + "sewing": 15974, + "sewn": 42118, + "sex": 3548, + "sex": 5937, + "sexi": 20562, + "sexiest": 25426, + "sexism": 32059, + "sexist": 33047, + "sexu": 14741, + "sexual": 6749, + "sexuality": 21244, + "sexually": 23032, + "sexy": 21019, + "sexy": 38127, + "sey": 6317, + "sey": 2258, + "seychel": 36809, + "seychelles": 38519, + "seye": 35604, + "seym": 22657, + "seymour": 25850, + "seys": 15081, + "sez": 42377, + "señ": 43368, + "sf": 4435, + "sf": 4915, + "sfa": 32675, + "sfam": 37649, + "sfb": 27930, + "sfc": 14129, + "sfest": 49024, + "sff": 42056, + "sfgiants": 20923, + "sfield": 11801, + "sfo": 39182, + "sfootball": 45259, + "sfor": 9115, + "sford": 28917, + "sforsale": 28888, + "sfw": 18073, + "sfx": 37995, + "sg": 9599, + "sg": 7611, + "sga": 33049, + "sgate": 27558, + "sgh": 47590, + "sgo": 5393, + "sgo": 21044, + "sgt": 13748, + "sh": 552, + "sh": 849, + "sha": 1514, + "sha": 3337, + "shaa": 44221, + "shab": 8323, + "shabbat": 38042, + "shabby": 28838, + "shack": 23866, + "shack": 18785, + "shad": 3182, + "shad": 23874, + "shade": 34554, + "shade": 10097, + "shaded": 43506, + "shades": 46608, + "shades": 9270, + "shadesof": 45180, + "shading": 37348, + "shado": 9325, + "shadow": 15243, + "shadow": 7068, + "shadowhun": 19931, + "shadowhunters": 24834, + "shadowing": 46092, + "shadows": 12971, + "shady": 22158, + "shaf": 12032, + "shaft": 21545, + "shag": 22439, + "shaggy": 42662, + "shah": 13203, + "shah": 8439, + "shahe": 23643, + "shaheed": 30060, + "shaheer": 43969, + "shahi": 46972, + "shahid": 25696, + "shahid": 27138, + "shahidkapoor": 29892, + "shahzad": 45915, + "shai": 47941, + "shaikh": 45712, + "shail": 37603, + "shair": 43135, + "shak": 8385, + "shake": 8206, + "shake": 8251, + "shaken": 38237, + "shaker": 26210, + "shakers": 38411, + "shakes": 19668, + "shakespe": 9890, + "shakespeare": 22499, + "shakespeare": 12488, + "shakespearesunday": 32320, + "shaking": 19101, + "shakira": 40795, + "shakti": 48593, + "shakti": 32458, + "shakur": 48915, + "shal": 15056, + "shal": 28175, + "shale": 32864, + "shall": 4742, + "shallow": 23730, + "shalom": 31339, + "sham": 6453, + "sham": 9005, + "shaman": 48727, + "shambles": 40799, + "shame": 14776, + "shame": 7593, + "shameful": 28283, + "shameless": 25380, + "shaming": 40553, + "shampoo": 23944, + "shamrock": 34199, + "shan": 5171, + "shan": 8834, + "shana": 44835, + "shand": 29101, + "shane": 26863, + "shane": 11572, + "shang": 11141, + "shanghai": 12742, + "shani": 46665, + "shank": 24685, + "shankar": 24108, + "shann": 9932, + "shannon": 22842, + "shannon": 13581, + "shant": 36610, + "shap": 5581, + "shape": 26925, + "shape": 6448, + "shaped": 10127, + "shapes": 15377, + "shaping": 18632, + "shapiro": 32110, + "shaq": 46402, + "shaq": 26843, + "shar": 1669, + "shar": 36542, + "shara": 48849, + "sharapo": 36489, + "sharapova": 36671, + "shard": 42207, + "share": 7585, + "share": 1978, + "shared": 5368, + "shareholder": 38241, + "shareholders": 34778, + "sharepoint": 39213, + "shares": 4974, + "sharethe": 49277, + "shareyour": 45890, + "shari": 27738, + "shari": 47390, + "sharia": 37244, + "sharif": 15501, + "sharing": 3567, + "sharjah": 33420, + "shark": 15836, + "shark": 7980, + "sharks": 10047, + "sharkweek": 39571, + "sharma": 10105, + "sharon": 28722, + "sharon": 14138, + "sharp": 17126, + "sharp": 8157, + "sharpe": 34374, + "sharpen": 41465, + "sharpie": 46858, + "sharply": 37185, + "shasta": 46727, + "shat": 12169, + "shat": 44388, + "shatter": 45008, + "shattered": 26820, + "shau": 13750, + "shaun": 23446, + "shaun": 16669, + "shav": 11410, + "shave": 17735, + "shaved": 25571, + "shaving": 24261, + "shaw": 6122, + "shaw": 6805, + "shawa": 46413, + "shawl": 35132, + "shawn": 16677, + "shawn": 10970, + "shawnee": 48060, + "shawnmendes": 27277, + "shawty": 38026, + "shay": 10778, + "shay": 18361, + "shaykh": 47223, + "shaz": 18618, + "shazam": 29063, + "shc": 43419, + "shd": 37729, + "she": 1729, + "she": 1043, + "shea": 20407, + "shead": 44287, + "shead": 20434, + "shealth": 41743, + "shealth": 22197, + "shear": 27974, + "shear": 32108, + "shearer": 40505, + "sheath": 45637, + "shed": 16586, + "shed": 1492, + "shedding": 33608, + "sheds": 25921, + "shee": 23450, + "shee": 34321, + "sheed": 26105, + "sheehan": 41809, + "sheen": 25025, + "sheep": 23604, + "sheep": 9629, + "sheer": 17577, + "sheeran": 18561, + "sheet": 7298, + "sheets": 12744, + "shef": 8237, + "sheff": 38844, + "sheff": 43821, + "sheffiel": 26940, + "sheffield": 41763, + "sheffield": 10420, + "sheffieldissuper": 33628, + "sheh": 31667, + "sheikh": 15031, + "sheil": 42765, + "sheila": 25734, + "shek": 33285, + "shel": 3159, + "shelby": 36906, + "shelby": 16885, + "sheldon": 25079, + "shelf": 10955, + "shell": 23374, + "shell": 6648, + "shelley": 22497, + "shelling": 43166, + "shells": 19265, + "shelly": 37461, + "shelter": 8599, + "sheltered": 48070, + "shelters": 24312, + "shelton": 24471, + "shelves": 16225, + "shem": 40299, + "shen": 10154, + "shen": 31098, + "shenan": 20965, + "shenando": 44666, + "shenanigans": 26590, + "shenko": 39751, + "shenmue": 48279, + "shenzhen": 38970, + "shep": 33757, + "shep": 44857, + "shepard": 26810, + "shepher": 11008, + "shepherd": 13242, + "shepherds": 42792, + "sheppard": 37304, + "sher": 3570, + "sher": 4510, + "sheraton": 39400, + "shere": 21507, + "sheri": 9235, + "sheridan": 27085, + "sheriff": 10309, + "sherlock": 17294, + "sherman": 17822, + "sherry": 44348, + "sherry": 24689, + "shers": 14141, + "sherwood": 24527, + "sheryl": 39773, + "shes": 45514, + "shes": 2502, + "shet": 15850, + "shetland": 29595, + "shetty": 25533, + "shev": 45182, + "sheva": 45132, + "shh": 35025, + "shhh": 36932, + "shi": 823, + "shi": 3533, + "shia": 23791, + "shibu": 36177, + "shibuya": 41623, + "shie": 26638, + "shiel": 33413, + "shield": 8670, + "shields": 19085, + "shies": 35312, + "shif": 35317, + "shift": 43767, + "shift": 6905, + "shifted": 34429, + "shifter": 48944, + "shifting": 21992, + "shifts": 23957, + "shik": 36980, + "shil": 14370, + "shill": 32121, + "shill": 30090, + "shilpa": 47062, + "shilpa": 40690, + "shim": 11986, + "shim": 32780, + "shima": 14382, + "shimano": 48904, + "shimi": 40517, + "shimmer": 38792, + "shin": 5664, + "shin": 11784, + "shinde": 41516, + "shine": 17582, + "shine": 3780, + "shinee": 19660, + "shines": 16015, + "shing": 38641, + "shing": 1743, + "shining": 10485, + "shino": 43074, + "shiny": 12190, + "ship": 7645, + "ship": 1158, + "shipment": 28553, + "shipp": 34709, + "shipped": 15279, + "shippers": 44789, + "shipping": 5721, + "ships": 3262, + "shipwreck": 48878, + "shipy": 26828, + "shipyard": 31273, + "shir": 1956, + "shiraz": 35618, + "shire": 11975, + "shire": 2968, + "shirehour": 32456, + "shirley": 18189, + "shiro": 26048, + "shirt": 27576, + "shirt": 2523, + "shirtless": 28959, + "shirts": 5803, + "shistory": 34979, + "shiv": 18042, + "shiv": 37121, + "shiva": 33881, + "shiva": 21174, + "shka": 38944, + "shld": 49359, + "shma": 48074, + "shment": 8802, + "shments": 18822, + "sho": 719, + "sho": 13756, + "shock": 19617, + "shock": 8736, + "shocked": 15787, + "shocker": 37971, + "shockey": 22258, + "shocking": 13394, + "shocks": 31886, + "shoe": 16308, + "shoe": 7342, + "shoes": 49391, + "shoes": 4079, + "shol": 21472, + "sholm": 44139, + "shome": 42701, + "shon": 19526, + "shon": 37621, + "shone": 47173, + "shoo": 1975, + "shook": 20730, + "shoops": 29956, + "shoot": 12531, + "shoot": 3704, + "shooter": 13645, + "shooters": 31902, + "shooting": 3992, + "shootings": 26753, + "shootout": 20666, + "shoots": 14144, + "shop": 5738, + "shop": 1557, + "shopify": 47949, + "shoplocal": 21775, + "shopp": 38486, + "shoppe": 38236, + "shopped": 28088, + "shopper": 24346, + "shoppers": 22316, + "shopping": 42101, + "shopping": 4266, + "shops": 6467, + "shopsmall": 35942, + "shor": 3209, + "shore": 14717, + "shore": 5928, + "shored": 33140, + "shoreditch": 35042, + "shoreline": 34807, + "shores": 18102, + "short": 6803, + "short": 3005, + "shortage": 19910, + "shortages": 38730, + "shortcuts": 45793, + "shorten": 41711, + "shorter": 20350, + "shortest": 33717, + "shortfilm": 37204, + "shorth": 37397, + "shortlist": 28163, + "shortlisted": 20631, + "shortly": 11967, + "shorts": 9680, + "shorty": 33502, + "shot": 9805, + "shot": 2000, + "shotel": 42365, + "shotgun": 21643, + "shots": 5342, + "shou": 3890, + "shoul": 29847, + "should": 14947, + "should": 1535, + "shoulder": 8476, + "shoulders": 18738, + "shouldn": 9416, + "shour": 20025, + "shouse": 28671, + "shout": 7335, + "shout": 5214, + "shouted": 44397, + "shouting": 26464, + "shoutout": 8274, + "shouts": 26709, + "shovel": 31778, + "show": 2133, + "show": 1080, + "showbiz": 34156, + "showcas": 14290, + "showcase": 7265, + "showcased": 35786, + "showcases": 26266, + "showcasing": 17036, + "showdown": 15576, + "showed": 7150, + "shower": 7777, + "showers": 9893, + "showing": 3649, + "shown": 8506, + "showroom": 16821, + "shows": 2665, + "showtime": 40576, + "showtime": 15442, + "showyour": 46733, + "shp": 38341, + "shq": 21145, + "shr": 10118, + "shra": 21360, + "shradd": 28172, + "shraddha": 35208, + "shraddhakapoor": 40385, + "shre": 12101, + "shred": 19756, + "shred": 33017, + "shredded": 31772, + "shredding": 45534, + "shree": 37410, + "shrek": 35009, + "shrews": 26411, + "shrewsbury": 30921, + "shri": 8838, + "shri": 11424, + "shrimp": 12727, + "shrin": 24865, + "shrine": 16156, + "shrink": 34957, + "shrinking": 41243, + "shrm": 44163, + "shro": 15259, + "shroff": 32081, + "shrop": 22630, + "shropshire": 26344, + "shru": 14911, + "shrub": 41464, + "shrubs": 47975, + "shrun": 46767, + "shs": 16184, + "sht": 44210, + "shti": 38927, + "shu": 2872, + "shu": 17651, + "shua": 33771, + "shub": 40552, + "shud": 45782, + "shuff": 42641, + "shuffle": 21681, + "shui": 45473, + "shuk": 29927, + "shukla": 46829, + "shul": 30721, + "shum": 37383, + "shun": 24479, + "shun": 39594, + "shur": 41032, + "shut": 8702, + "shut": 8282, + "shutdown": 16051, + "shutout": 24385, + "shuts": 28313, + "shutt": 31866, + "shutter": 36235, + "shutter": 33902, + "shutters": 46894, + "shutting": 31383, + "shuttle": 15842, + "shwar": 41640, + "shy": 22678, + "shy": 9682, + "si": 564, + "si": 2990, + "sia": 2357, + "siam": 29686, + "siam": 48248, + "siamese": 43161, + "sian": 28510, + "sian": 6221, + "sians": 26583, + "sias": 28645, + "siber": 22206, + "siberia": 39969, + "siberian": 34058, + "sibl": 14338, + "sible": 14507, + "sibling": 43060, + "sibling": 23779, + "siblings": 17156, + "sic": 8278, + "sic": 1118, + "sica": 34125, + "sical": 33875, + "sichuan": 48950, + "sicilian": 45292, + "sicily": 23179, + "sick": 11143, + "sick": 5359, + "sickest": 47972, + "sickle": 41459, + "sickness": 28898, + "sics": 26297, + "sid": 10117, + "sid": 15119, + "sidd": 19842, + "siddi": 35227, + "side": 5869, + "side": 1145, + "sided": 21061, + "sidekick": 44683, + "sidel": 43557, + "sideline": 32056, + "sidelines": 31046, + "sider": 30581, + "siders": 41249, + "sides": 7578, + "sideshow": 46789, + "sidewalk": 23278, + "sidewalks": 43583, + "sideways": 35593, + "siding": 38758, + "sidney": 22598, + "sie": 8533, + "sie": 5685, + "sieg": 49203, + "siege": 18460, + "siegel": 48559, + "siem": 18434, + "siemens": 30147, + "siempre": 44030, + "siena": 33336, + "sienna": 40373, + "sier": 10028, + "sier": 7444, + "sierra": 13552, + "siers": 35923, + "sies": 16367, + "siest": 18323, + "sif": 29300, + "sig": 872, + "sig": 19145, + "sigh": 36303, + "sigh": 15505, + "sighs": 44579, + "sight": 16897, + "sight": 6329, + "sighted": 33034, + "sighting": 17507, + "sightings": 30004, + "sights": 17364, + "sightseeing": 34210, + "sigma": 45075, + "sigma": 15697, + "sign": 5538, + "sign": 2292, + "signage": 21156, + "signal": 10781, + "signaling": 38492, + "signalling": 48426, + "signals": 17150, + "signation": 24347, + "signature": 9189, + "signatures": 21865, + "signed": 3163, + "signee": 39778, + "signi": 34023, + "signific": 6374, + "significance": 23769, + "significant": 8735, + "significantly": 16187, + "signing": 4401, + "signingday": 40282, + "signings": 27731, + "signs": 4659, + "signup": 40791, + "sigue": 49401, + "sii": 36672, + "sik": 19974, + "sik": 22413, + "sika": 31144, + "sikh": 21829, + "sikhs": 45426, + "sil": 1556, + "sil": 8315, + "sila": 41754, + "sile": 37620, + "silen": 39048, + "silence": 8462, + "silenced": 45415, + "silent": 30352, + "silent": 8487, + "silently": 42640, + "silhou": 20589, + "silhouette": 26149, + "silic": 23830, + "silicon": 32412, + "silicon": 17888, + "silicone": 28221, + "silk": 25891, + "silk": 9743, + "silky": 29554, + "sill": 42468, + "sill": 48024, + "silly": 11883, + "silon": 31841, + "sils": 39708, + "silva": 16489, + "silve": 37697, + "silver": 7525, + "silver": 3467, + "silverado": 46160, + "silverstone": 29666, + "silvia": 37289, + "sim": 5026, + "sim": 10740, + "sima": 35871, + "simba": 39492, + "simcoe": 47148, + "sime": 28329, + "simi": 38073, + "simil": 7202, + "similar": 8547, + "similarities": 34716, + "simm": 13001, + "simmons": 14699, + "simo": 37171, + "simon": 8796, + "simon": 6668, + "simona": 46277, + "simone": 19062, + "simons": 33097, + "simp": 2542, + "simple": 19018, + "simple": 4129, + "simpler": 35489, + "simplest": 39588, + "simpli": 16868, + "simplicity": 21262, + "simplified": 36647, + "simplify": 35479, + "simply": 25637, + "simply": 6151, + "simpson": 41805, + "simpson": 11750, + "simpsons": 21092, + "sims": 14021, + "simul": 9845, + "simulated": 46395, + "simulation": 18610, + "simulator": 20821, + "simultaneous": 48816, + "simultaneously": 28575, + "sin": 1303, + "sin": 3421, + "sina": 19541, + "sinai": 33226, + "sinatra": 27262, + "sinc": 30464, + "since": 1855, + "sincere": 24513, + "sincere": 24886, + "sincerely": 25673, + "sinclair": 23100, + "sind": 39598, + "sind": 30877, + "sindh": 20754, + "sindia": 48038, + "sine": 22741, + "sine": 33793, + "sinfo": 47178, + "sing": 1387, + "sing": 1197, + "singapo": 27861, + "singapore": 28879, + "singapore": 6754, + "singer": 33880, + "singer": 5108, + "singers": 15613, + "singersongwriter": 44585, + "singh": 19445, + "singh": 5715, + "singing": 5864, + "single": 19524, + "single": 2688, + "singles": 12025, + "singleton": 46247, + "singly": 16619, + "sings": 13635, + "singul": 34003, + "singular": 44009, + "singularity": 48410, + "sinha": 29416, + "sini": 41781, + "sini": 26319, + "sinister": 31313, + "sink": 37232, + "sink": 14551, + "sinking": 27949, + "sinks": 32710, + "sinn": 36315, + "sinner": 45380, + "sinners": 43436, + "sino": 29759, + "sins": 9345, + "sinthe": 30737, + "sinu": 37351, + "sinus": 47535, + "sio": 10807, + "siob": 40954, + "siology": 46315, + "sion": 5676, + "sion": 1015, + "sional": 14533, + "sionally": 30754, + "sions": 4060, + "sioux": 44695, + "sioux": 24954, + "sip": 16096, + "sipping": 28527, + "sir": 10708, + "sir": 3846, + "sire": 28450, + "siren": 33026, + "sirens": 35907, + "siri": 13986, + "siri": 18394, + "sirius": 23574, + "sirius": 34999, + "siriusxm": 29833, + "sirloin": 46828, + "sis": 18132, + "sis": 2580, + "sisd": 27132, + "sisi": 37892, + "siss": 42929, + "sissy": 27564, + "sist": 20520, + "sista": 37448, + "sister": 17417, + "sister": 3677, + "sisterhood": 37313, + "sisters": 6404, + "sit": 7387, + "sit": 4037, + "sitcom": 30426, + "site": 26792, + "site": 1988, + "sites": 7236, + "sith": 41499, + "sito": 42613, + "sits": 12726, + "sitt": 42988, + "sitter": 40777, + "sittin": 40887, + "sitting": 4919, + "situ": 5562, + "situ": 42536, + "situated": 22030, + "situation": 7144, + "situations": 19096, + "sity": 38177, + "sity": 5477, + "siu": 40174, + "sium": 8090, + "sius": 27595, + "siva": 20991, + "sivan": 36931, + "sive": 23572, + "sive": 1875, + "sively": 10343, + "siveness": 39667, + "sives": 23896, + "sivity": 42738, + "siwon": 29055, + "six": 5968, + "six": 4093, + "sixers": 25941, + "sixteen": 28677, + "sixth": 12909, + "sixties": 44948, + "sixty": 32588, + "siya": 44440, + "size": 38377, + "size": 3235, + "sized": 9832, + "sizes": 10253, + "sizing": 28330, + "sizz": 23778, + "sizzle": 47890, + "sizzling": 35799, + "sj": 7536, + "sj": 16010, + "sjo": 42012, + "sk": 909, + "sk": 2058, + "ska": 7495, + "skag": 31948, + "skan": 46772, + "skar": 27587, + "skar": 26835, + "skate": 13740, + "skate": 12745, + "skateboard": 31777, + "skateboarding": 31352, + "skater": 30337, + "skaters": 39824, + "skates": 31479, + "skc": 44551, + "ske": 6261, + "ske": 25516, + "skel": 36564, + "skelet": 27075, + "skeletal": 37369, + "skeleton": 20062, + "skeletons": 48874, + "skell": 40801, + "skep": 27772, + "skeptical": 44934, + "sker": 37640, + "sker": 33600, + "sket": 3744, + "sketch": 11767, + "sketch": 5269, + "sketchbook": 18899, + "sketched": 38581, + "sketches": 17622, + "sketching": 23228, + "sketchy": 41582, + "skey": 37453, + "ski": 3327, + "ski": 3428, + "skid": 36574, + "skid": 32099, + "skier": 42585, + "skies": 7244, + "skiing": 14400, + "skil": 24543, + "skill": 15598, + "skill": 10604, + "skilled": 17535, + "skillet": 40568, + "skills": 4113, + "skim": 33191, + "skin": 5821, + "skin": 3575, + "skincare": 12648, + "skine": 37300, + "sking": 46215, + "skinned": 42199, + "skinner": 30261, + "skinny": 42729, + "skinny": 15457, + "skins": 11594, + "skip": 39793, + "skip": 14296, + "skipped": 40639, + "skipper": 22226, + "skipping": 34867, + "skir": 8919, + "skirt": 12386, + "skirts": 24840, + "skis": 32843, + "skit": 43573, + "skitchen": 42820, + "skittles": 43213, + "sko": 15141, + "sko": 23493, + "skoda": 38668, + "skool": 26743, + "skril": 43149, + "skrillex": 43651, + "sks": 48136, + "sku": 10836, + "skul": 17561, + "skull": 34068, + "skull": 12092, + "skulls": 31804, + "skunk": 42194, + "sky": 3075, + "sky": 2390, + "skybet": 45540, + "skye": 21475, + "skyl": 43554, + "skylar": 45411, + "skyline": 14606, + "skymap": 41734, + "skynews": 40977, + "skype": 17069, + "skyrim": 33693, + "skysports": 39845, + "skysports": 46725, + "skywalker": 32936, + "sl": 2621, + "sl": 7489, + "sla": 2725, + "sla": 26707, + "slab": 24241, + "slabs": 42818, + "slack": 37108, + "slack": 30142, + "slade": 33546, + "slain": 35972, + "slalom": 43540, + "slam": 14891, + "slam": 10131, + "slammed": 29772, + "slams": 18907, + "slan": 44663, + "slan": 47193, + "sland": 11294, + "slang": 33655, + "slap": 48830, + "slap": 21751, + "slapped": 38861, + "slaps": 46796, + "slash": 19749, + "slat": 38966, + "slate": 17919, + "slated": 36094, + "slater": 25968, + "slaugh": 26782, + "slaughter": 19815, + "slaughtered": 46615, + "slav": 47292, + "slava": 41797, + "slave": 14029, + "slavery": 15754, + "slaves": 23833, + "slaw": 28178, + "slay": 48319, + "slay": 19380, + "slayed": 44870, + "slayer": 21605, + "slaying": 27812, + "slays": 45648, + "slc": 21972, + "sle": 1709, + "sleague": 23336, + "sled": 28438, + "sledge": 48750, + "slee": 17642, + "slee": 38977, + "sleek": 23187, + "sleep": 4656, + "sleep": 3840, + "sleeper": 28709, + "sleeping": 6982, + "sleepless": 39779, + "sleepover": 39415, + "sleeps": 16610, + "sleepy": 32572, + "sleepy": 14497, + "sleet": 36948, + "sleeve": 35270, + "sleeve": 10536, + "sleeveless": 38049, + "sleeves": 19691, + "sleg": 47650, + "sleigh": 30865, + "slender": 40331, + "slept": 20388, + "sler": 14066, + "sley": 17198, + "sley": 6496, + "sli": 1811, + "sli": 44824, + "slic": 19692, + "slice": 13431, + "sliced": 28121, + "slices": 28424, + "slick": 18341, + "slide": 27828, + "slide": 8837, + "slider": 37861, + "sliders": 40700, + "slides": 15939, + "slideshow": 42817, + "sliding": 21468, + "slife": 15448, + "sliga": 21080, + "slight": 14297, + "slightly": 8456, + "sligo": 30424, + "slike": 38744, + "slim": 35226, + "slim": 12364, + "slime": 29107, + "sling": 28021, + "sling": 32607, + "slinger": 47269, + "slions": 43363, + "slip": 39785, + "slip": 12105, + "slipknot": 41816, + "slipped": 30344, + "slipper": 39644, + "slippers": 26509, + "slippery": 30814, + "slipping": 36301, + "slips": 30632, + "slist": 33749, + "slit": 47011, + "slive": 31652, + "slo": 4303, + "slo": 36083, + "sloan": 29110, + "sloane": 41553, + "slogan": 23398, + "slogans": 42795, + "slope": 22769, + "slopes": 24066, + "sloppy": 36154, + "slot": 14500, + "sloth": 30007, + "slots": 19238, + "slou": 48493, + "slovak": 23315, + "slovakia": 25994, + "sloven": 17018, + "slovenia": 21037, + "slow": 6674, + "slow": 5444, + "slowdown": 38421, + "slowed": 43793, + "slower": 29181, + "slowing": 29839, + "slowly": 9568, + "slows": 46855, + "slp": 45599, + "slr": 21325, + "sls": 33651, + "slt": 39283, + "sltd": 36388, + "slu": 7224, + "slu": 47456, + "slug": 34190, + "slugger": 48671, + "slum": 46754, + "slumber": 44295, + "slump": 35588, + "slur": 30476, + "slush": 39815, + "slv": 45526, + "sly": 28145, + "sly": 21062, + "sm": 978, + "sm": 2764, + "sma": 4357, + "sma": 11854, + "smack": 21280, + "smack": 30026, + "smackdown": 26138, + "smafia": 47686, + "smag": 32212, + "smal": 48379, + "small": 5244, + "small": 2442, + "smallbiz": 41724, + "smallbiz": 18987, + "smallbusiness": 21316, + "smalle": 18490, + "smaller": 12431, + "smallest": 18686, + "smalls": 41696, + "sman": 9612, + "smar": 3201, + "smart": 5383, + "smart": 4115, + "smartcities": 34822, + "smartcity": 33973, + "smarter": 18990, + "smartest": 37092, + "smarthome": 47726, + "smartphone": 11290, + "smartphones": 22212, + "smartwatch": 35798, + "smash": 17258, + "smash": 10332, + "smashbros": 44897, + "smashed": 18410, + "smashes": 45657, + "smashing": 19632, + "smatter": 16537, + "smb": 30446, + "smc": 31375, + "smc": 28312, + "smd": 34582, + "sme": 11758, + "sme": 15650, + "smear": 37546, + "smel": 28476, + "smell": 9688, + "smelling": 32493, + "smells": 14668, + "smelly": 46145, + "smen": 15961, + "smer": 48526, + "smere": 39629, + "smes": 26141, + "smg": 46876, + "smh": 9623, + "smi": 5655, + "smi": 40049, + "smil": 33937, + "smile": 27641, + "smile": 3490, + "smiled": 34362, + "smiles": 8726, + "smiley": 22925, + "smiling": 9200, + "smir": 24667, + "smith": 10527, + "smith": 2915, + "smiths": 27872, + "smithson": 25372, + "smithsonian": 31209, + "smm": 19510, + "smma": 42370, + "smo": 2513, + "smo": 13437, + "smobile": 38923, + "smog": 44425, + "smoke": 20381, + "smoke": 6664, + "smoked": 11161, + "smoker": 32348, + "smokers": 29571, + "smokes": 40336, + "smokey": 23670, + "smokin": 32825, + "smoking": 9038, + "smoky": 25549, + "smol": 29939, + "smol": 40403, + "smoo": 5430, + "smooth": 10958, + "smooth": 8990, + "smoother": 44271, + "smoothie": 16668, + "smoothies": 34458, + "smoothly": 32380, + "smore": 48323, + "smp": 32260, + "smriti": 49227, + "sms": 10409, + "smt": 26672, + "smtown": 26072, + "smu": 10878, + "smu": 30458, + "smug": 41021, + "smugg": 28130, + "smuggling": 34146, + "smur": 24708, + "smusic": 19191, + "smw": 44929, + "smx": 46699, + "smy": 14381, + "smyth": 44822, + "sn": 1672, + "sn": 5844, + "sna": 4032, + "snack": 47548, + "snack": 10039, + "snacking": 46474, + "snacks": 12349, + "snag": 34789, + "snag": 28043, + "snagged": 48534, + "snail": 23132, + "snails": 34928, + "snake": 30133, + "snake": 8798, + "snakes": 19605, + "snap": 4578, + "snap": 7404, + "snapback": 31234, + "snapchat": 7799, + "snapmatic": 45907, + "snapp": 10185, + "snapped": 15543, + "snapper": 31677, + "snapping": 31581, + "snaps": 16890, + "snapshot": 18243, + "snar": 30810, + "snare": 40651, + "snat": 18457, + "snatch": 35302, + "snatched": 44821, + "snation": 14362, + "snazzy": 48963, + "snc": 39918, + "sne": 3791, + "sne": 46503, + "sneak": 27871, + "sneak": 6917, + "sneaker": 31698, + "sneaker": 24781, + "sneakers": 17397, + "sneaking": 34633, + "sneakpeek": 47831, + "sneaks": 40926, + "sneaky": 21293, + "snee": 42095, + "snell": 46410, + "sner": 31424, + "snes": 26667, + "snews": 18623, + "snf": 47651, + "sng": 41549, + "snhl": 43093, + "sni": 7186, + "sni": 35570, + "snickers": 49127, + "sniff": 37841, + "snip": 42954, + "sniper": 22157, + "snippet": 37531, + "snippets": 44001, + "snl": 16011, + "sno": 8567, + "sno": 17802, + "snoo": 11352, + "snooker": 25657, + "snoop": 44503, + "snoop": 27754, + "snoopdogg": 48388, + "snoopy": 41967, + "snooze": 40718, + "snor": 16590, + "snoring": 44560, + "snorkel": 44285, + "snorkeling": 48103, + "snow": 3880, + "snow": 2583, + "snowball": 39254, + "snowboard": 33403, + "snowboarding": 32397, + "snowday": 37982, + "snowden": 32154, + "snowdon": 47107, + "snowdonia": 36088, + "snowed": 45073, + "snowfall": 21714, + "snowflake": 33447, + "snowflakes": 38618, + "snowing": 21443, + "snowman": 22668, + "snowstorm": 38777, + "snowy": 14191, + "snp": 15301, + "sns": 36343, + "snsd": 27961, + "snt": 34834, + "snu": 9694, + "snuck": 36522, + "snug": 45169, + "snuggle": 31327, + "snuggles": 48165, + "sny": 17526, + "snyder": 22106, + "snz": 37678, + "so": 759, + "so": 706, + "soa": 39584, + "soak": 24839, + "soaked": 26592, + "soaking": 26750, + "soap": 26086, + "soap": 11088, + "soaps": 40958, + "soar": 48997, + "soar": 22241, + "soaring": 27968, + "soars": 41348, + "sob": 24900, + "sob": 35507, + "sobbing": 36691, + "sober": 30969, + "sober": 24487, + "sobre": 42768, + "sobri": 49308, + "sobs": 43636, + "soc": 3253, + "soc": 7741, + "soca": 49239, + "socal": 46470, + "socal": 20450, + "soccer": 16268, + "soccer": 4233, + "socceroos": 41997, + "socent": 30831, + "sochi": 21014, + "soci": 1720, + "social": 4803, + "social": 2346, + "socialism": 23372, + "socialist": 18450, + "socialists": 43839, + "socially": 24555, + "socialmedi": 23813, + "socialmedia": 9600, + "socialmediamarketing": 31790, + "societal": 40058, + "societies": 25855, + "society": 3757, + "socio": 44319, + "socio": 42790, + "sociology": 32373, + "sock": 29801, + "sock": 18277, + "socket": 28657, + "socks": 8774, + "socorro": 46409, + "socute": 45086, + "sod": 31435, + "soda": 13533, + "sodium": 29070, + "soe": 44136, + "soe": 25498, + "soever": 34024, + "sof": 1571, + "sof": 41187, + "sofa": 15723, + "soff": 35290, + "soff": 30684, + "sofficial": 20563, + "sofi": 41537, + "sofia": 18914, + "sofinstagram": 17301, + "soft": 12778, + "soft": 3773, + "softball": 8369, + "softer": 44462, + "softhe": 23127, + "softly": 34958, + "software": 35941, + "software": 5847, + "softwitter": 11311, + "sog": 44775, + "soggy": 41168, + "sohn": 49267, + "soho": 47749, + "soho": 17592, + "soi": 40495, + "soil": 33417, + "soil": 9216, + "soils": 34891, + "soir": 43427, + "sok": 43456, + "sol": 1175, + "sol": 9941, + "sola": 40086, + "solace": 42567, + "solar": 16990, + "solar": 5199, + "solareclipse": 44727, + "sold": 33116, + "sold": 3939, + "soldi": 5098, + "soldier": 9355, + "soldiers": 7547, + "sole": 10519, + "sole": 8576, + "soleil": 33148, + "solely": 27913, + "solent": 47783, + "soles": 22682, + "soli": 3911, + "solic": 19369, + "solicitor": 45647, + "solicitors": 46000, + "solid": 30626, + "solid": 6148, + "solidar": 10415, + "solidarity": 10983, + "solidi": 46136, + "solids": 49070, + "solihull": 45293, + "solit": 37039, + "solitaire": 47257, + "solitary": 33094, + "solitude": 33199, + "solo": 17626, + "solo": 5797, + "soloist": 46391, + "solom": 15768, + "solomon": 19785, + "solos": 44868, + "solst": 20298, + "solstice": 21359, + "solu": 2487, + "solution": 4575, + "solutions": 5140, + "solve": 8917, + "solved": 13451, + "solves": 42740, + "solving": 15581, + "som": 734, + "som": 10672, + "soma": 36170, + "somal": 40281, + "somali": 26231, + "somalia": 17051, + "somaliland": 43315, + "some": 1132, + "some": 836, + "somebody": 8305, + "someday": 17127, + "somehow": 11735, + "someone": 2100, + "somer": 9656, + "somerhalder": 33990, + "somerset": 14926, + "somerville": 41409, + "somes": 38124, + "somethin": 33541, + "something": 28316, + "something": 2006, + "sometime": 21464, + "sometimes": 4237, + "somewhat": 17864, + "somewhere": 8119, + "somm": 42726, + "somme": 30625, + "sommer": 44954, + "somos": 24951, + "son": 1176, + "son": 825, + "sona": 21249, + "sonam": 40096, + "sonar": 48235, + "sonata": 37009, + "sone": 29599, + "song": 6868, + "song": 2295, + "songs": 4641, + "songwriter": 13034, + "songwriters": 39583, + "songwriting": 33567, + "songz": 49302, + "soni": 34899, + "soni": 35911, + "sonia": 20409, + "sonic": 23785, + "sonic": 9132, + "sonics": 48511, + "sonja": 46102, + "sonline": 23412, + "sonny": 43000, + "sonny": 20880, + "sono": 44109, + "sonom": 48596, + "sonoma": 26269, + "sons": 5502, + "sonsof": 46676, + "sont": 31063, + "sonthe": 40923, + "sony": 16042, + "sony": 8748, + "sonya": 39172, + "soo": 5517, + "soo": 8602, + "soom": 39771, + "soon": 27559, + "soon": 1745, + "sooner": 18968, + "sooners": 30449, + "sooo": 11526, + "soooo": 13658, + "sooooo": 21199, + "soooooo": 34859, + "soor": 46698, + "soothe": 44424, + "soothing": 27730, + "sop": 3974, + "sop": 19194, + "soph": 34963, + "sophi": 6192, + "sophia": 16790, + "sophie": 38648, + "sophie": 12357, + "sophistic": 17646, + "sophisticated": 20833, + "sophom": 13696, + "sophomore": 15242, + "sophomores": 47645, + "soprano": 28880, + "soproud": 44479, + "sor": 1852, + "sor": 16872, + "sora": 38719, + "sorbet": 39994, + "sore": 43330, + "sore": 15454, + "sored": 6731, + "soren": 38907, + "sorg": 28152, + "sori": 38588, + "sorority": 30059, + "soros": 33248, + "sorren": 44012, + "sorrow": 28020, + "sorrows": 47924, + "sorry": 25745, + "sorry": 3675, + "sorrynotsorry": 37105, + "sort": 8450, + "sorta": 34700, + "sorted": 13221, + "sorting": 19198, + "sorts": 12577, + "sory": 16257, + "sos": 25145, + "sos": 5792, + "sosa": 45433, + "sosfam": 47709, + "sot": 41542, + "sot": 34116, + "sothe": 32145, + "sotho": 45496, + "soto": 27947, + "sotto": 26047, + "sotu": 32286, + "sou": 1101, + "sou": 24293, + "sought": 18874, + "soul": 8701, + "soul": 3755, + "soulful": 30196, + "soulmate": 38130, + "souls": 10951, + "soun": 19474, + "sound": 5236, + "sound": 3608, + "soundcheck": 31394, + "soundcloud": 15190, + "sounded": 28287, + "sounders": 44933, + "sounding": 21351, + "sounds": 5694, + "soundtrack": 11389, + "soup": 7077, + "soups": 45052, + "sour": 2235, + "sour": 12049, + "source": 23698, + "source": 3634, + "sourced": 23340, + "sources": 5124, + "sourcing": 19574, + "sourdough": 29921, + "souri": 11674, + "sous": 32093, + "sousa": 46296, + "sout": 38156, + "sout": 32732, + "south": 2938, + "south": 2045, + "southafrica": 15184, + "southampton": 15767, + "southbank": 44173, + "southbound": 22932, + "southeast": 13942, + "southeastern": 26813, + "southend": 25583, + "souther": 33330, + "southern": 17704, + "southern": 5036, + "southgate": 47262, + "southkorea": 43552, + "southport": 37446, + "southside": 36436, + "southsudan": 30419, + "southwark": 39098, + "southwe": 46443, + "southwest": 13320, + "southwestern": 30157, + "souven": 20210, + "souvenir": 24811, + "souvenirs": 48460, + "souza": 29424, + "sov": 29737, + "sover": 31876, + "sovere": 17736, + "sovereign": 29418, + "sovereign": 26337, + "sovereignty": 31701, + "soviet": 14274, + "sow": 33089, + "sowe": 36130, + "soweto": 47070, + "sown": 49369, + "sox": 39556, + "sox": 8657, + "soy": 16524, + "soy": 15010, + "soybean": 34606, + "soybeans": 40840, + "soyu": 39578, + "soyuz": 43842, + "sp": 588, + "sp": 4393, + "spa": 7852, + "spa": 6692, + "spac": 10336, + "space": 7857, + "space": 2138, + "spacecraft": 25940, + "spaces": 9006, + "spaceship": 34317, + "spacex": 22511, + "spacey": 48770, + "spacious": 24769, + "spad": 45362, + "spade": 32562, + "spades": 48368, + "spaghetti": 18440, + "spain": 5083, + "spal": 26018, + "spam": 29712, + "spam": 14624, + "span": 4270, + "span": 14537, + "spandex": 41686, + "spani": 16721, + "spaniel": 35435, + "spanish": 29966, + "spanish": 6013, + "spann": 25323, + "spanning": 38638, + "spans": 45407, + "spaper": 34548, + "spar": 3378, + "spar": 34576, + "spare": 12615, + "spares": 39505, + "spark": 9555, + "spark": 11047, + "sparked": 32647, + "sparkle": 18287, + "sparkles": 36410, + "sparkling": 17893, + "sparkly": 30542, + "sparks": 15046, + "sparky": 47198, + "sparring": 42161, + "sparrow": 22888, + "spart": 10143, + "sparta": 38401, + "spartan": 26582, + "spartan": 24225, + "spartans": 20457, + "sparty": 36477, + "spas": 31714, + "spati": 19200, + "spatial": 22022, + "spaw": 31605, + "spawn": 29166, + "spay": 40634, + "spc": 20492, + "spca": 37018, + "spd": 37717, + "spd": 28307, + "spdwy": 45981, + "spe": 876, + "spe": 36676, + "speak": 20599, + "speak": 4208, + "speake": 46077, + "speaker": 25764, + "speaker": 4914, + "speakers": 7675, + "speaking": 3714, + "speaks": 5661, + "spear": 23277, + "spear": 30420, + "speare": 43859, + "spears": 20242, + "spec": 1711, + "spec": 18596, + "speci": 1969, + "special": 11422, + "special": 1689, + "specialist": 10630, + "specialists": 21719, + "speciality": 46904, + "specialized": 23265, + "specializes": 48533, + "specially": 4513, + "specials": 11983, + "specialty": 18262, + "species": 6330, + "specific": 10528, + "specifically": 17174, + "specification": 46394, + "specifications": 39705, + "specified": 48114, + "specimen": 30263, + "specimens": 42715, + "specs": 24093, + "spect": 3416, + "spectac": 7242, + "spectacle": 34342, + "spectacular": 8404, + "spectator": 32372, + "spectators": 39306, + "spective": 6633, + "spector": 48676, + "spectral": 45441, + "spectre": 35998, + "spectro": 27646, + "spectrum": 13532, + "specul": 19209, + "speculation": 30898, + "sped": 38813, + "spee": 4050, + "speech": 19556, + "speech": 4902, + "speeches": 25208, + "speechless": 23152, + "speed": 6860, + "speed": 4163, + "speeding": 27264, + "speeds": 22017, + "speedway": 11480, + "speedy": 21603, + "spel": 41887, + "spell": 22784, + "spell": 11230, + "spelled": 24339, + "spelling": 15614, + "spells": 25335, + "spelt": 38316, + "spen": 5087, + "spence": 33324, + "spencer": 27509, + "spencer": 10678, + "spend": 4664, + "spending": 5961, + "spends": 22508, + "spent": 4429, + "speople": 33035, + "sper": 8213, + "sper": 15313, + "sperm": 35781, + "sperson": 22687, + "spf": 34973, + "spg": 34623, + "sph": 28909, + "sph": 24684, + "sphe": 33691, + "spher": 18349, + "sphere": 6987, + "spheres": 37478, + "spheric": 21744, + "sphin": 39237, + "sphinx": 46487, + "spho": 20442, + "sphoto": 38594, + "sphy": 43808, + "spi": 3174, + "spi": 37080, + "spic": 17264, + "spice": 29761, + "spice": 10141, + "spiced": 24267, + "spicer": 37627, + "spices": 21194, + "spicy": 10915, + "spide": 36801, + "spider": 11963, + "spider": 7622, + "spiderman": 39808, + "spiderman": 18427, + "spiders": 23141, + "spidey": 41706, + "spie": 28573, + "spie": 28746, + "spied": 43998, + "spiegel": 45351, + "spiel": 28435, + "spiel": 37690, + "spielberg": 37569, + "spies": 25374, + "spieth": 43254, + "spike": 35306, + "spike": 15310, + "spiked": 47014, + "spikes": 29582, + "spil": 47765, + "spill": 43933, + "spill": 18006, + "spilled": 33206, + "spilling": 49006, + "spills": 35796, + "spin": 6288, + "spin": 9226, + "spinach": 14747, + "spinal": 23925, + "spine": 48221, + "spine": 19646, + "sping": 47113, + "spinner": 29924, + "spinning": 13987, + "spino": 40848, + "spinoff": 42513, + "spinrilla": 46064, + "spins": 27243, + "spion": 39604, + "spionage": 41838, + "spir": 3745, + "spiral": 19873, + "spiration": 38126, + "spire": 27439, + "spired": 40650, + "spires": 46938, + "spiri": 4024, + "spirit": 18224, + "spirit": 4071, + "spirited": 34701, + "spirits": 13192, + "spiritu": 7237, + "spiritual": 46076, + "spiritual": 9473, + "spirituality": 22165, + "spiro": 40085, + "spit": 18115, + "spit": 23177, + "spite": 26060, + "spitfire": 31126, + "spitting": 40721, + "spl": 2470, + "spl": 33052, + "spla": 4809, + "splac": 16059, + "splace": 38743, + "splash": 43641, + "splash": 11879, + "splat": 15733, + "splatoon": 22565, + "splay": 3169, + "splen": 18552, + "splend": 29861, + "splendid": 21016, + "splendor": 46262, + "splin": 38090, + "split": 25443, + "split": 9109, + "splits": 34897, + "splitting": 37210, + "splus": 40866, + "spn": 35467, + "spn": 19414, + "spnfamily": 38566, + "spo": 1261, + "spo": 21085, + "spock": 43918, + "spoil": 25600, + "spoiled": 21399, + "spoiler": 16512, + "spoilers": 18326, + "spoils": 42436, + "spoilt": 35358, + "spokane": 24528, + "spoke": 13890, + "spoke": 6518, + "spoken": 12979, + "spokesman": 31632, + "spokesperson": 26234, + "spol": 22476, + "spol": 8132, + "spoli": 34301, + "spolice": 37406, + "spon": 1715, + "spon": 48216, + "sponge": 22861, + "sponge": 24345, + "spongebob": 25089, + "spons": 5597, + "sponsor": 10424, + "sponsor": 7574, + "sponsored": 7197, + "sponsoring": 16181, + "sponsors": 11005, + "sponsorship": 17632, + "spontaneous": 32465, + "spoo": 11248, + "spooky": 15369, + "spool": 49152, + "spoon": 27001, + "spoon": 14024, + "spoons": 29661, + "spor": 1475, + "spor": 33746, + "sport": 4379, + "sport": 2364, + "sporting": 32620, + "sporting": 8944, + "sports": 6436, + "sports": 2054, + "sportsc": 40114, + "sportscar": 46931, + "sportscenter": 39157, + "sportsman": 39020, + "sportsmanship": 34858, + "sportsnet": 34144, + "sportswear": 39747, + "sporty": 33346, + "spot": 3223, + "spot": 3049, + "spotify": 7193, + "spotlight": 7901, + "spots": 7670, + "spotted": 4533, + "spotter": 30742, + "spotting": 15885, + "spouse": 24724, + "spout": 48993, + "spp": 47567, + "spr": 1536, + "spr": 19417, + "spra": 12966, + "spraw": 46590, + "spray": 37885, + "spray": 10449, + "sprayed": 40022, + "spraying": 39224, + "spre": 18740, + "spread": 20620, + "spread": 5284, + "spreading": 11821, + "spreads": 27579, + "spree": 21851, + "spri": 35498, + "spride": 26685, + "spring": 5166, + "spring": 2420, + "springbreak": 37753, + "springer": 30117, + "springfield": 16599, + "springs": 7308, + "springst": 32132, + "springsteen": 28367, + "springtime": 28285, + "springtraining": 49364, + "springwatch": 29239, + "sprink": 15817, + "sprinkle": 42897, + "sprinkler": 48754, + "sprinkles": 37326, + "sprint": 29248, + "sprint": 10751, + "sprinter": 36947, + "sprints": 36404, + "sprite": 32544, + "spro": 13902, + "spro": 37403, + "sproject": 37802, + "sproud": 37686, + "sprout": 35863, + "sprouts": 25756, + "spru": 17041, + "spruce": 23812, + "sprung": 32968, + "sps": 13869, + "spu": 23566, + "spun": 47922, + "spun": 32852, + "spur": 15206, + "spur": 20361, + "spurs": 10916, + "spursofficial": 45290, + "sput": 47521, + "spx": 20584, + "spy": 13861, + "spy": 6656, + "spyder": 39952, + "spying": 36227, + "sq": 9370, + "sq": 11590, + "sqft": 41912, + "sql": 42759, + "sql": 18938, + "sqm": 47978, + "sqn": 41209, + "squ": 1653, + "squad": 13892, + "squad": 4234, + "squadron": 18579, + "squads": 36590, + "square": 19314, + "square": 3999, + "squared": 32967, + "squares": 26972, + "squash": 13312, + "squat": 44628, + "squat": 30680, + "squats": 40213, + "sque": 9721, + "sque": 8097, + "squee": 14420, + "squeeze": 21684, + "squeezed": 40413, + "squid": 42057, + "squid": 22553, + "squir": 9683, + "squire": 48090, + "squirrel": 14004, + "squirrels": 26623, + "squish": 42607, + "squishy": 47001, + "sr": 3437, + "sr": 5428, + "srbachchan": 32353, + "src": 23445, + "sre": 17748, + "sri": 11051, + "sri": 9276, + "sridevi": 46301, + "srilan": 15559, + "srilanka": 16922, + "srin": 26818, + "srinagar": 33671, + "srini": 41899, + "sriracha": 42743, + "sris": 27851, + "srisri": 32966, + "srk": 44982, + "srk": 11216, + "srl": 33808, + "srp": 43004, + "srs": 41764, + "srsly": 44179, + "srt": 28139, + "sru": 44152, + "srugby": 40526, + "ss": 690, + "ss": 632, + "ssa": 6088, + "ssal": 31330, + "ssal": 35936, + "ssb": 37511, + "ssc": 21692, + "ssc": 20364, + "ssd": 23107, + "sse": 9030, + "sse": 8938, + "ssed": 38755, + "ssed": 1804, + "ssel": 17402, + "ssel": 19373, + "sseldorf": 47792, + "ssell": 42388, + "ssels": 8355, + "ssen": 39408, + "ssen": 22645, + "sser": 20445, + "sses": 1802, + "ssett": 44103, + "ssf": 33239, + "ssg": 40707, + "ssh": 48866, + "ssi": 834, + "ssi": 14953, + "ssia": 22238, + "ssian": 31218, + "ssible": 47099, + "ssic": 27774, + "ssic": 17077, + "ssie": 7572, + "ssier": 26422, + "ssil": 15026, + "ssin": 42660, + "ssing": 2112, + "ssion": 16050, + "ssion": 1627, + "ssional": 13727, + "ssionism": 24787, + "ssionist": 27682, + "ssions": 4137, + "ssive": 2734, + "ssively": 28060, + "ssl": 32195, + "ssler": 30287, + "ssly": 24904, + "ssn": 39116, + "ssnhq": 47998, + "sso": 25900, + "sso": 7914, + "ssoccer": 32546, + "sson": 36124, + "sson": 7271, + "ssor": 35152, + "ssp": 31101, + "ssr": 39880, + "sss": 11176, + "ssss": 30676, + "ssss": 15880, + "sssss": 24298, + "sst": 40396, + "ssu": 35351, + "ssummit": 49301, + "ssus": 31286, + "ssw": 36937, + "ssy": 22519, + "ssy": 8661, + "st": 522, + "st": 545, + "sta": 1363, + "sta": 2745, + "stab": 7726, + "stab": 29974, + "stabbed": 24534, + "stabbing": 25474, + "stabil": 42576, + "stabili": 23903, + "stability": 16716, + "stable": 44427, + "stable": 10492, + "stables": 34218, + "stac": 10175, + "stacey": 41653, + "stacey": 24262, + "stache": 23616, + "stack": 24723, + "stack": 11257, + "stacked": 24990, + "stacking": 39836, + "stacks": 24734, + "stacy": 26628, + "stad": 15832, + "stad": 16485, + "stade": 38198, + "stadi": 26587, + "stadion": 48815, + "stadium": 3390, + "stadiums": 38852, + "stadt": 22713, + "staf": 2367, + "staff": 31188, + "staff": 2813, + "staffer": 38494, + "staffers": 44994, + "staffing": 32932, + "stafford": 25006, + "staffordshire": 29198, + "staffs": 36098, + "stag": 12088, + "stag": 20277, + "stage": 23182, + "stage": 2170, + "staged": 19906, + "stages": 12297, + "staggering": 37315, + "staging": 27026, + "stagram": 19503, + "stags": 45936, + "stain": 3933, + "stain": 14603, + "stained": 13751, + "staining": 32523, + "stainless": 12320, + "stains": 32008, + "stair": 7240, + "stair": 17662, + "staircase": 22777, + "stairs": 9577, + "stairway": 45559, + "stak": 39144, + "stake": 15955, + "stake": 7937, + "stakeholder": 39122, + "stakeholders": 22968, + "stakes": 7519, + "staking": 47082, + "stal": 3861, + "stal": 5535, + "stale": 42471, + "stalert": 25450, + "stalin": 28346, + "stalk": 40826, + "stalk": 14878, + "stalker": 26777, + "stalking": 24721, + "stalks": 45886, + "stall": 24636, + "stall": 12058, + "stalled": 40362, + "stallion": 28273, + "stallions": 44787, + "stallone": 40969, + "stalls": 25427, + "stam": 4663, + "stamatic": 30904, + "stamford": 27843, + "stamina": 48753, + "stamp": 28694, + "stamp": 12771, + "stampcollecting": 42852, + "stamped": 38356, + "stampede": 25384, + "stamps": 13827, + "stan": 2203, + "stan": 2434, + "stana": 33311, + "stanbul": 11231, + "stance": 48900, + "stance": 3542, + "stances": 15054, + "stand": 1819, + "stand": 2087, + "standalone": 44887, + "standard": 35780, + "standard": 5807, + "standardi": 30247, + "standards": 9022, + "standby": 36184, + "standing": 39934, + "standing": 2862, + "standings": 19835, + "standoff": 31821, + "standout": 23131, + "standre": 48309, + "stands": 6446, + "standup": 35108, + "standup": 24964, + "standwith": 19540, + "stanford": 36219, + "stanford": 15087, + "stang": 12536, + "stani": 38228, + "stanis": 37711, + "stanley": 19048, + "stanley": 10079, + "stanleycup": 28662, + "stans": 26564, + "stant": 41576, + "stant": 4906, + "stanton": 25400, + "stap": 10438, + "staple": 22695, + "staples": 23646, + "stapleton": 45228, + "star": 993, + "star": 1565, + "starbuck": 48519, + "starbucks": 9499, + "starch": 47837, + "starcraft": 48871, + "stardom": 44616, + "stardust": 34337, + "stare": 18094, + "stared": 47772, + "stares": 37916, + "starfish": 44283, + "stargate": 41099, + "stargazing": 49328, + "staring": 13800, + "stark": 40446, + "stark": 15353, + "starlight": 32197, + "starling": 46205, + "starmagic": 48023, + "starplus": 37815, + "starr": 19186, + "starred": 24180, + "starrer": 41311, + "starring": 6660, + "starry": 30963, + "stars": 2895, + "starship": 37166, + "start": 17466, + "start": 1572, + "started": 2760, + "starter": 7800, + "starters": 22222, + "starting": 2530, + "startrek": 30642, + "startrek": 15349, + "starts": 3105, + "startu": 6996, + "startup": 18049, + "startup": 5882, + "startups": 9056, + "starve": 46957, + "starving": 30473, + "starwar": 17287, + "starwars": 26239, + "starwars": 7887, + "starz": 25928, + "stas": 19866, + "stash": 27711, + "stasy": 45942, + "stat": 3004, + "stat": 15216, + "state": 3492, + "state": 1295, + "statec": 33931, + "stated": 19629, + "statedept": 41458, + "statefair": 40305, + "statement": 5401, + "statements": 19513, + "staten": 38263, + "stateof": 35195, + "states": 22125, + "states": 4218, + "statesman": 35301, + "stateu": 44248, + "statewide": 29561, + "stati": 9622, + "static": 16363, + "stating": 35147, + "station": 13498, + "station": 2631, + "stationary": 29493, + "stationed": 47618, + "stationery": 33851, + "stations": 10051, + "statistical": 29349, + "statistics": 14165, + "stats": 7294, + "statu": 32481, + "statue": 8222, + "statues": 24363, + "status": 6414, + "stau": 28550, + "staur": 3709, + "stav": 20285, + "stax": 32235, + "stay": 4714, + "stay": 2277, + "stayed": 13805, + "staying": 8993, + "stays": 13311, + "staytuned": 39285, + "stc": 29859, + "std": 30477, + "ste": 795, + "ste": 2686, + "stea": 46614, + "stead": 16101, + "stead": 11031, + "steadily": 35049, + "steady": 12937, + "steak": 26955, + "steak": 8913, + "steakhouse": 35031, + "steaks": 30655, + "steal": 37070, + "steal": 10181, + "stealing": 14242, + "steals": 20224, + "stealth": 25327, + "steam": 10962, + "steam": 6972, + "steamboat": 41121, + "steamed": 29007, + "steamer": 49075, + "steaming": 43746, + "steampunk": 24130, + "steamy": 43104, + "stec": 46713, + "stech": 48949, + "stech": 32455, + "sted": 20426, + "sted": 1356, + "stee": 31793, + "steed": 48293, + "steel": 6938, + "steel": 4726, + "steele": 19460, + "steelers": 14430, + "steen": 42851, + "steen": 18625, + "steep": 28648, + "steep": 20714, + "steer": 27612, + "steering": 19833, + "stef": 29158, + "stefan": 15004, + "stefan": 18829, + "stefani": 38319, + "stefano": 30719, + "steff": 30075, + "stein": 13653, + "stein": 5818, + "steiner": 36314, + "stel": 9102, + "stel": 10798, + "stell": 22355, + "stella": 46178, + "stella": 17869, + "stellar": 13810, + "stellen": 42754, + "stem": 24342, + "stem": 6761, + "stemc": 40486, + "stems": 31503, + "sten": 7652, + "sten": 7877, + "stencil": 47854, + "stennis": 45636, + "step": 15572, + "step": 3348, + "steph": 3522, + "steph": 16251, + "stephan": 37312, + "stephani": 48121, + "stephanie": 14361, + "stephen": 10421, + "stephen": 6078, + "stephenking": 46361, + "stephens": 22256, + "stephenson": 37280, + "stepped": 18384, + "stepping": 15906, + "steps": 5408, + "ster": 1022, + "ster": 881, + "stere": 9229, + "stered": 6935, + "stereo": 15992, + "stereo": 17400, + "stereotypes": 27890, + "steria": 38804, + "stering": 14175, + "sterling": 45790, + "sterling": 9378, + "stern": 36254, + "stern": 2945, + "steroids": 37670, + "sterone": 39418, + "sters": 2132, + "stery": 24232, + "stest": 8556, + "stev": 11640, + "steve": 7412, + "steve": 3803, + "steven": 10973, + "steven": 8016, + "stevens": 13877, + "stevenson": 25091, + "stevie": 42104, + "stevie": 18969, + "stew": 17906, + "stewar": 28453, + "steward": 34980, + "steward": 43355, + "stewards": 49294, + "stewardship": 36720, + "stewart": 8120, + "stfu": 47000, + "stg": 48387, + "stgeorge": 43698, + "sth": 13456, + "sth": 34004, + "sthe": 16491, + "sthel": 42863, + "sti": 860, + "sti": 12439, + "stia": 26492, + "stible": 25835, + "stic": 5868, + "stic": 1561, + "stical": 16660, + "stically": 19041, + "stick": 5483, + "stick": 4987, + "sticker": 11270, + "stickers": 11613, + "sticking": 21021, + "sticks": 10016, + "sticky": 18887, + "stics": 5449, + "stie": 38164, + "stie": 11000, + "stier": 42069, + "sties": 16428, + "stiff": 43471, + "stiff": 21441, + "stig": 4088, + "stig": 42551, + "stigate": 15390, + "stigma": 20619, + "stik": 42247, + "stil": 21790, + "stil": 37519, + "stiles": 33028, + "still": 13209, + "still": 1170, + "stills": 20259, + "stim": 18269, + "stime": 24711, + "stimul": 16434, + "stimulate": 42380, + "stimulating": 41237, + "stimulation": 39530, + "stimulus": 47283, + "stin": 2588, + "stin": 4025, + "stina": 22359, + "stine": 7098, + "sting": 19868, + "sting": 1271, + "stingly": 49332, + "stingray": 43229, + "stink": 38213, + "stinky": 44957, + "stino": 40658, + "stint": 33531, + "stion": 10812, + "stip": 39869, + "stips": 44756, + "stique": 43305, + "stir": 12416, + "stir": 19564, + "stirling": 23128, + "stirring": 39205, + "stis": 45224, + "stit": 14110, + "stitch": 30003, + "stitch": 14771, + "stitched": 36540, + "stitcher": 48204, + "stitches": 32360, + "stitching": 45208, + "stitu": 14585, + "stitutes": 40479, + "stive": 22426, + "stix": 48829, + "stjohn": 36153, + "stl": 14179, + "stl": 12527, + "stlblues": 44138, + "stlcards": 28644, + "stle": 7698, + "stles": 48638, + "stlouis": 40358, + "stlouis": 39516, + "stm": 28333, + "stn": 27175, + "sto": 928, + "sto": 5723, + "stock": 5899, + "stock": 3206, + "stocked": 23552, + "stockholm": 16024, + "stocki": 42944, + "stocking": 17335, + "stockings": 28040, + "stockmarket": 40359, + "stockport": 35569, + "stocks": 9321, + "stockton": 26130, + "stoday": 22392, + "stok": 43782, + "stoke": 31338, + "stoke": 13550, + "stoked": 13160, + "stokes": 27512, + "stol": 11401, + "stol": 6700, + "stole": 10995, + "stolen": 8704, + "stolic": 45020, + "stom": 2343, + "stom": 38068, + "stoma": 43545, + "stomach": 14722, + "stomp": 40165, + "stomping": 46144, + "ston": 4101, + "ston": 1839, + "stone": 7694, + "stone": 2441, + "stoned": 36248, + "stonehenge": 42417, + "stoner": 35131, + "stoner": 29115, + "stones": 42659, + "stones": 6885, + "stonewall": 39688, + "stoney": 44198, + "stony": 41717, + "stony": 35691, + "stoo": 24505, + "stood": 9151, + "stool": 34413, + "stool": 22314, + "stop": 6005, + "stop": 1691, + "stopbrexit": 48680, + "stopp": 15738, + "stopped": 6015, + "stopper": 32147, + "stoppers": 34457, + "stopping": 10735, + "stops": 9822, + "stopthe": 26463, + "stor": 809, + "stor": 17740, + "storage": 6824, + "store": 17769, + "store": 2183, + "stored": 28257, + "stores": 6370, + "storey": 24025, + "storians": 34628, + "stories": 3784, + "storing": 40087, + "stork": 46452, + "storm": 7434, + "storm": 2819, + "stormed": 45939, + "stormhour": 12161, + "storming": 24842, + "storms": 6464, + "stormtrooper": 49218, + "stormy": 20075, + "stors": 7178, + "story": 6512, + "story": 1134, + "storyline": 37079, + "storymonth": 23717, + "storyteller": 35882, + "storytelling": 14457, + "storytime": 44197, + "stos": 19281, + "stou": 37168, + "stour": 37361, + "stour": 21928, + "stout": 16550, + "stove": 21423, + "stow": 44284, + "stow": 17046, + "stowe": 34196, + "stown": 28071, + "stown": 7939, + "stp": 30576, + "stpatrick": 21343, + "stpatricksday": 22747, + "str": 807, + "str": 15913, + "stra": 1894, + "stra": 6253, + "strack": 46861, + "strada": 31134, + "strade": 48968, + "straigh": 31016, + "straight": 22114, + "straight": 4241, + "strain": 16887, + "strains": 38067, + "strait": 22946, + "straits": 41984, + "stral": 23289, + "stralia": 42510, + "stran": 18411, + "strand": 18214, + "strand": 17826, + "stranded": 22975, + "strang": 11138, + "strange": 33380, + "strange": 7288, + "strangely": 37566, + "stranger": 35541, + "stranger": 14149, + "strangers": 20684, + "strangerthings": 43271, + "strangest": 46740, + "strap": 13946, + "strapped": 40922, + "straps": 31213, + "stras": 36814, + "stras": 42125, + "strasbourg": 39576, + "strat": 11345, + "strat": 32925, + "strata": 47278, + "strate": 3532, + "strate": 28758, + "strategi": 49102, + "strategic": 10246, + "strategically": 45706, + "strategies": 9942, + "strategist": 37180, + "strategy": 5637, + "strates": 45724, + "stratford": 23955, + "strath": 21997, + "stration": 3156, + "strato": 28878, + "strauss": 32033, + "strava": 34625, + "stravel": 43494, + "straw": 7430, + "straw": 16438, + "strawberries": 17796, + "strawberry": 10233, + "straws": 33048, + "stray": 30784, + "stray": 15712, + "stre": 1079, + "stre": 19652, + "stread": 27797, + "streak": 11749, + "streaks": 42092, + "stream": 8659, + "stream": 3322, + "streamed": 26280, + "streamer": 25178, + "streamers": 19937, + "streaming": 6278, + "streamline": 44917, + "streams": 13545, + "stree": 35082, + "stree": 32438, + "streep": 38701, + "street": 4839, + "street": 2012, + "streetart": 12948, + "streetcar": 34268, + "streetfood": 44486, + "streetphotography": 20786, + "streets": 6058, + "streetstyle": 39118, + "streetwear": 37298, + "strel": 39685, + "stren": 4349, + "streng": 4472, + "strength": 15475, + "strength": 5959, + "strengthen": 16318, + "strengthened": 47131, + "strengthening": 23475, + "strengthens": 40280, + "strengths": 29268, + "stress": 17297, + "stress": 5843, + "stressed": 16497, + "stresses": 32112, + "stressful": 24268, + "stressing": 35917, + "stret": 12265, + "stretch": 10064, + "stretched": 29393, + "stretches": 32231, + "stretching": 24423, + "stri": 1493, + "stri": 27795, + "stria": 39620, + "strial": 30217, + "strian": 12924, + "stric": 2607, + "strick": 25181, + "strickland": 48939, + "strict": 21585, + "strictly": 16475, + "stride": 36024, + "strides": 37355, + "stries": 18171, + "strife": 46473, + "strike": 20774, + "strike": 5767, + "striker": 12448, + "strikers": 33465, + "strikes": 9280, + "striking": 13392, + "string": 25512, + "string": 9696, + "strings": 15699, + "strip": 9317, + "stripe": 19368, + "striped": 22192, + "stripes": 14239, + "stripped": 26602, + "stripper": 45759, + "stripping": 48588, + "strips": 19000, + "strive": 22140, + "striving": 37671, + "stro": 3121, + "stro": 6186, + "stroke": 44621, + "stroke": 10403, + "strokes": 26595, + "strol": 30123, + "stroll": 15924, + "stroller": 47076, + "strolling": 40911, + "strom": 14707, + "stron": 4165, + "strong": 10436, + "strong": 2389, + "stronger": 27760, + "stronger": 9245, + "strongertogether": 38532, + "strongest": 16171, + "strongh": 38678, + "strongly": 15507, + "strophy": 47912, + "strou": 48425, + "stroud": 39895, + "strous": 23752, + "stru": 1666, + "struc": 3311, + "struck": 10861, + "struction": 12497, + "structural": 16899, + "structure": 5285, + "structured": 27147, + "structures": 14171, + "structuring": 37496, + "strugg": 5176, + "struggle": 8443, + "struggled": 32921, + "struggles": 17446, + "struggling": 12135, + "struly": 34118, + "strum": 37632, + "strung": 46033, + "strust": 23920, + "strut": 48375, + "stry": 17325, + "stry": 2245, + "sts": 1088, + "stu": 858, + "stu": 23531, + "stuart": 32054, + "stuart": 11723, + "stub": 27066, + "stubborn": 38955, + "stuck": 6596, + "stud": 22368, + "stud": 13319, + "studded": 29153, + "studen": 44156, + "student": 14681, + "student": 2556, + "students": 1712, + "studi": 5691, + "studied": 21369, + "studies": 6426, + "studio": 17798, + "studio": 3155, + "studios": 6231, + "studs": 27571, + "study": 21051, + "study": 3123, + "studyabroad": 45425, + "studying": 8826, + "stuff": 46072, + "stuff": 3487, + "stuffed": 11781, + "stuffing": 31612, + "stuffs": 43455, + "stuk": 32424, + "stumb": 16784, + "stumble": 39045, + "stumbled": 21776, + "stump": 32064, + "stun": 3088, + "stun": 37959, + "stunned": 34034, + "stunner": 29965, + "stunning": 3769, + "stunningly": 47515, + "stuns": 43796, + "stunt": 19905, + "stunts": 40118, + "stupi": 18975, + "stupid": 42600, + "stupid": 8085, + "stupidity": 33766, + "stur": 10676, + "sturdy": 43780, + "stures": 27223, + "sturgeon": 31580, + "sturi": 21747, + "sturridge": 45331, + "stutt": 30444, + "stuttgart": 32219, + "stv": 27060, + "stv": 9708, + "stweet": 46832, + "stweets": 39174, + "stx": 42548, + "sty": 1421, + "sty": 2920, + "style": 12356, + "style": 1844, + "styled": 17974, + "styles": 6948, + "styli": 38577, + "styling": 14597, + "stylish": 10378, + "stylist": 15928, + "styn": 41394, + "su": 605, + "su": 2937, + "sua": 42448, + "suarez": 21437, + "suave": 47305, + "sub": 1783, + "sub": 7765, + "subaru": 21319, + "subjec": 16090, + "subject": 10300, + "subjects": 22099, + "subli": 16350, + "sublime": 22367, + "submarine": 19968, + "submer": 27156, + "submerged": 43171, + "submission": 16571, + "submissions": 21566, + "submit": 10423, + "submitted": 15189, + "submitting": 38788, + "subram": 49207, + "subs": 16398, + "subscri": 5838, + "subscribe": 9839, + "subscribed": 44867, + "subscriber": 36292, + "subscribers": 17337, + "subscription": 17979, + "subscriptions": 47162, + "subsequ": 33598, + "subsequent": 44323, + "subsi": 14856, + "subsidi": 45029, + "subsidiary": 45506, + "subsidies": 37685, + "subsidy": 47462, + "substan": 17487, + "substance": 19309, + "substances": 36834, + "substantial": 27171, + "substantially": 47577, + "substitu": 18529, + "substitute": 25340, + "subtitles": 39479, + "subtle": 16536, + "subur": 12517, + "suburb": 37664, + "suburban": 23570, + "suburbs": 25317, + "subway": 12196, + "suc": 1869, + "succe": 7981, + "succeed": 13556, + "succeeded": 41077, + "succes": 39019, + "success": 3695, + "success": 3034, + "successes": 29436, + "successful": 4670, + "successfully": 9934, + "succession": 38491, + "successive": 41319, + "successor": 34774, + "succu": 45253, + "succul": 25671, + "succulent": 35236, + "such": 2046, + "suction": 42786, + "sud": 8067, + "sud": 33714, + "sudan": 31149, + "sudan": 13474, + "sudanese": 42837, + "sudbury": 32488, + "sudden": 10833, + "sudden": 15433, + "suddenly": 11076, + "sue": 14045, + "sue": 6641, + "sued": 22225, + "suede": 21036, + "sues": 17105, + "suf": 21204, + "suf": 22579, + "sufc": 37091, + "suff": 4866, + "suffe": 13510, + "suffer": 13557, + "suffered": 14766, + "suffering": 10140, + "suffers": 22389, + "sufficient": 28410, + "suffol": 13775, + "suffolk": 46408, + "suffolk": 15685, + "suffra": 34596, + "suffrage": 39567, + "sufi": 39756, + "sug": 3189, + "suga": 28757, + "sugar": 12418, + "sugar": 5574, + "sugge": 6345, + "suggest": 13356, + "suggested": 18790, + "suggesti": 15033, + "suggesting": 29792, + "suggestion": 23741, + "suggestions": 16052, + "suggests": 13333, + "suho": 32744, + "sui": 24972, + "suici": 16372, + "suicidal": 37165, + "suicide": 31310, + "suicide": 8247, + "suing": 18309, + "suisse": 35964, + "suit": 11887, + "suit": 3940, + "suitable": 17476, + "suitcase": 27792, + "suite": 9346, + "suited": 25919, + "suites": 21523, + "suits": 9949, + "suk": 24820, + "suk": 6886, + "suka": 44017, + "suke": 25590, + "sukh": 46961, + "suki": 32704, + "sul": 1767, + "sul": 19879, + "sula": 34713, + "sula": 26143, + "sullivan": 14477, + "sully": 37752, + "sulph": 37234, + "sulphur": 47659, + "sultan": 35650, + "sultan": 17049, + "sum": 7054, + "sum": 8257, + "suma": 47938, + "sumat": 32640, + "sumatra": 47346, + "sume": 45457, + "sumi": 41248, + "summ": 1309, + "summar": 34657, + "summari": 31993, + "summary": 13435, + "summed": 34912, + "summer": 5500, + "summer": 1673, + "summers": 18254, + "summerslam": 40264, + "summertime": 19025, + "summit": 30011, + "summit": 3768, + "summon": 27622, + "summon": 39782, + "sumner": 46813, + "sumo": 33734, + "sump": 34252, + "sumptuous": 47354, + "sums": 13325, + "sun": 968, + "sun": 2176, + "sunbathing": 46994, + "sunburn": 45767, + "sund": 40735, + "sundae": 38078, + "sundance": 24128, + "sundar": 44936, + "sunday": 6649, + "sunday": 1706, + "sundayfunday": 21565, + "sundaymorning": 24809, + "sundaymotivation": 46227, + "sundays": 15827, + "sundaywith": 26469, + "sundaywithmarsha": 26662, + "sunder": 15097, + "sunderland": 45727, + "sunderland": 18851, + "sundown": 44438, + "sune": 41096, + "sunflower": 21559, + "sunflowers": 39809, + "sung": 16903, + "sung": 6047, + "sunglasses": 12906, + "suni": 17663, + "suni": 47010, + "sunil": 32861, + "sunite": 21382, + "sunited": 35276, + "sunk": 37534, + "sunken": 43473, + "sunlight": 17996, + "sunni": 44315, + "sunny": 15632, + "sunny": 5438, + "sunrise": 5610, + "suns": 18322, + "sunscreen": 29355, + "sunset": 37880, + "sunset": 3424, + "sunsets": 17721, + "sunshine": 32761, + "sunshine": 5385, + "suny": 41308, + "sup": 19078, + "sup": 8249, + "supdates": 24177, + "super": 1642, + "super": 1994, + "superb": 8930, + "superbike": 45709, + "superbowl": 47461, + "superbowl": 16467, + "supercar": 27021, + "supercars": 32185, + "supercell": 43227, + "supercharged": 47479, + "supere": 46831, + "superfood": 41715, + "supergirl": 25771, + "superhero": 14049, + "superheroes": 23334, + "superint": 17615, + "superintendent": 19020, + "superior": 13205, + "superjunior": 40475, + "superleague": 45539, + "superman": 11237, + "supermarket": 19897, + "supermarkets": 45106, + "supermodel": 41963, + "supermoon": 36571, + "supernatural": 15484, + "supernova": 39843, + "superrugby": 48717, + "supersonic": 42019, + "supersport": 46319, + "superst": 38202, + "superstar": 32551, + "superstar": 10472, + "superstars": 25797, + "supervis": 12709, + "supervised": 41316, + "supervision": 36234, + "supervisor": 20366, + "supervisors": 37958, + "superyacht": 42714, + "supp": 1023, + "supper": 15727, + "supple": 31431, + "supplement": 19924, + "supplements": 21265, + "supplied": 24106, + "supplier": 18043, + "suppliers": 24196, + "supplies": 9384, + "supply": 25074, + "supply": 6389, + "supplychain": 31224, + "supplying": 32739, + "suppo": 6941, + "suppor": 2104, + "support": 12062, + "support": 1425, + "supported": 8038, + "supporter": 12992, + "supporters": 7403, + "supportindiefilm": 43976, + "supporting": 3976, + "supportive": 18313, + "supportlocal": 43852, + "supports": 8336, + "supportsmall": 30941, + "supportsmallstreamers": 36097, + "suppose": 18924, + "supposed": 9119, + "supposedly": 32302, + "suppre": 20542, + "suppression": 36508, + "supra": 48485, + "supre": 5875, + "supremac": 28643, + "supremacist": 39005, + "supremacy": 28913, + "supreme": 35222, + "supreme": 7468, + "supt": 23625, + "sur": 1090, + "sur": 7123, + "sura": 33412, + "sura": 49125, + "surabaya": 45227, + "surance": 22184, + "surat": 30201, + "sure": 14320, + "sure": 1650, + "sured": 36869, + "surely": 11409, + "sures": 12725, + "suresh": 32118, + "suresh": 31464, + "sureshpp": 41924, + "sureshpprabhu": 42050, + "surf": 10176, + "surf": 10322, + "surface": 7744, + "surfaces": 20746, + "surfer": 24925, + "surfers": 34842, + "surfing": 15762, + "surg": 13045, + "surge": 17457, + "surgeon": 16039, + "surgeons": 26000, + "surger": 5122, + "surgeries": 34940, + "surgery": 5344, + "surgical": 16386, + "suri": 14130, + "suri": 33952, + "suring": 16817, + "suriya": 17832, + "surpass": 45494, + "surpassed": 25648, + "surplus": 29413, + "surpri": 3244, + "surprise": 5099, + "surprised": 8949, + "surprises": 16920, + "surprising": 14964, + "surprisingly": 17367, + "surreal": 18408, + "surrealism": 41773, + "surrender": 20964, + "surrendered": 44601, + "surrey": 26489, + "surrey": 14315, + "surro": 47499, + "surroun": 8250, + "surround": 26543, + "surround": 22999, + "surrounded": 13589, + "surrounding": 12544, + "surroundings": 26915, + "surrounds": 39012, + "suru": 49240, + "surve": 8952, + "surveill": 15408, + "surveillance": 15578, + "survey": 45914, + "survey": 6809, + "surveying": 33085, + "surveys": 25096, + "survi": 3440, + "surviv": 12922, + "survival": 10172, + "survive": 10431, + "survived": 13483, + "survives": 30927, + "surviving": 18609, + "survivor": 31934, + "survivor": 10944, + "survivors": 13711, + "surya": 37767, + "sus": 8091, + "sus": 3036, + "susa": 20546, + "susan": 19922, + "susan": 10168, + "suscep": 44270, + "sush": 22298, + "sushi": 11729, + "sushmaswar": 48200, + "susie": 32284, + "susp": 7971, + "suspec": 10298, + "suspect": 9065, + "suspected": 15579, + "suspects": 18265, + "suspen": 10578, + "suspend": 41007, + "suspended": 13126, + "suspends": 39535, + "suspense": 21556, + "suspension": 15417, + "suspici": 25714, + "suspicion": 34910, + "suspicious": 19862, + "sussex": 31244, + "sussex": 13266, + "sustain": 4644, + "sustain": 28156, + "sustainability": 9635, + "sustainable": 23645, + "sustainable": 7078, + "sustained": 22699, + "sustaining": 44418, + "sut": 23984, + "sut": 28956, + "sutherland": 27592, + "sutton": 39359, + "sutton": 18564, + "suv": 15985, + "suz": 9957, + "suzanne": 24617, + "suzu": 36289, + "suzuki": 16892, + "suzy": 26552, + "sv": 6508, + "sv": 17083, + "svc": 45065, + "sve": 47637, + "sven": 37786, + "sven": 45183, + "sver": 45923, + "sville": 44580, + "sville": 6741, + "svp": 28465, + "svt": 42014, + "svu": 32123, + "sw": 1220, + "sw": 4457, + "swa": 4707, + "swa": 31916, + "swach": 20862, + "swachhb": 31898, + "swachhbharat": 36927, + "swag": 8852, + "swag": 8177, + "swagg": 47702, + "swagger": 35797, + "swain": 43226, + "swal": 13433, + "swallow": 28979, + "swallowed": 46956, + "swallows": 45124, + "swam": 42539, + "swami": 25021, + "swamp": 41953, + "swamp": 16595, + "swamy": 28445, + "swan": 8215, + "swan": 12530, + "swana": 24699, + "swans": 19516, + "swansea": 16567, + "swanson": 34797, + "swap": 15234, + "swapped": 39077, + "swapping": 44702, + "swaps": 49242, + "swar": 11680, + "swarm": 31577, + "swarovski": 28515, + "swat": 32547, + "swat": 26482, + "swatch": 48053, + "sway": 26443, + "sway": 26617, + "swc": 42231, + "swe": 2350, + "swe": 38070, + "swear": 7406, + "swearing": 32627, + "sweat": 10282, + "sweat": 12663, + "sweater": 11455, + "sweaters": 31303, + "sweating": 33215, + "sweats": 39321, + "sweatshirt": 22442, + "sweaty": 28419, + "sweden": 8760, + "swedish": 11585, + "swee": 1812, + "sweek": 30017, + "sweeney": 27286, + "sweep": 23220, + "sweep": 13669, + "sweeping": 25719, + "sweeps": 26887, + "sweepstakes": 25992, + "sweet": 10957, + "sweet": 2418, + "sweetened": 45577, + "sweeter": 32873, + "sweetest": 15180, + "sweethe": 16316, + "sweetheart": 18079, + "sweetie": 24450, + "sweetness": 29713, + "sweets": 18045, + "swel": 48470, + "swell": 35538, + "swell": 21490, + "swelling": 46578, + "swept": 23311, + "swer": 30514, + "swfc": 30227, + "swfl": 46607, + "swi": 3881, + "swi": 45223, + "swick": 17159, + "swif": 28548, + "swift": 34843, + "swift": 8229, + "swild": 33909, + "swild": 38696, + "swildlife": 46818, + "swim": 4928, + "swim": 7681, + "swimmer": 25475, + "swimmers": 27776, + "swimming": 7411, + "swims": 46798, + "swimsuit": 25504, + "swimwear": 31889, + "swin": 14554, + "swin": 40798, + "swindon": 29540, + "swine": 31166, + "swing": 25292, + "swing": 7429, + "swinging": 26760, + "swings": 29141, + "swipe": 31828, + "swire": 42753, + "swirl": 35795, + "swis": 23611, + "swish": 38571, + "swiss": 37917, + "swiss": 9287, + "swit": 3726, + "switch": 22480, + "switch": 5893, + "switched": 22869, + "switches": 33569, + "switching": 21155, + "swith": 17299, + "switzer": 9835, + "switzerland": 9912, + "swivel": 48256, + "swo": 38673, + "swol": 29575, + "swollen": 36129, + "swoo": 29744, + "swood": 24158, + "swoon": 37028, + "swoop": 45661, + "sword": 33294, + "sword": 11356, + "swords": 27181, + "swork": 42722, + "sworld": 33305, + "sworn": 21130, + "sworth": 13322, + "swt": 38878, + "swx": 20597, + "sx": 9402, + "sx": 17806, + "sxsw": 13369, + "sy": 974, + "sy": 2126, + "sya": 35017, + "sycam": 34911, + "sycamore": 43086, + "syd": 4525, + "syd": 22504, + "sydney": 15878, + "sydney": 5278, + "syed": 27624, + "syfy": 32047, + "sykes": 27287, + "syl": 6452, + "sylla": 41708, + "sylvania": 12011, + "sylve": 28369, + "sylvester": 37214, + "sylvia": 25670, + "sym": 3645, + "sym": 40327, + "symb": 22987, + "symbol": 13085, + "symboli": 22019, + "symbolic": 33177, + "symbolism": 44679, + "symbols": 25476, + "symmetry": 31427, + "symp": 11468, + "sympathi": 47493, + "sympathy": 32477, + "symph": 9544, + "symphonic": 42639, + "symphony": 11180, + "sympo": 9730, + "symposium": 9971, + "symptom": 47799, + "symptoms": 12956, + "syn": 3758, + "syn": 36090, + "synago": 30945, + "synagogue": 33518, + "sync": 20081, + "synchron": 23943, + "syndic": 21098, + "syndicate": 28779, + "syndrome": 10927, + "syner": 22283, + "synergy": 32012, + "syno": 31533, + "synod": 47712, + "synopsis": 47018, + "synth": 33841, + "synth": 24462, + "synthe": 22604, + "synthesi": 33565, + "synthesis": 21602, + "synthesizer": 44077, + "synthetic": 19917, + "syou": 26742, + "syour": 21718, + "syrac": 17279, + "syracuse": 19640, + "syrah": 45364, + "syri": 18917, + "syria": 5563, + "syrian": 47562, + "syrian": 10041, + "syrians": 41392, + "syrup": 16611, + "sys": 26726, + "syste": 1933, + "system": 47813, + "system": 2422, + "systematic": 28586, + "systemic": 33807, + "systems": 4828, + "sz": 13438, + "sz": 15879, + "sze": 44507, + "szn": 48092, + "são": 45911, + "sé": 37879, + "t": 83, + "t": 339, + "ta": 648, + "ta": 1397, + "taa": 43874, + "tab": 2648, + "tab": 14724, + "tabby": 36145, + "tabern": 48991, + "tability": 15770, + "table": 12108, + "table": 2175, + "tableau": 39723, + "tables": 7822, + "tablet": 12494, + "tabletop": 46843, + "tabletop": 25773, + "tablets": 20436, + "tably": 24440, + "taboo": 38400, + "tabs": 29163, + "tac": 3145, + "tac": 22653, + "tache": 39239, + "tack": 6339, + "tack": 34446, + "tackle": 10294, + "tackled": 47218, + "tackles": 18021, + "tackling": 19628, + "taco": 31924, + "taco": 12436, + "tacoma": 25397, + "tacos": 14090, + "tactic": 40377, + "tactical": 17137, + "tactics": 16410, + "tacular": 48985, + "tad": 15890, + "tad": 19860, + "tado": 40846, + "tae": 15257, + "tae": 15580, + "taehyung": 24642, + "taek": 30753, + "taekwondo": 39963, + "taemin": 30600, + "taeyang": 45802, + "taeyeon": 27389, + "taf": 29660, + "taft": 42141, + "tag": 3456, + "tag": 3640, + "tage": 2669, + "tages": 39902, + "tagged": 12969, + "tagging": 25138, + "tagne": 47467, + "tags": 11606, + "tah": 14822, + "tah": 7090, + "tahit": 45385, + "tahoe": 26140, + "tai": 6511, + "tai": 13040, + "taiji": 30185, + "tail": 7156, + "tail": 4132, + "tailed": 20626, + "tailgate": 23168, + "tailgating": 42625, + "tailo": 27230, + "tailor": 29870, + "tailored": 28275, + "tailoring": 46357, + "tails": 16066, + "tain": 2841, + "tain": 1908, + "taine": 21214, + "taine": 32299, + "tained": 10212, + "taining": 7565, + "tainment": 30063, + "tains": 3952, + "tainted": 47211, + "taipei": 24356, + "tair": 29143, + "tairp": 43707, + "tait": 45325, + "taiwan": 36319, + "taiwan": 12626, + "taiwanese": 41416, + "taj": 28937, + "taj": 24805, + "taji": 46358, + "tak": 15070, + "tak": 14458, + "taka": 24070, + "taka": 40968, + "take": 5052, + "take": 1172, + "takeaway": 25737, + "takeaways": 32080, + "takeme": 41748, + "taken": 2807, + "takeoff": 32789, + "takeover": 11863, + "taker": 17939, + "takers": 30775, + "takes": 2633, + "takin": 30890, + "taking": 2019, + "taku": 48168, + "tal": 976, + "tal": 2066, + "tala": 29845, + "talaga": 35349, + "talbot": 30585, + "tale": 33971, + "tale": 7798, + "talent": 30435, + "talent": 5114, + "talented": 5331, + "talents": 16136, + "tales": 9469, + "tali": 12122, + "tali": 45406, + "taliban": 20788, + "talis": 36480, + "tality": 15631, + "talk": 12462, + "talk": 1841, + "talked": 10153, + "talkin": 26040, + "talking": 31463, + "talking": 2578, + "talks": 3237, + "tall": 11664, + "tall": 7771, + "talla": 21528, + "tallade": 44220, + "tallahassee": 37832, + "taller": 23470, + "tallest": 19774, + "tallinn": 45079, + "tally": 16323, + "talon": 47897, + "tam": 2661, + "tam": 12246, + "tama": 45424, + "tamanna": 48055, + "tamar": 22901, + "tamara": 35697, + "tame": 38557, + "tame": 32778, + "tamed": 40575, + "tami": 39429, + "tamil": 23046, + "tamil": 14033, + "tamilnadu": 32371, + "tamine": 42566, + "tammy": 28396, + "tampa": 10906, + "tampab": 37852, + "tamu": 34105, + "tan": 2123, + "tan": 5039, + "tana": 21396, + "tand": 20244, + "tandem": 33756, + "tane": 13344, + "tane": 24923, + "taneous": 22275, + "taneously": 24422, + "tang": 10425, + "tang": 20794, + "tanger": 31844, + "tangerine": 42045, + "tangible": 44823, + "tangle": 36568, + "tangled": 33587, + "tango": 24089, + "tani": 31374, + "tani": 32985, + "tania": 45369, + "tank": 29858, + "tank": 6172, + "tanker": 25020, + "tanks": 14223, + "tann": 19174, + "tanner": 22001, + "tanning": 27985, + "tans": 27332, + "tant": 41383, + "tant": 41695, + "tante": 48262, + "tanto": 45685, + "tany": 34410, + "tanya": 26800, + "tanz": 47399, + "tanzania": 15711, + "tao": 29084, + "tao": 18923, + "tap": 17923, + "tap": 7888, + "tapas": 27361, + "tape": 18332, + "tape": 5749, + "taped": 33219, + "tapes": 17903, + "tapestry": 33525, + "taping": 24355, + "tapp": 27644, + "tapp": 27764, + "tapped": 26649, + "tapping": 27882, + "tapro": 34415, + "taproom": 40266, + "taps": 23267, + "tar": 2002, + "tar": 6977, + "tara": 15264, + "tarak": 37813, + "taran": 32370, + "tarantino": 41180, + "tarde": 48670, + "tardis": 35410, + "tares": 34587, + "targe": 9620, + "target": 38556, + "target": 5400, + "targeted": 14968, + "targeting": 15818, + "targets": 12468, + "tari": 4238, + "tari": 38012, + "tarian": 11762, + "tarians": 42789, + "taries": 47291, + "tariff": 40220, + "tariffs": 28335, + "tariq": 42526, + "tarmac": 44294, + "taro": 26264, + "tarot": 23702, + "tart": 16707, + "tart": 14120, + "tartan": 35064, + "tarts": 29799, + "tary": 31729, + "tary": 5065, + "tarzan": 45463, + "tas": 6538, + "tas": 10163, + "tash": 35272, + "tasha": 44967, + "task": 39189, + "task": 10549, + "tasks": 19453, + "tasmania": 22429, + "tasmanian": 45102, + "tassel": 49276, + "tast": 10839, + "taste": 14314, + "taste": 5219, + "tasted": 22827, + "tasteof": 38097, + "taster": 29743, + "tastes": 13736, + "tastic": 21337, + "tasting": 7656, + "tastings": 49273, + "tasty": 43390, + "tasty": 8568, + "tat": 2652, + "tat": 21592, + "tata": 19300, + "tate": 44476, + "tate": 13295, + "tath": 27566, + "tati": 31433, + "tatiana": 48837, + "tation": 5280, + "tations": 32324, + "tator": 18791, + "tators": 37206, + "tats": 44557, + "tatt": 9232, + "tatted": 41605, + "tattoo": 15980, + "tattoo": 6325, + "tattooed": 28541, + "tattoos": 14900, + "tatum": 26103, + "tau": 6620, + "tau": 20510, + "taught": 9306, + "taun": 23910, + "taunton": 40681, + "taurus": 32881, + "taver": 37776, + "tavern": 18644, + "taw": 33868, + "taw": 40289, + "tawa": 29035, + "tawards": 14351, + "tax": 4581, + "tax": 3879, + "taxation": 36847, + "taxes": 11462, + "taxi": 25160, + "taxi": 11380, + "taxider": 47420, + "taxis": 34009, + "taxpay": 17986, + "taxpayer": 30978, + "taxpayers": 25503, + "tay": 6542, + "tay": 15073, + "taya": 38484, + "tayl": 3913, + "taylor": 9044, + "taylor": 3961, + "taylorswift": 18936, + "tayo": 33941, + "taz": 41475, + "taz": 31870, + "tb": 1990, + "tb": 7490, + "tba": 34363, + "tball": 8390, + "tball": 1467, + "tbc": 31807, + "tbd": 45548, + "tbh": 13238, + "tbi": 45868, + "tbl": 42962, + "tbli": 43664, + "tblightning": 44178, + "tbo": 34255, + "tbr": 46643, + "tbs": 37368, + "tbt": 2950, + "tc": 6820, + "tc": 5454, + "tca": 35116, + "tch": 10744, + "tch": 4048, + "tches": 42001, + "tcm": 21501, + "tcm": 26588, + "tcmparty": 24338, + "tcot": 8995, + "tcs": 39107, + "tcu": 26791, + "td": 20578, + "td": 3192, + "tdf": 21844, + "tdi": 45621, + "tdp": 47009, + "tds": 20238, + "tdsb": 29836, + "te": 600, + "te": 756, + "tea": 41053, + "tea": 3274, + "teach": 2043, + "teach": 6865, + "teacher": 18051, + "teacher": 4008, + "teachers": 5069, + "teaches": 17110, + "teaching": 5141, + "teachings": 32119, + "teal": 22821, + "team": 2085, + "team": 1027, + "teamcanada": 46636, + "teamed": 20590, + "teamgb": 40971, + "teaming": 24392, + "teammate": 17900, + "teammates": 13921, + "teams": 3891, + "teamsisd": 34703, + "teamusa": 28625, + "teamwork": 14657, + "teaparty": 33065, + "teapo": 35745, + "teapot": 40749, + "tear": 15802, + "tear": 11862, + "tearful": 46873, + "tearing": 24785, + "tears": 7688, + "teas": 23003, + "teas": 29314, + "tease": 25163, + "teased": 49122, + "teaser": 8982, + "teasers": 48990, + "teases": 28509, + "teasing": 36507, + "teat": 26376, + "teatime": 48948, + "teatro": 35756, + "teau": 24931, + "tebow": 37797, + "tec": 17381, + "tec": 11612, + "tech": 1782, + "tech": 2061, + "techcrunch": 42110, + "techn": 6252, + "technews": 31787, + "technic": 16639, + "technic": 37666, + "technical": 49231, + "technical": 7582, + "technically": 23180, + "technician": 22540, + "technicians": 35513, + "techno": 2599, + "techno": 17564, + "technological": 23068, + "technologies": 10040, + "technology": 3089, + "techs": 41353, + "ted": 4841, + "ted": 775, + "tedcruz": 27517, + "teddy": 25758, + "teddy": 11798, + "tedly": 8539, + "tedu": 42517, + "tedx": 17950, + "tedx": 41504, + "tee": 12676, + "tee": 3385, + "teed": 13692, + "teen": 5398, + "teen": 4697, + "teenage": 14069, + "teenager": 19338, + "teenagers": 25989, + "teenchoice": 28203, + "teens": 12375, + "teenth": 20249, + "teenwolf": 40067, + "teeny": 41622, + "teer": 48648, + "tees": 9641, + "teessi": 43295, + "teeth": 8225, + "tega": 29508, + "tegr": 39801, + "teh": 18720, + "teh": 29601, + "tehran": 26399, + "tein": 33223, + "tej": 46724, + "tek": 17489, + "tek": 18294, + "tekken": 29843, + "tel": 4978, + "tel": 2226, + "telang": 23469, + "telangana": 26386, + "tele": 3103, + "tele": 32851, + "telecom": 21057, + "telecommunications": 39900, + "telegram": 26780, + "telegraph": 14713, + "telephone": 17243, + "telescope": 19037, + "telethon": 49266, + "televised": 39470, + "television": 8608, + "telford": 38323, + "tell": 16069, + "tell": 2330, + "teller": 20415, + "tellers": 42707, + "telling": 5507, + "tells": 5217, + "tellu": 42511, + "telly": 31475, + "tels": 43607, + "telugu": 22927, + "tely": 5630, + "tem": 2404, + "tem": 17536, + "tema": 45881, + "teme": 43378, + "temp": 2684, + "temp": 11097, + "tempe": 36723, + "temper": 5981, + "temper": 35521, + "temperature": 9543, + "temperatures": 11575, + "tempered": 40521, + "tempest": 36053, + "templ": 16679, + "template": 18591, + "templates": 30498, + "temple": 21841, + "temple": 5620, + "temples": 24024, + "tempo": 19625, + "tempor": 4858, + "temporal": 43656, + "temporarily": 23189, + "temporary": 6513, + "temps": 11668, + "tempt": 28460, + "temptation": 30118, + "tempted": 26226, + "tempting": 34876, + "ten": 1149, + "ten": 2581, + "tenant": 16954, + "tenants": 26023, + "tenay": 45384, + "tenberg": 31329, + "tend": 17630, + "tend": 21252, + "tendency": 47277, + "tender": 23020, + "tender": 9838, + "tenderloin": 42750, + "tenders": 44741, + "tending": 35084, + "tendon": 48459, + "tends": 39962, + "tene": 24868, + "tened": 13682, + "tener": 29054, + "teneri": 28000, + "tenerife": 29401, + "teners": 41307, + "teness": 18018, + "teng": 34016, + "teng": 28474, + "tennant": 29310, + "tennes": 9514, + "tennessee": 10053, + "tennis": 31504, + "tennis": 5298, + "tenor": 30521, + "tens": 14062, + "tense": 23518, + "tension": 15221, + "tensions": 24224, + "tenstein": 49139, + "tent": 18505, + "tent": 10782, + "tentative": 48238, + "tenth": 27483, + "tention": 12191, + "tents": 30730, + "tenure": 30739, + "teo": 18665, + "tep": 31806, + "tequ": 17502, + "tequila": 18510, + "ter": 704, + "ter": 652, + "tera": 15155, + "teras": 44830, + "tere": 11329, + "tered": 49272, + "tered": 4389, + "terence": 33806, + "teresa": 19081, + "teri": 30917, + "teria": 22685, + "terie": 42276, + "tering": 7929, + "term": 40991, + "term": 4780, + "termin": 4766, + "terminal": 11816, + "terminals": 44091, + "terminator": 29609, + "terminology": 48896, + "terms": 8663, + "tern": 41572, + "tern": 12959, + "terns": 25251, + "tero": 20727, + "tero": 24697, + "terps": 41471, + "terr": 3921, + "terra": 22366, + "terra": 18816, + "terrac": 28549, + "terrace": 13820, + "terraces": 47508, + "terracotta": 45123, + "terrain": 20184, + "terran": 43726, + "terre": 33888, + "terre": 27537, + "terrell": 39494, + "terrence": 38746, + "terrestrial": 46299, + "terri": 4504, + "terri": 36722, + "terrible": 9741, + "terribly": 34558, + "terrier": 14455, + "terriers": 47047, + "terrific": 13837, + "terrified": 28204, + "terrifying": 18526, + "territ": 10720, + "territorial": 39163, + "territories": 32846, + "territory": 13936, + "terror": 9596, + "terror": 9327, + "terrori": 6836, + "terrorism": 10583, + "terrorist": 10575, + "terrorists": 12835, + "terry": 19378, + "terry": 8561, + "ters": 24102, + "ters": 1737, + "terti": 48386, + "tery": 4184, + "tes": 8019, + "tes": 3609, + "tesco": 15434, + "tese": 33320, + "tesla": 12254, + "tess": 21807, + "tess": 20840, + "tessa": 32063, + "test": 7738, + "test": 1628, + "testam": 23477, + "testament": 24609, + "tested": 10576, + "tester": 32707, + "testi": 18373, + "testic": 42364, + "testify": 33088, + "testifying": 46347, + "testim": 12553, + "testimonial": 28834, + "testimony": 18672, + "testing": 4967, + "testo": 42428, + "testosterone": 45168, + "tests": 8715, + "tet": 40468, + "tet": 13275, + "tetra": 40902, + "tetris": 45934, + "teu": 47152, + "teuk": 39979, + "teur": 27120, + "tex": 2056, + "tex": 11728, + "texan": 35287, + "texan": 38386, + "texans": 17580, + "texanscheer": 43717, + "texas": 15713, + "texas": 3403, + "texaste": 46469, + "text": 18169, + "text": 4160, + "textbook": 25952, + "textbooks": 44041, + "texted": 29004, + "textile": 19789, + "textiles": 24326, + "texting": 18600, + "texts": 12767, + "texture": 16505, + "textured": 32168, + "textures": 28063, + "tey": 32395, + "tez": 22664, + "tf": 18828, + "tf": 5001, + "tfc": 30186, + "tfl": 29918, + "tford": 22493, + "tful": 17108, + "tfw": 16741, + "tg": 7665, + "tg": 11981, + "tgif": 14483, + "th": 513, + "th": 640, + "tha": 18470, + "tha": 4715, + "thab": 38219, + "thad": 48339, + "thai": 28054, + "thai": 8825, + "thail": 7258, + "thailand": 7469, + "thak": 22801, + "thakur": 38427, + "thal": 7967, + "thal": 12323, + "thala": 17784, + "thalai": 25206, + "thalaivar": 44918, + "thalap": 39789, + "thalapathy": 45405, + "thalapathy": 23324, + "thall": 36007, + "tham": 11761, + "tham": 8896, + "thames": 43472, + "thames": 15321, + "than": 792, + "than": 1126, + "thand": 44465, + "thane": 21463, + "thang": 24870, + "thani": 31322, + "thank": 2790, + "thank": 1144, + "thanked": 32079, + "thankful": 38839, + "thankful": 6217, + "thankfully": 22089, + "thanking": 21989, + "thanks": 5672, + "thanks": 1085, + "thanksgiving": 45732, + "thanksgiving": 6167, + "thanku": 45710, + "thankyou": 18050, + "thankyou": 9911, + "thanniversary": 35564, + "thanos": 36709, + "thanx": 25095, + "thar": 14396, + "thar": 38843, + "thard": 43474, + "that": 6303, + "that": 682, + "thatcher": 32496, + "thats": 44636, + "thats": 9254, + "thaw": 26081, + "thaw": 47229, + "thbewithyou": 41067, + "thc": 20091, + "thcentury": 49111, + "thd": 28219, + "thday": 37801, + "the": 599, + "the": 518, + "thea": 15935, + "thea": 25429, + "thead": 25259, + "theal": 45728, + "thealth": 31398, + "thear": 43283, + "theart": 44678, + "theast": 8378, + "theastern": 17877, + "theat": 2263, + "theater": 39438, + "theater": 6128, + "theaters": 14689, + "theatre": 19857, + "theatre": 3292, + "theatres": 21680, + "theatrical": 26833, + "theband": 27695, + "thebeatles": 35645, + "thebest": 40883, + "thebest": 25856, + "thebig": 24732, + "theblack": 47718, + "thec": 48659, + "thed": 31405, + "thedaily": 33550, + "theday": 4408, + "thedream": 39417, + "thee": 44475, + "thee": 15108, + "theeconomist": 44518, + "theellenshow": 35342, + "thefilm": 31665, + "theflash": 25434, + "theforce": 40002, + "theforceawakens": 48033, + "theft": 13286, + "thefuture": 34287, + "thegame": 24428, + "thegood": 28594, + "thegreat": 28721, + "thei": 44522, + "their": 911, + "theirs": 29297, + "thel": 5403, + "thelast": 23495, + "thelastjedi": 47992, + "theless": 27712, + "theli": 15277, + "thelittle": 46872, + "thelo": 47036, + "thelove": 40668, + "thelove": 43200, + "them": 5435, + "them": 1180, + "themasters": 48378, + "theme": 38524, + "theme": 5849, + "themed": 10126, + "themes": 17849, + "themet": 48183, + "themovie": 27062, + "themselves": 6503, + "then": 5929, + "then": 1594, + "thenburg": 45209, + "thene": 17012, + "thenew": 24212, + "thenext": 47881, + "thenight": 43336, + "theno": 37172, + "thenorth": 34338, + "theo": 17043, + "theo": 18084, + "theod": 26653, + "theodore": 30743, + "theological": 41162, + "theology": 24095, + "theon": 34653, + "theone": 46231, + "theopen": 41438, + "theore": 22690, + "theoretical": 35585, + "theori": 34804, + "theories": 23937, + "theory": 7143, + "thepeople": 33597, + "thepersonal": 29981, + "thepersonalnetwork": 30016, + "thephoto": 18303, + "thephotohour": 18607, + "ther": 1160, + "ther": 743, + "therap": 4499, + "therapeu": 19332, + "therapeutic": 23240, + "therapeutics": 49101, + "therapies": 30179, + "therapist": 20608, + "therapists": 34763, + "therapper": 49340, + "therapy": 5257, + "there": 5283, + "there": 997, + "thereal": 8074, + "thereal": 41140, + "thereby": 43308, + "thered": 10208, + "therefore": 16865, + "theres": 18494, + "theresa": 14126, + "therese": 47996, + "theresistance": 22845, + "theri": 28967, + "theri": 45297, + "therine": 26807, + "therine": 9239, + "thering": 7891, + "therland": 25351, + "thermal": 13689, + "thermo": 22303, + "thermom": 31138, + "thermometer": 38172, + "thermost": 42391, + "thern": 10919, + "thern": 3137, + "thero": 13165, + "theroad": 29807, + "therock": 30036, + "theroy": 38146, + "thers": 1959, + "thes": 40556, + "thes": 6460, + "thescript": 47061, + "these": 40366, + "these": 1071, + "theses": 39388, + "thesimpsons": 45513, + "thesims": 34192, + "thesis": 10673, + "thessal": 41491, + "thessaloni": 41753, + "thest": 35343, + "thesun": 45617, + "theta": 27694, + "thetic": 7954, + "thetimes": 36039, + "thevamp": 33701, + "thevoice": 47206, + "thevoice": 30258, + "thewalkingdead": 18087, + "thewanted": 43008, + "theworld": 44988, + "theworld": 17475, + "thex": 35990, + "they": 15174, + "they": 889, + "theyre": 28266, + "thfc": 17729, + "thi": 2362, + "thi": 9111, + "thia": 17943, + "thiago": 44537, + "thian": 23214, + "thians": 28187, + "thibau": 48351, + "thic": 26107, + "thic": 11794, + "thick": 18417, + "thick": 11006, + "thicker": 43302, + "thickness": 40754, + "thief": 18508, + "thier": 25595, + "thierry": 32929, + "thieves": 17899, + "thigh": 47124, + "thigh": 22877, + "thighs": 30847, + "thik": 20512, + "thika": 44619, + "thill": 31266, + "thim": 42331, + "thin": 2178, + "thin": 7847, + "thine": 47192, + "thing": 7499, + "thing": 946, + "things": 30670, + "things": 1739, + "thingsto": 43924, + "thingy": 36888, + "think": 9820, + "think": 1331, + "thinkbig": 26015, + "thinkbigsundaywithmarsha": 26666, + "thinker": 34577, + "thinkers": 32779, + "thinkin": 34443, + "thinking": 3291, + "thinks": 6109, + "thinner": 47247, + "thir": 6030, + "third": 32102, + "third": 3981, + "thirds": 42582, + "thirst": 23563, + "thirsty": 39731, + "thirsty": 17521, + "thirteen": 34209, + "thirty": 20813, + "thiru": 43292, + "this": 4340, + "this": 589, + "thisday": 6532, + "thisdayin": 33641, + "thisdayinhistory": 46913, + "thisi": 7299, + "thisis": 14887, + "thismorning": 36245, + "thistle": 29039, + "thistory": 28904, + "thium": 21804, + "thletics": 17765, + "thm": 10407, + "thman": 30079, + "thms": 19874, + "thn": 44155, + "thn": 45587, + "thnx": 25480, + "tho": 1325, + "tho": 5025, + "thof": 18943, + "thofjuly": 21613, + "thol": 29319, + "thole": 31029, + "tholes": 42465, + "thology": 9881, + "thom": 2585, + "thom": 24094, + "thomas": 12574, + "thomas": 3888, + "thome": 21289, + "thomp": 37274, + "thompson": 42181, + "thompson": 8535, + "thomson": 24151, + "thon": 38776, + "thon": 8924, + "thong": 37058, + "thood": 15623, + "thor": 4130, + "thor": 13691, + "thora": 46866, + "thorn": 12957, + "thorn": 18466, + "thorne": 18025, + "thorns": 33650, + "thornton": 23592, + "thorough": 15294, + "thorough": 34788, + "thoroughbred": 43248, + "thoroughly": 19750, + "thorpe": 18099, + "thos": 41965, + "those": 1753, + "thot": 33736, + "thou": 1513, + "thou": 17781, + "though": 2846, + "thought": 23948, + "thought": 2449, + "thoughtful": 19592, + "thoughts": 3618, + "thour": 27125, + "thousand": 9344, + "thousands": 7089, + "thouse": 40318, + "thouse": 7819, + "thoven": 23078, + "thr": 1111, + "thr": 19138, + "thra": 17761, + "thra": 32797, + "thrash": 38262, + "thre": 1607, + "thread": 31108, + "thread": 8815, + "threads": 24957, + "threat": 7527, + "threat": 7212, + "threaten": 26097, + "threatened": 16391, + "threatening": 16400, + "threatens": 20555, + "threats": 12766, + "three": 21615, + "three": 2097, + "thren": 41776, + "thresh": 29779, + "threshold": 33791, + "threw": 12746, + "thri": 8713, + "thrift": 27779, + "thrill": 21023, + "thrilled": 7879, + "thriller": 9653, + "thrilling": 20101, + "thrills": 39829, + "thrive": 17669, + "thriving": 22677, + "thro": 2101, + "thro": 28624, + "throat": 16371, + "thrombo": 47585, + "throne": 15999, + "thrones": 8072, + "throp": 34939, + "throttle": 37139, + "through": 6091, + "through": 1417, + "throughout": 6721, + "throughs": 48278, + "throw": 3315, + "throw": 6293, + "throwback": 6001, + "throwback": 5058, + "throwbackthursday": 6326, + "thrower": 40199, + "throwing": 9734, + "thrown": 15079, + "throws": 14723, + "thru": 23856, + "thru": 6162, + "thrush": 46133, + "thrust": 40202, + "ths": 2079, + "tht": 23554, + "thu": 3837, + "thu": 14153, + "thub": 25660, + "thug": 37212, + "thug": 18137, + "thugs": 27686, + "thul": 28368, + "thulhu": 37560, + "thum": 14679, + "thumb": 19514, + "thumb": 18674, + "thumbnail": 32365, + "thumbs": 17599, + "thun": 32267, + "thunder": 6161, + "thunder": 8951, + "thunderbird": 45131, + "thunderbirds": 44286, + "thunderbolt": 43596, + "thunderstorm": 12005, + "thunderstorms": 19525, + "thunt": 46763, + "thur": 1837, + "thur": 21704, + "thurman": 41291, + "thurs": 9908, + "thursday": 11218, + "thursday": 2221, + "thursdaymotivation": 39375, + "thursdays": 21444, + "thursdaythoughts": 14866, + "thurst": 33970, + "thus": 12457, + "thusi": 9488, + "thwaite": 48469, + "thweeksary": 30871, + "thx": 5913, + "thy": 7804, + "thy": 3362, + "thyme": 29805, + "thyro": 25174, + "thyroid": 32558, + "ti": 555, + "ti": 2605, + "tia": 6709, + "tial": 2826, + "tially": 14503, + "tian": 23011, + "tian": 8125, + "tians": 35182, + "tiara": 38322, + "tib": 47868, + "tibet": 19927, + "tibet": 22234, + "tibetan": 24057, + "tible": 11453, + "tic": 890, + "tic": 1550, + "tica": 9669, + "tical": 34191, + "tical": 4342, + "tically": 13375, + "ticals": 30861, + "tice": 3122, + "tich": 48769, + "tician": 43358, + "ticism": 26491, + "tick": 24640, + "tick": 15617, + "ticket": 25740, + "ticket": 4500, + "ticketing": 44432, + "tickets": 2015, + "ticking": 35842, + "tickle": 42999, + "ticks": 40269, + "tico": 17670, + "ticon": 45996, + "tics": 2419, + "ticul": 15538, + "ticus": 44277, + "tid": 26002, + "tid": 23727, + "tidal": 21949, + "tide": 15698, + "tide": 9105, + "tides": 25524, + "tidy": 23858, + "tie": 14072, + "tie": 3422, + "tied": 9889, + "tiem": 34762, + "tien": 47538, + "tiene": 43438, + "tier": 14390, + "tier": 6598, + "tierney": 45693, + "tiers": 24604, + "ties": 25556, + "ties": 2499, + "tiest": 18300, + "tiesto": 46367, + "tif": 23216, + "tiff": 11112, + "tiff": 20699, + "tiffany": 30467, + "tiffany": 14446, + "tification": 43923, + "tified": 40854, + "tiful": 29123, + "tify": 6677, + "tig": 31999, + "tiger": 11954, + "tiger": 6531, + "tigers": 6934, + "tigh": 31365, + "tight": 25763, + "tight": 9123, + "tighten": 46653, + "tighter": 48193, + "tightly": 37568, + "tights": 29581, + "tijuana": 45273, + "tik": 24986, + "tik": 32403, + "tiki": 30107, + "til": 6124, + "til": 1763, + "tile": 26217, + "tile": 8227, + "tiles": 10607, + "tility": 38180, + "till": 17462, + "till": 4267, + "tilla": 26063, + "tillerson": 47738, + "tilly": 41199, + "tilt": 23601, + "tim": 1292, + "tim": 3863, + "timate": 4754, + "timb": 26627, + "timber": 14441, + "timber": 16246, + "timberlake": 28274, + "timbers": 39911, + "timberwolves": 41190, + "time": 3764, + "time": 788, + "timed": 32727, + "timehop": 19944, + "timel": 23549, + "timelapse": 48154, + "timeless": 15558, + "timeline": 11492, + "timely": 19250, + "timeout": 41536, + "timer": 19725, + "timers": 44574, + "times": 26445, + "times": 1661, + "timesnow": 45487, + "timesof": 32522, + "timesofindia": 44182, + "timetable": 31971, + "timeto": 29187, + "timing": 13624, + "timm": 22444, + "timmy": 33252, + "timo": 13390, + "timo": 33777, + "timothy": 42087, + "timothy": 18560, + "timp": 42166, + "tin": 1310, + "tin": 5420, + "tina": 9257, + "tinder": 24287, + "tine": 22341, + "ting": 7451, + "ting": 694, + "tinged": 44829, + "tings": 35332, + "tini": 26839, + "tink": 39278, + "tinker": 45272, + "tinker": 40910, + "tino": 20538, + "tins": 37359, + "tint": 40497, + "tinted": 42618, + "tiny": 21716, + "tiny": 5591, + "tio": 27562, + "tion": 2274, + "tion": 740, + "tional": 22460, + "tional": 2986, + "tionality": 24514, + "tionally": 12409, + "tionary": 8381, + "tione": 44318, + "tioned": 9083, + "tioning": 15528, + "tionist": 25732, + "tions": 1371, + "tious": 14255, + "tip": 15383, + "tip": 4623, + "tipoff": 44521, + "tipp": 32294, + "tipped": 31878, + "tipper": 38095, + "tipperary": 45612, + "tipping": 27827, + "tips": 3173, + "tipton": 48809, + "tiptuesday": 42112, + "tique": 37772, + "tir": 25467, + "tir": 38462, + "tire": 29128, + "tire": 9362, + "tired": 6533, + "tireless": 39835, + "tirelessly": 41548, + "tires": 15533, + "tiring": 42630, + "tiru": 36033, + "tis": 7839, + "tis": 7394, + "tise": 13745, + "tisgarh": 40538, + "tish": 45148, + "tish": 28784, + "tism": 27113, + "tiss": 28155, + "tissue": 15368, + "tissues": 32172, + "tist": 7902, + "tista": 25580, + "tists": 25944, + "tit": 1991, + "tit": 13202, + "tita": 40936, + "titan": 13496, + "titan": 15516, + "titanic": 20729, + "titanium": 24409, + "titans": 13066, + "titi": 17434, + "titi": 48504, + "title": 28033, + "title": 3644, + "titled": 9939, + "titles": 9780, + "tito": 26838, + "titus": 36102, + "tium": 21975, + "tiv": 1835, + "tiva": 41886, + "tive": 14640, + "tive": 1420, + "tively": 9883, + "tiveness": 20955, + "tives": 7570, + "tivity": 9859, + "tivo": 32162, + "tix": 5835, + "tiz": 19376, + "tj": 18890, + "tj": 18988, + "tk": 22344, + "tk": 20676, + "tko": 37347, + "tks": 38739, + "tl": 14325, + "tl": 8190, + "tland": 30697, + "tlap": 41976, + "tlc": 22047, + "tle": 39141, + "tle": 5825, + "tles": 39363, + "tless": 17427, + "tlot": 41080, + "tls": 47367, + "tly": 37483, + "tly": 1646, + "tm": 9430, + "tm": 7789, + "tman": 20796, + "tmc": 35263, + "tment": 26485, + "tml": 39445, + "tmltalk": 42260, + "tmnt": 32444, + "tmobile": 34901, + "tmr": 35906, + "tmrw": 16496, + "tms": 44496, + "tmund": 23801, + "tmw": 45827, + "tmz": 37248, + "tn": 3827, + "tn": 7248, + "tna": 21150, + "tnam": 8079, + "tner": 34922, + "tness": 35212, + "tney": 9523, + "tng": 35898, + "tnt": 20659, + "tnx": 38220, + "to": 580, + "to": 531, + "toa": 17916, + "toad": 26096, + "toast": 24654, + "toast": 10920, + "toasted": 23533, + "toaster": 39061, + "toasty": 44726, + "tob": 24260, + "tobac": 12611, + "tobacco": 13905, + "tobago": 39482, + "tobe": 17534, + "tobe": 28740, + "tober": 18162, + "tober": 2925, + "toberfest": 26249, + "tobi": 40335, + "tobi": 48374, + "tobias": 32464, + "tobin": 42466, + "toby": 29659, + "toby": 18333, + "toc": 41907, + "toc": 30643, + "tock": 25274, + "tod": 38239, + "tod": 33568, + "toda": 47141, + "todas": 36150, + "today": 11800, + "today": 721, + "todayin": 32957, + "todays": 13513, + "todayshow": 29739, + "todd": 10398, + "todd": 9951, + "toddler": 17772, + "toddlers": 36719, + "toddy": 38926, + "todo": 48857, + "todo": 23087, + "todos": 33355, + "toe": 47756, + "toe": 11344, + "toes": 16511, + "tof": 6659, + "toff": 27319, + "toffee": 34880, + "tofficial": 47953, + "tofthe": 23678, + "toftheday": 20566, + "tofu": 24692, + "tog": 45715, + "toge": 1903, + "together": 17858, + "together": 1952, + "togo": 26729, + "tography": 33968, + "toh": 26851, + "toi": 7472, + "toi": 26941, + "toid": 49124, + "toile": 43148, + "toilet": 11071, + "toilets": 24027, + "toire": 39534, + "tok": 16690, + "tok": 27010, + "token": 32634, + "token": 17134, + "tokens": 23562, + "tokyo": 35038, + "tokyo": 6667, + "tol": 4678, + "tol": 32962, + "told": 3527, + "tole": 15677, + "toledo": 19812, + "toler": 12150, + "tolerance": 20377, + "tolerant": 38536, + "tolerate": 35556, + "tolkien": 32989, + "toll": 44090, + "toll": 14155, + "tollywood": 42016, + "tology": 34799, + "tom": 999, + "tom": 2435, + "toma": 42360, + "toma": 44710, + "tomas": 35944, + "tomas": 27178, + "tomat": 12041, + "tomato": 9867, + "tomatoes": 13004, + "tomb": 37187, + "tomb": 15582, + "tombs": 48613, + "tombstone": 45729, + "tome": 24137, + "tome": 24283, + "tomi": 46290, + "tomlin": 46649, + "tomlinson": 17484, + "tommorow": 42871, + "tommy": 16573, + "tommy": 8876, + "tomo": 31223, + "tomo": 34434, + "tomor": 1277, + "tomorrow": 19728, + "tomorrow": 1293, + "tomorrowland": 34951, + "tomorrows": 32258, + "tomorrowspaper": 35005, + "tomorrowspaperstoday": 35190, + "tomp": 43544, + "tompkins": 49068, + "toms": 10545, + "tomy": 18730, + "ton": 838, + "ton": 917, + "tona": 13459, + "tone": 32366, + "tone": 8408, + "toned": 29426, + "toner": 40614, + "tones": 14744, + "tong": 21510, + "tonga": 37882, + "tongue": 44820, + "tongue": 13626, + "tongues": 39837, + "toni": 17766, + "toni": 17171, + "tonic": 17808, + "tonics": 34647, + "tonight": 1009, + "tonights": 23312, + "tonite": 13449, + "tonka": 42781, + "tonline": 45867, + "tonne": 42450, + "tonnes": 24813, + "tons": 7555, + "tony": 9150, + "tony": 4767, + "tonyawards": 46068, + "too": 1843, + "too": 1256, + "took": 2280, + "tool": 13718, + "tool": 5999, + "toolbox": 46599, + "toolkit": 29849, + "tools": 5771, + "toom": 27550, + "toon": 24664, + "toon": 19701, + "toonami": 48336, + "toons": 35345, + "toor": 42590, + "tooth": 15316, + "tooth": 12030, + "toothbrush": 36841, + "toothpaste": 37322, + "tooting": 42969, + "top": 5534, + "top": 1253, + "topaz": 46125, + "tope": 32149, + "tope": 42239, + "topeka": 46884, + "topia": 29618, + "topic": 8720, + "topical": 37464, + "topics": 11916, + "topless": 37415, + "topo": 23008, + "topoli": 30152, + "topp": 19529, + "topped": 12588, + "topper": 31780, + "toppers": 41651, + "topping": 21071, + "toppings": 47554, + "topps": 20201, + "tops": 8154, + "topshop": 40953, + "topus": 21495, + "tor": 937, + "tor": 1208, + "tora": 45147, + "torah": 37945, + "toral": 45282, + "torch": 31921, + "torch": 15820, + "tore": 38066, + "tore": 19385, + "tored": 38046, + "torg": 33214, + "tori": 17689, + "tori": 17539, + "toria": 23732, + "torial": 28029, + "torian": 48399, + "tories": 14193, + "torino": 29178, + "torio": 34235, + "torn": 8572, + "torn": 18023, + "tornad": 24676, + "tornado": 9062, + "tornadoes": 28254, + "toro": 17892, + "toron": 37407, + "toronto": 16866, + "toronto": 4514, + "torpe": 34093, + "torpedo": 46582, + "torquay": 45738, + "torque": 31940, + "torre": 39563, + "torre": 38009, + "torrent": 42317, + "torrential": 41158, + "torres": 16049, + "tors": 2546, + "tortilla": 32683, + "torto": 24170, + "tortoise": 30178, + "torture": 16013, + "tortured": 29900, + "tory": 29390, + "tory": 4214, + "tos": 6094, + "tosc": 37719, + "tose": 38154, + "tosh": 17109, + "toshi": 31744, + "toss": 19656, + "tossed": 31296, + "tot": 4618, + "tot": 23659, + "total": 13507, + "total": 4445, + "totally": 5440, + "totals": 25772, + "tote": 48145, + "tote": 19031, + "totem": 45376, + "totes": 37199, + "tothe": 12222, + "toto": 39823, + "tots": 24978, + "totten": 14360, + "tottenham": 14889, + "tou": 1879, + "tou": 29261, + "touch": 9480, + "touch": 4526, + "touchdown": 18664, + "touchdowns": 37905, + "touched": 13190, + "touches": 14832, + "touching": 14088, + "touchscreen": 39095, + "tough": 12063, + "tough": 5499, + "tougher": 33722, + "toughest": 23773, + "toughness": 45522, + "toulou": 27145, + "toulouse": 30267, + "tour": 2710, + "tour": 1760, + "tourde": 39247, + "toured": 27654, + "touri": 4224, + "touring": 11853, + "tourism": 23661, + "tourism": 6556, + "tourist": 12123, + "tourists": 15546, + "tournament": 4097, + "tournaments": 23058, + "tourney": 12603, + "tours": 8948, + "tous": 37424, + "tout": 22300, + "touts": 41274, + "tov": 28970, + "tow": 11557, + "tow": 18653, + "toward": 8508, + "towards": 4447, + "towed": 45419, + "towel": 15953, + "towels": 26578, + "tower": 26669, + "tower": 4730, + "towering": 39444, + "towers": 12701, + "towie": 44613, + "towin": 45819, + "towing": 36963, + "town": 4068, + "town": 1605, + "townfc": 33981, + "townhall": 33408, + "townhouse": 40178, + "towns": 14173, + "townsend": 26826, + "township": 14622, + "townsville": 47330, + "towork": 48233, + "tox": 7742, + "tox": 16145, + "toxic": 27436, + "toxic": 12348, + "toxicity": 41234, + "toxin": 48899, + "toxins": 36618, + "toy": 14387, + "toy": 5988, + "toya": 37602, + "toyo": 7644, + "toyota": 8908, + "toys": 39508, + "toys": 7162, + "tp": 23760, + "tp": 15188, + "tpp": 29411, + "tps": 35246, + "tq": 43066, + "tr": 635, + "tr": 6337, + "tra": 752, + "tra": 2483, + "trac": 2266, + "trace": 48611, + "trace": 14767, + "traced": 47956, + "traces": 30913, + "tracey": 25558, + "tracing": 27897, + "track": 10887, + "track": 2700, + "tracked": 27049, + "tracker": 18123, + "tracking": 10428, + "tracklist": 39777, + "tracks": 7579, + "tract": 4690, + "traction": 10644, + "tractor": 14607, + "tractors": 37854, + "tracy": 32984, + "tracy": 15508, + "trad": 48716, + "trad": 38037, + "trade": 10457, + "trade": 3629, + "traded": 18860, + "trademark": 25011, + "trader": 17700, + "traders": 19112, + "trades": 18519, + "trading": 40083, + "trading": 6520, + "tradio": 20689, + "tradition": 20838, + "tradition": 8784, + "traditional": 41113, + "traditional": 5604, + "traditionally": 35532, + "traditions": 18016, + "traf": 3227, + "trafal": 32461, + "trafalgar": 36969, + "traff": 31571, + "traffic": 12080, + "traffic": 3399, + "trafficking": 15983, + "trafford": 22912, + "trage": 12430, + "tragedy": 14082, + "tragic": 14828, + "tragically": 39599, + "trail": 11523, + "trail": 4921, + "trailblazer": 41015, + "trailblazers": 35954, + "trailer": 4700, + "trailers": 24862, + "trailing": 37427, + "trails": 10633, + "train": 9122, + "train": 3231, + "trained": 10874, + "trainee": 25795, + "trainees": 30382, + "trainer": 9767, + "trainers": 18871, + "training": 34508, + "training": 2199, + "trains": 9541, + "trait": 35160, + "traitor": 31760, + "traitors": 42633, + "traits": 25748, + "trajec": 42042, + "trak": 24065, + "tral": 14609, + "tram": 9800, + "tram": 17500, + "tramp": 46289, + "trampol": 32905, + "trampoline": 42800, + "tramrahim": 35220, + "tran": 1357, + "tran": 22031, + "trance": 30584, + "trance": 18671, + "trancefamily": 39630, + "trane": 35779, + "tranqu": 18912, + "tranquil": 35764, + "tranquility": 36688, + "trans": 1826, + "trans": 8126, + "transaction": 24881, + "transactions": 21653, + "transat": 37872, + "transatlantic": 40703, + "transc": 21073, + "transcend": 47087, + "transcript": 39008, + "transcription": 48765, + "transfer": 22659, + "transfer": 7134, + "transferred": 29700, + "transferring": 40924, + "transfers": 21621, + "transform": 8142, + "transform": 12288, + "transformation": 34204, + "transformation": 7832, + "transformational": 47135, + "transformationtuesday": 36511, + "transformative": 38106, + "transformed": 17453, + "transformer": 38235, + "transformers": 17843, + "transforming": 44470, + "transforming": 19251, + "transforms": 30312, + "transgender": 17732, + "transi": 32236, + "transit": 10174, + "transiti": 22939, + "transition": 11391, + "transitional": 41519, + "transitioning": 43586, + "transitions": 39374, + "transl": 12243, + "translate": 22655, + "translated": 20752, + "translates": 36334, + "translating": 42156, + "translation": 12153, + "translations": 41367, + "translator": 36230, + "translucent": 49052, + "transm": 18861, + "transmission": 16103, + "transmitted": 48605, + "transmitter": 40457, + "transp": 11726, + "transpa": 18524, + "transparen": 16108, + "transparency": 16828, + "transparent": 19017, + "transpl": 16038, + "transplant": 41871, + "transplant": 18771, + "transplantation": 45207, + "transpor": 19406, + "transport": 10231, + "transport": 7362, + "transportation": 10911, + "transported": 29089, + "transporter": 43568, + "transporting": 42259, + "trap": 36224, + "trap": 9677, + "trape": 42435, + "trapped": 15592, + "traps": 28517, + "tras": 30638, + "trash": 39215, + "trash": 9798, + "traum": 22263, + "trauma": 13846, + "traumati": 46613, + "traumatic": 29958, + "trav": 7586, + "trav": 46955, + "trave": 35357, + "travel": 2824, + "travel": 1949, + "travelblog": 35957, + "travelblogger": 25494, + "travelchat": 46455, + "traveled": 20384, + "traveler": 17794, + "travelers": 20644, + "travelgram": 40069, + "traveling": 9365, + "travelled": 23428, + "traveller": 22546, + "travellers": 29583, + "travelling": 11190, + "travelphotography": 22808, + "travelpics": 32293, + "travels": 11472, + "traveltips": 36260, + "traveltuesday": 16713, + "traverse": 35058, + "travi": 46971, + "travis": 27441, + "travis": 12287, + "traw": 42288, + "trax": 34421, + "tray": 38470, + "tray": 14621, + "trays": 39798, + "trc": 41803, + "tre": 975, + "tre": 6033, + "treach": 46005, + "tread": 26182, + "tread": 35658, + "treadmill": 37780, + "treas": 8591, + "treason": 28103, + "treasure": 9922, + "treasured": 48068, + "treasurer": 26985, + "treasures": 16500, + "treasury": 20956, + "treat": 3968, + "treat": 3901, + "treated": 9772, + "treating": 13842, + "treatment": 4869, + "treatments": 15839, + "treats": 8878, + "treaty": 19967, + "treble": 33194, + "trecht": 33812, + "tree": 13354, + "tree": 2677, + "treehouse": 42387, + "trees": 4682, + "trek": 13236, + "trek": 8136, + "trekking": 25293, + "trell": 35159, + "tremb": 44043, + "tremend": 14659, + "tremendous": 15988, + "tren": 2579, + "trench": 23846, + "trenches": 38723, + "trend": 19986, + "trend": 6643, + "trending": 6087, + "trends": 7015, + "trendsetter": 46666, + "trendy": 23072, + "trent": 45885, + "trent": 15548, + "trenton": 37470, + "tres": 23569, + "tress": 4733, + "tresses": 24273, + "trevor": 23437, + "trevor": 13219, + "trex": 42114, + "trey": 36670, + "trey": 16939, + "tri": 924, + "tri": 9618, + "triad": 45602, + "trial": 5991, + "trials": 10992, + "triangle": 14615, + "triathlon": 18080, + "trib": 45151, + "tribal": 16629, + "tribe": 19943, + "tribe": 11365, + "tribeca": 35184, + "tribes": 26546, + "tribu": 3028, + "tribun": 14311, + "tribunal": 32911, + "tribune": 18556, + "tribute": 5493, + "tributes": 15537, + "tric": 9511, + "tric": 4081, + "trich": 39519, + "trick": 17177, + "trick": 8172, + "tricks": 13177, + "tricky": 22319, + "trics": 31437, + "trident": 35491, + "tridge": 18722, + "tried": 4554, + "tries": 4315, + "trife": 48962, + "trigge": 30509, + "trigger": 16158, + "triggered": 30924, + "triggers": 37319, + "tright": 29915, + "tril": 40626, + "trill": 39297, + "trilli": 39350, + "trillion": 20160, + "trilo": 15183, + "trilogy": 16862, + "trim": 14182, + "trimmed": 40657, + "trin": 6628, + "trinidad": 26244, + "trinity": 30744, + "trinity": 12267, + "trio": 10263, + "trip": 23421, + "trip": 2529, + "tripad": 37189, + "tripadvisor": 38708, + "triple": 16519, + "triple": 7673, + "triplets": 48601, + "tripod": 36141, + "tripoli": 40095, + "trippin": 43073, + "tripping": 35229, + "trippy": 35137, + "trips": 12292, + "tris": 29690, + "trish": 40511, + "trish": 37179, + "trisha": 39152, + "tristan": 25497, + "trit": 37087, + "triton": 45437, + "triu": 14782, + "trium": 21065, + "triumph": 26507, + "triumph": 15307, + "triumphant": 41918, + "trivi": 21228, + "trivia": 10642, + "triviatuesday": 45499, + "trix": 41017, + "tro": 1046, + "tro": 3332, + "trock": 44368, + "trojan": 30653, + "trojans": 25310, + "trol": 10306, + "troll": 39737, + "troll": 17103, + "trolley": 25124, + "trolling": 28552, + "trolls": 20890, + "tromb": 32390, + "trombone": 44423, + "tron": 19057, + "tron": 10684, + "tronic": 34258, + "tronics": 34397, + "troom": 23691, + "troop": 12492, + "troop": 24054, + "trooper": 18327, + "troopers": 23576, + "troops": 10109, + "trop": 31585, + "trope": 41150, + "trophies": 20998, + "trophy": 42676, + "trophy": 6502, + "tropic": 21794, + "tropic": 36736, + "tropical": 41699, + "tropical": 8686, + "tropics": 36940, + "tros": 40456, + "trose": 36022, + "trot": 30453, + "trotter": 38287, + "trou": 5181, + "troubad": 49037, + "trouble": 25669, + "trouble": 7848, + "troubled": 25568, + "troubles": 27254, + "trough": 39761, + "troupe": 34803, + "trous": 19727, + "trousers": 23172, + "trout": 14853, + "trove": 45350, + "trow": 46914, + "troy": 26283, + "troy": 12819, + "trs": 24770, + "tru": 931, + "tru": 25326, + "truck": 14781, + "truck": 4629, + "trucker": 45918, + "truckers": 43404, + "trucking": 26208, + "trucks": 9569, + "trude": 39017, + "trudeau": 15752, + "true": 13096, + "true": 2328, + "truec": 37583, + "truelove": 45711, + "truffle": 23064, + "truffles": 37057, + "truly": 4545, + "trum": 11766, + "trum": 11399, + "truman": 29414, + "trump": 9124, + "trump": 1797, + "trumpet": 23681, + "trumpp": 45550, + "trumprussia": 39135, + "trumps": 29793, + "trumptrain": 43595, + "trun": 16163, + "trun": 46661, + "trunk": 18347, + "trunks": 38531, + "truro": 43507, + "truss": 46080, + "trust": 17691, + "trust": 3876, + "truste": 17356, + "trusted": 16538, + "trustee": 30803, + "trustees": 28853, + "trusting": 33221, + "trusts": 27507, + "trustworthy": 46840, + "trusty": 37955, + "truth": 21335, + "truth": 4319, + "truths": 27179, + "trx": 31620, + "try": 4487, + "try": 1209, + "tryin": 31085, + "trying": 2551, + "tryna": 15702, + "tryout": 43832, + "tryouts": 28053, + "ts": 2290, + "ts": 590, + "tsa": 25977, + "tsal": 20438, + "tsb": 45015, + "tsc": 37437, + "tsch": 38778, + "tsd": 20611, + "tse": 49144, + "tsfor": 42654, + "tsford": 32823, + "tsh": 42872, + "tshirt": 14907, + "tshirts": 29377, + "tsi": 40048, + "tsi": 37867, + "tsk": 43600, + "tsla": 35681, + "tsm": 43452, + "tsman": 20046, + "tsn": 44921, + "tsn": 26896, + "tson": 42353, + "tson": 47140, + "tsp": 34230, + "tsu": 13950, + "tsu": 20175, + "tsun": 19155, + "tsunami": 24286, + "tsville": 29080, + "tt": 971, + "tt": 1402, + "tta": 2646, + "ttc": 27668, + "tte": 23105, + "tte": 3070, + "tted": 15163, + "tten": 11351, + "tten": 17479, + "tter": 18691, + "tter": 5165, + "tters": 6318, + "ttes": 9293, + "tti": 5237, + "ttin": 36589, + "tting": 1188, + "ttino": 47389, + "ttip": 46993, + "ttle": 9253, + "ttm": 46838, + "tto": 8759, + "tto": 8105, + "tton": 10562, + "ttot": 12480, + "ttp": 30828, + "ttr": 47589, + "tts": 11570, + "ttt": 17256, + "tttt": 33119, + "ttu": 44006, + "ttv": 24281, + "tty": 11457, + "tty": 1856, + "tu": 764, + "tu": 5760, + "tua": 41344, + "tual": 4799, + "tuan": 37297, + "tub": 34907, + "tub": 15450, + "tube": 38229, + "tube": 3308, + "tuber": 30371, + "tuberculo": 42606, + "tuberculosis": 43129, + "tubes": 22870, + "tubing": 40794, + "tubs": 41705, + "tubular": 48786, + "tuc": 14456, + "tuc": 43871, + "tuck": 22398, + "tucked": 26923, + "tucker": 39703, + "tucker": 15726, + "tucket": 32677, + "tucson": 17250, + "tudor": 24547, + "tue": 17515, + "tues": 2283, + "tues": 12113, + "tuesday": 10209, + "tuesday": 2519, + "tuesdaymotivation": 25432, + "tuesdays": 23195, + "tuesdaythoughts": 17988, + "tuf": 44510, + "tuff": 38868, + "tug": 47032, + "tug": 27902, + "tuition": 21129, + "tuk": 39271, + "tuk": 14993, + "tul": 9069, + "tul": 40837, + "tula": 36332, + "tulane": 44893, + "tulip": 28389, + "tulips": 30886, + "tulsa": 18850, + "tum": 12932, + "tum": 8843, + "tumb": 8831, + "tumble": 38284, + "tumbler": 48790, + "tumbling": 46226, + "tumblr": 11841, + "tummy": 26053, + "tumor": 22616, + "tumors": 39894, + "tumour": 45129, + "tun": 1415, + "tun": 21349, + "tuna": 15037, + "tundra": 39899, + "tune": 11427, + "tune": 3300, + "tuned": 5898, + "tunein": 16809, + "tuner": 42905, + "tunes": 31688, + "tunes": 10810, + "tunesapp": 32550, + "tung": 47940, + "tung": 31092, + "tuni": 16270, + "tunic": 43495, + "tuning": 19585, + "tunisia": 23346, + "tunnel": 11096, + "tunnels": 29814, + "tuous": 28738, + "tup": 37956, + "tup": 4507, + "tupac": 31506, + "tups": 44855, + "tur": 985, + "tur": 17182, + "tura": 16127, + "tural": 45143, + "tural": 4261, + "turb": 18973, + "turban": 48515, + "turbine": 26880, + "turbines": 38863, + "turbo": 23578, + "turbo": 13668, + "turbul": 31100, + "turbulent": 47871, + "ture": 4321, + "ture": 941, + "tured": 3987, + "turer": 11993, + "turers": 16956, + "tures": 2400, + "turf": 36762, + "turf": 12510, + "turi": 11896, + "turin": 36251, + "turing": 5812, + "turismo": 30202, + "turk": 8254, + "turk": 32507, + "turkey": 35977, + "turkey": 4790, + "turkeys": 37991, + "turkish": 48199, + "turkish": 9278, + "turks": 34344, + "turmeric": 34044, + "turmoil": 37751, + "turn": 5522, + "turn": 2105, + "turnaround": 32719, + "turnbull": 27863, + "turned": 3771, + "turner": 42867, + "turner": 8777, + "turning": 4976, + "turno": 21377, + "turnout": 11654, + "turnover": 30794, + "turnpike": 38301, + "turns": 3185, + "turnt": 28887, + "turntable": 37953, + "turnup": 30591, + "turo": 29224, + "turquo": 19390, + "turquoise": 19899, + "turt": 13716, + "turtle": 35943, + "turtle": 10912, + "turtles": 17862, + "tus": 24828, + "tus": 7079, + "tusc": 17909, + "tuscal": 42638, + "tuscaloosa": 44375, + "tuscan": 42865, + "tuscany": 20885, + "tuss": 31741, + "tut": 35121, + "tutor": 10054, + "tutor": 27858, + "tutorial": 12857, + "tutorials": 30973, + "tutoring": 37532, + "tutti": 46880, + "tutu": 35845, + "tux": 28720, + "tux": 49186, + "tuxedo": 40173, + "tv": 3197, + "tv": 1583, + "tvc": 49190, + "tvd": 25889, + "tvmiaw": 38554, + "tvn": 44232, + "tvs": 27114, + "tvtime": 19947, + "tvxq": 43968, + "tw": 966, + "tw": 12842, + "twa": 46954, + "twain": 30689, + "twal": 48126, + "tware": 5707, + "twc": 41217, + "twd": 29440, + "twd": 19343, + "twdfamily": 38218, + "twe": 18365, + "tweak": 48870, + "tweaks": 42661, + "twee": 1330, + "tweed": 26904, + "tweeps": 14928, + "tweet": 11826, + "tweet": 1842, + "tweeta": 32024, + "tweetapicture": 40596, + "tweeted": 7841, + "tweeter": 32876, + "tweeters": 31713, + "tweeting": 8901, + "tweets": 3560, + "tweetyour": 45033, + "twel": 14476, + "twelf": 39443, + "twelfth": 44072, + "twell": 38722, + "twell": 30162, + "twelve": 19694, + "twent": 27027, + "twenti": 35167, + "twenty": 13016, + "twentyon": 39609, + "twentyonepilots": 40007, + "twer": 13923, + "twerk": 28506, + "twi": 5537, + "twice": 6970, + "twick": 34326, + "twickenham": 39619, + "twil": 12804, + "twili": 35754, + "twilight": 46366, + "twilight": 14512, + "twill": 43703, + "twin": 9342, + "twin": 6769, + "twine": 42775, + "twinkle": 36545, + "twinning": 30156, + "twinpeaks": 32042, + "twins": 8040, + "twist": 10589, + "twisted": 18233, + "twister": 45933, + "twists": 34149, + "twit": 1643, + "twit": 18704, + "twitart": 27709, + "twitch": 13251, + "twitch": 9153, + "twitter": 7546, + "twitter": 1989, + "twitterkurds": 32722, + "twitterstorians": 35389, + "two": 17211, + "two": 1237, + "twol": 31964, + "twood": 40404, + "twood": 13245, + "twp": 33283, + "twright": 46778, + "twt": 6825, + "twx": 26830, + "twy": 45861, + "tx": 6636, + "tx": 5200, + "txhsfb": 34757, + "txlege": 26995, + "txst": 40761, + "txt": 24595, + "txwx": 22995, + "ty": 1260, + "ty": 744, + "tya": 41273, + "tycoon": 36803, + "tye": 43097, + "tyfree": 41215, + "tyga": 41952, + "tying": 22559, + "tyl": 47537, + "tyler": 14787, + "tyler": 7058, + "tym": 45772, + "tyne": 27000, + "tyne": 29729, + "tyour": 16823, + "type": 15673, + "type": 3877, + "typed": 40753, + "typeface": 44969, + "types": 7543, + "typewriter": 42180, + "typho": 17486, + "typhoon": 21110, + "typic": 21648, + "typical": 9854, + "typically": 23175, + "typing": 20102, + "typo": 18831, + "typo": 29076, + "typography": 24332, + "tyr": 15590, + "tyran": 46921, + "tyranny": 35402, + "tyre": 38330, + "tyre": 16864, + "tyres": 21376, + "tyrone": 30226, + "tyson": 16616, + "tz": 7710, + "tz": 4983, + "tzer": 45267, + "tzky": 47127, + "tzman": 46032, + "tzu": 34354, + "té": 27208, + "té": 39694, + "u": 84, + "u": 340, + "ua": 34075, + "ua": 8441, + "uaap": 46753, + "uaap": 43774, + "uab": 35587, + "uae": 9752, + "ual": 1921, + "ually": 10767, + "uan": 33062, + "uas": 38339, + "uav": 30303, + "ub": 18430, + "ub": 13494, + "uba": 29768, + "ubc": 42479, + "ubc": 29455, + "ube": 30892, + "uber": 25896, + "uber": 10668, + "ubi": 26758, + "ubio": 32867, + "ubiquit": 48129, + "ubis": 28248, + "ubisoft": 32051, + "ubs": 43851, + "ubun": 28184, + "ubuntu": 30791, + "uc": 4903, + "uc": 12438, + "uca": 30942, + "ucc": 44844, + "ucc": 29138, + "ucci": 30746, + "uccino": 30409, + "ucd": 44746, + "ucd": 43514, + "ucf": 24414, + "uch": 19465, + "uch": 22394, + "uchi": 37473, + "uci": 46354, + "uci": 28925, + "uck": 34189, + "ucl": 12013, + "ucl": 13647, + "ucla": 37667, + "ucla": 17259, + "ucn": 49036, + "uconn": 30549, + "ud": 6560, + "ud": 5765, + "uda": 22800, + "udaipur": 49385, + "uddin": 43035, + "ude": 37016, + "ude": 35194, + "ue": 16696, + "ue": 1190, + "uefa": 19189, + "uel": 24231, + "uer": 45951, + "ues": 2526, + "uf": 17777, + "uf": 19230, + "ufc": 20396, + "ufc": 6490, + "uff": 45701, + "ufo": 19443, + "ufos": 48234, + "ug": 3754, + "ug": 16061, + "uga": 16056, + "ugand": 25965, + "uganda": 11125, + "ugandan": 44206, + "ugby": 30658, + "ugh": 39736, + "ugh": 12755, + "ugliest": 43543, + "ugly": 36070, + "ugly": 8159, + "ugu": 18144, + "uh": 17661, + "uh": 9219, + "uhc": 44974, + "uhh": 35938, + "uhhh": 45270, + "uhm": 35614, + "uhur": 29434, + "uhuru": 35690, + "ui": 17326, + "ui": 11458, + "uil": 29395, + "uit": 30696, + "uit": 47584, + "uj": 33266, + "uji": 39672, + "uk": 2294, + "uk": 1432, + "uka": 23294, + "uke": 48836, + "uke": 28577, + "uked": 48987, + "uki": 37435, + "uki": 9009, + "ukin": 34996, + "ukip": 20360, + "uklabour": 36902, + "ukmfg": 38764, + "uko": 33562, + "ukone": 24682, + "ukrain": 15468, + "ukraine": 7768, + "ukrainian": 16927, + "ukrunchat": 34481, + "uku": 29541, + "uku": 36082, + "ukulele": 39094, + "ul": 914, + "ul": 6625, + "ula": 34104, + "ula": 9506, + "ular": 4927, + "ulary": 21701, + "ulate": 20467, + "ulation": 32896, + "ule": 35616, + "ules": 26274, + "ulf": 49331, + "uli": 41841, + "uli": 22174, + "ull": 33254, + "ulla": 30577, + "ullah": 45310, + "ullivan": 45252, + "ulls": 37418, + "ulo": 46084, + "ulo": 36738, + "ulous": 42490, + "ulous": 4281, + "ulously": 20167, + "ulster": 29709, + "ulster": 24639, + "ult": 4380, + "ulti": 11925, + "ulties": 21884, + "ultimat": 16522, + "ultimate": 34684, + "ultimate": 5377, + "ultimatefan": 48372, + "ultimatefanlive": 48644, + "ultimately": 23023, + "ultr": 25636, + "ultra": 11398, + "ultra": 8118, + "ultram": 44519, + "ultrasound": 29717, + "ulture": 22272, + "ulty": 8036, + "ulu": 41815, + "ulu": 15659, + "ulum": 17235, + "uly": 33220, + "ulysses": 46114, + "um": 1622, + "um": 1008, + "uma": 29982, + "uma": 9256, + "uman": 27112, + "umar": 25656, + "umass": 39390, + "umatic": 45006, + "umb": 7493, + "umber": 19195, + "umbrel": 34773, + "umbrella": 17143, + "umbrellas": 42782, + "umbria": 39287, + "umc": 39491, + "umd": 42067, + "ume": 38480, + "umen": 42832, + "uments": 25924, + "umer": 23539, + "umes": 21403, + "umi": 48772, + "umi": 15458, + "umich": 41294, + "umin": 31542, + "umm": 26129, + "umm": 21215, + "ummer": 47628, + "ummm": 33665, + "umni": 31739, + "ump": 22224, + "umpire": 36214, + "ums": 8643, + "umu": 39788, + "un": 569, + "un": 2271, + "una": 6385, + "unable": 17793, + "unacceptable": 25234, + "unanim": 20800, + "unanimous": 33520, + "unanimously": 31798, + "unanswered": 43611, + "unarmed": 41541, + "unas": 41366, + "unavailable": 48430, + "unaware": 33347, + "unbeat": 37056, + "unbeatable": 40267, + "unbeaten": 19228, + "unbeliev": 11383, + "unbelievable": 13306, + "unbelievably": 33781, + "unborn": 37257, + "unboxing": 32866, + "unbreakable": 32956, + "unbroken": 49271, + "unc": 24921, + "unc": 15322, + "uncanny": 32556, + "uncertain": 30384, + "uncertainty": 23956, + "unch": 1527, + "unchanged": 34272, + "uncharted": 34560, + "unci": 25521, + "unciation": 34117, + "uncle": 31537, + "uncle": 8002, + "unclear": 32955, + "uncles": 45335, + "uncomfortable": 22470, + "uncommon": 34888, + "uncondition": 46561, + "unconditional": 31112, + "unconscious": 34791, + "unconstitutional": 43585, + "unconventional": 39440, + "uncover": 33031, + "uncovered": 28234, + "uncture": 38736, + "uncut": 41056, + "und": 9762, + "und": 9732, + "unda": 39932, + "undant": 25377, + "unday": 29338, + "unde": 45226, + "undead": 40105, + "undecided": 49368, + "undefeated": 15326, + "undeni": 38424, + "under": 1473, + "under": 1798, + "underage": 45669, + "underattack": 35075, + "undercover": 21595, + "underdog": 44266, + "undere": 21675, + "underestim": 23348, + "underestimate": 31794, + "undergo": 31545, + "undergoing": 26419, + "undergrad": 38331, + "undergraduate": 24320, + "underground": 9396, + "undering": 30826, + "underlying": 31812, + "undermine": 42839, + "underneath": 20857, + "underrated": 19494, + "unders": 20376, + "understand": 47582, + "understand": 4600, + "understanding": 7522, + "understands": 21607, + "understatement": 38296, + "understood": 17303, + "undertaker": 40144, + "undertaking": 49067, + "undertale": 48283, + "underthe": 41161, + "underwater": 14760, + "underway": 6273, + "underwear": 21154, + "underwood": 21474, + "underworld": 34760, + "undi": 23845, + "undisclosed": 39334, + "undo": 35454, + "undocumented": 35414, + "undoub": 38836, + "undoubtedly": 42204, + "undp": 26691, + "une": 4522, + "une": 10966, + "unearth": 32716, + "unearthed": 36632, + "unemp": 15139, + "unemployed": 32721, + "unemployment": 19350, + "unes": 6394, + "unesco": 16216, + "uneven": 43204, + "unex": 9484, + "unexpe": 10802, + "unexpec": 31829, + "unexpected": 12293, + "unexpectedly": 35622, + "unf": 29285, + "unfair": 22193, + "unfinished": 26526, + "unfit": 45367, + "unfold": 38681, + "unfollow": 38797, + "unfor": 14010, + "unforgettable": 16173, + "unfortun": 10194, + "unfortunate": 22361, + "unfortunately": 12863, + "unfpa": 45048, + "ung": 10439, + "ung": 4334, + "unga": 19151, + "ungsoo": 25582, + "unh": 25365, + "unhappy": 26528, + "unhcr": 43451, + "unhealthy": 30994, + "uni": 1107, + "uni": 5926, + "unic": 7648, + "unicef": 38286, + "unicef": 19259, + "unicorn": 15660, + "unicorns": 35183, + "unidenti": 33707, + "unidentified": 35563, + "unification": 45036, + "unified": 20876, + "uniform": 11075, + "uniforms": 17838, + "unil": 32388, + "unilever": 48654, + "uniof": 21218, + "union": 14210, + "union": 3503, + "unions": 18353, + "unis": 30482, + "unis": 39266, + "unisex": 27609, + "unison": 46694, + "unit": 28522, + "unit": 5695, + "unite": 15078, + "unite": 11305, + "uniteblue": 20935, + "united": 10898, + "united": 2690, + "unitedstates": 39636, + "unitedway": 47486, + "unites": 32061, + "uniting": 31318, + "units": 10394, + "unity": 38300, + "unity": 8581, + "univ": 36680, + "univ": 14896, + "univer": 15574, + "univers": 5855, + "universal": 19148, + "universal": 8754, + "universe": 6104, + "universi": 41692, + "universit": 26019, + "universities": 16408, + "university": 40728, + "university": 2182, + "universityof": 46158, + "unk": 5542, + "unknown": 8685, + "unl": 43807, + "unlawful": 42305, + "unle": 19677, + "unlea": 23893, + "unleash": 26706, + "unleashed": 27955, + "unless": 10602, + "unlike": 16694, + "unlikely": 18904, + "unlimited": 11015, + "unlock": 18649, + "unlocked": 16770, + "unlocking": 40810, + "unlucky": 35029, + "unlv": 42283, + "unmanned": 36751, + "unmatched": 46054, + "unn": 38364, + "unnamed": 44985, + "unnecessary": 24100, + "unner": 31481, + "unning": 43282, + "unnoticed": 42807, + "uno": 32446, + "uno": 17078, + "unofficial": 22506, + "unpacking": 43589, + "unpaid": 32811, + "unparalleled": 44396, + "unplugged": 31724, + "unpopular": 40232, + "unprece": 23054, + "unprecedented": 23344, + "unpredictable": 38684, + "unra": 45150, + "unreal": 46980, + "unreal": 15636, + "unrelated": 38644, + "unreleased": 29654, + "unrest": 36452, + "uns": 25908, + "unsafe": 32071, + "unsc": 36395, + "unseen": 19069, + "unsigned": 39346, + "unsolved": 40836, + "unsplash": 46196, + "unstable": 34730, + "unstopp": 22105, + "unstoppable": 23484, + "unsuccessful": 47478, + "unsung": 33015, + "unsure": 26396, + "unt": 19654, + "unt": 6537, + "until": 1942, + "untitled": 21309, + "unto": 19801, + "untold": 32206, + "untouch": 44509, + "untouched": 42764, + "unused": 29636, + "unusual": 12613, + "unusually": 36465, + "unve": 6685, + "unveil": 20483, + "unveiled": 13572, + "unveiling": 20327, + "unveils": 15057, + "unwanted": 25285, + "unwind": 34064, + "unya": 37142, + "uo": 30874, + "uo": 36162, + "uof": 11155, + "uoft": 37329, + "uon": 48144, + "uous": 40185, + "up": 1083, + "up": 705, + "upa": 31727, + "upbeat": 39201, + "upcoming": 4196, + "upcycled": 46552, + "upd": 3226, + "update": 2491, + "updated": 5974, + "updates": 4904, + "updating": 22792, + "uper": 38082, + "uper": 33056, + "upfront": 42064, + "upgrade": 10365, + "upgraded": 18577, + "upgrades": 21253, + "upgrading": 34368, + "uph": 14128, + "uphill": 42767, + "uphol": 26195, + "uphold": 43897, + "upholstery": 44556, + "upl": 41939, + "uplift": 45389, + "uplifting": 29546, + "upload": 13968, + "uploaded": 16793, + "uploading": 30145, + "upon": 23524, + "upon": 5067, + "upp": 19549, + "upp": 45946, + "upper": 22465, + "upper": 7067, + "upri": 15982, + "upright": 29818, + "uprising": 26006, + "upro": 28922, + "ups": 6926, + "upscale": 47501, + "upset": 11214, + "upsets": 42637, + "upside": 15362, + "upstairs": 21387, + "upstate": 33335, + "upstream": 45517, + "upthe": 31510, + "upto": 26575, + "upton": 31910, + "uptown": 23807, + "upward": 32526, + "upwards": 34915, + "uq": 39591, + "ur": 565, + "ur": 1775, + "ura": 29337, + "ura": 3544, + "urable": 40194, + "ural": 23547, + "ural": 33948, + "uran": 16197, + "uranium": 29850, + "urban": 7931, + "urban": 5800, + "urbanart": 40834, + "urd": 47880, + "urday": 19742, + "urdu": 29976, + "ure": 5514, + "ure": 726, + "ured": 4210, + "urer": 20864, + "ures": 2288, + "urg": 35995, + "urge": 14852, + "urged": 23790, + "urgency": 47612, + "urgent": 13693, + "urgently": 34534, + "urges": 16692, + "urging": 27748, + "uri": 11052, + "uri": 8699, + "urie": 46429, + "urin": 45245, + "urine": 28864, + "uring": 1351, + "url": 23464, + "urn": 38075, + "uro": 17343, + "uro": 5925, + "urology": 48585, + "urope": 14918, + "urs": 4794, + "urself": 31942, + "urst": 19181, + "urstruly": 34751, + "urstrulymahesh": 35314, + "ursula": 38390, + "urt": 24309, + "uru": 16322, + "uru": 11768, + "uruguay": 27931, + "urus": 14246, + "urve": 24583, + "ury": 8642, + "ury": 2106, + "us": 904, + "us": 718, + "usa": 9491, + "usa": 2547, + "usability": 46736, + "usable": 22890, + "usaf": 25017, + "usage": 19137, + "usaid": 34507, + "usair": 36742, + "usairforce": 42179, + "usarmy": 19132, + "usatoday": 40263, + "usav": 36056, + "usb": 10281, + "usc": 13346, + "usc": 14995, + "uscg": 43932, + "usd": 7485, + "usda": 25829, + "use": 4419, + "use": 1483, + "used": 32289, + "used": 2026, + "useful": 9784, + "useless": 20154, + "usemb": 39700, + "user": 21248, + "user": 7031, + "username": 28162, + "users": 7433, + "uses": 5282, + "useum": 45189, + "usf": 32385, + "usf": 28942, + "usgs": 35103, + "ush": 12001, + "ush": 18335, + "usher": 27411, + "ushi": 47734, + "usi": 25540, + "usic": 34909, + "usic": 16753, + "using": 1996, + "usky": 45778, + "usl": 42113, + "usm": 40041, + "usmc": 21678, + "usmnt": 30662, + "usn": 40579, + "usnavy": 24500, + "usnews": 43752, + "uso": 21539, + "usopen": 21782, + "usp": 26651, + "usps": 39980, + "usrc": 33274, + "uss": 11545, + "uss": 9260, + "ussia": 29553, + "ussoccer": 42828, + "ussr": 32697, + "ust": 35501, + "ust": 24725, + "usu": 4254, + "usu": 40434, + "usual": 6129, + "usually": 8296, + "usur": 45582, + "uswnt": 35255, + "ut": 1419, + "ut": 3641, + "uta": 42706, + "uta": 25925, + "utah": 27474, + "utah": 9312, + "utc": 18196, + "utd": 10493, + "ute": 16856, + "ute": 3130, + "uten": 32089, + "uter": 39197, + "utes": 2850, + "uth": 48819, + "uth": 44750, + "uti": 24568, + "util": 28824, + "utili": 17015, + "utilities": 27210, + "utility": 14941, + "utilize": 36861, + "utilized": 47604, + "utilizing": 40212, + "utm": 47853, + "utmost": 42352, + "uto": 18866, + "uto": 13683, + "utopia": 34433, + "utpol": 42605, + "utr": 48726, + "utrecht": 37216, + "uts": 11740, + "utsa": 37528, + "utt": 17096, + "uttar": 40168, + "uttarak": 33755, + "uttarakhand": 35655, + "utter": 18769, + "utter": 24558, + "utterly": 21353, + "utto": 42183, + "utv": 36351, + "utz": 45320, + "uu": 5702, + "uu": 14553, + "uuu": 44355, + "uuu": 27656, + "uuuu": 16720, + "uuuu": 40797, + "uv": 23777, + "uv": 15977, + "uva": 23908, + "uw": 13933, + "uw": 19166, + "uwe": 48785, + "uwu": 35544, + "ux": 9251, + "ux": 6213, + "uy": 31929, + "uy": 48113, + "uz": 19398, + "uz": 36991, + "uzbe": 43007, + "uzbekistan": 45024, + "uzzi": 48210, + "v": 85, + "v": 341, + "va": 4648, + "va": 1892, + "vaa": 37488, + "vable": 23088, + "vac": 3125, + "vac": 34085, + "vaca": 48215, + "vacancies": 26333, + "vacancy": 21247, + "vacant": 25262, + "vacation": 28336, + "vacation": 6561, + "vacations": 29002, + "vacay": 44716, + "vacc": 13342, + "vaccin": 19164, + "vaccinated": 48134, + "vaccination": 32518, + "vaccine": 47780, + "vaccine": 17493, + "vaccines": 25860, + "vach": 46211, + "vacu": 16058, + "vacuum": 18420, + "vad": 11880, + "vada": 46759, + "vader": 21908, + "vae": 39384, + "vag": 13015, + "vague": 42154, + "vah": 26921, + "vai": 26893, + "vai": 36802, + "vail": 21189, + "vain": 25538, + "vais": 28719, + "vaj": 34206, + "vak": 16288, + "vak": 41597, + "val": 1214, + "val": 1560, + "vala": 48525, + "valdez": 40617, + "vale": 35554, + "vale": 10820, + "valedic": 43525, + "valen": 12630, + "valence": 30225, + "valenci": 34183, + "valencia": 16559, + "valent": 3655, + "valent": 15300, + "valentin": 48631, + "valentina": 43741, + "valentine": 11208, + "valentine": 5876, + "valentines": 10259, + "valentinesday": 12369, + "valentino": 29624, + "valeri": 31951, + "valerie": 25592, + "valet": 45749, + "vali": 8230, + "valiant": 33804, + "valid": 15126, + "validation": 32536, + "valkyrie": 42326, + "vall": 23523, + "vall": 35295, + "vallarta": 47874, + "valle": 24857, + "valle": 29105, + "valley": 18354, + "valley": 3136, + "valleys": 28649, + "valor": 30930, + "vals": 7431, + "valu": 6291, + "valuable": 10056, + "valuation": 25894, + "value": 41358, + "value": 4602, + "valued": 17801, + "values": 8857, + "valve": 17001, + "valves": 33517, + "vam": 9983, + "vamo": 46718, + "vamos": 30346, + "vamp": 10680, + "vampi": 47017, + "vampire": 47576, + "vampire": 13220, + "vampires": 30868, + "vamps": 44810, + "van": 2446, + "van": 2451, + "vana": 20543, + "vanc": 6320, + "vance": 31447, + "vancou": 6750, + "vancouver": 31904, + "vancouver": 7208, + "vand": 11691, + "vandalism": 45664, + "vander": 16264, + "vanderbilt": 33524, + "vandy": 39268, + "vane": 43828, + "vaness": 13328, + "vanessa": 16836, + "vangogh": 47849, + "vanguard": 27916, + "vani": 15396, + "vani": 26459, + "vania": 10998, + "vanilla": 11974, + "vanished": 43783, + "vanishing": 48296, + "vanity": 48353, + "vanity": 22938, + "vans": 11711, + "vant": 26298, + "vantage": 31749, + "vanu": 42892, + "vanuatu": 48766, + "vap": 10462, + "vape": 25423, + "vape": 20219, + "vaping": 29403, + "vapor": 37167, + "vapor": 30729, + "vapori": 46183, + "var": 3187, + "var": 12998, + "vara": 47492, + "varan": 36585, + "varanasi": 39364, + "vard": 21866, + "vard": 8773, + "vardy": 47371, + "vare": 38159, + "vares": 42895, + "vargas": 32752, + "vari": 3354, + "variable": 26416, + "varian": 34334, + "variant": 20293, + "variants": 38312, + "variation": 26420, + "variations": 29025, + "varied": 32334, + "varies": 32543, + "varieties": 23805, + "variety": 8396, + "various": 7395, + "varsity": 43716, + "varsity": 8574, + "varun": 48120, + "varun": 22069, + "vary": 18855, + "varying": 36456, + "vas": 5669, + "vas": 5995, + "vasc": 40995, + "vascular": 19218, + "vase": 20431, + "vasi": 49092, + "vast": 24413, + "vast": 16414, + "vastly": 48257, + "vat": 11588, + "vat": 18363, + "vatican": 21030, + "vation": 37884, + "vau": 6391, + "vaugh": 25158, + "vaughan": 21392, + "vaughn": 29013, + "vaul": 27469, + "vault": 15240, + "vaus": 40217, + "vaux": 27403, + "vauxhall": 29173, + "vaw": 47952, + "vay": 48000, + "vaz": 38142, + "vb": 29365, + "vb": 8778, + "vball": 38329, + "vc": 28670, + "vc": 7952, + "vcs": 43528, + "vcu": 40102, + "vd": 9515, + "vday": 42055, + "ve": 673, + "ve": 563, + "vea": 43798, + "veal": 36616, + "veau": 24419, + "vec": 19912, + "vector": 40453, + "vector": 21533, + "ved": 19515, + "ved": 1102, + "veda": 44401, + "vedere": 45660, + "vedi": 47971, + "vee": 35708, + "vee": 17073, + "veen": 22432, + "veer": 21243, + "veer": 22058, + "veg": 9048, + "veg": 16460, + "vega": 22930, + "vegan": 15705, + "vegan": 5615, + "vegans": 48514, + "vegas": 20288, + "vegas": 4413, + "vege": 6219, + "vegetable": 15725, + "vegetables": 14119, + "vegetarian": 14600, + "vegetation": 33947, + "veggie": 19401, + "veggies": 16767, + "vehic": 3973, + "vehicle": 5299, + "vehicles": 8361, + "veil": 23516, + "vein": 29169, + "veins": 28867, + "veit": 30620, + "vel": 942, + "vel": 1287, + "vela": 34898, + "veld": 34011, + "veled": 15370, + "veli": 49166, + "veling": 37970, + "vell": 21173, + "vell": 32997, + "velo": 14357, + "velo": 33850, + "velocity": 23811, + "vels": 5109, + "velve": 37849, + "velvet": 11063, + "vely": 1708, + "vember": 3477, + "vement": 3129, + "vements": 11104, + "ven": 1240, + "ven": 1638, + "vena": 47442, + "vend": 10851, + "vending": 29202, + "vendor": 21261, + "vendors": 20353, + "vene": 5365, + "veness": 10516, + "venetian": 34336, + "venezia": 34139, + "venezu": 10939, + "venezuela": 12839, + "venezuelan": 34699, + "veng": 31526, + "venge": 27757, + "vengeance": 32057, + "veni": 31142, + "venice": 11010, + "vening": 47532, + "venison": 40037, + "venom": 42491, + "venom": 21588, + "vens": 20884, + "vent": 4373, + "vent": 5687, + "ventil": 39522, + "ventilation": 35066, + "venting": 15731, + "vention": 4122, + "vents": 12833, + "ventu": 48217, + "ventura": 20921, + "venture": 37046, + "venture": 12543, + "ventures": 20829, + "venue": 5097, + "venues": 18120, + "venus": 14691, + "ver": 624, + "ver": 667, + "vera": 13350, + "verage": 3725, + "verb": 34952, + "verbal": 26522, + "verbally": 39985, + "verbs": 45687, + "verde": 16935, + "verdi": 42306, + "verdict": 18030, + "vere": 11135, + "vere": 34707, + "vered": 2868, + "verge": 23913, + "veri": 11638, + "verification": 33521, + "verified": 22555, + "verify": 34722, + "vering": 4630, + "veriz": 19707, + "verizon": 21532, + "verma": 41261, + "vermont": 19241, + "vern": 2214, + "vern": 12586, + "verne": 45553, + "vernon": 18348, + "vero": 45217, + "vero": 38208, + "verona": 31819, + "veronic": 39551, + "veronica": 24039, + "vers": 1219, + "vers": 2094, + "versa": 35765, + "versace": 25422, + "versail": 29857, + "versailles": 32129, + "versary": 2940, + "versatile": 18110, + "versatility": 41340, + "verse": 39466, + "verse": 3131, + "verses": 30769, + "versi": 8934, + "version": 3273, + "versions": 16190, + "versity": 1906, + "verst": 42484, + "verstappen": 45064, + "versus": 14548, + "versy": 18522, + "vert": 11742, + "verte": 35158, + "verted": 48173, + "verti": 30459, + "vertical": 14293, + "vertigo": 42477, + "verton": 40632, + "verts": 37265, + "very": 11698, + "very": 1070, + "veryday": 37944, + "verything": 45174, + "ves": 9616, + "ves": 1003, + "vesmatter": 47636, + "vespa": 46029, + "vessel": 16387, + "vessels": 22822, + "vest": 31657, + "vest": 12473, + "vesti": 40349, + "vests": 41906, + "vet": 12294, + "vet": 5951, + "veter": 4330, + "veteran": 20797, + "veteran": 8814, + "veterans": 7092, + "veteransday": 26409, + "veterin": 43959, + "veterinary": 25458, + "veto": 36570, + "vets": 13113, + "vette": 17045, + "vettel": 28700, + "vevo": 35141, + "vex": 36187, + "vex": 43978, + "vey": 34792, + "vey": 3884, + "vez": 35987, + "vez": 17226, + "vf": 25966, + "vfl": 33726, + "vfx": 30149, + "vg": 40591, + "vg": 22346, + "vh": 46953, + "vh": 23847, + "vhs": 21932, + "vi": 603, + "vi": 4259, + "via": 1048, + "viable": 25752, + "viadu": 37012, + "viaduct": 39113, + "vial": 39951, + "vian": 40487, + "vian": 16124, + "vibe": 37974, + "vibe": 12813, + "vibes": 7764, + "vibr": 9527, + "vibrant": 14270, + "vibration": 37456, + "vibrations": 43660, + "vic": 1555, + "vic": 4412, + "vica": 46168, + "vicar": 43899, + "vice": 43572, + "vice": 6931, + "vicente": 39411, + "vices": 8332, + "vich": 24143, + "vici": 46670, + "vicious": 25177, + "vick": 15116, + "vick": 29704, + "vickers": 48452, + "vicki": 34927, + "vicky": 37176, + "vicky": 25788, + "victi": 6861, + "victim": 9133, + "victims": 7131, + "victor": 2423, + "victor": 10690, + "victori": 17555, + "victoria": 39286, + "victoria": 6127, + "victorian": 12350, + "victorias": 47791, + "victories": 24577, + "victorious": 24033, + "victory": 36668, + "victory": 4127, + "vid": 17233, + "vid": 9284, + "vida": 19015, + "vidal": 36678, + "vide": 1334, + "vide": 45244, + "video": 9478, + "video": 1455, + "videogame": 35097, + "videogames": 21149, + "videos": 6081, + "vids": 23035, + "vidy": 29639, + "vidya": 45264, + "vie": 922, + "vie": 8538, + "vien": 36493, + "vienna": 12670, + "vier": 15352, + "vier": 11987, + "viera": 21114, + "viernes": 33826, + "vies": 22458, + "viest": 31979, + "viet": 17558, + "viet": 13128, + "vietnam": 19558, + "vietnam": 8623, + "vietnamese": 22382, + "view": 12004, + "view": 1093, + "viewed": 7226, + "viewer": 15061, + "viewers": 14275, + "viewing": 7124, + "viewpoint": 41604, + "views": 2758, + "vig": 8549, + "vig": 45083, + "vigil": 21538, + "vigil": 19896, + "vigilant": 43026, + "vigne": 40447, + "vigne": 34581, + "vigo": 44097, + "vigor": 26781, + "vii": 17759, + "viii": 20414, + "vijay": 12014, + "vijay": 10823, + "vijaysethu": 47966, + "vik": 10764, + "vik": 17181, + "vika": 39562, + "vikas": 37116, + "viking": 26663, + "viking": 15897, + "vikings": 11713, + "vikram": 41136, + "vikram": 24314, + "viktor": 36101, + "vil": 1338, + "vil": 3000, + "vila": 37505, + "vile": 27247, + "vill": 10481, + "vill": 45698, + "villa": 3203, + "villa": 7754, + "village": 34584, + "village": 4331, + "villagers": 34283, + "villages": 17621, + "villain": 15425, + "villains": 25271, + "villanova": 44025, + "villar": 35164, + "villas": 28907, + "ville": 11110, + "ville": 1930, + "villen": 46177, + "villi": 36907, + "vimeo": 48720, + "vin": 1379, + "vin": 2558, + "vina": 35682, + "vinai": 37396, + "vinaigrette": 39876, + "vinay": 43952, + "vince": 32429, + "vince": 6236, + "vincen": 33402, + "vincent": 29069, + "vincent": 10357, + "vinci": 30199, + "vind": 20275, + "vindic": 39582, + "vine": 8471, + "vine": 7721, + "vinegar": 23834, + "vines": 21268, + "vineyard": 16527, + "vineyards": 23082, + "ving": 5375, + "ving": 903, + "vingne": 42579, + "vings": 22510, + "vini": 48119, + "vinnie": 40885, + "vinny": 36794, + "vino": 14509, + "vinod": 43348, + "vins": 34820, + "vinson": 45945, + "vintag": 10936, + "vintage": 13654, + "vintage": 3266, + "viny": 40990, + "vinyl": 22835, + "vinyl": 5754, + "vio": 11913, + "vio": 20324, + "viol": 3164, + "viola": 27438, + "violate": 44875, + "violated": 38192, + "violating": 37554, + "violation": 22919, + "violations": 21969, + "violence": 5450, + "violent": 11565, + "violently": 47758, + "violet": 16118, + "violets": 42861, + "violin": 17058, + "violinist": 36299, + "vion": 35496, + "vious": 6418, + "viously": 7149, + "vip": 45714, + "vip": 7111, + "viper": 27401, + "vips": 41149, + "vir": 1790, + "vir": 25319, + "vira": 35910, + "viral": 11653, + "virat": 32473, + "virgil": 39076, + "virgin": 5651, + "virgin": 12103, + "virgini": 43426, + "virginia": 6728, + "virgo": 39978, + "viro": 32301, + "viron": 38309, + "virtu": 7977, + "virtual": 18059, + "virtual": 7790, + "virtually": 22475, + "virtualreality": 32608, + "virtue": 26860, + "virtues": 42167, + "virtuoso": 47027, + "virus": 11808, + "viruses": 34830, + "vis": 1301, + "vis": 5337, + "visa": 12802, + "visas": 41228, + "vise": 24977, + "vised": 14810, + "vish": 12024, + "vish": 29124, + "vishal": 33648, + "vishnu": 37816, + "visi": 1409, + "visibility": 15921, + "visible": 36658, + "visible": 8626, + "vising": 37439, + "vision": 11147, + "vision": 2515, + "visional": 24627, + "visionary": 22959, + "visions": 13804, + "visit": 3388, + "visit": 1600, + "visitation": 44370, + "visited": 5580, + "visiting": 4680, + "visitor": 13881, + "visitors": 9160, + "visits": 8489, + "visitscotland": 28760, + "visitspain": 48860, + "vism": 15514, + "viso": 46732, + "visor": 24217, + "vist": 21436, + "vista": 13865, + "visu": 7739, + "visual": 17004, + "visual": 7195, + "visualization": 28500, + "visualize": 45057, + "visually": 25743, + "visuals": 21315, + "viswas": 36513, + "viswasam": 47664, + "vit": 4056, + "vit": 35580, + "vita": 15700, + "vital": 32525, + "vital": 10585, + "vitality": 36385, + "vitam": 9856, + "vitamin": 13675, + "vitamins": 22582, + "vito": 36725, + "vity": 4893, + "vitz": 26188, + "vius": 41571, + "viv": 21827, + "viv": 35363, + "viva": 17399, + "vival": 35920, + "vive": 18980, + "vive": 24004, + "vivek": 36243, + "vivi": 11625, + "vivian": 30129, + "vivid": 22984, + "vivo": 28091, + "vivo": 25888, + "vix": 28976, + "vix": 34811, + "vixen": 38757, + "vixx": 32106, + "viz": 28251, + "viz": 31786, + "vj": 45439, + "vj": 30827, + "vk": 41893, + "vl": 37580, + "vl": 36442, + "vla": 23686, + "vlad": 41089, + "vladi": 19320, + "vladimir": 21702, + "vlive": 46797, + "vlog": 18894, + "vm": 16204, + "vm": 20269, + "vma": 35666, + "vmas": 30236, + "vmware": 29615, + "vn": 47098, + "vn": 25076, + "vo": 947, + "vo": 3951, + "voc": 4105, + "voc": 20855, + "vocab": 21346, + "vocabulary": 23804, + "vocal": 34037, + "vocal": 13147, + "vocali": 19134, + "vocalist": 22102, + "vocals": 17666, + "vocation": 20521, + "vocational": 33751, + "vod": 11820, + "vod": 35854, + "vodaf": 28436, + "vodafone": 38695, + "vodka": 13646, + "vogel": 44960, + "vogue": 24418, + "vogue": 13178, + "voic": 29185, + "voice": 13179, + "voice": 3386, + "voiced": 34352, + "voiceof": 44966, + "voiceover": 41979, + "voices": 9144, + "void": 21561, + "voip": 42762, + "voir": 16036, + "vol": 1343, + "vol": 7945, + "volatile": 41022, + "volatility": 32355, + "volcan": 9916, + "volcanic": 24072, + "volcano": 14581, + "volcanoes": 38055, + "voli": 40138, + "volk": 13432, + "volkswag": 14407, + "volkswagen": 15342, + "volley": 7130, + "volley": 34656, + "volleyball": 7458, + "volo": 44791, + "vols": 20404, + "volt": 26430, + "volta": 29879, + "volta": 33480, + "voltage": 23118, + "voltron": 39314, + "volu": 3563, + "volume": 8284, + "volumes": 22651, + "volun": 3356, + "voluntar": 48823, + "voluntary": 23815, + "volunte": 3556, + "volunteer": 32331, + "volunteer": 7114, + "volunteered": 34000, + "volunteering": 14902, + "volunteers": 5939, + "volution": 24043, + "volved": 42888, + "volvo": 39991, + "volvo": 16906, + "vom": 24198, + "vomit": 46485, + "von": 11269, + "von": 8497, + "voo": 19497, + "voodoo": 26869, + "voor": 34291, + "voor": 34464, + "vor": 8338, + "vor": 5308, + "vore": 18215, + "vortex": 30071, + "vos": 16863, + "vot": 48558, + "vote": 6830, + "vote": 2187, + "voted": 6454, + "votel": 41379, + "voter": 44474, + "voter": 14065, + "voters": 8925, + "votes": 6693, + "voting": 5756, + "vou": 11045, + "voucher": 18190, + "vouchers": 23384, + "vous": 10636, + "vow": 34787, + "vows": 21677, + "vox": 29215, + "vox": 22692, + "voy": 10622, + "voy": 15021, + "voyage": 16299, + "voyager": 29669, + "vp": 32758, + "vp": 3896, + "vpn": 38212, + "vr": 16840, + "vr": 5921, + "vre": 44500, + "vre": 17501, + "vs": 11385, + "vs": 1547, + "vsco": 26752, + "vsco": 32822, + "vscocam": 34694, + "vsky": 37791, + "vss": 31919, + "vt": 31732, + "vt": 10291, + "vu": 8664, + "vu": 13230, + "vue": 43915, + "vue": 19313, + "vuel": 31312, + "vuelta": 43856, + "vuitton": 26705, + "vul": 6856, + "vulcan": 34767, + "vulner": 11213, + "vulnerability": 28797, + "vulnerable": 14332, + "vulture": 34593, + "vultures": 47197, + "vv": 19264, + "vv": 35686, + "vw": 28650, + "vw": 13250, + "vx": 47644, + "vy": 11566, + "vy": 5157, + "w": 86, + "w": 342, + "wa": 869, + "wa": 2663, + "waa": 35874, + "wab": 19893, + "wab": 36852, + "wac": 27445, + "wac": 37947, + "wack": 22880, + "wack": 38270, + "wacky": 34318, + "waco": 36035, + "wad": 11133, + "wad": 30451, + "wada": 40006, + "wade": 40237, + "wade": 14180, + "wadi": 37253, + "waf": 17638, + "wafc": 49086, + "waff": 13940, + "waffle": 20375, + "waffles": 24205, + "wag": 5764, + "wag": 19177, + "wage": 10716, + "wager": 43430, + "wages": 19114, + "wagner": 18081, + "wagon": 13260, + "wagons": 47944, + "wags": 48580, + "wah": 24812, + "wah": 18014, + "wahl": 27500, + "wahlberg": 35151, + "wahoo": 47995, + "wai": 11469, + "wai": 21569, + "waifu": 46551, + "waikiki": 44907, + "wain": 28358, + "wain": 20120, + "wainwright": 45878, + "waist": 36946, + "waist": 18459, + "wait": 10021, + "wait": 1885, + "waite": 24272, + "waited": 18492, + "waiter": 32946, + "waitin": 44482, + "waiting": 2680, + "waitress": 39760, + "waitrose": 37164, + "waits": 21361, + "waiver": 42866, + "waj": 49367, + "wak": 11172, + "wak": 36015, + "waka": 42696, + "wake": 10501, + "wake": 5731, + "wakefield": 26358, + "wakes": 29108, + "wakeup": 26328, + "wakeup": 35380, + "wakeupamerica": 37474, + "waking": 13025, + "wal": 1056, + "wal": 6903, + "wala": 16468, + "walang": 49180, + "walcott": 45744, + "wald": 46930, + "wald": 15724, + "walden": 39311, + "waldo": 32440, + "waldorf": 38227, + "wale": 41247, + "wale": 20336, + "wales": 25383, + "wales": 5110, + "walgreens": 38490, + "wali": 37576, + "wali": 14768, + "walia": 44455, + "walk": 8588, + "walk": 2374, + "walkaway": 48255, + "walked": 8667, + "walker": 24735, + "walker": 6150, + "walkers": 23366, + "walkin": 45792, + "walking": 12644, + "walking": 3941, + "walkingdead": 14948, + "walkout": 47470, + "walks": 8192, + "walkway": 36614, + "wall": 4316, + "wall": 2569, + "walla": 26007, + "walla": 39982, + "wallabies": 48926, + "wallace": 12535, + "wallart": 36223, + "walled": 36567, + "waller": 45340, + "wallet": 12154, + "wallets": 38550, + "walleye": 49099, + "wallis": 42206, + "wallpaper": 10560, + "wallpapers": 29841, + "walls": 8258, + "wallstreet": 45341, + "wally": 26024, + "walmart": 11972, + "walnut": 16310, + "walnuts": 38294, + "walsall": 42935, + "walsh": 12856, + "walt": 23535, + "walt": 14312, + "waltdisneyworld": 36505, + "walter": 31156, + "walter": 10645, + "walters": 25532, + "waltham": 42742, + "waltham": 45581, + "walton": 19485, + "waltz": 35982, + "wam": 20503, + "wamy": 46970, + "wan": 2060, + "wan": 4557, + "wana": 30830, + "wand": 14636, + "wand": 28559, + "wanda": 25070, + "wander": 12985, + "wander": 24473, + "wandered": 46593, + "wanderers": 27540, + "wandering": 22597, + "wanderlust": 16129, + "wane": 27459, + "wang": 19731, + "wang": 11900, + "wani": 21674, + "wankers": 42189, + "wann": 23622, + "wanna": 35940, + "wanna": 3836, + "wannabe": 40730, + "wannaone": 44832, + "want": 18356, + "want": 1280, + "wanted": 3146, + "wanting": 12801, + "wants": 3107, + "wap": 27393, + "wap": 30368, + "waq": 47512, + "war": 984, + "war": 2238, + "wara": 21631, + "warbler": 33891, + "warcraft": 13660, + "ward": 7728, + "ward": 1460, + "warden": 27798, + "wardly": 30780, + "wardro": 14247, + "wardrobe": 15020, + "wards": 2593, + "ware": 7416, + "ware": 4476, + "wareagle": 35716, + "warehouse": 13054, + "wareness": 41601, + "wareness": 35870, + "wares": 30692, + "warfare": 15739, + "warhammer": 26832, + "warhol": 27554, + "wari": 20977, + "wark": 46346, + "wark": 15164, + "warlock": 42455, + "warm": 14725, + "warm": 3616, + "warmed": 36695, + "warmer": 14328, + "warmest": 30910, + "warming": 8606, + "warmly": 45322, + "warmongers": 33205, + "warms": 32917, + "warmth": 19636, + "warmup": 29904, + "warmups": 44094, + "warn": 19360, + "warned": 16409, + "warner": 28564, + "warner": 13402, + "warning": 4994, + "warnings": 18098, + "warns": 14086, + "waron": 38947, + "warp": 32411, + "warped": 32125, + "warran": 17392, + "warrant": 22554, + "warrants": 45677, + "warranty": 23999, + "warren": 23143, + "warren": 9234, + "warri": 4109, + "warrington": 31203, + "warrior": 18998, + "warrior": 8148, + "warriors": 6421, + "wars": 3931, + "warsaw": 21072, + "warship": 47846, + "wart": 43535, + "wart": 7346, + "wartime": 42998, + "warts": 21781, + "warwick": 23081, + "warwick": 22215, + "warwickshire": 36766, + "wary": 36213, + "was": 3398, + "was": 739, + "wasabi": 47334, + "wash": 3363, + "wash": 7810, + "washed": 14092, + "washer": 24085, + "washes": 38950, + "washing": 13029, + "washington": 16774, + "washington": 4365, + "washingtondc": 40225, + "washingtonpost": 28426, + "wasn": 5044, + "wasnt": 29607, + "wasp": 24889, + "wasps": 35300, + "wassup": 45708, + "wast": 28886, + "waste": 18157, + "waste": 6065, + "wasted": 18278, + "wasteland": 44035, + "wastewater": 34463, + "wasting": 25577, + "wat": 800, + "wat": 10621, + "wata": 42509, + "watch": 7046, + "watch": 1239, + "watchdog": 35303, + "watched": 5775, + "watcher": 35971, + "watchers": 28443, + "watches": 9521, + "watchin": 32432, + "watching": 2113, + "water": 2505, + "water": 1573, + "watercolor": 14211, + "watercolour": 18377, + "waterfall": 16403, + "waterfalls": 26692, + "waterford": 24448, + "waterfront": 16605, + "waterhouse": 45072, + "watering": 19871, + "waterloo": 17465, + "watermelon": 19889, + "waterproof": 17613, + "waters": 7753, + "watershed": 33204, + "waterstones": 45014, + "waterways": 37395, + "watford": 23162, + "watfordfc": 37328, + "wati": 27966, + "watkins": 22539, + "watson": 35490, + "watson": 9294, + "watt": 22899, + "watt": 15805, + "wattpad": 32351, + "watts": 14750, + "wau": 9479, + "wav": 6054, + "wave": 17530, + "wave": 4535, + "waved": 44657, + "waver": 25997, + "waves": 7882, + "waving": 26545, + "wavy": 31941, + "waw": 22039, + "wawrinka": 48414, + "wawx": 47387, + "wax": 18789, + "wax": 11910, + "waxing": 38781, + "way": 3079, + "way": 923, + "wayback": 47822, + "wayne": 23632, + "wayne": 7003, + "ways": 1248, + "waz": 20889, + "waz": 48835, + "wb": 10726, + "wb": 12377, + "wba": 22675, + "wbb": 14482, + "wbc": 26745, + "wbo": 49053, + "wbz": 35471, + "wc": 4842, + "wc": 5755, + "wcc": 47166, + "wcc": 34926, + "wcpo": 46624, + "wcs": 39916, + "wcvb": 32709, + "wcw": 9041, + "wd": 15998, + "wd": 7494, + "wdw": 40334, + "we": 598, + "we": 649, + "wea": 37146, + "wea": 47301, + "weak": 12128, + "weak": 10128, + "weaker": 39735, + "weakness": 21448, + "weaknesses": 43487, + "weal": 14759, + "wealth": 33150, + "wealth": 7904, + "wealthy": 22617, + "weap": 6156, + "weapon": 42612, + "weapon": 10537, + "weapons": 10007, + "wear": 12206, + "wear": 2839, + "wearab": 22983, + "wearable": 44943, + "wearable": 24973, + "wearables": 30319, + "weare": 4264, + "weare": 27867, + "weareall": 45980, + "wearec": 43620, + "wearen": 45635, + "weareone": 16149, + "weareoneexo": 16448, + "wearethe": 40242, + "wearing": 3309, + "wears": 11869, + "weary": 38766, + "weasel": 44308, + "weather": 8808, + "weather": 2237, + "weathercee": 44980, + "weatherchannel": 42138, + "weav": 22260, + "weave": 22450, + "weaver": 20297, + "weaving": 27131, + "web": 2055, + "web": 4601, + "webb": 15708, + "webber": 34248, + "webcam": 24211, + "webcam": 22589, + "webcamtoy": 27719, + "webcast": 28256, + "webcomic": 34286, + "webcomics": 39811, + "webdesign": 20470, + "webdev": 37000, + "webdevelopment": 47553, + "weber": 20179, + "webin": 8460, + "webinar": 8921, + "webinars": 47755, + "webpage": 46964, + "webs": 32829, + "webseries": 44819, + "website": 3364, + "websites": 19278, + "webster": 19471, + "websummit": 48069, + "wec": 33152, + "wechat": 46124, + "wed": 1687, + "wed": 3478, + "wedd": 7576, + "wedding": 11204, + "wedding": 3101, + "weddings": 15964, + "wedge": 21446, + "wedges": 33179, + "wedne": 2380, + "wednesday": 9311, + "wednesday": 2689, + "wednesdaymotivation": 37860, + "wednesdays": 24943, + "wednesdaywisdom": 11445, + "wedo": 43432, + "weds": 19107, + "wee": 716, + "wee": 8288, + "weed": 36935, + "weed": 8015, + "weeds": 26326, + "week": 1286, + "week": 994, + "weekday": 29244, + "weekdays": 44330, + "weekend": 17205, + "weekend": 1456, + "weekender": 36547, + "weekends": 14564, + "weekly": 34652, + "weekly": 5885, + "weeknd": 29925, + "weeks": 2898, + "weeksary": 24628, + "ween": 17517, + "ween": 1599, + "weep": 39270, + "weeping": 36629, + "weer": 32491, + "weet": 17742, + "weets": 13454, + "wef": 23313, + "weg": 47867, + "weg": 47561, + "wego": 44784, + "wego": 28220, + "weh": 48458, + "weh": 40313, + "weho": 47798, + "wei": 6958, + "wei": 20952, + "weibo": 20613, + "weigh": 10565, + "weigh": 17346, + "weighed": 33210, + "weighing": 24455, + "weighs": 20481, + "weight": 12723, + "weight": 3868, + "weighted": 43179, + "weightlifting": 36164, + "weightloss": 20359, + "weights": 21374, + "weil": 43720, + "weiler": 42203, + "wein": 29134, + "wein": 37684, + "weiner": 38822, + "weinstein": 34367, + "weir": 11299, + "weir": 25517, + "weird": 27981, + "weird": 5613, + "weirdest": 29482, + "weirdo": 32476, + "weis": 26251, + "weiser": 34833, + "weiss": 24794, + "wel": 1267, + "wel": 8042, + "welch": 25820, + "welcom": 11578, + "welcome": 18318, + "welcome": 1881, + "welcomed": 12590, + "welcomes": 9304, + "welcometo": 47511, + "welcoming": 8775, + "weld": 39776, + "welding": 24956, + "welfare": 12129, + "well": 3277, + "well": 1123, + "wellbeing": 14273, + "weller": 40921, + "welling": 49165, + "wellington": 15389, + "wellness": 40574, + "wellness": 9904, + "wells": 42705, + "wells": 9804, + "welove": 13573, + "welp": 28391, + "wels": 20852, + "welsh": 19173, + "welsh": 10977, + "welt": 38595, + "welter": 37115, + "welterweight": 39617, + "wemb": 15213, + "wembley": 16579, + "wen": 6590, + "wen": 11278, + "wend": 15166, + "wendell": 42091, + "wendy": 31616, + "wendy": 14074, + "wenger": 21105, + "went": 18633, + "went": 2437, + "wentworth": 36423, + "wentz": 39179, + "wer": 6316, + "wer": 2980, + "were": 15461, + "were": 1365, + "wered": 6605, + "weren": 13611, + "werewolf": 32001, + "werk": 30176, + "werner": 29917, + "wers": 7110, + "wes": 18620, + "wes": 14738, + "wesle": 29606, + "wesley": 17332, + "wesleyan": 32509, + "wesome": 33292, + "wess": 44431, + "west": 2973, + "west": 1593, + "westbound": 29208, + "westbrook": 26948, + "westchester": 36675, + "westcoast": 44610, + "westend": 44815, + "wester": 9846, + "western": 17079, + "western": 4463, + "westfield": 32309, + "westh": 36798, + "westin": 43232, + "westlake": 41535, + "westminster": 15158, + "weston": 22771, + "westside": 33762, + "westwood": 26371, + "westworld": 42287, + "wet": 12406, + "wet": 6682, + "weta": 40946, + "wethenorth": 45281, + "wethepeople": 48030, + "wether": 33794, + "wether": 48405, + "wetland": 37357, + "wetlands": 26547, + "wett": 41971, + "wetter": 43957, + "wewant": 39280, + "wewill": 37241, + "wex": 17234, + "wexford": 29876, + "wexmondays": 49042, + "wey": 30376, + "wey": 19781, + "weymouth": 41433, + "wf": 14576, + "wf": 22313, + "wfa": 44606, + "wfc": 36431, + "wfp": 35193, + "wftv": 47075, + "wg": 21091, + "wg": 25857, + "wga": 32354, + "wgn": 48828, + "wh": 573, + "wh": 13844, + "wha": 18994, + "wha": 25884, + "whal": 38967, + "whale": 37083, + "whale": 11650, + "whales": 17722, + "wham": 42506, + "whar": 15517, + "wharf": 22452, + "wharton": 43320, + "what": 4268, + "what": 768, + "whatcha": 37160, + "whate": 6695, + "whatever": 6743, + "whati": 23500, + "whats": 9263, + "whats": 13084, + "whatsapp": 10119, + "whatsoever": 39928, + "whatson": 35632, + "whatyou": 30508, + "whe": 2009, + "whead": 34583, + "wheat": 20505, + "wheat": 10303, + "wheaton": 46933, + "wheel": 7360, + "wheel": 6744, + "wheelchair": 17713, + "wheeler": 18405, + "wheeling": 34839, + "wheels": 8025, + "whel": 9792, + "whelan": 40715, + "when": 8753, + "when": 827, + "whenever": 10500, + "where": 7052, + "where": 1234, + "whereabouts": 47808, + "whereas": 42234, + "wheres": 46345, + "wherever": 14103, + "whereyou": 46837, + "whether": 5903, + "whew": 39016, + "whey": 34556, + "whi": 4295, + "whi": 33129, + "which": 1448, + "whiche": 48719, + "whichever": 49138, + "whil": 8499, + "while": 1519, + "whilst": 8596, + "whim": 27766, + "whimsical": 42282, + "whip": 14412, + "whipped": 22323, + "whipping": 41567, + "whir": 20873, + "whirl": 30962, + "whirlwind": 47771, + "whis": 6024, + "whiskey": 41381, + "whiskey": 11610, + "whisky": 37567, + "whisky": 12599, + "whisp": 21986, + "whispe": 30356, + "whisper": 27616, + "whisperer": 41368, + "whispering": 42599, + "whispers": 29133, + "whist": 13640, + "whistle": 23972, + "whistle": 19746, + "whistleblower": 40410, + "whistler": 29633, + "whit": 4398, + "whit": 31498, + "whitaker": 35851, + "whitby": 30858, + "white": 4699, + "white": 1579, + "whiteboard": 40839, + "whitec": 24575, + "whitehall": 42827, + "whitehead": 43560, + "whitehouse": 20776, + "whitening": 35540, + "whitepaper": 42713, + "whites": 35886, + "whites": 18835, + "whitesox": 28816, + "whitewater": 49350, + "whitfield": 48404, + "whitley": 40564, + "whitman": 32394, + "whitney": 43021, + "whitney": 18048, + "whitt": 33784, + "whittaker": 47595, + "whl": 25801, + "who": 2969, + "who": 822, + "whoa": 16943, + "whoever": 11137, + "whois": 41884, + "whole": 10360, + "whole": 2954, + "wholefoods": 42840, + "wholesale": 18306, + "wholesome": 35959, + "whom": 38158, + "whom": 12873, + "whoo": 20003, + "whoo": 49290, + "whoop": 22060, + "whoops": 28433, + "whopping": 34384, + "whore": 31690, + "whos": 41460, + "whos": 27130, + "whose": 6933, + "whouse": 45927, + "whs": 26292, + "wht": 32470, + "whufc": 31695, + "whun": 18272, + "why": 11040, + "why": 1182, + "whyte": 42386, + "wi": 820, + "wi": 5585, + "wib": 45303, + "wic": 7834, + "wich": 9759, + "wich": 5238, + "wichita": 22566, + "wick": 6798, + "wick": 6479, + "wicked": 32579, + "wicked": 12825, + "wicker": 38096, + "wicket": 19180, + "wickets": 22110, + "wicklow": 39039, + "wicz": 30121, + "wid": 11886, + "wid": 20886, + "wide": 19341, + "wide": 3184, + "widely": 16195, + "widening": 46598, + "wider": 21263, + "widesp": 20598, + "widespread": 21258, + "widget": 43906, + "wido": 28068, + "widow": 19949, + "widows": 42129, + "width": 23571, + "wie": 21378, + "wie": 9131, + "wielding": 47272, + "wien": 38131, + "wiener": 40567, + "wies": 42788, + "wif": 37572, + "wife": 3607, + "wifey": 35282, + "wifi": 11026, + "wig": 23690, + "wig": 12216, + "wigan": 23130, + "wiggins": 32329, + "wiggle": 47812, + "wight": 41278, + "wight": 15545, + "wigs": 31207, + "wii": 8005, + "wiiu": 40980, + "wiki": 10373, + "wiki": 24265, + "wikileaks": 28731, + "wikipedia": 15176, + "wil": 1352, + "wil": 20581, + "wilbur": 43069, + "wilcox": 43231, + "wild": 2780, + "wild": 3220, + "wildatlantic": 35500, + "wildatlanticway": 35776, + "wildcard": 37360, + "wildcat": 49077, + "wildcat": 25870, + "wildcats": 15909, + "wilde": 23498, + "wilder": 14343, + "wilder": 23499, + "wilderness": 16506, + "wildest": 43028, + "wildfire": 22788, + "wildfires": 29184, + "wildflower": 27628, + "wildflower": 33181, + "wildflowerhour": 31302, + "wildflowers": 29136, + "wildlife": 13298, + "wildlife": 5250, + "wildlifephotography": 32307, + "wildlifewednesday": 48537, + "wildly": 35981, + "wildoz": 40113, + "wiley": 32747, + "wilhelm": 39696, + "wilkes": 39548, + "wilkins": 36986, + "wilkinson": 26797, + "will": 5062, + "will": 751, + "willam": 43276, + "willard": 44920, + "wille": 48739, + "willem": 38044, + "willi": 2256, + "william": 8420, + "william": 4705, + "williams": 38452, + "williams": 4075, + "williamsburg": 30683, + "williamson": 20793, + "willie": 13907, + "willing": 34160, + "willing": 11718, + "willingness": 40573, + "willis": 18491, + "willow": 33887, + "willow": 15665, + "wills": 26913, + "willy": 34502, + "willy": 19599, + "wilmington": 28052, + "wilms": 47879, + "wilshere": 48359, + "wilson": 23629, + "wilson": 5622, + "wilt": 23394, + "wilt": 47357, + "wilton": 46638, + "wiltshire": 28025, + "wim": 8662, + "wim": 27580, + "wimble": 11752, + "wimbledon": 12229, + "win": 831, + "win": 1225, + "winchester": 20647, + "wind": 6812, + "wind": 3630, + "winder": 44454, + "winder": 46245, + "winding": 22390, + "windmill": 34084, + "windo": 3110, + "window": 26675, + "window": 4879, + "windows": 5437, + "winds": 12668, + "winds": 7012, + "windshield": 33002, + "windsor": 44322, + "windsor": 12884, + "windy": 13446, + "wine": 7375, + "wine": 2604, + "winelover": 26357, + "winemaker": 41588, + "wineoclock": 43846, + "wineries": 49349, + "winery": 15500, + "wines": 8263, + "winetasting": 41288, + "winewednesday": 35447, + "wing": 8141, + "wing": 1340, + "winged": 24993, + "winger": 22727, + "winget": 44578, + "wings": 5178, + "wink": 34455, + "wink": 25859, + "winkle": 36430, + "winn": 38104, + "winne": 46273, + "winner": 32961, + "winner": 2520, + "winners": 4320, + "winni": 13018, + "winnie": 29022, + "winning": 42099, + "winning": 2577, + "winnings": 46490, + "winnipeg": 14369, + "winona": 49202, + "wins": 46839, + "wins": 2718, + "winslow": 39658, + "winston": 14848, + "winter": 7340, + "winter": 2541, + "winters": 21587, + "wintry": 39504, + "wip": 10447, + "wipe": 26761, + "wiped": 31822, + "wipes": 33463, + "wir": 16849, + "wir": 44838, + "wire": 7558, + "wire": 7794, + "wired": 18935, + "wireless": 9103, + "wires": 24311, + "wiring": 36434, + "wirral": 34675, + "wis": 3392, + "wis": 20405, + "wiscon": 9857, + "wisconsin": 10265, + "wisdom": 42474, + "wisdom": 5425, + "wise": 19116, + "wise": 5558, + "wisely": 26173, + "wiser": 44859, + "wish": 11328, + "wish": 2412, + "wished": 25883, + "wishes": 6045, + "wishing": 5307, + "wishlist": 31969, + "wit": 584, + "wit": 8531, + "witch": 20139, + "witch": 10083, + "witchcraft": 35065, + "witcher": 33684, + "witches": 21673, + "with": 1435, + "with": 593, + "withdra": 24696, + "withdraw": 31670, + "withdrawal": 25765, + "withdrawn": 46687, + "withdraws": 48637, + "wither": 39655, + "witherspoon": 45409, + "within": 4154, + "withme": 44670, + "without": 32836, + "without": 2193, + "withstand": 42236, + "withthe": 36872, + "withus": 30572, + "withyou": 30351, + "witne": 12096, + "witness": 8793, + "witnessed": 20187, + "witnesses": 22778, + "witnessing": 33618, + "wits": 30938, + "witt": 38194, + "witt": 17168, + "witter": 31597, + "witty": 29970, + "witz": 44186, + "witz": 13265, + "wiv": 48925, + "wives": 14378, + "wiwx": 44461, + "wiz": 7730, + "wiz": 23178, + "wizar": 49121, + "wizard": 30490, + "wizard": 14295, + "wizards": 19140, + "wizkid": 40146, + "wj": 19739, + "wj": 35453, + "wk": 11512, + "wk": 11528, + "wkend": 42336, + "wknd": 20851, + "wks": 25508, + "wku": 43377, + "wl": 13299, + "wl": 9613, + "wm": 20268, + "wm": 15790, + "wn": 1186, + "wn": 757, + "wnba": 32358, + "wned": 8628, + "wns": 12950, + "wnt": 22484, + "wny": 24833, + "wo": 1613, + "wo": 11132, + "woah": 17751, + "wob": 35984, + "woc": 39011, + "wod": 41522, + "woes": 27860, + "wof": 45671, + "woj": 48931, + "wok": 28912, + "woke": 9331, + "woken": 43697, + "woking": 43931, + "wol": 2798, + "wol": 48622, + "wold": 42399, + "wolf": 9453, + "wolf": 5916, + "wolfe": 24989, + "wolff": 34369, + "wolfgang": 34061, + "wolfpack": 30887, + "wolve": 45101, + "wolver": 14334, + "wolverhampton": 34518, + "wolverine": 23353, + "wolverines": 42003, + "wolves": 9372, + "wom": 1087, + "womack": 48980, + "woman": 15716, + "woman": 2308, + "womanc": 35630, + "womancrush": 37721, + "womancrushwednesday": 39714, + "womanin": 30562, + "womaninbiz": 36482, + "womb": 37023, + "women": 3648, + "women": 1507, + "womenin": 13062, + "womeninscience": 41343, + "womeninstem": 29380, + "womenintech": 31470, + "womenof": 48421, + "womens": 12822, + "womens": 14408, + "womensart": 38548, + "womensday": 13956, + "womenshi": 22887, + "womenshistorymonth": 24982, + "womensmarch": 30102, + "won": 1528, + "won": 1749, + "wonder": 2070, + "wonder": 3936, + "wondercon": 46944, + "wondered": 15550, + "wonderful": 2582, + "wonderfully": 23245, + "wondering": 8360, + "wonderland": 13874, + "wonders": 14048, + "wonderwoman": 31000, + "wondo": 38402, + "wondr": 46771, + "wong": 17876, + "wonka": 43463, + "wont": 43174, + "wont": 15952, + "woo": 1867, + "woo": 9322, + "wood": 3269, + "wood": 1704, + "woodbridge": 49074, + "wooden": 48226, + "wooden": 9057, + "woodland": 44314, + "woodland": 17447, + "woodlands": 32430, + "woodley": 40566, + "woodpecker": 32684, + "woods": 6267, + "woodson": 48967, + "woodstock": 29486, + "woodward": 27419, + "woodwork": 47386, + "woodworking": 29267, + "woody": 38627, + "woody": 17144, + "woof": 34234, + "woof": 24028, + "woohoo": 20172, + "wook": 29192, + "wool": 9967, + "wool": 13283, + "woolf": 43728, + "woolly": 47722, + "woon": 33126, + "wooo": 43217, + "woop": 31884, + "woot": 22466, + "wor": 641, + "worcester": 22172, + "worcester": 19580, + "worcestershire": 38440, + "worcestershirehour": 43644, + "word": 8272, + "word": 2653, + "wordof": 33500, + "wordoftheday": 43594, + "wordpress": 15193, + "words": 31007, + "words": 2709, + "wore": 8953, + "work": 1636, + "work": 951, + "workday": 29735, + "worked": 5410, + "worker": 8098, + "workers": 4795, + "workflow": 28502, + "workforce": 14672, + "workin": 31825, + "workin": 26323, + "working": 20806, + "working": 1699, + "workinprogress": 46086, + "workout": 6773, + "workouts": 22779, + "workplace": 11959, + "workplaces": 47383, + "works": 2322, + "workshop": 3832, + "workshops": 12262, + "workspace": 34470, + "worl": 5221, + "world": 2334, + "world": 1002, + "worlda": 46627, + "worldbank": 36759, + "worldbookday": 31191, + "worldcup": 42525, + "worldcup": 8650, + "worlden": 44668, + "worldenviron": 47115, + "worldenvironmentday": 47522, + "worldly": 36268, + "worldo": 41698, + "worldof": 22636, + "worldre": 33951, + "worlds": 7691, + "worldseries": 26695, + "worldtour": 23202, + "worldwater": 41176, + "worldwaterday": 44520, + "worldwide": 6214, + "worm": 33709, + "worm": 10945, + "worms": 20231, + "worn": 9037, + "worried": 11911, + "worries": 17684, + "worry": 7534, + "worrying": 24058, + "worse": 8236, + "worsen": 46344, + "worshi": 31840, + "worship": 46399, + "worship": 9023, + "worst": 5719, + "wort": 30209, + "worth": 10671, + "worth": 2450, + "worthing": 39929, + "worthit": 40830, + "worthless": 44736, + "worths": 44633, + "worthwhile": 36295, + "worthy": 8881, + "worx": 44973, + "wot": 24863, + "wou": 5279, + "would": 39873, + "would": 1311, + "wouldn": 5878, + "wouldnt": 41595, + "wound": 19231, + "wounded": 14859, + "wounds": 21290, + "woven": 19830, + "wow": 22191, + "wow": 2781, + "woz": 44558, + "wozni": 47782, + "wp": 15378, + "wp": 13302, + "wpg": 35048, + "wps": 33386, + "wq": 45195, + "wr": 1189, + "wr": 8028, + "wra": 3852, + "wra": 46004, + "wral": 49050, + "wrangler": 30923, + "wrap": 7094, + "wrapped": 9875, + "wrapping": 15223, + "wraps": 18236, + "wrath": 29783, + "wray": 48943, + "wrc": 16004, + "wre": 3168, + "wreath": 23091, + "wrec": 20879, + "wreck": 28775, + "wreck": 15017, + "wrecked": 32695, + "wreckem": 45676, + "wrecking": 36956, + "wrecks": 45545, + "wren": 20191, + "wren": 31970, + "wrench": 30980, + "wrest": 4177, + "wrestle": 17097, + "wrestle": 28086, + "wrestlemania": 18849, + "wrestler": 19790, + "wrestlers": 25902, + "wrestling": 31292, + "wrestling": 5904, + "wrexham": 34479, + "wri": 7667, + "wri": 42007, + "wright": 28616, + "wright": 6991, + "wrights": 43711, + "wrigley": 33538, + "wrink": 22201, + "wrinkle": 46642, + "wrinkles": 35525, + "wrist": 19243, + "wrist": 16139, + "wristband": 36890, + "wristbands": 44864, + "writ": 2902, + "write": 28874, + "write": 4946, + "writer": 27886, + "writer": 4422, + "writers": 18742, + "writers": 7307, + "writerslife": 25007, + "writes": 8023, + "writing": 16053, + "writing": 2979, + "writingcommunity": 39178, + "writings": 36259, + "written": 5231, + "wro": 5447, + "wrong": 18381, + "wrong": 3669, + "wrongly": 45642, + "wrote": 5796, + "wrought": 48125, + "wrs": 45280, + "ws": 6300, + "ws": 799, + "wsb": 30681, + "wsbtv": 38394, + "wsj": 19764, + "wski": 12548, + "wsl": 43706, + "wsoc": 40253, + "wson": 33954, + "wsop": 41231, + "wsu": 44674, + "wsu": 32913, + "wsw": 43285, + "wt": 15873, + "wt": 12255, + "wta": 25984, + "wtc": 39718, + "wtf": 6891, + "wth": 23021, + "wthr": 45269, + "wti": 47345, + "wto": 36406, + "wts": 32159, + "wu": 9710, + "wu": 9837, + "wud": 43870, + "wul": 35154, + "wunder": 36661, + "wur": 24040, + "wurst": 44409, + "wusa": 40021, + "wut": 28590, + "wv": 18920, + "wv": 14743, + "wvu": 44878, + "wvu": 25879, + "ww": 3181, + "ww": 4491, + "wwc": 26505, + "wwdc": 47441, + "wwe": 12112, + "wwe": 5290, + "wwen": 23308, + "wwenetwork": 37228, + "wwenxt": 39898, + "wwer": 32038, + "wwf": 23332, + "wwfc": 42681, + "wwg": 35322, + "wwi": 20194, + "wwii": 10261, + "www": 26074, + "www": 9667, + "wwwbigbaldhead": 30761, + "wwww": 34224, + "wwww": 25200, + "wwwww": 48268, + "wwx": 47431, + "wx": 18192, + "wx": 3561, + "wy": 4665, + "wy": 7625, + "wyatt": 21660, + "wyd": 33113, + "wye": 48436, + "wye": 43751, + "wylie": 49330, + "wyn": 11802, + "wyn": 17504, + "wynn": 36117, + "wynne": 35951, + "wynonna": 41456, + "wynonnaearp": 43755, + "wyoming": 18693, + "x": 87, + "x": 343, + "xa": 24831, + "xan": 45530, + "xander": 45601, + "xavi": 36342, + "xavier": 41044, + "xavier": 18567, + "xb": 33678, + "xbox": 18063, + "xbox": 7748, + "xboxone": 27410, + "xc": 12515, + "xchange": 49132, + "xd": 6380, + "xe": 42886, + "xe": 19183, + "xen": 15568, + "xer": 49005, + "xf": 35274, + "xfactor": 25211, + "xfinity": 35107, + "xford": 34732, + "xh": 45771, + "xham": 25284, + "xi": 2467, + "xi": 7376, + "xia": 19854, + "xia": 20724, + "xian": 42570, + "xiao": 49318, + "xiaomi": 27477, + "xico": 38469, + "xide": 17398, + "xie": 40122, + "xie": 15976, + "xii": 36525, + "xiii": 28199, + "xim": 11217, + "xin": 27053, + "xin": 41517, + "xing": 14383, + "xion": 24164, + "xis": 35793, + "xit": 5316, + "xiumin": 36563, + "xiv": 16125, + "xj": 42453, + "xl": 36529, + "xl": 8833, + "xley": 38223, + "xm": 18626, + "xma": 48805, + "xmas": 48848, + "xmas": 6425, + "xmen": 28708, + "xn": 25388, + "xo": 26936, + "xo": 9000, + "xon": 29186, + "xon": 8482, + "xox": 11531, + "xox": 34050, + "xoxo": 13313, + "xp": 15651, + "xper": 32200, + "xperia": 37615, + "xpo": 44377, + "xpress": 31809, + "xq": 40606, + "xr": 26276, + "xrp": 26965, + "xs": 16397, + "xt": 1052, + "xtina": 45520, + "xton": 32666, + "xton": 10597, + "xtra": 26969, + "xtre": 27025, + "xtreme": 33483, + "xu": 42063, + "xu": 37198, + "xv": 17768, + "xvi": 44031, + "xx": 5675, + "xx": 3553, + "xxl": 29777, + "xxx": 33923, + "xxx": 8352, + "xxxx": 32035, + "xxxx": 22819, + "xxxxx": 44195, + "xy": 20023, + "xy": 11443, + "y": 88, + "y": 344, + "ya": 5018, + "ya": 1430, + "yaa": 48847, + "yaa": 34498, + "yaan": 34680, + "yab": 27737, + "yach": 9039, + "yacht": 43806, + "yacht": 12859, + "yachts": 29260, + "yad": 13276, + "yad": 40047, + "yadav": 26650, + "yaf": 38019, + "yag": 35081, + "yah": 16170, + "yah": 12381, + "yaho": 37929, + "yahoo": 38152, + "yahoo": 16846, + "yak": 11014, + "yak": 29074, + "yaki": 44677, + "yaku": 29572, + "yakuza": 42628, + "yal": 16198, + "yal": 13418, + "yale": 39926, + "yale": 17157, + "yall": 9210, + "yam": 6666, + "yam": 19318, + "yama": 23512, + "yamaha": 18854, + "yan": 3949, + "yan": 4788, + "yana": 18698, + "yand": 38609, + "yang": 23818, + "yang": 12605, + "yani": 26439, + "yankee": 21554, + "yankees": 11889, + "yann": 40246, + "yann": 38657, + "yao": 45231, + "yap": 48700, + "yap": 34468, + "yar": 6786, + "yar": 23071, + "yard": 20234, + "yard": 4313, + "yards": 7550, + "yarmouth": 45941, + "yarn": 19702, + "yarra": 46824, + "yas": 8168, + "yas": 20570, + "yash": 30216, + "yash": 37836, + "yasi": 37700, + "yasss": 23873, + "yat": 29443, + "yat": 34965, + "yates": 27677, + "yatra": 38932, + "yav": 41275, + "yaw": 31989, + "yawn": 48643, + "yay": 20614, + "yay": 6712, + "yaya": 37608, + "yaz": 19348, + "yaz": 42252, + "yb": 41785, + "yb": 27615, + "yc": 11931, + "ycle": 38089, + "yd": 29896, + "yd": 9534, + "yday": 15899, + "yds": 24819, + "ye": 693, + "ye": 4582, + "yea": 13687, + "yeah": 29405, + "yeah": 3908, + "year": 5163, + "year": 935, + "yearbook": 21636, + "yearling": 48392, + "yearly": 24541, + "yearof": 31944, + "yearofthe": 47899, + "years": 30864, + "years": 1151, + "yearsof": 14932, + "yearswith": 45249, + "yeast": 25819, + "yeats": 44903, + "yed": 28137, + "yed": 3301, + "yee": 18114, + "yee": 23108, + "yeezy": 24901, + "yeg": 16854, + "yeg": 11976, + "yegfood": 48711, + "yeh": 21331, + "yel": 3323, + "yel": 48164, + "yell": 30824, + "yelled": 39199, + "yelling": 26581, + "yellow": 12059, + "yellow": 4481, + "yellowstone": 29241, + "yelp": 31674, + "yemen": 29276, + "yemen": 12513, + "yemeni": 44656, + "yemi": 42267, + "yen": 29602, + "yen": 17960, + "yeo": 32292, + "yeo": 43830, + "yeol": 15808, + "yeon": 16602, + "yep": 10964, + "yer": 15491, + "yer": 2371, + "yers": 3722, + "yes": 21620, + "yes": 1958, + "yess": 42778, + "yess": 40189, + "yesss": 36210, + "yessss": 45620, + "yester": 1905, + "yesterday": 1926, + "yesterdays": 36238, + "yesung": 38527, + "yet": 2296, + "yeti": 34228, + "yev": 39855, + "yew": 34660, + "yey": 45447, + "yg": 16396, + "ygk": 44758, + "ygo": 46166, + "yh": 41978, + "yi": 5826, + "yi": 14762, + "yield": 16825, + "yields": 24856, + "yikes": 25094, + "yin": 26476, + "yin": 23543, + "ying": 42933, + "ying": 910, + "yixing": 32120, + "yk": 30965, + "yl": 2656, + "yl": 4045, + "ylan": 41875, + "ylde": 42850, + "yle": 32305, + "yle": 10770, + "ylene": 34239, + "yler": 48081, + "yles": 42860, + "ylon": 22375, + "ylor": 48468, + "ym": 1786, + "ym": 19587, + "yman": 29077, + "ymc": 47101, + "ymca": 22369, + "yment": 8199, + "ymes": 39968, + "ymi": 5271, + "ymm": 37133, + "ymoun": 41426, + "ymouth": 36429, + "yn": 2823, + "yn": 4100, + "yne": 18238, + "ynes": 18020, + "ynn": 10499, + "ynna": 48292, + "ynwa": 27372, + "yo": 586, + "yo": 3497, + "yoda": 31922, + "yof": 5966, + "yofficial": 21818, + "yofthe": 43983, + "yog": 34985, + "yog": 36539, + "yoga": 25872, + "yoga": 5523, + "yogh": 32626, + "yoghurt": 33491, + "yogi": 22766, + "yogur": 16137, + "yogurt": 16819, + "yoh": 48880, + "yoke": 41969, + "yoko": 25929, + "yoko": 32256, + "yokohama": 42409, + "yol": 19387, + "yol": 35218, + "yolanda": 43845, + "yolo": 20905, + "yom": 34718, + "yom": 44527, + "yon": 10147, + "yon": 7604, + "yong": 27960, + "yong": 20887, + "yonge": 48592, + "yoo": 25842, + "yoo": 20775, + "yoon": 30863, + "yoon": 22113, + "yoona": 32736, + "yoongi": 24037, + "yor": 2028, + "yor": 21132, + "york": 5318, + "york": 2705, + "yorker": 23865, + "yorkers": 41041, + "yorks": 39093, + "yorkshi": 43367, + "yorkshire": 27007, + "yorkshire": 8633, + "yoruba": 46083, + "yos": 35607, + "yosemite": 25893, + "yoshi": 22920, + "yoshi": 25354, + "yot": 22875, + "yotes": 46157, + "yotpo": 26113, + "you": 1562, + "you": 592, + "youare": 33879, + "youcan": 32498, + "youknow": 47919, + "youknow": 41088, + "youn": 1596, + "young": 6939, + "young": 1888, + "younger": 10414, + "youngest": 12316, + "youngjae": 46426, + "youngster": 35881, + "youngsters": 28098, + "younow": 33831, + "your": 2130, + "your": 695, + "youre": 28344, + "youre": 19695, + "yourown": 28583, + "yours": 3834, + "yourself": 3053, + "yourselves": 19747, + "youth": 10743, + "youth": 3281, + "youthful": 37480, + "youths": 23614, + "youts": 22737, + "youtu": 13868, + "youtube": 31258, + "youtube": 3895, + "youtuber": 24720, + "youtubers": 36822, + "youu": 35055, + "youuu": 35324, + "youuuu": 47123, + "yoy": 41865, + "yp": 38370, + "yp": 34734, + "ypg": 37386, + "yql": 46122, + "yqr": 36881, + "yr": 18395, + "yr": 4333, + "yrs": 4822, + "ys": 1971, + "ys": 961, + "yser": 33121, + "ysis": 4843, + "ysl": 45681, + "ysm": 23842, + "yst": 40528, + "yt": 36777, + "yt": 14779, + "ytd": 47524, + "yte": 48172, + "yu": 3371, + "yu": 8887, + "yuan": 26236, + "yuck": 48282, + "yugo": 48231, + "yuh": 42547, + "yui": 47932, + "yuk": 17037, + "yuk": 24063, + "yuki": 34010, + "yukon": 27094, + "yul": 39832, + "yum": 6869, + "yum": 7259, + "yuma": 47566, + "yummy": 7687, + "yun": 14976, + "yun": 18288, + "yung": 44545, + "yung": 17676, + "yunho": 39748, + "yup": 13231, + "yur": 42533, + "yuri": 23823, + "yusuf": 33222, + "yuv": 36784, + "yves": 33698, + "yvon": 23327, + "yvonne": 32583, + "yvr": 29058, + "yw": 33741, + "yx": 35624, + "yxe": 34240, + "yy": 3433, + "yy": 8321, + "yya": 37444, + "yyc": 27542, + "yyc": 11741, + "yyj": 26203, + "yyy": 11514, + "yyyy": 38749, + "yyyy": 16955, + "yyyyy": 26089, + "yyyyyy": 47055, + "yz": 37579, + "yz": 46451, + "yü": 48232, + "z": 89, + "z": 345, + "za": 3710, + "za": 2186, + "zab": 22982, + "zable": 37002, + "zac": 25501, + "zac": 19159, + "zach": 13401, + "zach": 11815, + "zachary": 32401, + "zack": 30567, + "zack": 19120, + "zad": 47314, + "zad": 27838, + "zada": 34889, + "zaf": 21837, + "zafar": 46668, + "zag": 26091, + "zag": 29346, + "zagre": 34107, + "zagreb": 35355, + "zah": 23258, + "zah": 43297, + "zaha": 44408, + "zai": 44329, + "zai": 27065, + "zain": 34400, + "zain": 45366, + "zak": 13050, + "zak": 20738, + "zaki": 48091, + "zal": 20552, + "zal": 33298, + "zam": 7218, + "zam": 41578, + "zambia": 21671, + "zan": 7284, + "zan": 17835, + "zana": 39643, + "zand": 37712, + "zane": 34786, + "zani": 45373, + "zania": 15059, + "zano": 27637, + "zanzi": 47835, + "zap": 24134, + "zapp": 33504, + "zappa": 46592, + "zar": 5458, + "zar": 16392, + "zara": 24454, + "zardari": 20174, + "zas": 48261, + "zation": 3683, + "zawa": 49281, + "zay": 7102, + "zayed": 36726, + "zayn": 22292, + "zayn": 10308, + "zaynmalik": 25278, + "zazzle": 47857, + "ze": 2254, + "ze": 1298, + "zeal": 44951, + "zealand": 7618, + "zeb": 46518, + "zebra": 47394, + "zebra": 22548, + "zed": 21047, + "zed": 1993, + "zedd": 45608, + "zee": 25468, + "zee": 14080, + "zeiss": 47460, + "zeit": 37898, + "zeit": 37906, + "zek": 40829, + "zeke": 47065, + "zel": 10389, + "zel": 12027, + "zelda": 17138, + "zell": 39526, + "zen": 8518, + "zen": 3928, + "zend": 33478, + "zendaya": 35956, + "zenith": 44740, + "zens": 15298, + "zeph": 40726, + "zepp": 22977, + "zeppelin": 25408, + "zer": 6118, + "zer": 3716, + "zero": 14867, + "zero": 5848, + "zers": 9547, + "zes": 4073, + "zest": 37709, + "zet": 34098, + "zeta": 30954, + "zetta": 45993, + "zeus": 32800, + "zey": 46647, + "zh": 33389, + "zh": 41621, + "zhang": 21127, + "zhen": 37374, + "zhen": 33236, + "zhou": 17384, + "zhu": 42049, + "zi": 2651, + "zi": 5819, + "zia": 13764, + "zid": 30235, + "zidane": 34643, + "zie": 29316, + "zie": 8956, + "zieg": 40157, + "ziegler": 46812, + "ziel": 32151, + "zier": 15399, + "zies": 38001, + "ziest": 28159, + "zig": 15950, + "zig": 21345, + "ziggy": 39274, + "zik": 30125, + "zika": 28783, + "zil": 25039, + "zil": 33190, + "zilla": 17879, + "zim": 8112, + "zim": 22577, + "zimbab": 12373, + "zimbabwe": 45668, + "zimbabwe": 13583, + "zimmer": 27452, + "zimmer": 35211, + "zimmerman": 38231, + "zin": 14085, + "zin": 21278, + "zinc": 27458, + "zind": 26206, + "zindabad": 42208, + "zine": 16100, + "zing": 25062, + "zing": 3152, + "zinger": 42027, + "zio": 13906, + "zion": 31763, + "zion": 20963, + "zione": 36161, + "zionist": 33078, + "zip": 26479, + "zip": 16083, + "zipper": 33670, + "zir": 31892, + "zl": 39168, + "zlat": 32489, + "zlatan": 37877, + "zm": 43691, + "zman": 24248, + "zn": 18004, + "zo": 4397, + "zo": 5056, + "zodi": 22660, + "zodiac": 27753, + "zoe": 43114, + "zoe": 16662, + "zoey": 39871, + "zog": 40680, + "zol": 25939, + "zola": 46105, + "zom": 6623, + "zombi": 29452, + "zombie": 11819, + "zombies": 46702, + "zombies": 16517, + "zon": 15109, + "zon": 14618, + "zona": 42134, + "zone": 37197, + "zone": 4442, + "zones": 17247, + "zoning": 36790, + "zoo": 8182, + "zoo": 7147, + "zoom": 32671, + "zoom": 13909, + "zor": 17605, + "zou": 38072, + "zr": 39275, + "zs": 35248, + "zshq": 41442, + "zt": 42629, + "zu": 4091, + "zu": 14184, + "zucchini": 29873, + "zucker": 26890, + "zuckerberg": 30066, + "zul": 31146, + "zulu": 32821, + "zum": 35094, + "zuma": 23326, + "zumba": 32976, + "zun": 42440, + "zur": 17128, + "zurich": 21288, + "zw": 42188, + "zx": 31604, + "zy": 6615, + "zy": 2303, + "zyk": 39112, + "zyme": 36472, + "zyn": 45287, + "zz": 1544, + "zz": 4943, + "zza": 14642, + "zzi": 13974, + "zzie": 18635, + "zzle": 7873, + "zzled": 39075, + "zzo": 14036, + "zzy": 21275, + "zzy": 8353, + "zzz": 20055, + "zzzz": 35742, + "zzzz": 43103, + "{": 90, + "{": 346, + "{}": 39025, + "|": 91, + "|#": 31183, + "|": 347, + "|@": 41677, + "||": 7566, + "}": 92, + "}": 348, + "~": 93, + "~!": 31181, + "~\"": 48442, + "~": 349, + "~>": 43291, + "~@": 44247, + "~~": 11461, + "~~": 16671, + "~~~": 32472, + "~~~~": 28295, + "¡": 94, + "¡": 350, + "¡ï¸ı": 15113, + "¡ï¸ı": 4174, + "¡ľ": 43991, + "¢": 95, + "¢": 351, + "£": 96, + "£": 352, + "£ï¸ı": 18446, + "¤": 97, + "¤": 353, + "¥": 98, + "¥": 354, + "¦": 99, + "¦": 355, + "¦Ī": 47615, + "§": 100, + "§": 356, + "¨": 101, + "¨": 357, + "©": 102, + "©": 358, + "ª": 103, + "ª": 359, + "«": 104, + "«": 360, + "¬": 105, + "¬": 361, + "¬ë": 31736, + "®": 106, + "®": 362, + "¯": 107, + "¯": 363, + "°": 108, + "°:": 21787, + "°": 364, + "°ï¸ı": 34777, + "±": 109, + "±": 365, + "±ï¸ı": 41020, + "²": 110, + "²": 366, + "³": 111, + "³": 367, + "³ï¸ı": 22195, + "³ï¸ı": 24706, + "´": 112, + "´": 368, + "µ": 113, + "µ": 369, + "µï¸ı": 27605, + "¶": 114, + "¶": 370, + "·": 115, + "·": 371, + "¸": 116, + "¸": 372, + "¸ë": 19693, + "¹": 117, + "¹": 373, + "º": 118, + "º": 374, + "»": 119, + "»": 375, + "¼": 120, + "¼": 376, + "½": 121, + "½": 377, + "½ï¸ı": 31333, + "¾": 122, + "¾": 378, + "¿": 123, + "¿": 379, + "À": 124, + "À": 380, + "Á": 125, + "Á": 381, + "Â": 126, + "Â": 382, + "¡": 26868, + "¡": 10830, + "¡¡": 45505, + "¢": 41359, + "£": 31117, + "£": 1950, + "Â¥": 20199, + "¨": 19957, + "¨¨": 23089, + "¨¨¨¨": 41223, + "©": 31148, + "©": 5811, + "«": 14434, + "®": 30857, + "®": 8436, + "¯": 38682, + "¯": 43593, + "¯\\": 44096, + "¯\\_(": 45115, + "°": 21305, + "°": 6858, + "²": 41175, + "´": 30560, + "´": 12559, + "·": 14844, + "º": 28059, + "»": 31642, + "»": 7599, + "½": 33613, + "¿": 44559, + "¿": 17133, + "ÂŃ": 22618, + "Ã": 127, + "Ã": 383, + "á": 7261, + "á": 22229, + "án": 38340, + "án": 21385, + "â": 26170, + "ã": 19339, + "ão": 21141, + "ä": 10896, + "ä": 47276, + "än": 42787, + "Ã¥": 23176, + "æ": 42495, + "ç": 10067, + "ça": 22711, + "è": 12138, + "è": 37761, + "ère": 30272, + "ès": 41210, + "é": 3459, + "é": 4166, + "éal": 45251, + "ée": 13489, + "és": 20507, + "ê": 27515, + "ë": 29526, + "ë": 40520, + "î": 48704, + "ï": 35689, + "ñ": 6445, + "ña": 17753, + "ño": 16574, + "ños": 40104, + "ó": 8891, + "ó": 27733, + "ón": 13926, + "ô": 26815, + "ö": 7255, + "ö": 37423, + "ör": 31762, + "ø": 17483, + "ø": 45598, + "ú": 17963, + "ú": 36019, + "ü": 6522, + "ü": 47177, + "ür": 26132, + "ÃĹ": 16165, + "Ãł": 36149, + "Ãł": 21259, + "ÃŃ": 8366, + "ÃŃ": 23928, + "ÃŃa": 16609, + "ÃŃn": 33623, + "Ä": 128, + "Ä": 384, + "ı": 18562, + "ı": 41901, + "Äģ": 23134, + "Äĩ": 31719, + "Äį": 45414, + "ÄŁ": 26540, + "Å": 129, + "Å": 385, + "Å¡": 35621, + "ÅĤ": 40419, + "Åį": 41267, + "ÅŁ": 21254, + "ÅŁ": 40706, + "Æ": 130, + "Æ": 386, + "Ç": 131, + "Ç": 387, + "È": 132, + "È": 388, + "É": 133, + "É": 389, + "Ê": 134, + "Ê": 390, + "Ë": 135, + "Ë": 391, + "Ì": 136, + "Ì": 392, + "Ìĩ": 16384, + "Í": 137, + "Í": 393, + "Î": 138, + "Î": 394, + "Ï": 139, + "Ï": 395, + "Ïī": 38065, + "Ð": 140, + "Ð": 396, + "а": 16912, + "а": 27080, + "аÐ": 31090, + "в": 39813, + "е": 22176, + "и": 16701, + "иÐ": 29503, + "к": 27152, + "л": 47611, + "м": 38018, + "н": 22705, + "о": 13506, + "о": 29386, + "оÐ": 20978, + "од": 38416, + "оÑĤ": 28599, + "п": 26302, + "пÑĢи": 46321, + "пÑĢиÑĢода": 48150, + "Ñ": 141, + "Ñ": 397, + "ÑĢ": 16370, + "ÑĢи": 41092, + "ÑĢод": 47039, + "ÑĢода": 47929, + "Ñģ": 23669, + "ÑĤ": 17875, + "Ñĥ": 39729, + "ÑĦ": 27993, + "ÑĦоÑĤ": 35155, + "ÑĦоÑĤо": 38981, + "Ñĭ": 45001, + "Ò": 142, + "Ò": 398, + "Ó": 143, + "Ó": 399, + "Ô": 144, + "Ô": 400, + "Õ": 145, + "Õ": 401, + "Ö": 146, + "Ö": 402, + "×": 147, + "×": 403, + "Ø": 148, + "Ø": 404, + "ا": 6042, + "ا": 22625, + "اØ": 13189, + "ار": 40137, + "اÙ": 8453, + "اÙĦ": 12973, + "اÙħ": 47626, + "اÙĨ": 42773, + "اÙĨ": 33200, + "ب": 16378, + "ب": 35330, + "Ø©": 20915, + "ت": 18197, + "ت": 44333, + "ج": 26375, + "Ø®": 41495, + "د": 19872, + "د": 35566, + "ر": 10948, + "ر": 24933, + "رÙĬ": 43273, + "ز": 36169, + "س": 17856, + "Ø´": 28770, + "ص": 27271, + "Ø·": 32050, + "ع": 18843, + "غ": 48510, + "ØŃ": 25722, + "Ù": 149, + "Ù": 405, + "Ùģ": 24112, + "ÙĤ": 27585, + "Ùĥ": 33499, + "ÙĦ": 14251, + "ÙĦ": 37899, + "Ùħ": 12986, + "Ùħ": 29945, + "ÙĨ": 16655, + "ÙĨ": 25386, + "Ùĩ": 34274, + "Ùĩ": 31343, + "ÙĪ": 12203, + "ÙĪ": 38310, + "ÙĪØ±": 48242, + "ÙĬ": 12046, + "ÙĬ": 23853, + "Ú": 150, + "Ú": 406, + "Ú©": 26475, + "Û": 151, + "Û": 407, + "Ûģ": 40480, + "ÛĮ": 21452, + "ÛĮ": 32703, + "Ü": 152, + "Ü": 408, + "Ý": 153, + "Ý": 409, + "Þ": 154, + "Þ": 410, + "ß": 155, + "ß": 411, + "à": 156, + "à": 412, + "à¤": 3124, + "त": 27263, + "द": 29552, + "न": 26090, + "प": 44149, + "ब": 43599, + "म": 48254, + "म": 26774, + "य": 37299, + "र": 39136, + "र": 19052, + "ल": 30881, + "व": 39545, + "श": 43181, + "स": 28505, + "ह": 29446, + "ा": 37973, + "ा": 13343, + "ि": 26721, + "à¤Ĥ": 30833, + "à¤ķ": 22067, + "à¤Ĺ": 42598, + "à¤ľ": 39561, + "à¥": 7410, + "à¥Ģ": 45791, + "à¥Ģ": 25751, + "à¥ģ": 39653, + "à¥ĩ": 48612, + "à¥ĩ": 25130, + "à¥ĭ": 34452, + "à¥į": 19389, + "à¦": 11322, + "া": 41532, + "à§": 26339, + "à¨": 15741, + "à©": 32086, + "àª": 22990, + "à«": 48347, + "à¬": 32791, + "à®": 6022, + "த": 34691, + "ன": 43394, + "ப": 47388, + "à®®": 35463, + "à®°": 43270, + "ல": 47705, + "ா": 32831, + "ி": 27126, + "à®ķ": 36168, + "à®Ł": 45263, + "à¯": 11259, + "à¯ģ": 33115, + "à¯į": 16631, + "à°": 12100, + "à±": 23550, + "à±į": 46098, + "à²": 9992, + "ಿ": 47797, + "à³": 20745, + "à³į": 36148, + "à´": 15418, + "àµ": 27392, + "àµį": 45266, + "à¶": 29881, + "à·": 30766, + "à¸": 1777, + "ม": 26137, + "ม": 29570, + "ย": 27241, + "ย": 33091, + "ร": 32225, + "ร": 27331, + "ล": 34696, + "ล": 32746, + "ว": 26990, + "ว": 30245, + "ส": 37883, + "ส": 35737, + "ห": 33064, + "ะ": 43920, + "ะ": 49234, + "ั": 14978, + "า": 11529, + "า": 38476, + "าà¸": 12330, + "ิ": 17092, + "ี": 22421, + "ี": 20278, + "ีà¹Ī": 31511, + "ื": 47991, + "ุ": 30524, + "ู": 35273, + "à¸ģ": 30767, + "à¸ģà¸": 31474, + "à¸Ħ": 31757, + "à¸Ħà¸": 39628, + "à¸ĩ": 24603, + "à¸ĩ": 33382, + "à¸Ī": 47608, + "à¸Ĭ": 46324, + "à¸Ķ": 31107, + "à¸Ķ": 38825, + "à¸ķ": 40273, + "à¸ķ": 41108, + "à¸Ĺ": 36171, + "à¸Ļ": 17474, + "à¸Ļ": 17639, + "à¸Ļà¸": 23121, + "à¸ļ": 33859, + "à¸ļ": 39616, + "à¸ŀ": 48171, + "à¸Ń": 13398, + "à¸Ń": 32818, + "à¸Ńà¸": 14649, + "à¸Ńà¸ĩ": 46622, + "à¹": 4484, + "à¹Ģ": 13729, + "à¹Ģà¸": 14076, + "à¹ģà¸": 23916, + "à¹Ĥ": 33118, + "à¹ĥ": 40962, + "à¹Ħà¸": 31718, + "à¹ĩ": 38699, + "à¹Ī": 11722, + "à¹ī": 13123, + "à¹Į": 28353, + "à¼": 46186, + "à½": 39219, + "á": 157, + "á": 413, + "á´": 19036, + "áµ": 17330, + "áĢ": 45932, + "áĥ": 24829, + "áĥ¦": 32193, + "â": 158, + "â": 414, + "â¤": 25087, + "⤵ï¸ı": 36026, + "â¬": 7930, + "â¬ħï¸ı": 42111, + "â¬Ĩ": 27718, + "â¬Ĩï¸ı": 32798, + "â¬ĩ": 10917, + "â¬ĩ": 39370, + "â¬ĩï¸ı": 25621, + "â¬ĩï¸ı": 13984, + "â¬ĩï¸ıâ¬ĩï¸ı": 40159, + "âĢ": 728, + "âĢ¢": 9485, + "âĢ¢": 2701, + "âĢ¢âĢ¢": 15006, + "âĢ¢âĢ¢": 47575, + "âĢ¢âĢ¢âĢ¢âĢ¢": 27502, + "âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢âĢ¢": 48630, + "â̦": 7095, + "â̦\"": 20215, + "â̦..": 47779, + "â̦.": 18615, + "â̦/": 29842, + "â̦": 959, + "â̦â̦": 40066, + "â̲": 32633, + "â̳": 25061, + "â̼": 6578, + "â̼ï¸ı": 15622, + "â̼ï¸ı": 8310, + "â̼ï¸ıâ̼ï¸ı": 33218, + "âĢĭ": 17086, + "âĢĭ": 9844, + "âĢį": 4244, + "âĢįâĻ": 5177, + "âĢįâĻĢï¸ı": 18897, + "âĢįâĻĢï¸ı": 9605, + "âĢįâĻĤ": 8832, + "âĢįâĻĤï¸ı": 21779, + "âĢįâĻĤï¸ı": 10613, + "âĢİ": 31001, + "âĢIJ": 34512, + "âĢĵ": 21070, + "âĢĵ": 1224, + "âĢĶ": 6718, + "âĢĶ": 2005, + "âĢĶ>": 26341, + "âĢĶ@": 28470, + "âĢĶâĢĶ": 10037, + "âĢĶâĢĶ": 44800, + "âĢĶâĢĶâĢĶâĢĶ": 17797, + "âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ": 34432, + "âĢķ": 14236, + "âģ": 1667, + "âģ£": 31089, + "âģ£": 16845, + "âģ¦": 2773, + "âģ¦": 34855, + "âģ¦@": 2859, + "âģ¦âģ¦@": 27783, + "âģ©": 20097, + "âģ©,": 48749, + "âģ©.": 35777, + "âģ©": 2918, + "âģīï¸ı": 46534, + "âģł": 23881, + "âģł": 13503, + "âģłâģł": 33488, + "âĤ": 5227, + "âĤ¬": 34919, + "âĤ¬": 6309, + "âĤ¹": 21777, + "âĥ": 2805, + "âĥ£": 11250, + "âĥ£": 3076, + "âĥ£@": 48291, + "âĦ": 8604, + "âĦ¢": 29438, + "âĦ¢": 11675, + "âĦ¹": 45462, + "âĨ": 6059, + "âĨĴ": 7481, + "âĨĵ": 41603, + "âĩ": 27228, + "âĪ": 17788, + "âī": 22684, + "âīĪ": 45451, + "âĮ": 17848, + "âĮļ": 31301, + "âĮļï¸ı": 35931, + "âı": 7960, + "âı©": 40847, + "âı°": 12714, + "âı±": 33149, + "âı³": 47617, + "âĵ": 27400, + "âĶ": 13389, + "âĶĢ": 45139, + "âĶģ": 42022, + "âķ": 17027, + "âķIJ": 48039, + "âĸ": 4168, + "âĸª": 21203, + "âĸª": 36628, + "âĸªï¸ı": 24974, + "âĸ«": 39478, + "âĸ¬": 33798, + "âĸ¬âĸ¬": 36975, + "âĸ¶": 12509, + "âĸ¶": 21126, + "âĸ¶ï¸ı": 14442, + "âĸº": 46061, + "âĸº": 12086, + "âĸ½": 45634, + "âĸł": 36791, + "âĹ": 9323, + "âĹĨ": 48961, + "âĹı": 26999, + "âĺ": 1741, + "âĺ®": 45851, + "âĺ¹": 28811, + "âĺ¹ï¸ı": 39605, + "âĺº": 5010, + "âĺº": 8703, + "âĺºâĺº": 46051, + "âĺºï¸ı": 11506, + "âĺºï¸ı": 7779, + "âĺºï¸ıâĺºï¸ı": 41315, + "âĺ¼": 38877, + "âĺĢ": 32146, + "âĺĢ": 22242, + "âĺĢï¸ı": 12817, + "âĺĢï¸ı": 8219, + "âĺĢï¸ıâĺĢï¸ı": 44550, + "âĺģ": 25195, + "âĺģï¸ı": 35197, + "âĺĥ": 38972, + "âĺħ": 9339, + "âĺħ": 10643, + "âĺħâĺħ": 12681, + "âĺħâĺħ": 36644, + "âĺħâĺħâĺħâĺħ": 34431, + "âĺħâĺħâĺħâĺħ": 44034, + "âĺħâĺħâĺħâĺħâĺħ": 45984, + "âĺĨ": 23941, + "âĺĨ": 13439, + "âĺİ": 24045, + "âĺİ": 45493, + "âĺİï¸ı": 27219, + "âĺij": 20983, + "âĺij": 42300, + "âĺijï¸ı": 22291, + "âĺĶï¸ı": 31238, + "âĺķ": 11454, + "âĺķ": 26561, + "âĺķï¸ı": 25839, + "âĺķï¸ı": 15499, + "âĺĺ": 23483, + "âĺĺï¸ı": 31454, + "âĺĿ": 21982, + "âĺĿï¸ı": 38891, + "âĺŀ": 31255, + "âĺłï¸ı": 34672, + "âĻ": 1548, + "âĻ¡": 11091, + "âĻ¡": 6251, + "âĻ¡âĻ¡": 22360, + "âĻ¡âĻ¡": 34267, + "âĻ¡âĻ¡âĻ¡": 36611, + "âϤ": 47435, + "âĻ¥": 4622, + "âĻ¥": 3405, + "âĻ¥âĻ¥": 12975, + "âĻ¥âĻ¥": 19604, + "âĻ¥âĻ¥âĻ¥": 23255, + "âĻ¥âĻ¥âĻ¥âĻ¥": 49020, + "âĻ¥ï¸ı": 17774, + "âĻ¥ï¸ı": 10561, + "âĻ¥ï¸ıâĻ¥ï¸ı": 40309, + "âϦ": 32376, + "âϦ": 47547, + "âĻ©": 30339, + "âĻ©âĻ«": 31636, + "âĻª": 27364, + "âĻª": 12382, + "âĻ«": 39217, + "âĻ«": 10814, + "âϬ": 24753, + "âĻ»": 39611, + "âĻ»ï¸ı": 46075, + "âļ": 2234, + "âļ¡": 40098, + "âļ¡": 20712, + "âļ¡ï¸ı": 19500, + "âļ¡ï¸ı": 11605, + "âļ¡ï¸ıâļ¡ï¸ı": 45922, + "âļª": 11922, + "âļª": 36373, + "âļªï¸ı": 22251, + "âļªï¸ı": 17885, + "âļ«": 15374, + "âļ«ï¸ı": 26529, + "âļ«ï¸ı": 24649, + "âļ½": 4867, + "âļ½": 13173, + "âļ½âļ½": 43259, + "âļ½ï¸ı": 11342, + "âļ½ï¸ı": 6768, + "âļ½ï¸ıâļ½ï¸ı": 30358, + "âļ½ï¸ıâļ½ï¸ı": 44148, + "âļ¾": 11314, + "âļ¾": 34717, + "âļ¾ï¸ı": 24727, + "âļ¾ï¸ı": 14858, + "âļĵ": 23522, + "âļĵï¸ı": 35299, + "âļĶï¸ı": 29361, + "âļľ": 47491, + "âļł": 39203, + "âļłï¸ı": 40966, + "âļłï¸ı": 15596, + "âĽ": 7956, + "âĽ³ï¸ı": 29204, + "âĽĦ": 30668, + "âĽĦï¸ı": 45465, + "âľ": 1508, + "⾨": 7181, + "⾨": 3531, + "⾨⾨": 35174, + "⾨⾨": 21985, + "⾨⾨⾨": 39424, + "âľĤ": 38602, + "âľħ": 29544, + "âľħ": 5564, + "âľĪ": 10682, + "âľĪ": 30712, + "âľĪï¸ı": 26176, + "âľĪï¸ı": 13413, + "âľĬ": 12392, + "âľĬ": 17819, + "âľĬðŁı½": 48547, + "âľĬðŁı¾": 41185, + "âľĭ": 39383, + "âľĭ": 30239, + "âľĮ": 6419, + "âľĮ": 12656, + "âľĮï¸ı": 21906, + "âľĮï¸ı": 12239, + "âľĮðŁı»": 30538, + "âľĮðŁı¼": 30588, + "âľį": 20872, + "âľįï¸ı": 30888, + "âľı": 32574, + "âľıï¸ı": 40724, + "âľĵ": 36700, + "âľĶ": 47200, + "âľĶ": 13749, + "âľĶï¸ı": 40544, + "âľĶï¸ı": 9191, + "âľĸï¸ı": 44133, + "âľĿ": 42220, + "âĿ": 1045, + "âĿ£": 37007, + "âĿ£": 25623, + "âĿ£ï¸ı": 25240, + "âĿ¤": 1266, + "âĿ¤": 2720, + "âĿ¤âĿ¤": 9033, + "âĿ¤âĿ¤": 14058, + "âĿ¤âĿ¤âĿ¤": 16708, + "âĿ¤âĿ¤âĿ¤âĿ¤": 37918, + "âĿ¤âĿ¤âĿ¤âĿ¤": 43970, + "âĿ¤ï¸ı": 2626, + "âĿ¤ï¸ı#": 30281, + "âĿ¤ï¸ı.": 45326, + "âĿ¤ï¸ı": 1752, + "âĿ¤ï¸ı@": 31187, + "âĿ¤ï¸ıâĿ¤ï¸ı": 6713, + "âĿ¤ï¸ıâĿ¤ï¸ı": 10363, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 12282, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 39167, + "âĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ıâĿ¤ï¸ı": 29880, + "âĿ¤ï¸ıðŁĴĻ": 37380, + "âĿ¤ï¸ıðŁĺį": 37272, + "âĿ¤ï¸ıðŁĺĺ": 41800, + "âĿ¤ðŁĺį": 49120, + "âĿ¥": 36914, + "âĿĦ": 8501, + "âĿĦ": 30494, + "âĿĦï¸ı": 16834, + "âĿĦï¸ı": 12402, + "âĿĦï¸ıâĿĦï¸ı": 41626, + "âĿĮ": 44485, + "âĿĮ": 17975, + "âĿĵ": 29791, + "âĿĹ": 12868, + "âĿĹ": 29079, + "âĿĹï¸ı": 28642, + "âĿĹï¸ı": 17391, + "âĿĿ": 46951, + "âŀ": 3257, + "âŀ¡": 12854, + "âŀ¡ï¸ı": 31860, + "âŀ¡ï¸ı": 4956, + "âŀ¤": 18651, + "âŀķ": 46526, + "âŀĸ": 21327, + "âŀĸ": 34902, + "âŀĸâŀĸ": 23316, + "âŀĸâŀĸâŀĸâŀĸ": 40401, + "âŀľ": 23775, + "âł": 5689, + "âłĢ": 9691, + "âłĢ": 8621, + "âłĢâłĢ": 11466, + "âłĢâłĢ": 39092, + "âłĢâłĢâłĢâłĢ": 20976, + "âłĢâłĢâłĢâłĢâłĢâłĢâłĢâłĢ": 46063, + "âŃ": 5527, + "âŃIJ": 6410, + "âŃIJ": 19012, + "âŃIJâŃIJ": 32663, + "âŃIJï¸ı": 12427, + "âŃIJï¸ı": 10251, + "âŃIJï¸ıâŃIJï¸ı": 18640, + "âŃIJï¸ıâŃIJï¸ıâŃIJï¸ı": 40746, + "ã": 159, + "ã": 415, + "ãĢ": 4092, + "ãĢģ": 45262, + "ãĢĤ": 38060, + "ãĢĤ": 38000, + "ãĢĬ": 39920, + "ãĢĭ": 32898, + "ãĢĮ": 18116, + "ãĢį": 19149, + "ãĢİ": 26947, + "ãĢı": 30293, + "ãĢIJ": 12534, + "ãĢij": 12990, + "ãĢľ": 39581, + "ãģ": 4813, + "ãģ¦": 48029, + "ãģ¨": 34671, + "ãģ¨ç¹ĭãģ": 47310, + "ãģ¨ç¹ĭãģĮãĤĬãģŁãģĦ": 48290, + "ãģª": 29104, + "ãģ®": 21575, + "ãģ·": 44130, + "ãģĦ": 33523, + "ãģĦ": 38850, + "ãģĨ": 44235, + "ãģį": 42184, + "ãĤ": 3909, + "ãĤ¢": 26560, + "ãĤ¤": 19319, + "ãĤ¤ãĥ": 36294, + "ãĤ«": 37367, + "ãĤ¯": 31574, + "ãĤ·": 37665, + "ãĤ¸": 32234, + "ãĤ¸ãĥ": 43491, + "ãĤ¹": 22694, + "ãĤ¹": 39220, + "ãĤ¹ãĥ": 32421, + "ãĤ¿": 34941, + "ãĤĬãģ": 40500, + "ãĤĮ": 45211, + "ãĤŃ": 47121, + "ãĥ": 2429, + "ãĥ©": 23007, + "ãĥª": 32115, + "ãĥ«": 33257, + "ãĥ¬": 32965, + "ãĥ³": 17671, + "ãĥ³": 26875, + "ãĥ³ãĤ": 45105, + "ãĥ³ãĥ": 25914, + "ãĥ»": 8415, + "ãĥ»": 11158, + "ãĥ»ãĥ»": 13949, + "ãĥ»ãĥ»ãĥ»": 14234, + "ãĥ¼": 13457, + "ãĥ¼": 30391, + "ãĥ¼ãĥ": 18584, + "ãĥĥ": 28902, + "ãĥĦ": 32173, + "ãĥĪ": 42384, + "ãĥİ": 39967, + "ãĥķãĤ": 33371, + "ãĥŀ": 48924, + "ãĥŃ": 35827, + "ãħ": 5947, + "ãħ¤": 21096, + "ãħ¤ãħ¤": 22583, + "ãħ¤ãħ¤ãħ¤ãħ¤": 39329, + "ãħĭ": 13052, + "ãħĭ": 25108, + "ãħĭãħĭ": 16604, + "ãħĭãħĭ": 42581, + "ãħĭãħĭãħĭ": 46407, + "ãħĭãħĭãħĭãħĭ": 39362, + "ãħł": 16089, + "ãħł": 25781, + "ãħłãħł": 22021, + "ãħłãħł": 34398, + "ãħłãħłãħłãħł": 47028, + "ä": 160, + "ä": 416, + "ä¸": 19759, + "ä¹": 41854, + "äº": 21078, + "人": 36839, + "ä»": 37743, + "ä½": 47466, + "å": 161, + "å": 417, + "å¤": 23170, + "å¥": 29290, + "å®": 27047, + "å°": 34720, + "å±": 46096, + "å¸": 42021, + "å¹": 38780, + "åħ": 34314, + "åĨ": 27972, + "åĨĻ": 44653, + "åĪ": 42748, + "åĭ": 47505, + "åı": 34517, + "åIJ": 41673, + "åĽ": 39027, + "åľ": 37746, + "åŃ": 35751, + "æ": 162, + "æ": 418, + "æĸ": 29032, + "æĹ": 22265, + "æĹ¥": 39121, + "æĹ¥": 37156, + "æĺ": 42891, + "æĻ": 48132, + "æľ": 19277, + "æľ¬": 44353, + "æĿ": 27667, + "æĿ±": 48338, + "ç": 163, + "ç": 419, + "ç¥": 26369, + "ç¥Ń": 42557, + "çµ": 37810, + "ç¹": 43431, + "ç¹ĭãģ": 45930, + "çĶ": 20211, + "çĶŁ": 33375, + "çľ": 33440, + "羣": 41570, + "è": 164, + "è": 420, + "èª": 34002, + "èªķ": 41293, + "é": 165, + "é": 421, + "éģ": 44854, + "éĩ": 38283, + "ê": 166, + "ê": 422, + "ê°": 21122, + "ê°ĵ": 41076, + "ê°ĵìĦ¸ë¸IJ": 41689, + "ê°ķ": 45758, + "ê²": 35555, + "ê³": 36216, + "êµ": 31871, + "ê·": 42680, + "ê¸": 32495, + "ê¹": 24531, + "ê¹Ģ": 25203, + "ë": 167, + "ë": 423, + "ë¦": 24621, + "리": 47649, + "ë§": 28024, + "ë§Ī": 40027, + "ëª": 36311, + "ë¯": 19528, + "민": 34442, + "민": 44632, + "ë°": 15810, + "ë°©": 23273, + "ë°©íĥ": 25081, + "ë°©íĥĦ": 25641, + "ë°©íĥĦìĨĮëħĦëĭ": 26068, + "ë°©íĥĦìĨĮëħĦëĭ¨": 27129, + "ë°ķ": 40988, + "ë²": 48267, + "ë³": 44693, + "ë¹": 24193, + "ëĤ": 27252, + "ëĤĺ": 48484, + "ëĭ": 13094, + "ëĭ¤": 46680, + "ëĭĪ": 33708, + "ëį": 45543, + "ëı": 31972, + "ëĵ": 30850, + "ëĿ": 44317, + "ì": 168, + "ì": 424, + "ì£": 39856, + "주": 45161, + "ì¤": 31153, + "ì§": 16279, + "ì§Ģ": 28836, + "ì§Ħ": 38890, + "ì°": 40742, + "ì¶": 42476, + "ì¶ķ": 46403, + "ì¶ķíķĺ": 47866, + "ì¹": 45088, + "ìĤ": 31061, + "ìĥ": 30587, + "ìĥĿ": 47858, + "ìĦ": 15074, + "ìĦ¸ë": 29254, + "ìĦ¸ë¸": 29658, + "ìĦ¸ë¸IJ": 41415, + "ìĨ": 15115, + "ìĨĮë": 20515, + "ìĨĮëħ": 21391, + "ìĨĮëħĦëĭ": 25887, + "ìĪ": 32757, + "ìĬ": 12125, + "ìĬ¤": 20305, + "ìĬ¤": 23829, + "ìĭ": 23924, + "ìķ": 16071, + "ìķĦ": 23233, + "ìĸ": 31625, + "ìĹ": 13252, + "ìĹIJ": 37622, + "ìĹij": 31036, + "ìĹijìĨ": 42763, + "ìĹijìĨĮ": 45606, + "ìĺ": 21144, + "ìĻ": 39405, + "ìļ": 18541, + "ìļ°": 38415, + "ìļ°": 49344, + "ìĽ": 22543, + "ìĽIJ": 36495, + "ìľ": 20909, + "ìľł": 42890, + "ìĿ": 8276, + "ìĿ´": 12286, + "ìĿ´": 34746, + "ìĿ´ì": 37590, + "ìĿ¼": 43406, + "ìŀ": 20849, + "ìł": 20580, + "ìłķ": 34725, + "í": 169, + "í": 425, + "íģ": 35641, + "íģ¬": 45832, + "íĤ": 43565, + "íĥ": 15012, + "íĥĢ": 41126, + "íĥľ": 37663, + "íĬ": 23215, + "íĬ¸": 48974, + "íĬ¸": 39820, + "íĭ": 34350, + "íĶ": 29450, + "íķ": 15197, + "íķ´": 35286, + "íķĺ": 33992, + "íĺ": 15962, + "íĺ¸": 39657, + "íĺĦ": 34645, + "íĻ": 31882, + "î": 170, + "î": 426, + "îĢ": 36288, + "îĦ": 35368, + "îĮ": 41006, + "îIJ": 16929, + "îIJĴ": 40100, + "ï": 171, + "ï": 427, + "ï¸": 842, + "ï¸İ": 24029, + "ï¸ı": 1392, + "ï¸ı#": 46997, + "ï¸ı:": 32604, + "ï¸ı": 1001, + "ï¸ı@": 34600, + "ï¸ıâĥ£": 17394, + "ï¸ıâĥ£-": 40376, + "ï¸ıâĥ£": 4603, + "ï¿": 27850, + "�": 47356, + "�": 39802, + "ð": 172, + "ð": 428, + "ðĿ": 6874, + "ðĿIJ": 15889, + "ðĿij": 43794, + "ðĿĴ": 43387, + "ðĿĵ": 47110, + "ðĿĹ": 18865, + "ðĿĺ": 26109, + "ðĿĻ": 29415, + "ðŁ": 558, + "ð٤": 1793, + "ðŁ¤£": 9665, + "ðŁ¤£": 9909, + "ðŁ¤£ðŁ¤£": 16430, + "ðŁ¤£ðŁ¤£": 31009, + "ðŁ¤£ðŁ¤£ðŁ¤£": 32262, + "ðŁ¤¤": 39550, + "ðŁ¤¤": 26759, + "ðŁ¤¦": 17186, + "ðŁ¤§": 40983, + "ðŁ¤©": 27351, + "ðŁ¤©": 16074, + "ðŁ¤ª": 44230, + "ðŁ¤ª": 24920, + "ðŁ¤«": 47671, + "ðŁ¤¯": 37595, + "ðŁ¤·": 13185, + "ðŁ¤·ðŁı»âĢįâĻĢï¸ı": 46770, + "ð٤ij": 34801, + "ð٤ĵ": 36580, + "ð٤ĵ": 18928, + "ð٤Ķ": 12706, + "ð٤Ķ": 6497, + "ð٤Ķð٤Ķ": 28490, + "ð٤Ķð٤Ķð٤Ķ": 43361, + "ð٤ĸ": 46146, + "ð٤Ĺ": 16646, + "ð٤Ĺ": 10465, + "ð٤Ĺð٤Ĺ": 44321, + "ð٤ĺ": 10623, + "ð٤ĺ": 17288, + "ð٤ĺðŁı»": 46449, + "ð٤ĺðŁı»": 30891, + "ð٤ĺðŁı¼": 31458, + "ð٤ĺðŁı½": 49362, + "ð٤Ļ": 23800, + "ð٤Ļ": 39101, + "ð٤Ŀ": 35242, + "ð٤ŀ": 29463, + "ð٤ŀ": 38597, + "ðŁ¤Ł": 48509, + "ðŁ¤ł": 36737, + "ð٤Ń": 47289, + "ðŁ¥": 4156, + "ðŁ¥°": 29246, + "ðŁ¥°": 17597, + "ðŁ¥³": 45823, + "ðŁ¥³": 28055, + "ðŁ¥º": 43380, + "ðŁ¥º": 36858, + "ðŁ¥Ĥ": 43805, + "ðŁ¥Ĥ": 25212, + "ðŁ¥ĥ": 47790, + "ðŁ¥ĩ": 34372, + "ðŁ¥ĩ": 20069, + "ðŁ¥Ī": 35858, + "ðŁ¥ī": 36782, + "ðŁ¥Ĭ": 29275, + "ð٦": 6040, + "ð٦ģ": 36367, + "ð٦ģ": 26056, + "ð٦ĥ": 40184, + "ð٦Ħ": 37659, + "ð٦ħ": 28800, + "ð٦Ī": 48984, + "ð٦ĭ": 49325, + "ð٦ĭ": 28985, + "ð٧": 8792, + "ðŁ§¡": 30996, + "ðŁ§¡": 24578, + "ð٧IJ": 33549, + "ðŁħ": 22010, + "ðŁĨ": 9536, + "ðŁĨķ": 34956, + "ðŁĨĺ": 39868, + "ðŁĨļ": 16325, + "ðŁĩ": 1173, + "ðŁĩ¦": 12469, + "ðŁĩ¦": 28565, + "ðŁĩ¦ðŁĩ": 33196, + "ðŁĩ¦ðŁĩ·": 41629, + "ðŁĩ¦ðŁĩº": 25192, + "ðŁĩ§": 14660, + "ðŁĩ§ðŁĩ": 37342, + "ðŁĩ§ðŁĩª": 38794, + "ðŁĩ§ðŁĩ·": 28182, + "ðŁĩ¨": 8889, + "ðŁĩ¨ðŁĩ": 8989, + "ðŁĩ¨ðŁĩ¦": 34324, + "ðŁĩ¨ðŁĩ¦": 16364, + "ðŁĩ¨ðŁĩ³": 36819, + "ðŁĩ¨ðŁĩŃ": 41119, + "ðŁĩ©": 15222, + "ðŁĩ©ðŁĩ": 36350, + "ðŁĩ©ðŁĩª": 21531, + "ðŁĩª": 11428, + "ðŁĩª": 12331, + "ðŁĩªðŁĩ": 13917, + "ðŁĩªðŁĩ¸": 22177, + "ðŁĩªðŁĩº": 34655, + "ðŁĩ«": 12977, + "ðŁĩ«ðŁĩ·": 39109, + "ðŁĩ«ðŁĩ·": 16223, + "ðŁĩ¬": 8129, + "ðŁĩ¬ðŁĩ": 8354, + "ðŁĩ¬ðŁĩ§": 23762, + "ðŁĩ¬ðŁĩ§": 11559, + "ðŁĩ®": 8268, + "ðŁĩ®ðŁĩ": 8347, + "ðŁĩ®ðŁĩª": 34148, + "ðŁĩ®ðŁĩ³": 47299, + "ðŁĩ®ðŁĩ³": 23602, + "ðŁĩ®ðŁĩ¹": 42034, + "ðŁĩ®ðŁĩ¹": 17070, + "ðŁĩ¯": 20090, + "ðŁĩ¯ðŁĩ": 22924, + "ðŁĩ¯ðŁĩµ": 26527, + "ðŁĩ°": 28232, + "ðŁĩ±": 29533, + "ðŁĩ±ðŁĩ": 40941, + "ðŁĩ²": 16411, + "ðŁĩ²ðŁĩ": 17562, + "ðŁĩ²ðŁĩ½": 32073, + "ðŁĩ³": 16645, + "ðŁĩ³ðŁĩ": 17747, + "ðŁĩ³ðŁĩ±": 36747, + "ðŁĩµ": 12127, + "ðŁĩµðŁĩ": 13608, + "ðŁĩµðŁĩ°": 37764, + "ðŁĩµðŁĩ¹": 42621, + "ðŁĩµðŁĩŃ": 42777, + "ðŁĩ·": 16026, + "ðŁĩ·": 9869, + "ðŁĩ·ðŁĩº": 37902, + "ðŁĩ¸": 19447, + "ðŁĩ¸ðŁĩ": 33325, + "ðŁĩ¸ðŁĩª": 39260, + "ðŁĩ¹": 21810, + "ðŁĩ¹ðŁĩ": 36250, + "ðŁĩº": 4054, + "ðŁĩº": 17467, + "ðŁĩºðŁĩ": 4131, + "ðŁĩºðŁĩ¸": 8907, + "ðŁĩºðŁĩ¸": 5688, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 18739, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 41411, + "ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸ðŁĩºðŁĩ¸": 43357, + "ðŁĩ¿": 25520, + "ðŁĩ¿ðŁĩ¦": 36982, + "ðŁĩŃ": 30370, + "ðŁĮ": 1576, + "ðŁĮ±": 35318, + "ðŁĮ±": 20665, + "ðŁĮ²": 34071, + "ðŁĮ²": 28154, + "ðŁĮ³": 44265, + "ðŁĮ³": 28543, + "ðŁĮ´": 20643, + "ðŁĮ´": 15968, + "ðŁĮµ": 40871, + "ðŁĮ·": 32328, + "ðŁĮ·": 24259, + "ðŁĮ¸": 16314, + "ðŁĮ¸": 10980, + "ðŁĮ¸ðŁĮ¸": 46210, + "ðŁĮ¹": 14990, + "ðŁĮ¹": 10662, + "ðŁĮ¹ðŁĮ¹": 37933, + "ðŁĮº": 27608, + "ðŁĮº": 19829, + "ðŁĮ»": 27196, + "ðŁĮ»": 19772, + "ðŁĮ¼": 36484, + "ðŁĮ¼": 26312, + "ðŁĮ¾": 39796, + "ðŁĮ¿": 27736, + "ðŁĮ¿": 18588, + "ðŁĮĢ": 34348, + "ðŁĮħ": 27547, + "ðŁĮĪ": 23038, + "ðŁĮĪ": 13042, + "ðŁĮĬ": 20465, + "ðŁĮĬ": 14302, + "ðŁĮĮ": 43393, + "ðŁĮį": 34931, + "ðŁĮį": 18641, + "ðŁĮİ": 31125, + "ðŁĮİ": 16969, + "ðŁĮı": 31527, + "ðŁĮIJ": 33071, + "ðŁĮĻ": 42330, + "ðŁĮĻ": 23283, + "ðŁĮļ": 49004, + "ðŁĮļ": 27877, + "ðŁĮŀ": 21152, + "ðŁĮŀ": 12980, + "ðŁĮŁ": 13196, + "ðŁĮŁ": 8542, + "ðŁĮŁðŁĮŁ": 26014, + "ðŁį": 2011, + "ðŁį¦": 47375, + "ðŁį¦": 32032, + "ðŁį©": 38379, + "ðŁįª": 38958, + "ðŁį«": 47994, + "ðŁį«": 33401, + "ðŁį°": 43732, + "ðŁį°": 30051, + "ðŁį³": 37441, + "ðŁį´": 41531, + "ðŁį´": 25338, + "ðŁį·": 24445, + "ðŁį·": 18072, + "ðŁį¸": 43058, + "ðŁį¸": 31217, + "ðŁį¹": 35598, + "ðŁįº": 31081, + "ðŁįº": 21590, + "ðŁį»": 22793, + "ðŁį»": 13167, + "ðŁį¾": 27294, + "ðŁį¾": 21656, + "ðŁįĢ": 22865, + "ðŁįĢ": 15764, + "ðŁįģ": 29837, + "ðŁįģ": 23075, + "ðŁįĤ": 35015, + "ðŁįĤ": 25721, + "ðŁįĥ": 27157, + "ðŁįĥ": 20147, + "ðŁįĩ": 48697, + "ðŁįĬ": 35001, + "ðŁįĬ": 28036, + "ðŁįĭ": 39543, + "ðŁįĮ": 44987, + "ðŁįį": 48946, + "ðŁįİ": 32069, + "ðŁįij": 32889, + "ðŁįĴ": 33160, + "ðŁįĵ": 44739, + "ðŁįĵ": 33456, + "ðŁįĶ": 46415, + "ðŁįĶ": 36031, + "ðŁįķ": 31469, + "ðŁįķ": 23904, + "ðŁįŃ": 42100, + "ðŁİ": 1165, + "ðŁİ£": 43158, + "ðŁİ¤": 23490, + "ðŁİ¤": 15690, + "ðŁİ¥": 22186, + "ðŁİ¥:": 43640, + "ðŁİ¥": 13233, + "ðŁİ§": 31254, + "ðŁİ§": 14266, + "ðŁİ¨": 31953, + "ðŁİ¨": 13461, + "ðŁİ©": 37701, + "ðŁİ«": 30331, + "ðŁİ¬": 36020, + "ðŁİ¬": 18150, + "ðŁİ®": 29312, + "ðŁİ¯": 23114, + "ðŁİµ": 27435, + "ðŁİµ": 14946, + "ðŁİ¶": 11755, + "ðŁİ¶": 6011, + "ðŁİ¶ðŁİ¶": 36283, + "ðŁİ¸": 29135, + "ðŁİ¸": 22122, + "ðŁİ¹": 43493, + "ðŁİ¼": 34949, + "ðŁİ¼": 23757, + "ðŁİ¾": 41982, + "ðŁİ¾": 24222, + "ðŁİĢ": 34347, + "ðŁİĢ": 20151, + "ðŁİģ": 18368, + "ðŁİģ": 13462, + "ðŁİĤ": 13026, + "ðŁİĤ": 10392, + "ðŁİĤðŁİĤ": 39338, + "ðŁİĥ": 22622, + "ðŁİĥ": 16780, + "ðŁİĦ": 12942, + "ðŁİĦ": 11267, + "ðŁİħ": 17685, + "ðŁİħ": 24276, + "ðŁİĨ": 39222, + "ðŁİĪ": 16142, + "ðŁİĪ": 14448, + "ðŁİĪðŁİī": 48049, + "ðŁİī": 4310, + "ðŁİī:": 17310, + "ðŁİī": 3986, + "ðŁİīðŁİ": 11473, + "ðŁİīðŁİĪ": 40499, + "ðŁİīðŁİĪ": 34008, + "ðŁİīðŁİī": 25159, + "ðŁİīðŁİī": 13450, + "ðŁİīðŁİīðŁİī": 20828, + "ðŁİīðŁİĬ": 31662, + "ðŁİīðŁİĬ": 30781, + "ðŁİĬ": 22763, + "ðŁİĬ": 22425, + "ðŁİĬðŁİī": 48801, + "ðŁİĵ": 28916, + "ðŁİĵ": 18744, + "ðŁİĻ": 29001, + "ðŁİĻ": 29753, + "ðŁİĻï¸ı": 44205, + "ðŁİŁ": 19248, + "ðŁİŁ": 21107, + "ðŁİŁï¸ı": 30243, + "ðŁİŃ": 28856, + "ðŁı": 1109, + "ðŁı¡": 27318, + "ðŁı³ï¸ı": 26844, + "ðŁı³ï¸ıâĢį": 27093, + "ðŁı³ï¸ıâĢįðŁĮĪ": 32610, + "ðŁı´": 39690, + "ðŁı´": 19704, + "ðŁı»": 5042, + "ðŁı»": 3702, + "ðŁı»âĢį": 46250, + "ðŁı»âĢįâĻĢï¸ı": 48391, + "ðŁı»âĢįâĻĢï¸ı": 23595, + "ðŁı»âĢįâĻĤï¸ı": 30984, + "ðŁı¼": 6193, + "ðŁı¼": 4027, + "ðŁı¼âĢįâĻĢï¸ı": 28955, + "ðŁı½": 8514, + "ðŁı½": 6114, + "ðŁı½âĢįâĻĢï¸ı": 37036, + "ðŁı½âĢįâĻĤï¸ı": 43157, + "ðŁı¾": 10230, + "ðŁı¾": 7778, + "ðŁı¾âĢįâĻĤï¸ı": 47189, + "ðŁı¿": 29854, + "ðŁı¿": 21094, + "ðŁıĢ": 13708, + "ðŁıĢ": 8813, + "ðŁıĢðŁıĢ": 43169, + "ðŁıģ": 29423, + "ðŁıģ": 17473, + "ðŁıĥ": 16820, + "ðŁıĥ": 32751, + "ðŁıħ": 25500, + "ðŁıĨ": 9585, + "ðŁıĨ": 5596, + "ðŁıĨðŁıĨ": 18946, + "ðŁıĨðŁıĨ": 38269, + "ðŁıĨðŁıĨðŁıĨ": 44484, + "ðŁıĩ": 45789, + "ðŁıĩ": 40288, + "ðŁıĪ": 16144, + "ðŁıĪ": 10477, + "ðŁıī": 26020, + "ðŁıĬ": 33061, + "ðŁıĬ": 47830, + "ðŁıĮ": 41116, + "ðŁıı": 32460, + "ðŁıIJ": 46334, + "ðŁıIJ": 29433, + "ðŁıĴ": 37756, + "ðŁıŁ": 35914, + "ðŁıŁ": 26472, + "ðŁıŁï¸ı": 42627, + "ðŁıł": 33727, + "ðŁIJ": 2074, + "ðŁIJ¢": 37049, + "ðŁIJ£": 39597, + "ðŁIJ¥": 42981, + "ðŁIJ¦": 37260, + "ðŁIJ¬": 44238, + "ðŁIJ¯": 34825, + "ðŁIJ¯": 26111, + "ðŁIJ°": 35378, + "ðŁIJ°": 25050, + "ðŁIJ±": 35710, + "ðŁIJ±": 22979, + "ðŁIJ´": 33509, + "ðŁIJ¶": 14466, + "ðŁIJ¶": 10631, + "ðŁIJ·": 38408, + "ðŁIJ¸": 45597, + "ðŁIJ¸": 40298, + "ðŁIJº": 44281, + "ðŁIJº": 31445, + "ðŁIJ»": 30750, + "ðŁIJ»": 25322, + "ðŁIJ¼": 46234, + "ðŁIJ¾": 16057, + "ðŁIJ¾": 11317, + "ðŁIJ¾ðŁIJ¾": 42202, + "ðŁIJī": 46908, + "ðŁIJĬ": 43974, + "ðŁIJį": 48903, + "ðŁIJį": 30177, + "ðŁIJİ": 48281, + "ðŁIJİ": 32726, + "ðŁIJIJ": 47735, + "ðŁIJIJ": 27954, + "ðŁIJij": 49389, + "ðŁIJķ": 41069, + "ðŁIJĺ": 38733, + "ðŁIJĿ": 30619, + "ðŁIJĿ": 20111, + "ðŁIJŁ": 42084, + "ðŁIJŁ": 29989, + "ðŁIJł": 42725, + "ðŁij": 964, + "ðŁij£": 39755, + "ðŁij§": 48938, + "ðŁij¨": 18966, + "ðŁij¨âĢį": 25023, + "ðŁij©": 18800, + "ðŁij©âĢį": 26304, + "ðŁij«": 47106, + "ðŁij«": 35457, + "ðŁij®": 42686, + "ðŁij¯": 25910, + "ðŁij¯": 20582, + "ðŁij¶": 26187, + "ðŁij¶": 33189, + "ðŁij¸": 26268, + "ðŁij¸": 36645, + "ðŁij¹": 46766, + "ðŁij»": 24625, + "ðŁij»": 16243, + "ðŁij¼": 25270, + "ðŁij¼": 31083, + "ðŁij½": 42677, + "ðŁij½": 26257, + "ðŁijĢ": 11524, + "ðŁijĢ": 5908, + "ðŁijĢðŁijĢ": 31561, + "ðŁijģ": 47796, + "ðŁijģ": 45705, + "ðŁijĦ": 47445, + "ðŁijħ": 31833, + "ðŁijħ": 24672, + "ðŁijĨ": 42975, + "ðŁijĨ": 45194, + "ðŁijĩ": 7662, + "ðŁijĩ": 7475, + "ðŁijĩðŁı»": 45811, + "ðŁijĩðŁı»": 32813, + "ðŁijĩðŁı¼": 37504, + "ðŁijĩðŁijĩ": 17915, + "ðŁijĩðŁijĩ": 31891, + "ðŁijĩðŁijĩðŁijĩ": 35627, + "ðŁijĪ": 32794, + "ðŁijĪ": 20832, + "ðŁijī": 9477, + "ðŁijī": 3988, + "ðŁijīðŁı»": 23481, + "ðŁijīðŁı¼": 27534, + "ðŁijīðŁı½": 38059, + "ðŁijīðŁijī": 41480, + "ðŁijĬ": 8897, + "ðŁijĬ": 9704, + "ðŁijĬðŁı»": 47393, + "ðŁijĬðŁı»": 29152, + "ðŁijĬðŁı¼": 49000, + "ðŁijĬðŁı¼": 30115, + "ðŁijĬðŁijĬ": 46521, + "ðŁijĭ": 19351, + "ðŁijĭ": 17686, + "ðŁijĮ": 4890, + "ðŁijĮ": 4494, + "ðŁijĮðŁı»": 31818, + "ðŁijĮðŁı»": 18606, + "ðŁijĮðŁı¼": 37655, + "ðŁijĮðŁı¼": 20031, + "ðŁijĮðŁı½": 35834, + "ðŁijĮðŁijĮ": 36139, + "ðŁijĮðŁijĮ": 21435, + "ðŁijĮðŁijĮðŁijĮ": 40876, + "ðŁijį": 4686, + "ðŁijį": 4201, + "ðŁijįðŁı»": 25803, + "ðŁijįðŁı»": 15129, + "ðŁijįðŁı¼": 37285, + "ðŁijįðŁı¼": 19689, + "ðŁijįðŁı½": 43722, + "ðŁijįðŁijį": 33012, + "ðŁijįðŁijį": 18997, + "ðŁijįðŁijįðŁijį": 37284, + "ðŁijİ": 39702, + "ðŁijİ": 32568, + "ðŁijı": 3802, + "ðŁijı": 4829, + "ðŁijıðŁı»": 19236, + "ðŁijıðŁı»": 17029, + "ðŁijıðŁı»ðŁijıðŁı»": 35254, + "ðŁijıðŁı¼": 24496, + "ðŁijıðŁı¼": 19979, + "ðŁijıðŁı¼ðŁijıðŁı¼": 46712, + "ðŁijıðŁı½": 40796, + "ðŁijıðŁı½": 33978, + "ðŁijıðŁı¾": 45450, + "ðŁijıðŁijı": 10356, + "ðŁijıðŁijı": 16706, + "ðŁijıðŁijıðŁijı": 17254, + "ðŁijIJ": 40877, + "ðŁijij": 14955, + "ðŁijij": 8717, + "ðŁijijðŁijij": 48532, + "ðŁijķ": 47865, + "ðŁijŁ": 41183, + "ðŁijł": 41264, + "ðŁijŃ": 34175, + "ðŁijŃ": 27943, + "ðŁĴ": 837, + "ðŁĴ¡": 24081, + "ðŁĴ£": 36862, + "ðŁĴ£": 29006, + "ðŁĴ¤": 34706, + "ðŁĴ¤": 25632, + "ðŁĴ¥": 12209, + "ðŁĴ¥": 7347, + "ðŁĴ¥ðŁĴ¥": 27396, + "ðŁĴ¥ðŁĴ¥": 39246, + "ðŁĴ¥ðŁĴ¥ðŁĴ¥": 48890, + "ðŁĴ¦": 21180, + "ðŁĴ¦": 14060, + "ðŁĴ¦ðŁĴ¦": 44469, + "ðŁĴ§": 34095, + "ðŁĴ¨": 27408, + "ðŁĴ¨": 17891, + "ðŁĴ©": 48621, + "ðŁĴ©": 28847, + "ðŁĴª": 5475, + "ðŁĴª": 6440, + "ðŁĴªðŁı»": 31669, + "ðŁĴªðŁı»": 21903, + "ðŁĴªðŁı¼": 32041, + "ðŁĴªðŁı¼": 20759, + "ðŁĴªðŁı½": 46380, + "ðŁĴªðŁı½": 31111, + "ðŁĴªðŁı¾": 39398, + "ðŁĴªðŁĴª": 24747, + "ðŁĴªðŁĴªðŁĴª": 39913, + "ðŁĴ«": 25770, + "ðŁĴ«": 12526, + "ðŁĴ¬": 30947, + "ðŁĴ¯": 10611, + "ðŁĴ¯": 7018, + "ðŁĴ¯ðŁĴ¯": 30234, + "ðŁĴ¯ðŁĴ¯": 44070, + "ðŁĴ°": 20454, + "ðŁĴ°": 14078, + "ðŁĴ°ðŁĴ°": 41747, + "ðŁĴµ": 47412, + "ðŁĴµ": 38041, + "ðŁĴ¸": 37696, + "ðŁĴ¸": 25957, + "ðŁĴ»": 33433, + "ðŁĴ»": 18135, + "ðŁĴ¿": 39541, + "ðŁĴĢ": 14888, + "ðŁĴĢ": 12158, + "ðŁĴĢðŁĴĢ": 30884, + "ðŁĴģ": 13997, + "ðŁĴģ": 14392, + "ðŁĴĥ": 9947, + "ðŁĴĥ": 14333, + "ðŁĴĥðŁı»": 38624, + "ðŁĴĥðŁĴĥ": 28041, + "ðŁĴĦ": 46116, + "ðŁĴĦ": 34571, + "ðŁĴħ": 27457, + "ðŁĴħ": 32414, + "ðŁĴī": 44316, + "ðŁĴī": 30503, + "ðŁĴĭ": 12217, + "ðŁĴĭ": 7417, + "ðŁĴĭðŁĴĭ": 29214, + "ðŁĴĮ": 40817, + "ðŁĴį": 35850, + "ðŁĴį": 24898, + "ðŁĴİ": 25938, + "ðŁĴİ": 15874, + "ðŁĴIJ": 27375, + "ðŁĴIJ": 20554, + "ðŁĴij": 49404, + "ðŁĴĵ": 20628, + "ðŁĴĵ": 12568, + "ðŁĴĵðŁĴĵ": 43505, + "ðŁĴĶ": 18880, + "ðŁĴĶ": 10704, + "ðŁĴĶðŁĴĶ": 44673, + "ðŁĴķ": 5412, + "ðŁĴķ": 3082, + "ðŁĴķðŁĴķ": 23106, + "ðŁĴķðŁĴķ": 14117, + "ðŁĴķðŁĴķðŁĴķ": 26772, + "ðŁĴĸ": 8466, + "ðŁĴĸ": 5582, + "ðŁĴĸðŁĴĸ": 19562, + "ðŁĴĸðŁĴĸ": 30595, + "ðŁĴĸðŁĴĸðŁĴĸ": 33915, + "ðŁĴĹ": 10148, + "ðŁĴĹ": 6690, + "ðŁĴĹðŁĴĹ": 47158, + "ðŁĴĹðŁĴĹ": 24064, + "ðŁĴĹðŁĴĹðŁĴĹ": 36990, + "ðŁĴĺ": 18223, + "ðŁĴĺ": 10816, + "ðŁĴĺðŁĴĺ": 40464, + "ðŁĴĻ": 5305, + "ðŁĴĻ": 4074, + "ðŁĴĻðŁĴĻ": 17833, + "ðŁĴĻðŁĴĻ": 27101, + "ðŁĴĻðŁĴĻðŁĴĻ": 30698, + "ðŁĴĻðŁĴĽ": 46804, + "ðŁĴĻðŁĴĽ": 26230, + "ðŁĴĻðŁĴľ": 47931, + "ðŁĴĻðŁĴľ": 42541, + "ðŁĴļ": 8102, + "ðŁĴļ": 6521, + "ðŁĴļðŁĴļ": 27497, + "ðŁĴļðŁĴļ": 46209, + "ðŁĴļðŁĴļðŁĴļ": 46182, + "ðŁĴļðŁĴĽ": 41232, + "ðŁĴĽ": 8221, + "ðŁĴĽ": 6233, + "ðŁĴĽðŁĴĻ": 36337, + "ðŁĴĽðŁĴļ": 37994, + "ðŁĴĽðŁĴĽ": 32420, + "ðŁĴľ": 6832, + "ðŁĴľ": 4882, + "ðŁĴľðŁĴľ": 17280, + "ðŁĴľðŁĴľ": 28211, + "ðŁĴľðŁĴľðŁĴľ": 31004, + "ðŁĴĿ": 36761, + "ðŁĴĿ": 22002, + "ðŁĴŀ": 14862, + "ðŁĴŀ": 8988, + "ðŁĴŀðŁĴŀ": 36448, + "ðŁĴŁ": 49394, + "ðŁĴŁ": 28828, + "ðŁĴŃ": 33848, + "ðŁĵ": 1497, + "ðŁĵ¢": 46560, + "ðŁĵ¢": 20901, + "ðŁĵ£": 48841, + "ðŁĵ£": 21282, + "ðŁĵ°:": 28952, + "ðŁĵ°": 14985, + "ðŁĵ±": 36104, + "ðŁĵ±": 20824, + "ðŁĵ²": 19363, + "ðŁĵ·": 6966, + "ðŁĵ·:": 8294, + "ðŁĵ·": 5551, + "ðŁĵ·@": 40032, + "ðŁĵ¸": 8401, + "ðŁĵ¸:": 10379, + "ðŁĵ¸": 6074, + "ðŁĵ¸@": 39660, + "ðŁĵ¹": 49251, + "ðŁĵº": 21792, + "ðŁĵº:": 29728, + "ðŁĵº": 10450, + "ðŁĵ»": 32711, + "ðŁĵ»": 15882, + "ðŁĵ½": 45361, + "ðŁĵħ": 21277, + "ðŁĵĨ": 23471, + "ðŁĵĪ": 23359, + "ðŁĵĬ": 22244, + "ðŁĵĭ": 46351, + "ðŁĵĮ": 22289, + "ðŁĵį": 25043, + "ðŁĵį:": 36845, + "ðŁĵį": 8903, + "ðŁĵĸ": 49003, + "ðŁĵĸ": 23043, + "ðŁĵļ": 25433, + "ðŁĵļ": 15566, + "ðŁĵĿ": 31888, + "ðŁĵĿ:": 48398, + "ðŁĵĿ": 15853, + "ðŁĵŀ": 24022, + "ðŁĶ": 1428, + "ðŁĶ¥": 3191, + "ðŁĶ¥#": 44354, + "ðŁĶ¥": 3016, + "ðŁĶ¥ðŁĶ¥": 5692, + "ðŁĶ¥ðŁĶ¥": 11771, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥": 11004, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 23408, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 30989, + "ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥ðŁĶ¥": 48401, + "ðŁĶ¥ðŁĶĹ": 35130, + "ðŁĶª": 47078, + "ðŁĶª": 34545, + "ðŁĶ«": 38116, + "ðŁĶ«": 20583, + "ðŁĶ¬": 44227, + "ðŁĶ®": 38077, + "ðŁĶ´": 12408, + "ðŁĶ´": 10854, + "ðŁĶ´âļªï¸ı": 46879, + "ðŁĶ´âļªï¸ı": 40055, + "ðŁĶµ": 17531, + "ðŁĶµ": 17193, + "ðŁĶµâļªï¸ı": 42412, + "ðŁĶ¶": 42880, + "ðŁĶ¶": 36222, + "ðŁĶ·": 37740, + "ðŁĶ¸": 24200, + "ðŁĶ¹": 19995, + "ðŁĶº": 45561, + "ðŁĶģ": 41299, + "ðŁĶĬ": 32580, + "ðŁĶĬ": 20502, + "ðŁĶİ": 44935, + "ðŁĶij": 35127, + "ðŁĶĴ": 44972, + "ðŁĶĶ": 45753, + "ðŁĶĹ": 47475, + "ðŁĶĹ": 14561, + "ðŁĶĺ": 38995, + "ðŁĶľ": 36011, + "ðŁĶĿ": 44387, + "ðŁĶĿ": 29506, + "ðŁķ": 7692, + "ðŁķº": 33958, + "ðŁķĬ": 42624, + "ðŁķĬ": 37760, + "ðŁĸ": 6269, + "ðŁĸ¤": 17603, + "ðŁĸ¤": 10860, + "ðŁĸ¥": 47990, + "ðŁĹ": 7045, + "ðŁĹ£": 33232, + "ðŁĹ£": 18583, + "ðŁĹ£ï¸ı": 37476, + "ðŁĹĵ": 34335, + "ðŁĹĵ": 28773, + "ðŁĹĵï¸ı": 39847, + "ðŁĺ": 668, + "ðŁĺ¡": 21968, + "ðŁĺ¡": 17452, + "ðŁĺ¡ðŁĺ¡": 37223, + "ðŁĺ¢": 14308, + "ðŁĺ¢": 9925, + "ðŁĺ¢ðŁĺ¢": 32923, + "ðŁĺ¢ðŁĺ¢": 47921, + "ðŁĺ£": 32718, + "ðŁĺ¤": 26872, + "ðŁĺ¤": 20740, + "ðŁĺ¥": 38383, + "ðŁĺ¥": 23951, + "ðŁĺ¨": 38080, + "ðŁĺ©": 9051, + "ðŁĺ©": 9494, + "ðŁĺ©ðŁĺ©": 22820, + "ðŁĺ©ðŁĺ©": 38031, + "ðŁĺ©ðŁĺ©ðŁĺ©": 49063, + "ðŁĺª": 38181, + "ðŁĺª": 22243, + "ðŁĺ«": 25141, + "ðŁĺ«": 22340, + "ðŁĺ¬": 23704, + "ðŁĺ¬": 14549, + "ðŁĺ®": 40163, + "ðŁĺ®": 21616, + "ðŁĺ¯": 37858, + "ðŁĺ°": 34728, + "ðŁĺ±": 10938, + "ðŁĺ±": 9055, + "ðŁĺ±ðŁĺ±": 22061, + "ðŁĺ±ðŁĺ±": 40767, + "ðŁĺ±ðŁĺ±ðŁĺ±": 40909, + "ðŁĺ²": 40460, + "ðŁĺ²": 24620, + "ðŁĺ³": 12047, + "ðŁĺ³": 8223, + "ðŁĺ³ðŁĺ³": 32592, + "ðŁĺ´": 23527, + "ðŁĺ´": 16415, + "ðŁĺ´ðŁĺ´": 49307, + "ðŁĺµ": 39368, + "ðŁĺ¶": 35207, + "ðŁĺ·": 37943, + "ðŁĺ·": 25759, + "ðŁĺ¸": 36912, + "ðŁĺ¹": 26477, + "ðŁĺ¹": 26573, + "ðŁĺ¹ðŁĺ¹": 46287, + "ðŁĺº": 40613, + "ðŁĺ»": 15453, + "ðŁĺ»": 12911, + "ðŁĺ»ðŁĺ»": 34414, + "ðŁĺ¼": 44245, + "ðŁĺ½": 45156, + "ðŁĺĢ": 12832, + "ðŁĺĢ": 7334, + "ðŁĺĢðŁĺĢ": 34503, + "ðŁĺģ": 6967, + "ðŁĺģ": 4821, + "ðŁĺģðŁĺģ": 37900, + "ðŁĺģðŁĺģ": 19213, + "ðŁĺģðŁĺģðŁĺģ": 29083, + "ðŁĺĤ": 1424, + "ðŁĺĤ)": 42643, + "ðŁĺĤ.": 42550, + "ðŁĺĤ": 1558, + "ðŁĺĤâĿ¤ï¸ı": 36412, + "ðŁĺĤðŁijĮ": 42000, + "ðŁĺĤðŁĺĤ": 2286, + "ðŁĺĤðŁĺĤ": 4112, + "ðŁĺĤðŁĺĤðŁĺĤ": 22233, + "ðŁĺĤðŁĺĤðŁĺĤ": 4887, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 9936, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 11522, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 19295, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 33415, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 48973, + "ðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤðŁĺĤ": 28504, + "ðŁĺĤðŁĺį": 43128, + "ðŁĺĤðŁĺŃ": 28965, + "ðŁĺĤðŁĺŃ": 25802, + "ðŁĺĥ": 14079, + "ðŁĺĥ": 8520, + "ðŁĺĥðŁĺĥ": 38358, + "ðŁĺĦ": 12141, + "ðŁĺĦ": 7624, + "ðŁĺĦðŁĺĦ": 32312, + "ðŁĺħ": 15245, + "ðŁĺħ": 9188, + "ðŁĺħðŁĺħ": 39078, + "ðŁĺĨ": 16541, + "ðŁĺĨ": 10943, + "ðŁĺĨðŁĺĨ": 39503, + "ðŁĺĩ": 21694, + "ðŁĺĩ": 13091, + "ðŁĺĪ": 14377, + "ðŁĺĪ": 9756, + "ðŁĺĪðŁĺĪ": 44473, + "ðŁĺī": 9740, + "ðŁĺī": 4955, + "ðŁĺīðŁĺī": 40430, + "ðŁĺĬ": 4692, + "ðŁĺĬ": 3020, + "ðŁĺĬâĿ¤ï¸ı": 43606, + "ðŁĺĬðŁĺĬ": 12838, + "ðŁĺĬðŁĺĬ": 20842, + "ðŁĺĬðŁĺĬðŁĺĬ": 28685, + "ðŁĺĬðŁĺĬðŁĺĬðŁĺĬ": 35519, + "ðŁĺĭ": 12391, + "ðŁĺĭ": 7203, + "ðŁĺĭðŁĺĭ": 33304, + "ðŁĺĮ": 19221, + "ðŁĺĮ": 12163, + "ðŁĺį": 1796, + "ðŁĺį#": 42357, + "ðŁĺį.": 48579, + "ðŁĺį": 1754, + "ðŁĺįâĿ¤": 29122, + "ðŁĺįâĿ¤ï¸ı": 21945, + "ðŁĺįðŁijĮ": 41005, + "ðŁĺįðŁĴķ": 35946, + "ðŁĺįðŁĶ¥": 46648, + "ðŁĺįðŁĺĤ": 48715, + "ðŁĺįðŁĺį": 3663, + "ðŁĺįðŁĺį": 6471, + "ðŁĺįðŁĺįðŁĺį": 30614, + "ðŁĺįðŁĺįðŁĺį": 7703, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 16603, + "ðŁĺįðŁĺįðŁĺįðŁĺį": 18925, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 32078, + "ðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺįðŁĺį": 48683, + "ðŁĺįðŁĺĺ": 29646, + "ðŁĺįðŁĺĺ": 19849, + "ðŁĺįðŁĺŃ": 39555, + "ðŁĺİ": 7426, + "ðŁĺİ": 4345, + "ðŁĺİðŁĺİ": 24048, + "ðŁĺİðŁĺİðŁĺİ": 39742, + "ðŁĺı": 11624, + "ðŁĺı": 6909, + "ðŁĺıðŁĺı": 38151, + "ðŁĺIJ": 38586, + "ðŁĺIJ": 19618, + "ðŁĺij": 32469, + "ðŁĺij": 18937, + "ðŁĺĴ": 20792, + "ðŁĺĴ": 11702, + "ðŁĺĵ": 28733, + "ðŁĺĶ": 19532, + "ðŁĺĶ": 11432, + "ðŁĺķ": 45741, + "ðŁĺķ": 20602, + "ðŁĺĸ": 35006, + "ðŁĺĺ": 4240, + "ðŁĺĺ": 3352, + "ðŁĺĺâĿ¤": 48409, + "ðŁĺĺâĿ¤ï¸ı": 39150, + "ðŁĺĺðŁĺį": 38176, + "ðŁĺĺðŁĺĺ": 15663, + "ðŁĺĺðŁĺĺ": 10507, + "ðŁĺĺðŁĺĺðŁĺĺ": 20208, + "ðŁĺĺðŁĺĺðŁĺĺðŁĺĺ": 44892, + "ðŁĺĻ": 36201, + "ðŁĺĻ": 29209, + "ðŁĺļ": 24897, + "ðŁĺļ": 19102, + "ðŁĺĽ": 24550, + "ðŁĺĽ": 15745, + "ðŁĺľ": 13226, + "ðŁĺľ": 7830, + "ðŁĺľðŁĺľ": 43065, + "ðŁĺĿ": 20064, + "ðŁĺĿ": 12970, + "ðŁĺŀ": 40458, + "ðŁĺŀ": 21103, + "ðŁĺŁ": 46947, + "ðŁĺł": 34094, + "ðŁĺŃ": 2962, + "ðŁĺŃ": 3915, + "ðŁĺŃâĿ¤ï¸ı": 29567, + "ðŁĺŃðŁĴķ": 46306, + "ðŁĺŃðŁĺĤ": 38505, + "ðŁĺŃðŁĺį": 36893, + "ðŁĺŃðŁĺŃ": 5300, + "ðŁĺŃðŁĺŃ": 11834, + "ðŁĺŃðŁĺŃðŁĺŃ": 44089, + "ðŁĺŃðŁĺŃðŁĺŃ": 13116, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 19793, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 27322, + "ðŁĺŃðŁĺŃðŁĺŃðŁĺŃðŁĺŃ": 43366, + "ðŁĻ": 1478, + "ðŁĻĢ": 43092, + "ðŁĻĤ": 32006, + "ðŁĻĤ": 14860, + "ðŁĻĥ": 27222, + "ðŁĻĥ": 15652, + "ðŁĻĦ": 20648, + "ðŁĻĦ": 13049, + "ðŁĻħ": 42702, + "ðŁĻĨ": 30050, + "ðŁĻĨ": 35730, + "ðŁĻĪ": 12661, + "ðŁĻĪ": 9516, + "ðŁĻĪðŁĻĪ": 41796, + "ðŁĻĬ": 23684, + "ðŁĻĬ": 16636, + "ðŁĻĭ": 19193, + "ðŁĻĭ": 30274, + "ðŁĻĮ": 4366, + "ðŁĻĮ": 4855, + "ðŁĻĮðŁı»": 26756, + "ðŁĻĮðŁı»": 15799, + "ðŁĻĮðŁı¼": 26584, + "ðŁĻĮðŁı¼": 15364, + "ðŁĻĮðŁı½": 36660, + "ðŁĻĮðŁı½": 22962, + "ðŁĻĮðŁı¾": 38023, + "ðŁĻĮðŁı¾": 26466, + "ðŁĻĮðŁĻĮ": 21202, + "ðŁĻĮðŁĻĮ": 30430, + "ðŁĻĮðŁĻĮðŁĻĮ": 37127, + "ðŁĻı": 4260, + "ðŁĻı": 5503, + "ðŁĻıðŁı»": 25100, + "ðŁĻıðŁı»": 16650, + "ðŁĻıðŁı¼": 31163, + "ðŁĻıðŁı¼": 18952, + "ðŁĻıðŁı½": 34103, + "ðŁĻıðŁı½": 21540, + "ðŁĻıðŁı¾": 34277, + "ðŁĻıðŁı¾": 21979, + "ðŁĻıðŁĻı": 18227, + "ðŁĻıðŁĻı": 26510, + "ðŁĻıðŁĻıðŁĻı": 31702, + "ðŁļ": 2730, + "ðŁļ¨": 12198, + "ðŁļ¨": 6056, + "ðŁļ¨ðŁļ¨": 36487, + "ðŁļ¨ðŁļ¨": 21440, + "ðŁļ¨ðŁļ¨ðŁļ¨": 41515, + "ðŁļ©": 44514, + "ðŁļ«": 35291, + "ðŁļ²": 37085, + "ðŁļ´": 30825, + "ðŁļ¶": 46060, + "ðŁļĢ": 22400, + "ðŁļĢ": 13542, + "ðŁļĢðŁļĢ": 49033, + "ðŁļĤ": 38949, + "ðŁļĮ": 46891, + "ðŁļĹ": 33054, + "ðŁļĹ": 22783, + "ðŁļĺ": 35825, + "ðŁļĻ": 48487, + "ðŁĽ": 11306, + "ñ": 173, + "ñ": 429, + "ò": 174, + "ò": 430, + "ó": 175, + "ó": 431, + "ô": 176, + "ô": 432, + "õ": 177, + "õ": 433, + "ö": 178, + "ö": 434, + "÷": 179, + "÷": 435, + "ø": 180, + "ø": 436, + "ù": 181, + "ù": 437, + "ú": 182, + "ú": 438, + "û": 183, + "û": 439, + "ü": 184, + "ü": 440, + "ý": 185, + "ý": 441, + "þ": 186, + "þ": 442, + "ÿ": 187, + "ÿ": 443, + "Ā": 188, + "Ā": 444, + "ā": 189, + "ā": 445, + "Ă": 190, + "Ă": 446, + "ă": 191, + "ă": 447, + "Ą": 192, + "Ą": 448, + "ą": 193, + "ą": 449, + "Ć": 194, + "Ć": 450, + "ć": 195, + "ć": 451, + "Ĉ": 196, + "Ĉ": 452, + "ĉ": 197, + "ĉ": 453, + "Ċ": 198, + "Ċ": 454, + "ċ": 199, + "ċ": 455, + "Č": 200, + "Č": 456, + "č": 201, + "č": 457, + "Ď": 202, + "Ď": 458, + "ď": 203, + "ď": 459, + "Đ": 204, + "Đ": 460, + "đ": 205, + "đ": 461, + "Ē": 206, + "Ē": 462, + "ē": 207, + "ē": 463, + "Ĕ": 208, + "Ĕ": 464, + "ĕ": 209, + "ĕ": 465, + "Ė": 210, + "Ė": 466, + "ė": 211, + "ė": 467, + "Ę": 212, + "Ę": 468, + "ę": 213, + "ę": 469, + "Ě": 214, + "Ě": 470, + "ě": 215, + "ě": 471, + "Ĝ": 216, + "Ĝ": 472, + "ĝ": 217, + "ĝ": 473, + "Ğ": 218, + "Ğ": 474, + "ğ": 219, + "ğ": 475, + "Ġ": 220, + "Ġ": 476, + "ġ": 221, + "ġ": 477, + "Ģ": 222, + "Ģ": 478, + "Ģï¸ı": 9668, + "Ģï¸ı": 5511, + "ģ": 223, + "ģ": 479, + "ģà¸": 15016, + "Ĥ": 224, + "Ĥ": 480, + "Ĥâĸ": 29036, + "ĤâĸĤâĸ": 30832, + "ĥ": 225, + "ĥ": 481, + "Ħ": 226, + "Ħ": 482, + "Ħà¸": 20537, + "Ħë": 34462, + "Ħëĭ": 25170, + "ħ": 227, + "ħ": 483, + "ħï¸ı": 33950, + "Ĩ": 228, + "Ĩ": 484, + "ĩ": 229, + "ĩ": 485, + "Ī": 230, + "Ī": 486, + "ī": 231, + "ī": 487, + "īï¸ı": 37463, + "Ĭ": 232, + "Ĭ": 488, + "Ĭãģ": 30294, + "ĭ": 233, + "ĭ": 489, + "ĭãģ": 36218, + "ĭãĤ": 45737, + "Į": 234, + "Į": 490, + "ĮãĤĬãģ": 45969, + "ĮãĤĬãģŁãģĦ": 47021, + "Įë": 17003, + "į": 235, + "į": 491, + "İ": 236, + "İ": 492, + "ı": 237, + "ı": 493, + "IJ": 238, + "IJ": 494, + "ij": 239, + "ij": 495, + "Ĵ": 240, + "Ĵ": 496, + "ĵ": 241, + "ĵ": 497, + "Ķ": 242, + "Ķ": 498, + "Ķë": 37978, + "Ķï¸ı": 24395, + "Ķï¸ı": 7443, + "ķ": 243, + "ķ": 499, + "ķãĤ": 26609, + "ķï¸ı": 44853, + "ĸ": 244, + "ĸ": 500, + "ĸï¸ı": 28877, + "Ĺ": 245, + "Ĺ": 501, + "ĺ": 246, + "ĺ": 502, + "Ļ": 247, + "Ļ": 503, + "ļ": 248, + "ļ": 504, + "Ľ": 249, + "Ľ": 505, + "ľ": 250, + "ľ": 506, + "ľë": 39810, + "Ŀ": 251, + "Ŀ": 507, + "ŀ": 252, + "ŀ": 508, + "Ł": 253, + "Ł": 509, + "ŁãģĦ": 46023, + "ł": 254, + "ł": 510, + "łï¸ı": 27899, + "łï¸ı": 12715, + "łĪ": 43364, + "Ń": 255, + "Ń": 511 +} diff --git a/src/comfyui/comfy/sdxl_clip.py b/src/comfyui/comfy/sdxl_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..4d0a4e8e75a0a4e069a671fce113e1798c29751a --- /dev/null +++ b/src/comfyui/comfy/sdxl_clip.py @@ -0,0 +1,95 @@ +from comfy import sd1_clip +import torch +import os + +class SDXLClipG(sd1_clip.SDClipModel): + def __init__(self, device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None, model_options={}): + if layer == "penultimate": + layer="hidden" + layer_idx=-2 + + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_config_bigg.json") + super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, + special_tokens={"start": 49406, "end": 49407, "pad": 0}, layer_norm_hidden_state=False, return_projected_pooled=True, model_options=model_options) + + def load_sd(self, sd): + return super().load_sd(sd) + +class SDXLClipGTokenizer(sd1_clip.SDTokenizer): + def __init__(self, tokenizer_path=None, embedding_directory=None, tokenizer_data={}): + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') + + +class SDXLTokenizer: + def __init__(self, embedding_directory=None, tokenizer_data={}): + clip_l_tokenizer_class = tokenizer_data.get("clip_l_tokenizer_class", sd1_clip.SDTokenizer) + self.clip_l = clip_l_tokenizer_class(embedding_directory=embedding_directory) + self.clip_g = SDXLClipGTokenizer(embedding_directory=embedding_directory) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out["g"] = self.clip_g.tokenize_with_weights(text, return_word_ids) + out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return self.clip_g.untokenize(token_weight_pair) + + def state_dict(self): + return {} + +class SDXLClipModel(torch.nn.Module): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__() + clip_l_class = model_options.get("clip_l_class", sd1_clip.SDClipModel) + self.clip_l = clip_l_class(layer="hidden", layer_idx=-2, device=device, dtype=dtype, layer_norm_hidden_state=False, model_options=model_options) + self.clip_g = SDXLClipG(device=device, dtype=dtype, model_options=model_options) + self.dtypes = set([dtype]) + + def set_clip_options(self, options): + self.clip_l.set_clip_options(options) + self.clip_g.set_clip_options(options) + + def reset_clip_options(self): + self.clip_g.reset_clip_options() + self.clip_l.reset_clip_options() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs_g = token_weight_pairs["g"] + token_weight_pairs_l = token_weight_pairs["l"] + g_out, g_pooled = self.clip_g.encode_token_weights(token_weight_pairs_g) + l_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) + cut_to = min(l_out.shape[1], g_out.shape[1]) + return torch.cat([l_out[:,:cut_to], g_out[:,:cut_to]], dim=-1), g_pooled + + def load_sd(self, sd): + if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: + return self.clip_g.load_sd(sd) + else: + return self.clip_l.load_sd(sd) + +class SDXLRefinerClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__(device=device, dtype=dtype, clip_name="g", clip_model=SDXLClipG, model_options=model_options) + + +class StableCascadeClipGTokenizer(sd1_clip.SDTokenizer): + def __init__(self, tokenizer_path=None, embedding_directory=None, tokenizer_data={}): + super().__init__(tokenizer_path, pad_with_end=True, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') + +class StableCascadeTokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, clip_name="g", tokenizer=StableCascadeClipGTokenizer) + +class StableCascadeClipG(sd1_clip.SDClipModel): + def __init__(self, device="cpu", max_length=77, freeze=True, layer="hidden", layer_idx=-1, dtype=None, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "clip_config_bigg.json") + super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, + special_tokens={"start": 49406, "end": 49407, "pad": 49407}, layer_norm_hidden_state=False, enable_attention_masks=True, return_projected_pooled=True, model_options=model_options) + + def load_sd(self, sd): + return super().load_sd(sd) + +class StableCascadeClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__(device=device, dtype=dtype, clip_name="g", clip_model=StableCascadeClipG, model_options=model_options) diff --git a/src/comfyui/comfy/supported_models.py b/src/comfyui/comfy/supported_models.py new file mode 100644 index 0000000000000000000000000000000000000000..9931f4c5d10edb22c5f88a7a575b1e7c97d5c33b --- /dev/null +++ b/src/comfyui/comfy/supported_models.py @@ -0,0 +1,706 @@ +import torch +from . import model_base +from . import utils + +from . import sd1_clip +from . import sdxl_clip +import comfy.text_encoders.sd2_clip +import comfy.text_encoders.sd3_clip +import comfy.text_encoders.sa_t5 +import comfy.text_encoders.aura_t5 +import comfy.text_encoders.hydit +import comfy.text_encoders.flux +import comfy.text_encoders.genmo + +from . import supported_models_base +from . import latent_formats + +from . import diffusers_convert + +class SD15(supported_models_base.BASE): + unet_config = { + "context_dim": 768, + "model_channels": 320, + "use_linear_in_transformer": False, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + unet_extra_config = { + "num_heads": 8, + "num_head_channels": -1, + } + + latent_format = latent_formats.SD15 + memory_usage_factor = 1.0 + + def process_clip_state_dict(self, state_dict): + k = list(state_dict.keys()) + for x in k: + if x.startswith("cond_stage_model.transformer.") and not x.startswith("cond_stage_model.transformer.text_model."): + y = x.replace("cond_stage_model.transformer.", "cond_stage_model.transformer.text_model.") + state_dict[y] = state_dict.pop(x) + + if 'cond_stage_model.transformer.text_model.embeddings.position_ids' in state_dict: + ids = state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] + if ids.dtype == torch.float32: + state_dict['cond_stage_model.transformer.text_model.embeddings.position_ids'] = ids.round() + + replace_prefix = {} + replace_prefix["cond_stage_model."] = "clip_l." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + pop_keys = ["clip_l.transformer.text_projection.weight", "clip_l.logit_scale"] + for p in pop_keys: + if p in state_dict: + state_dict.pop(p) + + replace_prefix = {"clip_l.": "cond_stage_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(sd1_clip.SD1Tokenizer, sd1_clip.SD1ClipModel) + +class SD20(supported_models_base.BASE): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + unet_extra_config = { + "num_heads": -1, + "num_head_channels": 64, + "attn_precision": torch.float32, + } + + latent_format = latent_formats.SD15 + memory_usage_factor = 1.0 + + def model_type(self, state_dict, prefix=""): + if self.unet_config["in_channels"] == 4: #SD2.0 inpainting models are not v prediction + k = "{}output_blocks.11.1.transformer_blocks.0.norm1.bias".format(prefix) + out = state_dict.get(k, None) + if out is not None and torch.std(out, unbiased=False) > 0.09: # not sure how well this will actually work. I guess we will find out. + return model_base.ModelType.V_PREDICTION + return model_base.ModelType.EPS + + def process_clip_state_dict(self, state_dict): + replace_prefix = {} + replace_prefix["conditioner.embedders.0.model."] = "clip_h." #SD2 in sgm format + replace_prefix["cond_stage_model.model."] = "clip_h." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) + state_dict = utils.clip_text_transformers_convert(state_dict, "clip_h.", "clip_h.transformer.") + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + replace_prefix["clip_h"] = "cond_stage_model.model" + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix) + state_dict = diffusers_convert.convert_text_enc_state_dict_v20(state_dict) + return state_dict + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(comfy.text_encoders.sd2_clip.SD2Tokenizer, comfy.text_encoders.sd2_clip.SD2ClipModel) + +class SD21UnclipL(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": 1536, + "use_temporal_attention": False, + } + + clip_vision_prefix = "embedder.model.visual." + noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 768} + + +class SD21UnclipH(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 320, + "use_linear_in_transformer": True, + "adm_in_channels": 2048, + "use_temporal_attention": False, + } + + clip_vision_prefix = "embedder.model.visual." + noise_aug_config = {"noise_schedule_config": {"timesteps": 1000, "beta_schedule": "squaredcos_cap_v2"}, "timestep_dim": 1024} + +class SDXLRefiner(supported_models_base.BASE): + unet_config = { + "model_channels": 384, + "use_linear_in_transformer": True, + "context_dim": 1280, + "adm_in_channels": 2560, + "transformer_depth": [0, 0, 4, 4, 4, 4, 0, 0], + "use_temporal_attention": False, + } + + latent_format = latent_formats.SDXL + memory_usage_factor = 1.0 + + def get_model(self, state_dict, prefix="", device=None): + return model_base.SDXLRefiner(self, device=device) + + def process_clip_state_dict(self, state_dict): + keys_to_replace = {} + replace_prefix = {} + replace_prefix["conditioner.embedders.0.model."] = "clip_g." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) + + state_dict = utils.clip_text_transformers_convert(state_dict, "clip_g.", "clip_g.transformer.") + state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") + if "clip_g.transformer.text_model.embeddings.position_ids" in state_dict_g: + state_dict_g.pop("clip_g.transformer.text_model.embeddings.position_ids") + replace_prefix["clip_g"] = "conditioner.embedders.0.model" + state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) + return state_dict_g + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLRefinerClipModel) + +class SDXL(supported_models_base.BASE): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + + latent_format = latent_formats.SDXL + + memory_usage_factor = 0.8 + + def model_type(self, state_dict, prefix=""): + if 'edm_mean' in state_dict and 'edm_std' in state_dict: #Playground V2.5 + self.latent_format = latent_formats.SDXL_Playground_2_5() + self.sampling_settings["sigma_data"] = 0.5 + self.sampling_settings["sigma_max"] = 80.0 + self.sampling_settings["sigma_min"] = 0.002 + return model_base.ModelType.EDM + elif "edm_vpred.sigma_max" in state_dict: + self.sampling_settings["sigma_max"] = float(state_dict["edm_vpred.sigma_max"].item()) + if "edm_vpred.sigma_min" in state_dict: + self.sampling_settings["sigma_min"] = float(state_dict["edm_vpred.sigma_min"].item()) + return model_base.ModelType.V_PREDICTION_EDM + elif "v_pred" in state_dict: + return model_base.ModelType.V_PREDICTION + else: + return model_base.ModelType.EPS + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SDXL(self, model_type=self.model_type(state_dict, prefix), device=device) + if self.inpaint_model(): + out.set_inpaint() + return out + + def process_clip_state_dict(self, state_dict): + keys_to_replace = {} + replace_prefix = {} + + replace_prefix["conditioner.embedders.0.transformer.text_model"] = "clip_l.transformer.text_model" + replace_prefix["conditioner.embedders.1.model."] = "clip_g." + state_dict = utils.state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=True) + + state_dict = utils.state_dict_key_replace(state_dict, keys_to_replace) + state_dict = utils.clip_text_transformers_convert(state_dict, "clip_g.", "clip_g.transformer.") + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {} + keys_to_replace = {} + state_dict_g = diffusers_convert.convert_text_enc_state_dict_v20(state_dict, "clip_g") + for k in state_dict: + if k.startswith("clip_l"): + state_dict_g[k] = state_dict[k] + + state_dict_g["clip_l.transformer.text_model.embeddings.position_ids"] = torch.arange(77).expand((1, -1)) + pop_keys = ["clip_l.transformer.text_projection.weight", "clip_l.logit_scale"] + for p in pop_keys: + if p in state_dict_g: + state_dict_g.pop(p) + + replace_prefix["clip_g"] = "conditioner.embedders.1.model" + replace_prefix["clip_l"] = "conditioner.embedders.0" + state_dict_g = utils.state_dict_prefix_replace(state_dict_g, replace_prefix) + return state_dict_g + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(sdxl_clip.SDXLTokenizer, sdxl_clip.SDXLClipModel) + +class SSD1B(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 4, 4], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class Segmind_Vega(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 1, 1, 2, 2], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class KOALA_700M(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 2, 5], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class KOALA_1B(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 2, 6], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + } + +class SVD_img2vid(supported_models_base.BASE): + unet_config = { + "model_channels": 320, + "in_channels": 8, + "use_linear_in_transformer": True, + "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], + "context_dim": 1024, + "adm_in_channels": 768, + "use_temporal_attention": True, + "use_temporal_resblock": True + } + + unet_extra_config = { + "num_heads": -1, + "num_head_channels": 64, + "attn_precision": torch.float32, + } + + clip_vision_prefix = "conditioner.embedders.0.open_clip.model.visual." + + latent_format = latent_formats.SD15 + + sampling_settings = {"sigma_max": 700.0, "sigma_min": 0.002} + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SVD_img2vid(self, device=device) + return out + + def clip_target(self, state_dict={}): + return None + +class SV3D_u(SVD_img2vid): + unet_config = { + "model_channels": 320, + "in_channels": 8, + "use_linear_in_transformer": True, + "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], + "context_dim": 1024, + "adm_in_channels": 256, + "use_temporal_attention": True, + "use_temporal_resblock": True + } + + vae_key_prefix = ["conditioner.embedders.1.encoder."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SV3D_u(self, device=device) + return out + +class SV3D_p(SV3D_u): + unet_config = { + "model_channels": 320, + "in_channels": 8, + "use_linear_in_transformer": True, + "transformer_depth": [1, 1, 1, 1, 1, 1, 0, 0], + "context_dim": 1024, + "adm_in_channels": 1280, + "use_temporal_attention": True, + "use_temporal_resblock": True + } + + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SV3D_p(self, device=device) + return out + +class Stable_Zero123(supported_models_base.BASE): + unet_config = { + "context_dim": 768, + "model_channels": 320, + "use_linear_in_transformer": False, + "adm_in_channels": None, + "use_temporal_attention": False, + "in_channels": 8, + } + + unet_extra_config = { + "num_heads": 8, + "num_head_channels": -1, + } + + required_keys = { + "cc_projection.weight": None, + "cc_projection.bias": None, + } + + clip_vision_prefix = "cond_stage_model.model.visual." + + latent_format = latent_formats.SD15 + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.Stable_Zero123(self, device=device, cc_projection_weight=state_dict["cc_projection.weight"], cc_projection_bias=state_dict["cc_projection.bias"]) + return out + + def clip_target(self, state_dict={}): + return None + +class SD_X4Upscaler(SD20): + unet_config = { + "context_dim": 1024, + "model_channels": 256, + 'in_channels': 7, + "use_linear_in_transformer": True, + "adm_in_channels": None, + "use_temporal_attention": False, + } + + unet_extra_config = { + "disable_self_attentions": [True, True, True, False], + "num_classes": 1000, + "num_heads": 8, + "num_head_channels": -1, + } + + latent_format = latent_formats.SD_X4 + + sampling_settings = { + "linear_start": 0.0001, + "linear_end": 0.02, + } + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SD_X4Upscaler(self, device=device) + return out + +class Stable_Cascade_C(supported_models_base.BASE): + unet_config = { + "stable_cascade_stage": 'c', + } + + unet_extra_config = {} + + latent_format = latent_formats.SC_Prior + supported_inference_dtypes = [torch.bfloat16, torch.float32] + + sampling_settings = { + "shift": 2.0, + } + + vae_key_prefix = ["vae."] + text_encoder_key_prefix = ["text_encoder."] + clip_vision_prefix = "clip_l_vision." + + def process_unet_state_dict(self, state_dict): + key_list = list(state_dict.keys()) + for y in ["weight", "bias"]: + suffix = "in_proj_{}".format(y) + keys = filter(lambda a: a.endswith(suffix), key_list) + for k_from in keys: + weights = state_dict.pop(k_from) + prefix = k_from[:-(len(suffix) + 1)] + shape_from = weights.shape[0] // 3 + for x in range(3): + p = ["to_q", "to_k", "to_v"] + k_to = "{}.{}.{}".format(prefix, p[x], y) + state_dict[k_to] = weights[shape_from*x:shape_from*(x + 1)] + return state_dict + + def process_clip_state_dict(self, state_dict): + state_dict = utils.state_dict_prefix_replace(state_dict, {k: "" for k in self.text_encoder_key_prefix}, filter_keys=True) + if "clip_g.text_projection" in state_dict: + state_dict["clip_g.transformer.text_projection.weight"] = state_dict.pop("clip_g.text_projection").transpose(0, 1) + return state_dict + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.StableCascade_C(self, device=device) + return out + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(sdxl_clip.StableCascadeTokenizer, sdxl_clip.StableCascadeClipModel) + +class Stable_Cascade_B(Stable_Cascade_C): + unet_config = { + "stable_cascade_stage": 'b', + } + + unet_extra_config = {} + + latent_format = latent_formats.SC_B + supported_inference_dtypes = [torch.float16, torch.bfloat16, torch.float32] + + sampling_settings = { + "shift": 1.0, + } + + clip_vision_prefix = None + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.StableCascade_B(self, device=device) + return out + +class SD15_instructpix2pix(SD15): + unet_config = { + "context_dim": 768, + "model_channels": 320, + "use_linear_in_transformer": False, + "adm_in_channels": None, + "use_temporal_attention": False, + "in_channels": 8, + } + + def get_model(self, state_dict, prefix="", device=None): + return model_base.SD15_instructpix2pix(self, device=device) + +class SDXL_instructpix2pix(SDXL): + unet_config = { + "model_channels": 320, + "use_linear_in_transformer": True, + "transformer_depth": [0, 0, 2, 2, 10, 10], + "context_dim": 2048, + "adm_in_channels": 2816, + "use_temporal_attention": False, + "in_channels": 8, + } + + def get_model(self, state_dict, prefix="", device=None): + return model_base.SDXL_instructpix2pix(self, model_type=self.model_type(state_dict, prefix), device=device) + +class SD3(supported_models_base.BASE): + unet_config = { + "in_channels": 16, + "pos_embed_scaling_factor": None, + } + + sampling_settings = { + "shift": 3.0, + } + + unet_extra_config = {} + latent_format = latent_formats.SD3 + + memory_usage_factor = 1.2 + + text_encoder_key_prefix = ["text_encoders."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.SD3(self, device=device) + return out + + def clip_target(self, state_dict={}): + clip_l = False + clip_g = False + t5 = False + dtype_t5 = None + pref = self.text_encoder_key_prefix[0] + if "{}clip_l.transformer.text_model.final_layer_norm.weight".format(pref) in state_dict: + clip_l = True + if "{}clip_g.transformer.text_model.final_layer_norm.weight".format(pref) in state_dict: + clip_g = True + t5_detect = comfy.text_encoders.sd3_clip.t5_xxl_detect(state_dict, "{}t5xxl.transformer.".format(pref)) + if "dtype_t5" in t5_detect: + t5 = True + + return supported_models_base.ClipTarget(comfy.text_encoders.sd3_clip.SD3Tokenizer, comfy.text_encoders.sd3_clip.sd3_clip(clip_l=clip_l, clip_g=clip_g, t5=t5, **t5_detect)) + +class StableAudio(supported_models_base.BASE): + unet_config = { + "audio_model": "dit1.0", + } + + sampling_settings = {"sigma_max": 500.0, "sigma_min": 0.03} + + unet_extra_config = {} + latent_format = latent_formats.StableAudio1 + + text_encoder_key_prefix = ["text_encoders."] + vae_key_prefix = ["pretransform.model."] + + def get_model(self, state_dict, prefix="", device=None): + seconds_start_sd = utils.state_dict_prefix_replace(state_dict, {"conditioner.conditioners.seconds_start.": ""}, filter_keys=True) + seconds_total_sd = utils.state_dict_prefix_replace(state_dict, {"conditioner.conditioners.seconds_total.": ""}, filter_keys=True) + return model_base.StableAudio1(self, seconds_start_embedder_weights=seconds_start_sd, seconds_total_embedder_weights=seconds_total_sd, device=device) + + def process_unet_state_dict(self, state_dict): + for k in list(state_dict.keys()): + if k.endswith(".cross_attend_norm.beta") or k.endswith(".ff_norm.beta") or k.endswith(".pre_norm.beta"): #These weights are all zero + state_dict.pop(k) + return state_dict + + def process_unet_state_dict_for_saving(self, state_dict): + replace_prefix = {"": "model.model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(comfy.text_encoders.sa_t5.SAT5Tokenizer, comfy.text_encoders.sa_t5.SAT5Model) + +class AuraFlow(supported_models_base.BASE): + unet_config = { + "cond_seq_dim": 2048, + } + + sampling_settings = { + "multiplier": 1.0, + "shift": 1.73, + } + + unet_extra_config = {} + latent_format = latent_formats.SDXL + + vae_key_prefix = ["vae."] + text_encoder_key_prefix = ["text_encoders."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.AuraFlow(self, device=device) + return out + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(comfy.text_encoders.aura_t5.AuraT5Tokenizer, comfy.text_encoders.aura_t5.AuraT5Model) + +class HunyuanDiT(supported_models_base.BASE): + unet_config = { + "image_model": "hydit", + } + + unet_extra_config = { + "attn_precision": torch.float32, + } + + sampling_settings = { + "linear_start": 0.00085, + "linear_end": 0.018, + } + + latent_format = latent_formats.SDXL + + vae_key_prefix = ["vae."] + text_encoder_key_prefix = ["text_encoders."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.HunyuanDiT(self, device=device) + return out + + def clip_target(self, state_dict={}): + return supported_models_base.ClipTarget(comfy.text_encoders.hydit.HyditTokenizer, comfy.text_encoders.hydit.HyditModel) + +class HunyuanDiT1(HunyuanDiT): + unet_config = { + "image_model": "hydit1", + } + + unet_extra_config = {} + + sampling_settings = { + "linear_start" : 0.00085, + "linear_end" : 0.03, + } + +class Flux(supported_models_base.BASE): + unet_config = { + "image_model": "flux", + "guidance_embed": True, + } + + sampling_settings = { + } + + unet_extra_config = {} + latent_format = latent_formats.Flux + + memory_usage_factor = 2.8 + + supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32] + + vae_key_prefix = ["vae."] + text_encoder_key_prefix = ["text_encoders."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.Flux(self, device=device) + return out + + def clip_target(self, state_dict={}): + pref = self.text_encoder_key_prefix[0] + t5_detect = comfy.text_encoders.sd3_clip.t5_xxl_detect(state_dict, "{}t5xxl.transformer.".format(pref)) + return supported_models_base.ClipTarget(comfy.text_encoders.flux.FluxTokenizer, comfy.text_encoders.flux.flux_clip(**t5_detect)) + +class FluxSchnell(Flux): + unet_config = { + "image_model": "flux", + "guidance_embed": False, + } + + sampling_settings = { + "multiplier": 1.0, + "shift": 1.0, + } + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.Flux(self, model_type=model_base.ModelType.FLOW, device=device) + return out + +class GenmoMochi(supported_models_base.BASE): + unet_config = { + "image_model": "mochi_preview", + } + + sampling_settings = { + "multiplier": 1.0, + "shift": 6.0, + } + + unet_extra_config = {} + latent_format = latent_formats.Mochi + + memory_usage_factor = 2.0 #TODO + + supported_inference_dtypes = [torch.bfloat16, torch.float32] + + vae_key_prefix = ["vae."] + text_encoder_key_prefix = ["text_encoders."] + + def get_model(self, state_dict, prefix="", device=None): + out = model_base.GenmoMochi(self, device=device) + return out + + def clip_target(self, state_dict={}): + pref = self.text_encoder_key_prefix[0] + t5_detect = comfy.text_encoders.sd3_clip.t5_xxl_detect(state_dict, "{}t5xxl.transformer.".format(pref)) + return supported_models_base.ClipTarget(comfy.text_encoders.genmo.MochiT5Tokenizer, comfy.text_encoders.genmo.mochi_te(**t5_detect)) + + +models = [Stable_Zero123, SD15_instructpix2pix, SD15, SD20, SD21UnclipL, SD21UnclipH, SDXL_instructpix2pix, SDXLRefiner, SDXL, SSD1B, KOALA_700M, KOALA_1B, Segmind_Vega, SD_X4Upscaler, Stable_Cascade_C, Stable_Cascade_B, SV3D_u, SV3D_p, SD3, StableAudio, AuraFlow, HunyuanDiT, HunyuanDiT1, Flux, FluxSchnell, GenmoMochi] + +models += [SVD_img2vid] diff --git a/src/comfyui/comfy/supported_models_base.py b/src/comfyui/comfy/supported_models_base.py new file mode 100644 index 0000000000000000000000000000000000000000..54573abb110d8cc5e190ecefa0f9aecf95da0b99 --- /dev/null +++ b/src/comfyui/comfy/supported_models_base.py @@ -0,0 +1,119 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +from . import model_base +from . import utils +from . import latent_formats + +class ClipTarget: + def __init__(self, tokenizer, clip): + self.clip = clip + self.tokenizer = tokenizer + self.params = {} + +class BASE: + unet_config = {} + unet_extra_config = { + "num_heads": -1, + "num_head_channels": 64, + } + + required_keys = {} + + clip_prefix = [] + clip_vision_prefix = None + noise_aug_config = None + sampling_settings = {} + latent_format = latent_formats.LatentFormat + vae_key_prefix = ["first_stage_model."] + text_encoder_key_prefix = ["cond_stage_model."] + supported_inference_dtypes = [torch.float16, torch.bfloat16, torch.float32] + + memory_usage_factor = 2.0 + + manual_cast_dtype = None + custom_operations = None + scaled_fp8 = None + optimizations = {"fp8": False} + + @classmethod + def matches(s, unet_config, state_dict=None): + for k in s.unet_config: + if k not in unet_config or s.unet_config[k] != unet_config[k]: + return False + if state_dict is not None: + for k in s.required_keys: + if k not in state_dict: + return False + return True + + def model_type(self, state_dict, prefix=""): + return model_base.ModelType.EPS + + def inpaint_model(self): + return self.unet_config["in_channels"] > 4 + + def __init__(self, unet_config): + self.unet_config = unet_config.copy() + self.sampling_settings = self.sampling_settings.copy() + self.latent_format = self.latent_format() + self.optimizations = self.optimizations.copy() + for x in self.unet_extra_config: + self.unet_config[x] = self.unet_extra_config[x] + + def get_model(self, state_dict, prefix="", device=None): + if self.noise_aug_config is not None: + out = model_base.SD21UNCLIP(self, self.noise_aug_config, model_type=self.model_type(state_dict, prefix), device=device) + else: + out = model_base.BaseModel(self, model_type=self.model_type(state_dict, prefix), device=device) + if self.inpaint_model(): + out.set_inpaint() + return out + + def process_clip_state_dict(self, state_dict): + state_dict = utils.state_dict_prefix_replace(state_dict, {k: "" for k in self.text_encoder_key_prefix}, filter_keys=True) + return state_dict + + def process_unet_state_dict(self, state_dict): + return state_dict + + def process_vae_state_dict(self, state_dict): + return state_dict + + def process_clip_state_dict_for_saving(self, state_dict): + replace_prefix = {"": self.text_encoder_key_prefix[0]} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_clip_vision_state_dict_for_saving(self, state_dict): + replace_prefix = {} + if self.clip_vision_prefix is not None: + replace_prefix[""] = self.clip_vision_prefix + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_unet_state_dict_for_saving(self, state_dict): + replace_prefix = {"": "model.diffusion_model."} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def process_vae_state_dict_for_saving(self, state_dict): + replace_prefix = {"": self.vae_key_prefix[0]} + return utils.state_dict_prefix_replace(state_dict, replace_prefix) + + def set_inference_dtype(self, dtype, manual_cast_dtype): + self.unet_config['dtype'] = dtype + self.manual_cast_dtype = manual_cast_dtype diff --git a/src/comfyui/comfy/t2i_adapter/__pycache__/adapter.cpython-310.pyc b/src/comfyui/comfy/t2i_adapter/__pycache__/adapter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c2a216090139e142fb5ccd7afef3971fca597948 Binary files /dev/null and b/src/comfyui/comfy/t2i_adapter/__pycache__/adapter.cpython-310.pyc differ diff --git a/src/comfyui/comfy/t2i_adapter/adapter.py b/src/comfyui/comfy/t2i_adapter/adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..10ea18e326693f237b3b219970c86e3808f6d334 --- /dev/null +++ b/src/comfyui/comfy/t2i_adapter/adapter.py @@ -0,0 +1,299 @@ +#taken from https://github.com/TencentARC/T2I-Adapter +import torch +import torch.nn as nn +from collections import OrderedDict + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + if not self.use_conv: + padding = [x.shape[2] % 2, x.shape[3] % 2] + self.op.padding = padding + + x = self.op(x) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, in_c, out_c, down, ksize=3, sk=False, use_conv=True): + super().__init__() + ps = ksize // 2 + if in_c != out_c or sk == False: + self.in_conv = nn.Conv2d(in_c, out_c, ksize, 1, ps) + else: + # print('n_in') + self.in_conv = None + self.block1 = nn.Conv2d(out_c, out_c, 3, 1, 1) + self.act = nn.ReLU() + self.block2 = nn.Conv2d(out_c, out_c, ksize, 1, ps) + if sk == False: + self.skep = nn.Conv2d(in_c, out_c, ksize, 1, ps) + else: + self.skep = None + + self.down = down + if self.down == True: + self.down_opt = Downsample(in_c, use_conv=use_conv) + + def forward(self, x): + if self.down == True: + x = self.down_opt(x) + if self.in_conv is not None: # edit + x = self.in_conv(x) + + h = self.block1(x) + h = self.act(h) + h = self.block2(h) + if self.skep is not None: + return h + self.skep(x) + else: + return h + x + + +class Adapter(nn.Module): + def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64, ksize=3, sk=False, use_conv=True, xl=True): + super(Adapter, self).__init__() + self.unshuffle_amount = 8 + resblock_no_downsample = [] + resblock_downsample = [3, 2, 1] + self.xl = xl + if self.xl: + self.unshuffle_amount = 16 + resblock_no_downsample = [1] + resblock_downsample = [2] + + self.input_channels = cin // (self.unshuffle_amount * self.unshuffle_amount) + self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) + self.channels = channels + self.nums_rb = nums_rb + self.body = [] + for i in range(len(channels)): + for j in range(nums_rb): + if (i in resblock_downsample) and (j == 0): + self.body.append( + ResnetBlock(channels[i - 1], channels[i], down=True, ksize=ksize, sk=sk, use_conv=use_conv)) + elif (i in resblock_no_downsample) and (j == 0): + self.body.append( + ResnetBlock(channels[i - 1], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) + else: + self.body.append( + ResnetBlock(channels[i], channels[i], down=False, ksize=ksize, sk=sk, use_conv=use_conv)) + self.body = nn.ModuleList(self.body) + self.conv_in = nn.Conv2d(cin, channels[0], 3, 1, 1) + + def forward(self, x): + # unshuffle + x = self.unshuffle(x) + # extract features + features = [] + x = self.conv_in(x) + for i in range(len(self.channels)): + for j in range(self.nums_rb): + idx = i * self.nums_rb + j + x = self.body[idx](x) + if self.xl: + features.append(None) + if i == 0: + features.append(None) + features.append(None) + if i == 2: + features.append(None) + else: + features.append(None) + features.append(None) + features.append(x) + + features = features[::-1] + + if self.xl: + return {"input": features[1:], "middle": features[:1]} + else: + return {"input": features} + + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor): + orig_type = x.dtype + ret = super().forward(x.type(torch.float32)) + return ret.type(orig_type) + + +class QuickGELU(nn.Module): + + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + + def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None): + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential( + OrderedDict([("c_fc", nn.Linear(d_model, d_model * 4)), ("gelu", QuickGELU()), + ("c_proj", nn.Linear(d_model * 4, d_model))])) + self.ln_2 = LayerNorm(d_model) + self.attn_mask = attn_mask + + def attention(self, x: torch.Tensor): + self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None + return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0] + + def forward(self, x: torch.Tensor): + x = x + self.attention(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + + +class StyleAdapter(nn.Module): + + def __init__(self, width=1024, context_dim=768, num_head=8, n_layes=3, num_token=4): + super().__init__() + + scale = width ** -0.5 + self.transformer_layes = nn.Sequential(*[ResidualAttentionBlock(width, num_head) for _ in range(n_layes)]) + self.num_token = num_token + self.style_embedding = nn.Parameter(torch.randn(1, num_token, width) * scale) + self.ln_post = LayerNorm(width) + self.ln_pre = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, context_dim)) + + def forward(self, x): + # x shape [N, HW+1, C] + style_embedding = self.style_embedding + torch.zeros( + (x.shape[0], self.num_token, self.style_embedding.shape[-1]), device=x.device) + x = torch.cat([x, style_embedding], dim=1) + x = self.ln_pre(x) + x = x.permute(1, 0, 2) # NLD -> LND + x = self.transformer_layes(x) + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x[:, -self.num_token:, :]) + x = x @ self.proj + + return x + + +class ResnetBlock_light(nn.Module): + def __init__(self, in_c): + super().__init__() + self.block1 = nn.Conv2d(in_c, in_c, 3, 1, 1) + self.act = nn.ReLU() + self.block2 = nn.Conv2d(in_c, in_c, 3, 1, 1) + + def forward(self, x): + h = self.block1(x) + h = self.act(h) + h = self.block2(h) + + return h + x + + +class extractor(nn.Module): + def __init__(self, in_c, inter_c, out_c, nums_rb, down=False): + super().__init__() + self.in_conv = nn.Conv2d(in_c, inter_c, 1, 1, 0) + self.body = [] + for _ in range(nums_rb): + self.body.append(ResnetBlock_light(inter_c)) + self.body = nn.Sequential(*self.body) + self.out_conv = nn.Conv2d(inter_c, out_c, 1, 1, 0) + self.down = down + if self.down == True: + self.down_opt = Downsample(in_c, use_conv=False) + + def forward(self, x): + if self.down == True: + x = self.down_opt(x) + x = self.in_conv(x) + x = self.body(x) + x = self.out_conv(x) + + return x + + +class Adapter_light(nn.Module): + def __init__(self, channels=[320, 640, 1280, 1280], nums_rb=3, cin=64): + super(Adapter_light, self).__init__() + self.unshuffle_amount = 8 + self.unshuffle = nn.PixelUnshuffle(self.unshuffle_amount) + self.input_channels = cin // (self.unshuffle_amount * self.unshuffle_amount) + self.channels = channels + self.nums_rb = nums_rb + self.body = [] + self.xl = False + + for i in range(len(channels)): + if i == 0: + self.body.append(extractor(in_c=cin, inter_c=channels[i]//4, out_c=channels[i], nums_rb=nums_rb, down=False)) + else: + self.body.append(extractor(in_c=channels[i-1], inter_c=channels[i]//4, out_c=channels[i], nums_rb=nums_rb, down=True)) + self.body = nn.ModuleList(self.body) + + def forward(self, x): + # unshuffle + x = self.unshuffle(x) + # extract features + features = [] + for i in range(len(self.channels)): + x = self.body[i](x) + features.append(None) + features.append(None) + features.append(x) + + return {"input": features[::-1]} diff --git a/src/comfyui/comfy/taesd/__pycache__/taesd.cpython-310.pyc b/src/comfyui/comfy/taesd/__pycache__/taesd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae901224f646e8cd4407cf2c2c08e0d267930649 Binary files /dev/null and b/src/comfyui/comfy/taesd/__pycache__/taesd.cpython-310.pyc differ diff --git a/src/comfyui/comfy/taesd/taesd.py b/src/comfyui/comfy/taesd/taesd.py new file mode 100644 index 0000000000000000000000000000000000000000..ce36f1a84dae599a35e84a8da3462408c0f0ccc6 --- /dev/null +++ b/src/comfyui/comfy/taesd/taesd.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +""" +Tiny AutoEncoder for Stable Diffusion +(DNN for encoding / decoding SD's latent space) +""" +import torch +import torch.nn as nn + +import comfy.utils +import comfy.ops + +def conv(n_in, n_out, **kwargs): + return comfy.ops.disable_weight_init.Conv2d(n_in, n_out, 3, padding=1, **kwargs) + +class Clamp(nn.Module): + def forward(self, x): + return torch.tanh(x / 3) * 3 + +class Block(nn.Module): + def __init__(self, n_in, n_out): + super().__init__() + self.conv = nn.Sequential(conv(n_in, n_out), nn.ReLU(), conv(n_out, n_out), nn.ReLU(), conv(n_out, n_out)) + self.skip = comfy.ops.disable_weight_init.Conv2d(n_in, n_out, 1, bias=False) if n_in != n_out else nn.Identity() + self.fuse = nn.ReLU() + def forward(self, x): + return self.fuse(self.conv(x) + self.skip(x)) + +def Encoder(latent_channels=4): + return nn.Sequential( + conv(3, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, 64, stride=2, bias=False), Block(64, 64), Block(64, 64), Block(64, 64), + conv(64, latent_channels), + ) + + +def Decoder(latent_channels=4): + return nn.Sequential( + Clamp(), conv(latent_channels, 64), nn.ReLU(), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), Block(64, 64), Block(64, 64), nn.Upsample(scale_factor=2), conv(64, 64, bias=False), + Block(64, 64), conv(64, 3), + ) + +class TAESD(nn.Module): + latent_magnitude = 3 + latent_shift = 0.5 + + def __init__(self, encoder_path=None, decoder_path=None, latent_channels=4): + """Initialize pretrained TAESD on the given device from the given checkpoints.""" + super().__init__() + self.taesd_encoder = Encoder(latent_channels=latent_channels) + self.taesd_decoder = Decoder(latent_channels=latent_channels) + self.vae_scale = torch.nn.Parameter(torch.tensor(1.0)) + self.vae_shift = torch.nn.Parameter(torch.tensor(0.0)) + if encoder_path is not None: + self.taesd_encoder.load_state_dict(comfy.utils.load_torch_file(encoder_path, safe_load=True)) + if decoder_path is not None: + self.taesd_decoder.load_state_dict(comfy.utils.load_torch_file(decoder_path, safe_load=True)) + + @staticmethod + def scale_latents(x): + """raw latents -> [0, 1]""" + return x.div(2 * TAESD.latent_magnitude).add(TAESD.latent_shift).clamp(0, 1) + + @staticmethod + def unscale_latents(x): + """[0, 1] -> raw latents""" + return x.sub(TAESD.latent_shift).mul(2 * TAESD.latent_magnitude) + + def decode(self, x): + x_sample = self.taesd_decoder((x - self.vae_shift) * self.vae_scale) + x_sample = x_sample.sub(0.5).mul(2) + return x_sample + + def encode(self, x): + return (self.taesd_encoder(x * 0.5 + 0.5) / self.vae_scale) + self.vae_shift diff --git a/src/comfyui/comfy/text_encoders/__pycache__/aura_t5.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/aura_t5.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bdedaf5671d7520be57dbb0bc140226259454357 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/aura_t5.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/bert.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/bert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f4e24e2ed46446b80fa43df74330bdc6eb7f1f2 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/bert.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/flux.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/flux.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8d60c3c7049679f70662a175ee989aa641ae6c1 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/flux.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/genmo.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/genmo.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff86693b2a554d8cfb76b91f74c68ddd1466a98e Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/genmo.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/hydit.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/hydit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3723fd5a5414f53dd4a14c9ab2ca90ffc17cb68d Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/hydit.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/long_clipl.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/long_clipl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..105b467aff9fe9e3d0b130e267b5e12cc3015941 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/long_clipl.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/sa_t5.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/sa_t5.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f2a7e02297cc36b36d0bafd56e8d418c311ff15 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/sa_t5.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/sd2_clip.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/sd2_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11a5fb5fb6420cfd3828e09cde654c76e356b243 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/sd2_clip.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/sd3_clip.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/sd3_clip.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..80e89b29f69703f92759d589b2aea1e2e215575f Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/sd3_clip.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/spiece_tokenizer.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/spiece_tokenizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0892923d0a2fcae3ecbf9dacf496ed7d93f5b833 Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/spiece_tokenizer.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/__pycache__/t5.cpython-310.pyc b/src/comfyui/comfy/text_encoders/__pycache__/t5.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3118f643ee6a1a79c4cf553429a289f5ed9449b Binary files /dev/null and b/src/comfyui/comfy/text_encoders/__pycache__/t5.cpython-310.pyc differ diff --git a/src/comfyui/comfy/text_encoders/aura_t5.py b/src/comfyui/comfy/text_encoders/aura_t5.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ad45a7fcb3e31b9e08b36fb58b2f86c2a2084a --- /dev/null +++ b/src/comfyui/comfy/text_encoders/aura_t5.py @@ -0,0 +1,22 @@ +from comfy import sd1_clip +from .spiece_tokenizer import SPieceTokenizer +import comfy.text_encoders.t5 +import os + +class PT5XlModel(sd1_clip.SDClipModel): + def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_pile_config_xl.json") + super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"end": 2, "pad": 1}, model_class=comfy.text_encoders.t5.T5, enable_attention_masks=True, zero_out_masked=True, model_options=model_options) + +class PT5XlTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_pile_tokenizer"), "tokenizer.model") + super().__init__(tokenizer_path, pad_with_end=False, embedding_size=2048, embedding_key='pile_t5xl', tokenizer_class=SPieceTokenizer, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256, pad_token=1) + +class AuraT5Tokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, clip_name="pile_t5xl", tokenizer=PT5XlTokenizer) + +class AuraT5Model(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}, **kwargs): + super().__init__(device=device, dtype=dtype, model_options=model_options, name="pile_t5xl", clip_model=PT5XlModel, **kwargs) diff --git a/src/comfyui/comfy/text_encoders/bert.py b/src/comfyui/comfy/text_encoders/bert.py new file mode 100644 index 0000000000000000000000000000000000000000..fc9bac1d24c088cf17cae4f3af86b8646960f808 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/bert.py @@ -0,0 +1,140 @@ +import torch +from comfy.ldm.modules.attention import optimized_attention_for_device +import comfy.ops + +class BertAttention(torch.nn.Module): + def __init__(self, embed_dim, heads, dtype, device, operations): + super().__init__() + + self.heads = heads + self.query = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.key = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + self.value = operations.Linear(embed_dim, embed_dim, bias=True, dtype=dtype, device=device) + + + def forward(self, x, mask=None, optimized_attention=None): + q = self.query(x) + k = self.key(x) + v = self.value(x) + + out = optimized_attention(q, k, v, self.heads, mask) + return out + +class BertOutput(torch.nn.Module): + def __init__(self, input_dim, output_dim, layer_norm_eps, dtype, device, operations): + super().__init__() + self.dense = operations.Linear(input_dim, output_dim, dtype=dtype, device=device) + self.LayerNorm = operations.LayerNorm(output_dim, eps=layer_norm_eps, dtype=dtype, device=device) + # self.dropout = nn.Dropout(0.0) + + def forward(self, x, y): + x = self.dense(x) + # hidden_states = self.dropout(hidden_states) + x = self.LayerNorm(x + y) + return x + +class BertAttentionBlock(torch.nn.Module): + def __init__(self, embed_dim, heads, layer_norm_eps, dtype, device, operations): + super().__init__() + self.self = BertAttention(embed_dim, heads, dtype, device, operations) + self.output = BertOutput(embed_dim, embed_dim, layer_norm_eps, dtype, device, operations) + + def forward(self, x, mask, optimized_attention): + y = self.self(x, mask, optimized_attention) + return self.output(y, x) + +class BertIntermediate(torch.nn.Module): + def __init__(self, embed_dim, intermediate_dim, dtype, device, operations): + super().__init__() + self.dense = operations.Linear(embed_dim, intermediate_dim, dtype=dtype, device=device) + + def forward(self, x): + x = self.dense(x) + return torch.nn.functional.gelu(x) + + +class BertBlock(torch.nn.Module): + def __init__(self, embed_dim, intermediate_dim, heads, layer_norm_eps, dtype, device, operations): + super().__init__() + self.attention = BertAttentionBlock(embed_dim, heads, layer_norm_eps, dtype, device, operations) + self.intermediate = BertIntermediate(embed_dim, intermediate_dim, dtype, device, operations) + self.output = BertOutput(intermediate_dim, embed_dim, layer_norm_eps, dtype, device, operations) + + def forward(self, x, mask, optimized_attention): + x = self.attention(x, mask, optimized_attention) + y = self.intermediate(x) + return self.output(y, x) + +class BertEncoder(torch.nn.Module): + def __init__(self, num_layers, embed_dim, intermediate_dim, heads, layer_norm_eps, dtype, device, operations): + super().__init__() + self.layer = torch.nn.ModuleList([BertBlock(embed_dim, intermediate_dim, heads, layer_norm_eps, dtype, device, operations) for i in range(num_layers)]) + + def forward(self, x, mask=None, intermediate_output=None): + optimized_attention = optimized_attention_for_device(x.device, mask=mask is not None, small_input=True) + + if intermediate_output is not None: + if intermediate_output < 0: + intermediate_output = len(self.layer) + intermediate_output + + intermediate = None + for i, l in enumerate(self.layer): + x = l(x, mask, optimized_attention) + if i == intermediate_output: + intermediate = x.clone() + return x, intermediate + +class BertEmbeddings(torch.nn.Module): + def __init__(self, vocab_size, max_position_embeddings, type_vocab_size, pad_token_id, embed_dim, layer_norm_eps, dtype, device, operations): + super().__init__() + self.word_embeddings = operations.Embedding(vocab_size, embed_dim, padding_idx=pad_token_id, dtype=dtype, device=device) + self.position_embeddings = operations.Embedding(max_position_embeddings, embed_dim, dtype=dtype, device=device) + self.token_type_embeddings = operations.Embedding(type_vocab_size, embed_dim, dtype=dtype, device=device) + + self.LayerNorm = operations.LayerNorm(embed_dim, eps=layer_norm_eps, dtype=dtype, device=device) + + def forward(self, input_tokens, token_type_ids=None, dtype=None): + x = self.word_embeddings(input_tokens, out_dtype=dtype) + x += comfy.ops.cast_to_input(self.position_embeddings.weight[:x.shape[1]], x) + if token_type_ids is not None: + x += self.token_type_embeddings(token_type_ids, out_dtype=x.dtype) + else: + x += comfy.ops.cast_to_input(self.token_type_embeddings.weight[0], x) + x = self.LayerNorm(x) + return x + + +class BertModel_(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + embed_dim = config_dict["hidden_size"] + layer_norm_eps = config_dict["layer_norm_eps"] + + self.embeddings = BertEmbeddings(config_dict["vocab_size"], config_dict["max_position_embeddings"], config_dict["type_vocab_size"], config_dict["pad_token_id"], embed_dim, layer_norm_eps, dtype, device, operations) + self.encoder = BertEncoder(config_dict["num_hidden_layers"], embed_dim, config_dict["intermediate_size"], config_dict["num_attention_heads"], layer_norm_eps, dtype, device, operations) + + def forward(self, input_tokens, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True, dtype=None): + x = self.embeddings(input_tokens, dtype=dtype) + mask = None + if attention_mask is not None: + mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) + mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) + + x, i = self.encoder(x, mask, intermediate_output) + return x, i + + +class BertModel(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.bert = BertModel_(config_dict, dtype, device, operations) + self.num_layers = config_dict["num_hidden_layers"] + + def get_input_embeddings(self): + return self.bert.embeddings.word_embeddings + + def set_input_embeddings(self, embeddings): + self.bert.embeddings.word_embeddings = embeddings + + def forward(self, *args, **kwargs): + return self.bert(*args, **kwargs) diff --git a/src/comfyui/comfy/text_encoders/flux.py b/src/comfyui/comfy/text_encoders/flux.py new file mode 100644 index 0000000000000000000000000000000000000000..b945b1aaace8c1776caf2f6387caecdabe561e8d --- /dev/null +++ b/src/comfyui/comfy/text_encoders/flux.py @@ -0,0 +1,72 @@ +from comfy import sd1_clip +import comfy.text_encoders.t5 +import comfy.text_encoders.sd3_clip +import comfy.model_management +from transformers import T5TokenizerFast +import torch +import os + +class T5XXLTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") + super().__init__(tokenizer_path, embedding_directory=embedding_directory, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256) + + +class FluxTokenizer: + def __init__(self, embedding_directory=None, tokenizer_data={}): + clip_l_tokenizer_class = tokenizer_data.get("clip_l_tokenizer_class", sd1_clip.SDTokenizer) + self.clip_l = clip_l_tokenizer_class(embedding_directory=embedding_directory) + self.t5xxl = T5XXLTokenizer(embedding_directory=embedding_directory) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) + out["t5xxl"] = self.t5xxl.tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return self.clip_l.untokenize(token_weight_pair) + + def state_dict(self): + return {} + + +class FluxClipModel(torch.nn.Module): + def __init__(self, dtype_t5=None, device="cpu", dtype=None, model_options={}): + super().__init__() + dtype_t5 = comfy.model_management.pick_weight_dtype(dtype_t5, dtype, device) + clip_l_class = model_options.get("clip_l_class", sd1_clip.SDClipModel) + self.clip_l = clip_l_class(device=device, dtype=dtype, return_projected_pooled=False, model_options=model_options) + self.t5xxl = comfy.text_encoders.sd3_clip.T5XXLModel(device=device, dtype=dtype_t5, model_options=model_options) + self.dtypes = set([dtype, dtype_t5]) + + def set_clip_options(self, options): + self.clip_l.set_clip_options(options) + self.t5xxl.set_clip_options(options) + + def reset_clip_options(self): + self.clip_l.reset_clip_options() + self.t5xxl.reset_clip_options() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs_l = token_weight_pairs["l"] + token_weight_pairs_t5 = token_weight_pairs["t5xxl"] + + t5_out, t5_pooled = self.t5xxl.encode_token_weights(token_weight_pairs_t5) + l_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) + return t5_out, l_pooled + + def load_sd(self, sd): + if "text_model.encoder.layers.1.mlp.fc1.weight" in sd: + return self.clip_l.load_sd(sd) + else: + return self.t5xxl.load_sd(sd) + +def flux_clip(dtype_t5=None, t5xxl_scaled_fp8=None): + class FluxClipModel_(FluxClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + if t5xxl_scaled_fp8 is not None and "t5xxl_scaled_fp8" not in model_options: + model_options = model_options.copy() + model_options["t5xxl_scaled_fp8"] = t5xxl_scaled_fp8 + super().__init__(dtype_t5=dtype_t5, device=device, dtype=dtype, model_options=model_options) + return FluxClipModel_ diff --git a/src/comfyui/comfy/text_encoders/genmo.py b/src/comfyui/comfy/text_encoders/genmo.py new file mode 100644 index 0000000000000000000000000000000000000000..5e96cea6817025c1d334fa425c97892424cf971b --- /dev/null +++ b/src/comfyui/comfy/text_encoders/genmo.py @@ -0,0 +1,38 @@ +from comfy import sd1_clip +import comfy.text_encoders.sd3_clip +import os +from transformers import T5TokenizerFast + + +class T5XXLModel(comfy.text_encoders.sd3_clip.T5XXLModel): + def __init__(self, **kwargs): + kwargs["attention_mask"] = True + super().__init__(**kwargs) + + +class MochiT5XXL(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__(device=device, dtype=dtype, clip_name="t5xxl", clip_model=T5XXLModel, model_options=model_options) + + +class T5XXLTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") + super().__init__(tokenizer_path, embedding_directory=embedding_directory, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256) + + +class MochiT5Tokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, clip_name="t5xxl", tokenizer=T5XXLTokenizer) + + +def mochi_te(dtype_t5=None, t5xxl_scaled_fp8=None): + class MochiTEModel_(MochiT5XXL): + def __init__(self, device="cpu", dtype=None, model_options={}): + if t5xxl_scaled_fp8 is not None and "t5xxl_scaled_fp8" not in model_options: + model_options = model_options.copy() + model_options["t5xxl_scaled_fp8"] = t5xxl_scaled_fp8 + if dtype is None: + dtype = dtype_t5 + super().__init__(device=device, dtype=dtype, model_options=model_options) + return MochiTEModel_ diff --git a/src/comfyui/comfy/text_encoders/hydit.py b/src/comfyui/comfy/text_encoders/hydit.py new file mode 100644 index 0000000000000000000000000000000000000000..7cb790f45e2d87dba0ba8e4d74aa0537bcd1068e --- /dev/null +++ b/src/comfyui/comfy/text_encoders/hydit.py @@ -0,0 +1,79 @@ +from comfy import sd1_clip +from transformers import BertTokenizer +from .spiece_tokenizer import SPieceTokenizer +from .bert import BertModel +import comfy.text_encoders.t5 +import os +import torch + +class HyditBertModel(sd1_clip.SDClipModel): + def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "hydit_clip.json") + super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 101, "end": 102, "pad": 0}, model_class=BertModel, enable_attention_masks=True, return_attention_masks=True, model_options=model_options) + +class HyditBertTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "hydit_clip_tokenizer") + super().__init__(tokenizer_path, pad_with_end=False, embedding_size=1024, embedding_key='chinese_roberta', tokenizer_class=BertTokenizer, pad_to_max_length=False, max_length=512, min_length=77) + + +class MT5XLModel(sd1_clip.SDClipModel): + def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "mt5_config_xl.json") + super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"end": 1, "pad": 0}, model_class=comfy.text_encoders.t5.T5, enable_attention_masks=True, return_attention_masks=True, model_options=model_options) + +class MT5XLTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + #tokenizer_path = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "mt5_tokenizer"), "spiece.model") + tokenizer = tokenizer_data.get("spiece_model", None) + super().__init__(tokenizer, pad_with_end=False, embedding_size=2048, embedding_key='mt5xl', tokenizer_class=SPieceTokenizer, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256) + + def state_dict(self): + return {"spiece_model": self.tokenizer.serialize_model()} + +class HyditTokenizer: + def __init__(self, embedding_directory=None, tokenizer_data={}): + mt5_tokenizer_data = tokenizer_data.get("mt5xl.spiece_model", None) + self.hydit_clip = HyditBertTokenizer(embedding_directory=embedding_directory) + self.mt5xl = MT5XLTokenizer(tokenizer_data={"spiece_model": mt5_tokenizer_data}, embedding_directory=embedding_directory) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out["hydit_clip"] = self.hydit_clip.tokenize_with_weights(text, return_word_ids) + out["mt5xl"] = self.mt5xl.tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return self.hydit_clip.untokenize(token_weight_pair) + + def state_dict(self): + return {"mt5xl.spiece_model": self.mt5xl.state_dict()["spiece_model"]} + +class HyditModel(torch.nn.Module): + def __init__(self, device="cpu", dtype=None, model_options={}): + super().__init__() + self.hydit_clip = HyditBertModel(dtype=dtype, model_options=model_options) + self.mt5xl = MT5XLModel(dtype=dtype, model_options=model_options) + + self.dtypes = set() + if dtype is not None: + self.dtypes.add(dtype) + + def encode_token_weights(self, token_weight_pairs): + hydit_out = self.hydit_clip.encode_token_weights(token_weight_pairs["hydit_clip"]) + mt5_out = self.mt5xl.encode_token_weights(token_weight_pairs["mt5xl"]) + return hydit_out[0], hydit_out[1], {"attention_mask": hydit_out[2]["attention_mask"], "conditioning_mt5xl": mt5_out[0], "attention_mask_mt5xl": mt5_out[2]["attention_mask"]} + + def load_sd(self, sd): + if "bert.encoder.layer.0.attention.self.query.weight" in sd: + return self.hydit_clip.load_sd(sd) + else: + return self.mt5xl.load_sd(sd) + + def set_clip_options(self, options): + self.hydit_clip.set_clip_options(options) + self.mt5xl.set_clip_options(options) + + def reset_clip_options(self): + self.hydit_clip.reset_clip_options() + self.mt5xl.reset_clip_options() diff --git a/src/comfyui/comfy/text_encoders/hydit_clip.json b/src/comfyui/comfy/text_encoders/hydit_clip.json new file mode 100644 index 0000000000000000000000000000000000000000..c41c7c1ff376407f42e3ff20ab26faaa98bb5e65 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/hydit_clip.json @@ -0,0 +1,35 @@ +{ + "_name_or_path": "hfl/chinese-roberta-wwm-ext-large", + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "classifier_dropout": null, + "directionality": "bidi", + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "output_past": true, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "torch_dtype": "float32", + "transformers_version": "4.22.1", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 47020 +} + diff --git a/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/special_tokens_map.json b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..a8b3208c2884c4efb86e49300fdd3dc877220cdf --- /dev/null +++ b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/special_tokens_map.json @@ -0,0 +1,7 @@ +{ + "cls_token": "[CLS]", + "mask_token": "[MASK]", + "pad_token": "[PAD]", + "sep_token": "[SEP]", + "unk_token": "[UNK]" +} diff --git a/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/tokenizer_config.json b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..a14356073e11a885074a7cdbddc749463cefd911 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/tokenizer_config.json @@ -0,0 +1,16 @@ +{ + "cls_token": "[CLS]", + "do_basic_tokenize": true, + "do_lower_case": true, + "mask_token": "[MASK]", + "name_or_path": "hfl/chinese-roberta-wwm-ext", + "never_split": null, + "pad_token": "[PAD]", + "sep_token": "[SEP]", + "special_tokens_map_file": "/home/chenweifeng/.cache/huggingface/hub/models--hfl--chinese-roberta-wwm-ext/snapshots/5c58d0b8ec1d9014354d691c538661bf00bfdb44/special_tokens_map.json", + "strip_accents": null, + "tokenize_chinese_chars": true, + "tokenizer_class": "BertTokenizer", + "unk_token": "[UNK]", + "model_max_length": 77 +} diff --git a/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/vocab.txt b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..6246906805d02aca01714c71e4c8d77b69a7a131 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/hydit_clip_tokenizer/vocab.txt @@ -0,0 +1,47020 @@ +[PAD] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[unused99] +[UNK] +[CLS] +[SEP] +[MASK] + + +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +£ +¤ +¥ +§ +© +« +® +° +± +² +³ +µ +· +¹ +º +» +¼ +× +ß +æ +÷ +ø +đ +ŋ +ɔ +ə +ɡ +ʰ +ˇ +ˈ +ˊ +ˋ +ˍ +ː +˙ +˚ +ˢ +α +β +γ +δ +ε +η +θ +ι +κ +λ +μ +ν +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +а +б +в +г +д +е +ж +з +и +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +ы +ь +я +і +ا +ب +ة +ت +د +ر +س +ع +ل +م +ن +ه +و +ي +۩ +ก +ง +น +ม +ย +ร +อ +า +เ +๑ +་ +ღ +ᄀ +ᄁ +ᄂ +ᄃ +ᄅ +ᄆ +ᄇ +ᄈ +ᄉ +ᄋ +ᄌ +ᄎ +ᄏ +ᄐ +ᄑ +ᄒ +ᅡ +ᅢ +ᅣ +ᅥ +ᅦ +ᅧ +ᅨ +ᅩ +ᅪ +ᅬ +ᅭ +ᅮ +ᅯ +ᅲ +ᅳ +ᅴ +ᅵ +ᆨ +ᆫ +ᆯ +ᆷ +ᆸ +ᆺ +ᆻ +ᆼ +ᗜ +ᵃ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵘ +‖ +„ +† +• +‥ +‧ +
 +‰ +′ +″ +‹ +› +※ +‿ +⁄ +ⁱ +⁺ +ⁿ +₁ +₂ +₃ +₄ +€ +℃ +№ +™ +ⅰ +ⅱ +ⅲ +ⅳ +ⅴ +← +↑ +→ +↓ +↔ +↗ +↘ +⇒ +∀ +− +∕ +∙ +√ +∞ +∟ +∠ +∣ +∥ +∩ +∮ +∶ +∼ +∽ +≈ +≒ +≡ +≤ +≥ +≦ +≧ +≪ +≫ +⊙ +⋅ +⋈ +⋯ +⌒ +① +② +③ +④ +⑤ +⑥ +⑦ +⑧ +⑨ +⑩ +⑴ +⑵ +⑶ +⑷ +⑸ +⒈ +⒉ +⒊ +⒋ +ⓒ +ⓔ +ⓘ +─ +━ +│ +┃ +┅ +┆ +┊ +┌ +└ +├ +┣ +═ +║ +╚ +╞ +╠ +╭ +╮ +╯ +╰ +╱ +╳ +▂ +▃ +▅ +▇ +█ +▉ +▋ +▌ +▍ +▎ +■ +□ +▪ +▫ +▬ +▲ +△ +▶ +► +▼ +▽ +◆ +◇ +○ +◎ +● +◕ +◠ +◢ +◤ +☀ +★ +☆ +☕ +☞ +☺ +☼ +♀ +♂ +♠ +♡ +♣ +♥ +♦ +♪ +♫ +♬ +✈ +✔ +✕ +✖ +✦ +✨ +✪ +✰ +✿ +❀ +❤ +➜ +➤ +⦿ +、 +。 +〃 +々 +〇 +〈 +〉 +《 +》 +「 +」 +『 +』 +【 +】 +〓 +〔 +〕 +〖 +〗 +〜 +〝 +〞 +ぁ +あ +ぃ +い +う +ぇ +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +っ +つ +て +と +な +に +ぬ +ね +の +は +ひ +ふ +へ +ほ +ま +み +む +め +も +ゃ +や +ゅ +ゆ +ょ +よ +ら +り +る +れ +ろ +わ +を +ん +゜ +ゝ +ァ +ア +ィ +イ +ゥ +ウ +ェ +エ +ォ +オ +カ +キ +ク +ケ +コ +サ +シ +ス +セ +ソ +タ +チ +ッ +ツ +テ +ト +ナ +ニ +ヌ +ネ +ノ +ハ +ヒ +フ +ヘ +ホ +マ +ミ +ム +メ +モ +ャ +ヤ +ュ +ユ +ョ +ヨ +ラ +リ +ル +レ +ロ +ワ +ヲ +ン +ヶ +・ +ー +ヽ +ㄅ +ㄆ +ㄇ +ㄉ +ㄋ +ㄌ +ㄍ +ㄎ +ㄏ +ㄒ +ㄚ +ㄛ +ㄞ +ㄟ +ㄢ +ㄤ +ㄥ +ㄧ +ㄨ +ㆍ +㈦ +㊣ +㎡ +㗎 +一 +丁 +七 +万 +丈 +三 +上 +下 +不 +与 +丐 +丑 +专 +且 +丕 +世 +丘 +丙 +业 +丛 +东 +丝 +丞 +丟 +両 +丢 +两 +严 +並 +丧 +丨 +个 +丫 +中 +丰 +串 +临 +丶 +丸 +丹 +为 +主 +丼 +丽 +举 +丿 +乂 +乃 +久 +么 +义 +之 +乌 +乍 +乎 +乏 +乐 +乒 +乓 +乔 +乖 +乗 +乘 +乙 +乜 +九 +乞 +也 +习 +乡 +书 +乩 +买 +乱 +乳 +乾 +亀 +亂 +了 +予 +争 +事 +二 +于 +亏 +云 +互 +五 +井 +亘 +亙 +亚 +些 +亜 +亞 +亟 +亡 +亢 +交 +亥 +亦 +产 +亨 +亩 +享 +京 +亭 +亮 +亲 +亳 +亵 +人 +亿 +什 +仁 +仃 +仄 +仅 +仆 +仇 +今 +介 +仍 +从 +仏 +仑 +仓 +仔 +仕 +他 +仗 +付 +仙 +仝 +仞 +仟 +代 +令 +以 +仨 +仪 +们 +仮 +仰 +仲 +件 +价 +任 +份 +仿 +企 +伉 +伊 +伍 +伎 +伏 +伐 +休 +伕 +众 +优 +伙 +会 +伝 +伞 +伟 +传 +伢 +伤 +伦 +伪 +伫 +伯 +估 +伴 +伶 +伸 +伺 +似 +伽 +佃 +但 +佇 +佈 +位 +低 +住 +佐 +佑 +体 +佔 +何 +佗 +佘 +余 +佚 +佛 +作 +佝 +佞 +佟 +你 +佢 +佣 +佤 +佥 +佩 +佬 +佯 +佰 +佳 +併 +佶 +佻 +佼 +使 +侃 +侄 +來 +侈 +例 +侍 +侏 +侑 +侖 +侗 +供 +依 +侠 +価 +侣 +侥 +侦 +侧 +侨 +侬 +侮 +侯 +侵 +侶 +侷 +便 +係 +促 +俄 +俊 +俎 +俏 +俐 +俑 +俗 +俘 +俚 +保 +俞 +俟 +俠 +信 +俨 +俩 +俪 +俬 +俭 +修 +俯 +俱 +俳 +俸 +俺 +俾 +倆 +倉 +個 +倌 +倍 +倏 +們 +倒 +倔 +倖 +倘 +候 +倚 +倜 +借 +倡 +値 +倦 +倩 +倪 +倫 +倬 +倭 +倶 +债 +值 +倾 +偃 +假 +偈 +偉 +偌 +偎 +偏 +偕 +做 +停 +健 +側 +偵 +偶 +偷 +偻 +偽 +偿 +傀 +傅 +傍 +傑 +傘 +備 +傚 +傢 +傣 +傥 +储 +傩 +催 +傭 +傲 +傳 +債 +傷 +傻 +傾 +僅 +働 +像 +僑 +僕 +僖 +僚 +僥 +僧 +僭 +僮 +僱 +僵 +價 +僻 +儀 +儂 +億 +儆 +儉 +儋 +儒 +儕 +儘 +償 +儡 +優 +儲 +儷 +儼 +儿 +兀 +允 +元 +兄 +充 +兆 +兇 +先 +光 +克 +兌 +免 +児 +兑 +兒 +兔 +兖 +党 +兜 +兢 +入 +內 +全 +兩 +八 +公 +六 +兮 +兰 +共 +兲 +关 +兴 +兵 +其 +具 +典 +兹 +养 +兼 +兽 +冀 +内 +円 +冇 +冈 +冉 +冊 +册 +再 +冏 +冒 +冕 +冗 +写 +军 +农 +冠 +冢 +冤 +冥 +冨 +冪 +冬 +冯 +冰 +冲 +决 +况 +冶 +冷 +冻 +冼 +冽 +冾 +净 +凄 +准 +凇 +凈 +凉 +凋 +凌 +凍 +减 +凑 +凛 +凜 +凝 +几 +凡 +凤 +処 +凪 +凭 +凯 +凰 +凱 +凳 +凶 +凸 +凹 +出 +击 +函 +凿 +刀 +刁 +刃 +分 +切 +刈 +刊 +刍 +刎 +刑 +划 +列 +刘 +则 +刚 +创 +初 +删 +判 +別 +刨 +利 +刪 +别 +刮 +到 +制 +刷 +券 +刹 +刺 +刻 +刽 +剁 +剂 +剃 +則 +剉 +削 +剋 +剌 +前 +剎 +剐 +剑 +剔 +剖 +剛 +剜 +剝 +剣 +剤 +剥 +剧 +剩 +剪 +副 +割 +創 +剷 +剽 +剿 +劃 +劇 +劈 +劉 +劊 +劍 +劏 +劑 +力 +劝 +办 +功 +加 +务 +劣 +动 +助 +努 +劫 +劭 +励 +劲 +劳 +労 +劵 +効 +劾 +势 +勁 +勃 +勇 +勉 +勋 +勐 +勒 +動 +勖 +勘 +務 +勛 +勝 +勞 +募 +勢 +勤 +勧 +勳 +勵 +勸 +勺 +勻 +勾 +勿 +匀 +包 +匆 +匈 +匍 +匐 +匕 +化 +北 +匙 +匝 +匠 +匡 +匣 +匪 +匮 +匯 +匱 +匹 +区 +医 +匾 +匿 +區 +十 +千 +卅 +升 +午 +卉 +半 +卍 +华 +协 +卑 +卒 +卓 +協 +单 +卖 +南 +単 +博 +卜 +卞 +卟 +占 +卡 +卢 +卤 +卦 +卧 +卫 +卮 +卯 +印 +危 +即 +却 +卵 +卷 +卸 +卻 +卿 +厂 +厄 +厅 +历 +厉 +压 +厌 +厕 +厘 +厚 +厝 +原 +厢 +厥 +厦 +厨 +厩 +厭 +厮 +厲 +厳 +去 +县 +叁 +参 +參 +又 +叉 +及 +友 +双 +反 +収 +发 +叔 +取 +受 +变 +叙 +叛 +叟 +叠 +叡 +叢 +口 +古 +句 +另 +叨 +叩 +只 +叫 +召 +叭 +叮 +可 +台 +叱 +史 +右 +叵 +叶 +号 +司 +叹 +叻 +叼 +叽 +吁 +吃 +各 +吆 +合 +吉 +吊 +吋 +同 +名 +后 +吏 +吐 +向 +吒 +吓 +吕 +吖 +吗 +君 +吝 +吞 +吟 +吠 +吡 +否 +吧 +吨 +吩 +含 +听 +吭 +吮 +启 +吱 +吳 +吴 +吵 +吶 +吸 +吹 +吻 +吼 +吽 +吾 +呀 +呂 +呃 +呆 +呈 +告 +呋 +呎 +呐 +呓 +呕 +呗 +员 +呛 +呜 +呢 +呤 +呦 +周 +呱 +呲 +味 +呵 +呷 +呸 +呻 +呼 +命 +咀 +咁 +咂 +咄 +咆 +咋 +和 +咎 +咏 +咐 +咒 +咔 +咕 +咖 +咗 +咘 +咙 +咚 +咛 +咣 +咤 +咦 +咧 +咨 +咩 +咪 +咫 +咬 +咭 +咯 +咱 +咲 +咳 +咸 +咻 +咽 +咿 +哀 +品 +哂 +哄 +哆 +哇 +哈 +哉 +哋 +哌 +响 +哎 +哏 +哐 +哑 +哒 +哔 +哗 +哟 +員 +哥 +哦 +哧 +哨 +哩 +哪 +哭 +哮 +哲 +哺 +哼 +哽 +唁 +唄 +唆 +唇 +唉 +唏 +唐 +唑 +唔 +唠 +唤 +唧 +唬 +售 +唯 +唰 +唱 +唳 +唷 +唸 +唾 +啃 +啄 +商 +啉 +啊 +問 +啓 +啕 +啖 +啜 +啞 +啟 +啡 +啤 +啥 +啦 +啧 +啪 +啫 +啬 +啮 +啰 +啱 +啲 +啵 +啶 +啷 +啸 +啻 +啼 +啾 +喀 +喂 +喃 +善 +喆 +喇 +喉 +喊 +喋 +喎 +喏 +喔 +喘 +喙 +喚 +喜 +喝 +喟 +喧 +喪 +喫 +喬 +單 +喰 +喱 +喲 +喳 +喵 +営 +喷 +喹 +喺 +喻 +喽 +嗅 +嗆 +嗇 +嗎 +嗑 +嗒 +嗓 +嗔 +嗖 +嗚 +嗜 +嗝 +嗟 +嗡 +嗣 +嗤 +嗦 +嗨 +嗪 +嗬 +嗯 +嗰 +嗲 +嗳 +嗶 +嗷 +嗽 +嘀 +嘅 +嘆 +嘈 +嘉 +嘌 +嘍 +嘎 +嘔 +嘖 +嘗 +嘘 +嘚 +嘛 +嘜 +嘞 +嘟 +嘢 +嘣 +嘤 +嘧 +嘩 +嘭 +嘮 +嘯 +嘰 +嘱 +嘲 +嘴 +嘶 +嘸 +嘹 +嘻 +嘿 +噁 +噌 +噎 +噓 +噔 +噗 +噙 +噜 +噠 +噢 +噤 +器 +噩 +噪 +噬 +噱 +噴 +噶 +噸 +噹 +噻 +噼 +嚀 +嚇 +嚎 +嚏 +嚐 +嚓 +嚕 +嚟 +嚣 +嚥 +嚨 +嚮 +嚴 +嚷 +嚼 +囂 +囉 +囊 +囍 +囑 +囔 +囗 +囚 +四 +囝 +回 +囟 +因 +囡 +团 +団 +囤 +囧 +囪 +囫 +园 +困 +囱 +囲 +図 +围 +囹 +固 +国 +图 +囿 +圃 +圄 +圆 +圈 +國 +圍 +圏 +園 +圓 +圖 +團 +圜 +土 +圣 +圧 +在 +圩 +圭 +地 +圳 +场 +圻 +圾 +址 +坂 +均 +坊 +坍 +坎 +坏 +坐 +坑 +块 +坚 +坛 +坝 +坞 +坟 +坠 +坡 +坤 +坦 +坨 +坪 +坯 +坳 +坵 +坷 +垂 +垃 +垄 +型 +垒 +垚 +垛 +垠 +垢 +垣 +垦 +垩 +垫 +垭 +垮 +垵 +埂 +埃 +埋 +城 +埔 +埕 +埗 +域 +埠 +埤 +埵 +執 +埸 +培 +基 +埼 +堀 +堂 +堃 +堅 +堆 +堇 +堑 +堕 +堙 +堡 +堤 +堪 +堯 +堰 +報 +場 +堵 +堺 +堿 +塊 +塌 +塑 +塔 +塗 +塘 +塚 +塞 +塢 +塩 +填 +塬 +塭 +塵 +塾 +墀 +境 +墅 +墉 +墊 +墒 +墓 +増 +墘 +墙 +墜 +增 +墟 +墨 +墩 +墮 +墳 +墻 +墾 +壁 +壅 +壆 +壇 +壊 +壑 +壓 +壕 +壘 +壞 +壟 +壢 +壤 +壩 +士 +壬 +壮 +壯 +声 +売 +壳 +壶 +壹 +壺 +壽 +处 +备 +変 +复 +夏 +夔 +夕 +外 +夙 +多 +夜 +够 +夠 +夢 +夥 +大 +天 +太 +夫 +夭 +央 +夯 +失 +头 +夷 +夸 +夹 +夺 +夾 +奂 +奄 +奇 +奈 +奉 +奋 +奎 +奏 +奐 +契 +奔 +奕 +奖 +套 +奘 +奚 +奠 +奢 +奥 +奧 +奪 +奬 +奮 +女 +奴 +奶 +奸 +她 +好 +如 +妃 +妄 +妆 +妇 +妈 +妊 +妍 +妒 +妓 +妖 +妘 +妙 +妝 +妞 +妣 +妤 +妥 +妨 +妩 +妪 +妮 +妲 +妳 +妹 +妻 +妾 +姆 +姉 +姊 +始 +姍 +姐 +姑 +姒 +姓 +委 +姗 +姚 +姜 +姝 +姣 +姥 +姦 +姨 +姪 +姫 +姬 +姹 +姻 +姿 +威 +娃 +娄 +娅 +娆 +娇 +娉 +娑 +娓 +娘 +娛 +娜 +娟 +娠 +娣 +娥 +娩 +娱 +娲 +娴 +娶 +娼 +婀 +婁 +婆 +婉 +婊 +婕 +婚 +婢 +婦 +婧 +婪 +婭 +婴 +婵 +婶 +婷 +婺 +婿 +媒 +媚 +媛 +媞 +媧 +媲 +媳 +媽 +媾 +嫁 +嫂 +嫉 +嫌 +嫑 +嫔 +嫖 +嫘 +嫚 +嫡 +嫣 +嫦 +嫩 +嫲 +嫵 +嫻 +嬅 +嬉 +嬌 +嬗 +嬛 +嬢 +嬤 +嬪 +嬰 +嬴 +嬷 +嬸 +嬿 +孀 +孃 +子 +孑 +孔 +孕 +孖 +字 +存 +孙 +孚 +孛 +孜 +孝 +孟 +孢 +季 +孤 +学 +孩 +孪 +孫 +孬 +孰 +孱 +孳 +孵 +學 +孺 +孽 +孿 +宁 +它 +宅 +宇 +守 +安 +宋 +完 +宏 +宓 +宕 +宗 +官 +宙 +定 +宛 +宜 +宝 +实 +実 +宠 +审 +客 +宣 +室 +宥 +宦 +宪 +宫 +宮 +宰 +害 +宴 +宵 +家 +宸 +容 +宽 +宾 +宿 +寂 +寄 +寅 +密 +寇 +富 +寐 +寒 +寓 +寛 +寝 +寞 +察 +寡 +寢 +寥 +實 +寧 +寨 +審 +寫 +寬 +寮 +寰 +寵 +寶 +寸 +对 +寺 +寻 +导 +対 +寿 +封 +専 +射 +将 +將 +專 +尉 +尊 +尋 +對 +導 +小 +少 +尔 +尕 +尖 +尘 +尚 +尝 +尤 +尧 +尬 +就 +尴 +尷 +尸 +尹 +尺 +尻 +尼 +尽 +尾 +尿 +局 +屁 +层 +屄 +居 +屆 +屈 +屉 +届 +屋 +屌 +屍 +屎 +屏 +屐 +屑 +展 +屜 +属 +屠 +屡 +屢 +層 +履 +屬 +屯 +山 +屹 +屿 +岀 +岁 +岂 +岌 +岐 +岑 +岔 +岖 +岗 +岘 +岙 +岚 +岛 +岡 +岩 +岫 +岬 +岭 +岱 +岳 +岷 +岸 +峇 +峋 +峒 +峙 +峡 +峤 +峥 +峦 +峨 +峪 +峭 +峯 +峰 +峴 +島 +峻 +峽 +崁 +崂 +崆 +崇 +崎 +崑 +崔 +崖 +崗 +崙 +崛 +崧 +崩 +崭 +崴 +崽 +嵇 +嵊 +嵋 +嵌 +嵐 +嵘 +嵩 +嵬 +嵯 +嶂 +嶄 +嶇 +嶋 +嶙 +嶺 +嶼 +嶽 +巅 +巍 +巒 +巔 +巖 +川 +州 +巡 +巢 +工 +左 +巧 +巨 +巩 +巫 +差 +己 +已 +巳 +巴 +巷 +巻 +巽 +巾 +巿 +币 +市 +布 +帅 +帆 +师 +希 +帐 +帑 +帕 +帖 +帘 +帚 +帛 +帜 +帝 +帥 +带 +帧 +師 +席 +帮 +帯 +帰 +帳 +帶 +帷 +常 +帼 +帽 +幀 +幂 +幄 +幅 +幌 +幔 +幕 +幟 +幡 +幢 +幣 +幫 +干 +平 +年 +并 +幸 +幹 +幺 +幻 +幼 +幽 +幾 +广 +庁 +広 +庄 +庆 +庇 +床 +序 +庐 +库 +应 +底 +庖 +店 +庙 +庚 +府 +庞 +废 +庠 +度 +座 +庫 +庭 +庵 +庶 +康 +庸 +庹 +庾 +廁 +廂 +廃 +廈 +廉 +廊 +廓 +廖 +廚 +廝 +廟 +廠 +廢 +廣 +廬 +廳 +延 +廷 +建 +廿 +开 +弁 +异 +弃 +弄 +弈 +弊 +弋 +式 +弑 +弒 +弓 +弔 +引 +弗 +弘 +弛 +弟 +张 +弥 +弦 +弧 +弩 +弭 +弯 +弱 +張 +強 +弹 +强 +弼 +弾 +彅 +彆 +彈 +彌 +彎 +归 +当 +录 +彗 +彙 +彝 +形 +彤 +彥 +彦 +彧 +彩 +彪 +彫 +彬 +彭 +彰 +影 +彷 +役 +彻 +彼 +彿 +往 +征 +径 +待 +徇 +很 +徉 +徊 +律 +後 +徐 +徑 +徒 +従 +徕 +得 +徘 +徙 +徜 +從 +徠 +御 +徨 +復 +循 +徬 +微 +徳 +徴 +徵 +德 +徹 +徼 +徽 +心 +必 +忆 +忌 +忍 +忏 +忐 +忑 +忒 +忖 +志 +忘 +忙 +応 +忠 +忡 +忤 +忧 +忪 +快 +忱 +念 +忻 +忽 +忿 +怀 +态 +怂 +怅 +怆 +怎 +怏 +怒 +怔 +怕 +怖 +怙 +怜 +思 +怠 +怡 +急 +怦 +性 +怨 +怪 +怯 +怵 +总 +怼 +恁 +恃 +恆 +恋 +恍 +恐 +恒 +恕 +恙 +恚 +恢 +恣 +恤 +恥 +恨 +恩 +恪 +恫 +恬 +恭 +息 +恰 +恳 +恵 +恶 +恸 +恺 +恻 +恼 +恿 +悄 +悅 +悉 +悌 +悍 +悔 +悖 +悚 +悟 +悠 +患 +悦 +您 +悩 +悪 +悬 +悯 +悱 +悲 +悴 +悵 +悶 +悸 +悻 +悼 +悽 +情 +惆 +惇 +惊 +惋 +惑 +惕 +惘 +惚 +惜 +惟 +惠 +惡 +惦 +惧 +惨 +惩 +惫 +惬 +惭 +惮 +惯 +惰 +惱 +想 +惴 +惶 +惹 +惺 +愁 +愆 +愈 +愉 +愍 +意 +愕 +愚 +愛 +愜 +感 +愣 +愤 +愧 +愫 +愷 +愿 +慄 +慈 +態 +慌 +慎 +慑 +慕 +慘 +慚 +慟 +慢 +慣 +慧 +慨 +慫 +慮 +慰 +慳 +慵 +慶 +慷 +慾 +憂 +憊 +憋 +憎 +憐 +憑 +憔 +憚 +憤 +憧 +憨 +憩 +憫 +憬 +憲 +憶 +憾 +懂 +懇 +懈 +應 +懊 +懋 +懑 +懒 +懦 +懲 +懵 +懶 +懷 +懸 +懺 +懼 +懾 +懿 +戀 +戈 +戊 +戌 +戍 +戎 +戏 +成 +我 +戒 +戕 +或 +战 +戚 +戛 +戟 +戡 +戦 +截 +戬 +戮 +戰 +戲 +戳 +戴 +戶 +户 +戸 +戻 +戾 +房 +所 +扁 +扇 +扈 +扉 +手 +才 +扎 +扑 +扒 +打 +扔 +払 +托 +扛 +扣 +扦 +执 +扩 +扪 +扫 +扬 +扭 +扮 +扯 +扰 +扱 +扳 +扶 +批 +扼 +找 +承 +技 +抄 +抉 +把 +抑 +抒 +抓 +投 +抖 +抗 +折 +抚 +抛 +抜 +択 +抟 +抠 +抡 +抢 +护 +报 +抨 +披 +抬 +抱 +抵 +抹 +押 +抽 +抿 +拂 +拄 +担 +拆 +拇 +拈 +拉 +拋 +拌 +拍 +拎 +拐 +拒 +拓 +拔 +拖 +拗 +拘 +拙 +拚 +招 +拜 +拟 +拡 +拢 +拣 +拥 +拦 +拧 +拨 +择 +括 +拭 +拮 +拯 +拱 +拳 +拴 +拷 +拼 +拽 +拾 +拿 +持 +挂 +指 +挈 +按 +挎 +挑 +挖 +挙 +挚 +挛 +挝 +挞 +挟 +挠 +挡 +挣 +挤 +挥 +挨 +挪 +挫 +振 +挲 +挹 +挺 +挽 +挾 +捂 +捅 +捆 +捉 +捋 +捌 +捍 +捎 +捏 +捐 +捕 +捞 +损 +捡 +换 +捣 +捧 +捨 +捩 +据 +捱 +捲 +捶 +捷 +捺 +捻 +掀 +掂 +掃 +掇 +授 +掉 +掌 +掏 +掐 +排 +掖 +掘 +掙 +掛 +掠 +採 +探 +掣 +接 +控 +推 +掩 +措 +掬 +掰 +掲 +掳 +掴 +掷 +掸 +掺 +揀 +揃 +揄 +揆 +揉 +揍 +描 +提 +插 +揖 +揚 +換 +握 +揣 +揩 +揪 +揭 +揮 +援 +揶 +揸 +揹 +揽 +搀 +搁 +搂 +搅 +損 +搏 +搐 +搓 +搔 +搖 +搗 +搜 +搞 +搡 +搪 +搬 +搭 +搵 +搶 +携 +搽 +摀 +摁 +摄 +摆 +摇 +摈 +摊 +摒 +摔 +摘 +摞 +摟 +摧 +摩 +摯 +摳 +摸 +摹 +摺 +摻 +撂 +撃 +撅 +撇 +撈 +撐 +撑 +撒 +撓 +撕 +撚 +撞 +撤 +撥 +撩 +撫 +撬 +播 +撮 +撰 +撲 +撵 +撷 +撸 +撻 +撼 +撿 +擀 +擁 +擂 +擄 +擅 +擇 +擊 +擋 +操 +擎 +擒 +擔 +擘 +據 +擞 +擠 +擡 +擢 +擦 +擬 +擰 +擱 +擲 +擴 +擷 +擺 +擼 +擾 +攀 +攏 +攒 +攔 +攘 +攙 +攜 +攝 +攞 +攢 +攣 +攤 +攥 +攪 +攫 +攬 +支 +收 +攸 +改 +攻 +放 +政 +故 +效 +敌 +敍 +敎 +敏 +救 +敕 +敖 +敗 +敘 +教 +敛 +敝 +敞 +敢 +散 +敦 +敬 +数 +敲 +整 +敵 +敷 +數 +斂 +斃 +文 +斋 +斌 +斎 +斐 +斑 +斓 +斗 +料 +斛 +斜 +斟 +斡 +斤 +斥 +斧 +斩 +斫 +斬 +断 +斯 +新 +斷 +方 +於 +施 +旁 +旃 +旅 +旋 +旌 +旎 +族 +旖 +旗 +无 +既 +日 +旦 +旧 +旨 +早 +旬 +旭 +旮 +旱 +时 +旷 +旺 +旻 +昀 +昂 +昆 +昇 +昉 +昊 +昌 +明 +昏 +易 +昔 +昕 +昙 +星 +映 +春 +昧 +昨 +昭 +是 +昱 +昴 +昵 +昶 +昼 +显 +晁 +時 +晃 +晉 +晋 +晌 +晏 +晒 +晓 +晔 +晕 +晖 +晗 +晚 +晝 +晞 +晟 +晤 +晦 +晨 +晩 +普 +景 +晰 +晴 +晶 +晷 +智 +晾 +暂 +暄 +暇 +暈 +暉 +暌 +暐 +暑 +暖 +暗 +暝 +暢 +暧 +暨 +暫 +暮 +暱 +暴 +暸 +暹 +曄 +曆 +曇 +曉 +曖 +曙 +曜 +曝 +曠 +曦 +曬 +曰 +曲 +曳 +更 +書 +曹 +曼 +曾 +替 +最 +會 +月 +有 +朋 +服 +朐 +朔 +朕 +朗 +望 +朝 +期 +朦 +朧 +木 +未 +末 +本 +札 +朮 +术 +朱 +朴 +朵 +机 +朽 +杀 +杂 +权 +杆 +杈 +杉 +李 +杏 +材 +村 +杓 +杖 +杜 +杞 +束 +杠 +条 +来 +杨 +杭 +杯 +杰 +東 +杳 +杵 +杷 +杼 +松 +板 +极 +构 +枇 +枉 +枋 +析 +枕 +林 +枚 +果 +枝 +枢 +枣 +枪 +枫 +枭 +枯 +枰 +枱 +枳 +架 +枷 +枸 +柄 +柏 +某 +柑 +柒 +染 +柔 +柘 +柚 +柜 +柞 +柠 +柢 +查 +柩 +柬 +柯 +柱 +柳 +柴 +柵 +査 +柿 +栀 +栃 +栄 +栅 +标 +栈 +栉 +栋 +栎 +栏 +树 +栓 +栖 +栗 +校 +栩 +株 +样 +核 +根 +格 +栽 +栾 +桀 +桁 +桂 +桃 +桅 +框 +案 +桉 +桌 +桎 +桐 +桑 +桓 +桔 +桜 +桠 +桡 +桢 +档 +桥 +桦 +桧 +桨 +桩 +桶 +桿 +梁 +梅 +梆 +梏 +梓 +梗 +條 +梟 +梢 +梦 +梧 +梨 +梭 +梯 +械 +梳 +梵 +梶 +检 +棂 +棄 +棉 +棋 +棍 +棒 +棕 +棗 +棘 +棚 +棟 +棠 +棣 +棧 +森 +棱 +棲 +棵 +棹 +棺 +椁 +椅 +椋 +植 +椎 +椒 +検 +椪 +椭 +椰 +椹 +椽 +椿 +楂 +楊 +楓 +楔 +楚 +楝 +楞 +楠 +楣 +楨 +楫 +業 +楮 +極 +楷 +楸 +楹 +楼 +楽 +概 +榄 +榆 +榈 +榉 +榔 +榕 +榖 +榛 +榜 +榨 +榫 +榭 +榮 +榱 +榴 +榷 +榻 +槁 +槃 +構 +槌 +槍 +槎 +槐 +槓 +様 +槛 +槟 +槤 +槭 +槲 +槳 +槻 +槽 +槿 +樁 +樂 +樊 +樑 +樓 +標 +樞 +樟 +模 +樣 +権 +横 +樫 +樯 +樱 +樵 +樸 +樹 +樺 +樽 +樾 +橄 +橇 +橋 +橐 +橘 +橙 +機 +橡 +橢 +橫 +橱 +橹 +橼 +檀 +檄 +檎 +檐 +檔 +檗 +檜 +檢 +檬 +檯 +檳 +檸 +檻 +櫃 +櫚 +櫛 +櫥 +櫸 +櫻 +欄 +權 +欒 +欖 +欠 +次 +欢 +欣 +欧 +欲 +欸 +欺 +欽 +款 +歆 +歇 +歉 +歌 +歎 +歐 +歓 +歙 +歛 +歡 +止 +正 +此 +步 +武 +歧 +歩 +歪 +歯 +歲 +歳 +歴 +歷 +歸 +歹 +死 +歼 +殁 +殃 +殆 +殇 +殉 +殊 +残 +殒 +殓 +殖 +殘 +殞 +殡 +殤 +殭 +殯 +殲 +殴 +段 +殷 +殺 +殼 +殿 +毀 +毁 +毂 +毅 +毆 +毋 +母 +毎 +每 +毒 +毓 +比 +毕 +毗 +毘 +毙 +毛 +毡 +毫 +毯 +毽 +氈 +氏 +氐 +民 +氓 +气 +氖 +気 +氙 +氛 +氟 +氡 +氢 +氣 +氤 +氦 +氧 +氨 +氪 +氫 +氮 +氯 +氰 +氲 +水 +氷 +永 +氹 +氾 +汀 +汁 +求 +汆 +汇 +汉 +汎 +汐 +汕 +汗 +汙 +汛 +汝 +汞 +江 +池 +污 +汤 +汨 +汩 +汪 +汰 +汲 +汴 +汶 +汹 +決 +汽 +汾 +沁 +沂 +沃 +沅 +沈 +沉 +沌 +沏 +沐 +沒 +沓 +沖 +沙 +沛 +沟 +没 +沢 +沣 +沥 +沦 +沧 +沪 +沫 +沭 +沮 +沱 +河 +沸 +油 +治 +沼 +沽 +沾 +沿 +況 +泄 +泉 +泊 +泌 +泓 +法 +泗 +泛 +泞 +泠 +泡 +波 +泣 +泥 +注 +泪 +泫 +泮 +泯 +泰 +泱 +泳 +泵 +泷 +泸 +泻 +泼 +泽 +泾 +洁 +洄 +洋 +洒 +洗 +洙 +洛 +洞 +津 +洩 +洪 +洮 +洱 +洲 +洵 +洶 +洸 +洹 +活 +洼 +洽 +派 +流 +浃 +浄 +浅 +浆 +浇 +浊 +测 +济 +浏 +浑 +浒 +浓 +浔 +浙 +浚 +浜 +浣 +浦 +浩 +浪 +浬 +浮 +浯 +浴 +海 +浸 +涂 +涅 +涇 +消 +涉 +涌 +涎 +涓 +涔 +涕 +涙 +涛 +涝 +涞 +涟 +涠 +涡 +涣 +涤 +润 +涧 +涨 +涩 +涪 +涮 +涯 +液 +涵 +涸 +涼 +涿 +淀 +淄 +淅 +淆 +淇 +淋 +淌 +淑 +淒 +淖 +淘 +淙 +淚 +淞 +淡 +淤 +淦 +淨 +淩 +淪 +淫 +淬 +淮 +深 +淳 +淵 +混 +淹 +淺 +添 +淼 +清 +済 +渉 +渊 +渋 +渍 +渎 +渐 +渔 +渗 +渙 +渚 +減 +渝 +渠 +渡 +渣 +渤 +渥 +渦 +温 +測 +渭 +港 +渲 +渴 +游 +渺 +渾 +湃 +湄 +湊 +湍 +湖 +湘 +湛 +湟 +湧 +湫 +湮 +湯 +湳 +湾 +湿 +満 +溃 +溅 +溉 +溏 +源 +準 +溜 +溝 +溟 +溢 +溥 +溧 +溪 +溫 +溯 +溱 +溴 +溶 +溺 +溼 +滁 +滂 +滄 +滅 +滇 +滋 +滌 +滑 +滓 +滔 +滕 +滙 +滚 +滝 +滞 +滟 +满 +滢 +滤 +滥 +滦 +滨 +滩 +滬 +滯 +滲 +滴 +滷 +滸 +滾 +滿 +漁 +漂 +漆 +漉 +漏 +漓 +演 +漕 +漠 +漢 +漣 +漩 +漪 +漫 +漬 +漯 +漱 +漲 +漳 +漸 +漾 +漿 +潆 +潇 +潋 +潍 +潑 +潔 +潘 +潛 +潜 +潞 +潟 +潢 +潤 +潦 +潧 +潭 +潮 +潰 +潴 +潸 +潺 +潼 +澀 +澄 +澆 +澈 +澍 +澎 +澗 +澜 +澡 +澤 +澧 +澱 +澳 +澹 +激 +濁 +濂 +濃 +濑 +濒 +濕 +濘 +濛 +濟 +濠 +濡 +濤 +濫 +濬 +濮 +濯 +濱 +濺 +濾 +瀅 +瀆 +瀉 +瀋 +瀏 +瀑 +瀕 +瀘 +瀚 +瀛 +瀝 +瀞 +瀟 +瀧 +瀨 +瀬 +瀰 +瀾 +灌 +灏 +灑 +灘 +灝 +灞 +灣 +火 +灬 +灭 +灯 +灰 +灵 +灶 +灸 +灼 +災 +灾 +灿 +炀 +炁 +炅 +炉 +炊 +炎 +炒 +炔 +炕 +炖 +炙 +炜 +炫 +炬 +炭 +炮 +炯 +炳 +炷 +炸 +点 +為 +炼 +炽 +烁 +烂 +烃 +烈 +烊 +烏 +烘 +烙 +烛 +烟 +烤 +烦 +烧 +烨 +烩 +烫 +烬 +热 +烯 +烷 +烹 +烽 +焉 +焊 +焕 +焖 +焗 +焘 +焙 +焚 +焜 +無 +焦 +焯 +焰 +焱 +然 +焼 +煅 +煉 +煊 +煌 +煎 +煒 +煖 +煙 +煜 +煞 +煤 +煥 +煦 +照 +煨 +煩 +煮 +煲 +煸 +煽 +熄 +熊 +熏 +熒 +熔 +熙 +熟 +熠 +熨 +熬 +熱 +熵 +熹 +熾 +燁 +燃 +燄 +燈 +燉 +燊 +燎 +燒 +燔 +燕 +燙 +燜 +營 +燥 +燦 +燧 +燭 +燮 +燴 +燻 +燼 +燿 +爆 +爍 +爐 +爛 +爪 +爬 +爭 +爰 +爱 +爲 +爵 +父 +爷 +爸 +爹 +爺 +爻 +爽 +爾 +牆 +片 +版 +牌 +牍 +牒 +牙 +牛 +牝 +牟 +牠 +牡 +牢 +牦 +牧 +物 +牯 +牲 +牴 +牵 +特 +牺 +牽 +犀 +犁 +犄 +犊 +犍 +犒 +犢 +犧 +犬 +犯 +状 +犷 +犸 +犹 +狀 +狂 +狄 +狈 +狎 +狐 +狒 +狗 +狙 +狞 +狠 +狡 +狩 +独 +狭 +狮 +狰 +狱 +狸 +狹 +狼 +狽 +猎 +猕 +猖 +猗 +猙 +猛 +猜 +猝 +猥 +猩 +猪 +猫 +猬 +献 +猴 +猶 +猷 +猾 +猿 +獄 +獅 +獎 +獐 +獒 +獗 +獠 +獣 +獨 +獭 +獰 +獲 +獵 +獷 +獸 +獺 +獻 +獼 +獾 +玄 +率 +玉 +王 +玑 +玖 +玛 +玟 +玠 +玥 +玩 +玫 +玮 +环 +现 +玲 +玳 +玷 +玺 +玻 +珀 +珂 +珅 +珈 +珉 +珊 +珍 +珏 +珐 +珑 +珙 +珞 +珠 +珣 +珥 +珩 +珪 +班 +珮 +珲 +珺 +現 +球 +琅 +理 +琇 +琉 +琊 +琍 +琏 +琐 +琛 +琢 +琥 +琦 +琨 +琪 +琬 +琮 +琰 +琲 +琳 +琴 +琵 +琶 +琺 +琼 +瑀 +瑁 +瑄 +瑋 +瑕 +瑗 +瑙 +瑚 +瑛 +瑜 +瑞 +瑟 +瑠 +瑣 +瑤 +瑩 +瑪 +瑯 +瑰 +瑶 +瑾 +璀 +璁 +璃 +璇 +璉 +璋 +璎 +璐 +璜 +璞 +璟 +璧 +璨 +環 +璽 +璿 +瓊 +瓏 +瓒 +瓜 +瓢 +瓣 +瓤 +瓦 +瓮 +瓯 +瓴 +瓶 +瓷 +甄 +甌 +甕 +甘 +甙 +甚 +甜 +生 +產 +産 +甥 +甦 +用 +甩 +甫 +甬 +甭 +甯 +田 +由 +甲 +申 +电 +男 +甸 +町 +画 +甾 +畀 +畅 +界 +畏 +畑 +畔 +留 +畜 +畝 +畢 +略 +畦 +番 +畫 +異 +畲 +畳 +畴 +當 +畸 +畹 +畿 +疆 +疇 +疊 +疏 +疑 +疔 +疖 +疗 +疙 +疚 +疝 +疟 +疡 +疣 +疤 +疥 +疫 +疮 +疯 +疱 +疲 +疳 +疵 +疸 +疹 +疼 +疽 +疾 +痂 +病 +症 +痈 +痉 +痊 +痍 +痒 +痔 +痕 +痘 +痙 +痛 +痞 +痠 +痢 +痣 +痤 +痧 +痨 +痪 +痫 +痰 +痱 +痴 +痹 +痺 +痼 +痿 +瘀 +瘁 +瘋 +瘍 +瘓 +瘘 +瘙 +瘟 +瘠 +瘡 +瘢 +瘤 +瘦 +瘧 +瘩 +瘪 +瘫 +瘴 +瘸 +瘾 +療 +癇 +癌 +癒 +癖 +癜 +癞 +癡 +癢 +癣 +癥 +癫 +癬 +癮 +癱 +癲 +癸 +発 +登 +發 +白 +百 +皂 +的 +皆 +皇 +皈 +皋 +皎 +皑 +皓 +皖 +皙 +皚 +皮 +皰 +皱 +皴 +皺 +皿 +盂 +盃 +盅 +盆 +盈 +益 +盎 +盏 +盐 +监 +盒 +盔 +盖 +盗 +盘 +盛 +盜 +盞 +盟 +盡 +監 +盤 +盥 +盧 +盪 +目 +盯 +盱 +盲 +直 +相 +盹 +盼 +盾 +省 +眈 +眉 +看 +県 +眙 +眞 +真 +眠 +眦 +眨 +眩 +眯 +眶 +眷 +眸 +眺 +眼 +眾 +着 +睁 +睇 +睏 +睐 +睑 +睛 +睜 +睞 +睡 +睢 +督 +睥 +睦 +睨 +睪 +睫 +睬 +睹 +睽 +睾 +睿 +瞄 +瞅 +瞇 +瞋 +瞌 +瞎 +瞑 +瞒 +瞓 +瞞 +瞟 +瞠 +瞥 +瞧 +瞩 +瞪 +瞬 +瞭 +瞰 +瞳 +瞻 +瞼 +瞿 +矇 +矍 +矗 +矚 +矛 +矜 +矢 +矣 +知 +矩 +矫 +短 +矮 +矯 +石 +矶 +矽 +矾 +矿 +码 +砂 +砌 +砍 +砒 +研 +砖 +砗 +砚 +砝 +砣 +砥 +砧 +砭 +砰 +砲 +破 +砷 +砸 +砺 +砼 +砾 +础 +硅 +硐 +硒 +硕 +硝 +硫 +硬 +确 +硯 +硼 +碁 +碇 +碉 +碌 +碍 +碎 +碑 +碓 +碗 +碘 +碚 +碛 +碟 +碣 +碧 +碩 +碰 +碱 +碳 +碴 +確 +碼 +碾 +磁 +磅 +磊 +磋 +磐 +磕 +磚 +磡 +磨 +磬 +磯 +磲 +磷 +磺 +礁 +礎 +礙 +礡 +礦 +礪 +礫 +礴 +示 +礼 +社 +祀 +祁 +祂 +祇 +祈 +祉 +祎 +祐 +祕 +祖 +祗 +祚 +祛 +祜 +祝 +神 +祟 +祠 +祢 +祥 +票 +祭 +祯 +祷 +祸 +祺 +祿 +禀 +禁 +禄 +禅 +禍 +禎 +福 +禛 +禦 +禧 +禪 +禮 +禱 +禹 +禺 +离 +禽 +禾 +禿 +秀 +私 +秃 +秆 +秉 +秋 +种 +科 +秒 +秘 +租 +秣 +秤 +秦 +秧 +秩 +秭 +积 +称 +秸 +移 +秽 +稀 +稅 +程 +稍 +税 +稔 +稗 +稚 +稜 +稞 +稟 +稠 +稣 +種 +稱 +稲 +稳 +稷 +稹 +稻 +稼 +稽 +稿 +穀 +穂 +穆 +穌 +積 +穎 +穗 +穢 +穩 +穫 +穴 +究 +穷 +穹 +空 +穿 +突 +窃 +窄 +窈 +窍 +窑 +窒 +窓 +窕 +窖 +窗 +窘 +窜 +窝 +窟 +窠 +窥 +窦 +窨 +窩 +窪 +窮 +窯 +窺 +窿 +竄 +竅 +竇 +竊 +立 +竖 +站 +竜 +竞 +竟 +章 +竣 +童 +竭 +端 +競 +竹 +竺 +竽 +竿 +笃 +笆 +笈 +笋 +笏 +笑 +笔 +笙 +笛 +笞 +笠 +符 +笨 +第 +笹 +笺 +笼 +筆 +等 +筊 +筋 +筍 +筏 +筐 +筑 +筒 +答 +策 +筛 +筝 +筠 +筱 +筲 +筵 +筷 +筹 +签 +简 +箇 +箋 +箍 +箏 +箐 +箔 +箕 +算 +箝 +管 +箩 +箫 +箭 +箱 +箴 +箸 +節 +篁 +範 +篆 +篇 +築 +篑 +篓 +篙 +篝 +篠 +篡 +篤 +篩 +篪 +篮 +篱 +篷 +簇 +簌 +簍 +簡 +簦 +簧 +簪 +簫 +簷 +簸 +簽 +簾 +簿 +籁 +籃 +籌 +籍 +籐 +籟 +籠 +籤 +籬 +籮 +籲 +米 +类 +籼 +籽 +粄 +粉 +粑 +粒 +粕 +粗 +粘 +粟 +粤 +粥 +粧 +粪 +粮 +粱 +粲 +粳 +粵 +粹 +粼 +粽 +精 +粿 +糅 +糊 +糍 +糕 +糖 +糗 +糙 +糜 +糞 +糟 +糠 +糧 +糬 +糯 +糰 +糸 +系 +糾 +紀 +紂 +約 +紅 +紉 +紊 +紋 +納 +紐 +紓 +純 +紗 +紘 +紙 +級 +紛 +紜 +素 +紡 +索 +紧 +紫 +紮 +累 +細 +紳 +紹 +紺 +終 +絃 +組 +絆 +経 +結 +絕 +絞 +絡 +絢 +給 +絨 +絮 +統 +絲 +絳 +絵 +絶 +絹 +綁 +綏 +綑 +經 +継 +続 +綜 +綠 +綢 +綦 +綫 +綬 +維 +綱 +網 +綴 +綵 +綸 +綺 +綻 +綽 +綾 +綿 +緊 +緋 +総 +緑 +緒 +緘 +線 +緝 +緞 +締 +緣 +編 +緩 +緬 +緯 +練 +緹 +緻 +縁 +縄 +縈 +縛 +縝 +縣 +縫 +縮 +縱 +縴 +縷 +總 +績 +繁 +繃 +繆 +繇 +繋 +織 +繕 +繚 +繞 +繡 +繩 +繪 +繫 +繭 +繳 +繹 +繼 +繽 +纂 +續 +纍 +纏 +纓 +纔 +纖 +纜 +纠 +红 +纣 +纤 +约 +级 +纨 +纪 +纫 +纬 +纭 +纯 +纰 +纱 +纲 +纳 +纵 +纶 +纷 +纸 +纹 +纺 +纽 +纾 +线 +绀 +练 +组 +绅 +细 +织 +终 +绊 +绍 +绎 +经 +绑 +绒 +结 +绔 +绕 +绘 +给 +绚 +绛 +络 +绝 +绞 +统 +绡 +绢 +绣 +绥 +绦 +继 +绩 +绪 +绫 +续 +绮 +绯 +绰 +绳 +维 +绵 +绶 +绷 +绸 +绻 +综 +绽 +绾 +绿 +缀 +缄 +缅 +缆 +缇 +缈 +缉 +缎 +缓 +缔 +缕 +编 +缘 +缙 +缚 +缜 +缝 +缠 +缢 +缤 +缥 +缨 +缩 +缪 +缭 +缮 +缰 +缱 +缴 +缸 +缺 +缽 +罂 +罄 +罌 +罐 +网 +罔 +罕 +罗 +罚 +罡 +罢 +罩 +罪 +置 +罰 +署 +罵 +罷 +罹 +羁 +羅 +羈 +羊 +羌 +美 +羔 +羚 +羞 +羟 +羡 +羣 +群 +羥 +羧 +羨 +義 +羯 +羲 +羸 +羹 +羽 +羿 +翁 +翅 +翊 +翌 +翎 +習 +翔 +翘 +翟 +翠 +翡 +翦 +翩 +翰 +翱 +翳 +翹 +翻 +翼 +耀 +老 +考 +耄 +者 +耆 +耋 +而 +耍 +耐 +耒 +耕 +耗 +耘 +耙 +耦 +耨 +耳 +耶 +耷 +耸 +耻 +耽 +耿 +聂 +聆 +聊 +聋 +职 +聒 +联 +聖 +聘 +聚 +聞 +聪 +聯 +聰 +聲 +聳 +聴 +聶 +職 +聽 +聾 +聿 +肃 +肄 +肅 +肆 +肇 +肉 +肋 +肌 +肏 +肓 +肖 +肘 +肚 +肛 +肝 +肠 +股 +肢 +肤 +肥 +肩 +肪 +肮 +肯 +肱 +育 +肴 +肺 +肽 +肾 +肿 +胀 +胁 +胃 +胄 +胆 +背 +胍 +胎 +胖 +胚 +胛 +胜 +胝 +胞 +胡 +胤 +胥 +胧 +胫 +胭 +胯 +胰 +胱 +胳 +胴 +胶 +胸 +胺 +能 +脂 +脅 +脆 +脇 +脈 +脉 +脊 +脍 +脏 +脐 +脑 +脓 +脖 +脘 +脚 +脛 +脣 +脩 +脫 +脯 +脱 +脲 +脳 +脸 +脹 +脾 +腆 +腈 +腊 +腋 +腌 +腎 +腐 +腑 +腓 +腔 +腕 +腥 +腦 +腩 +腫 +腭 +腮 +腰 +腱 +腳 +腴 +腸 +腹 +腺 +腻 +腼 +腾 +腿 +膀 +膈 +膊 +膏 +膑 +膘 +膚 +膛 +膜 +膝 +膠 +膦 +膨 +膩 +膳 +膺 +膻 +膽 +膾 +膿 +臀 +臂 +臃 +臆 +臉 +臊 +臍 +臓 +臘 +臟 +臣 +臥 +臧 +臨 +自 +臬 +臭 +至 +致 +臺 +臻 +臼 +臾 +舀 +舂 +舅 +舆 +與 +興 +舉 +舊 +舌 +舍 +舎 +舐 +舒 +舔 +舖 +舗 +舛 +舜 +舞 +舟 +航 +舫 +般 +舰 +舱 +舵 +舶 +舷 +舸 +船 +舺 +舾 +艇 +艋 +艘 +艙 +艦 +艮 +良 +艰 +艱 +色 +艳 +艷 +艹 +艺 +艾 +节 +芃 +芈 +芊 +芋 +芍 +芎 +芒 +芙 +芜 +芝 +芡 +芥 +芦 +芩 +芪 +芫 +芬 +芭 +芮 +芯 +花 +芳 +芷 +芸 +芹 +芻 +芽 +芾 +苁 +苄 +苇 +苋 +苍 +苏 +苑 +苒 +苓 +苔 +苕 +苗 +苛 +苜 +苞 +苟 +苡 +苣 +若 +苦 +苫 +苯 +英 +苷 +苹 +苻 +茁 +茂 +范 +茄 +茅 +茉 +茎 +茏 +茗 +茜 +茧 +茨 +茫 +茬 +茭 +茯 +茱 +茲 +茴 +茵 +茶 +茸 +茹 +茼 +荀 +荃 +荆 +草 +荊 +荏 +荐 +荒 +荔 +荖 +荘 +荚 +荞 +荟 +荠 +荡 +荣 +荤 +荥 +荧 +荨 +荪 +荫 +药 +荳 +荷 +荸 +荻 +荼 +荽 +莅 +莆 +莉 +莊 +莎 +莒 +莓 +莖 +莘 +莞 +莠 +莢 +莧 +莪 +莫 +莱 +莲 +莴 +获 +莹 +莺 +莽 +莿 +菀 +菁 +菅 +菇 +菈 +菊 +菌 +菏 +菓 +菖 +菘 +菜 +菟 +菠 +菡 +菩 +華 +菱 +菲 +菸 +菽 +萁 +萃 +萄 +萊 +萋 +萌 +萍 +萎 +萘 +萝 +萤 +营 +萦 +萧 +萨 +萩 +萬 +萱 +萵 +萸 +萼 +落 +葆 +葉 +著 +葚 +葛 +葡 +董 +葦 +葩 +葫 +葬 +葭 +葯 +葱 +葳 +葵 +葷 +葺 +蒂 +蒋 +蒐 +蒔 +蒙 +蒜 +蒞 +蒟 +蒡 +蒨 +蒲 +蒸 +蒹 +蒻 +蒼 +蒿 +蓁 +蓄 +蓆 +蓉 +蓋 +蓑 +蓓 +蓖 +蓝 +蓟 +蓦 +蓬 +蓮 +蓼 +蓿 +蔑 +蔓 +蔔 +蔗 +蔘 +蔚 +蔡 +蔣 +蔥 +蔫 +蔬 +蔭 +蔵 +蔷 +蔺 +蔻 +蔼 +蔽 +蕁 +蕃 +蕈 +蕉 +蕊 +蕎 +蕙 +蕤 +蕨 +蕩 +蕪 +蕭 +蕲 +蕴 +蕻 +蕾 +薄 +薅 +薇 +薈 +薊 +薏 +薑 +薔 +薙 +薛 +薦 +薨 +薩 +薪 +薬 +薯 +薰 +薹 +藉 +藍 +藏 +藐 +藓 +藕 +藜 +藝 +藤 +藥 +藩 +藹 +藻 +藿 +蘆 +蘇 +蘊 +蘋 +蘑 +蘚 +蘭 +蘸 +蘼 +蘿 +虎 +虏 +虐 +虑 +虔 +處 +虚 +虛 +虜 +虞 +號 +虢 +虧 +虫 +虬 +虱 +虹 +虻 +虽 +虾 +蚀 +蚁 +蚂 +蚊 +蚌 +蚓 +蚕 +蚜 +蚝 +蚣 +蚤 +蚩 +蚪 +蚯 +蚱 +蚵 +蛀 +蛆 +蛇 +蛊 +蛋 +蛎 +蛐 +蛔 +蛙 +蛛 +蛟 +蛤 +蛭 +蛮 +蛰 +蛳 +蛹 +蛻 +蛾 +蜀 +蜂 +蜃 +蜆 +蜇 +蜈 +蜊 +蜍 +蜒 +蜓 +蜕 +蜗 +蜘 +蜚 +蜜 +蜡 +蜢 +蜥 +蜱 +蜴 +蜷 +蜻 +蜿 +蝇 +蝈 +蝉 +蝌 +蝎 +蝕 +蝗 +蝙 +蝟 +蝠 +蝦 +蝨 +蝴 +蝶 +蝸 +蝼 +螂 +螃 +融 +螞 +螢 +螨 +螯 +螳 +螺 +蟀 +蟄 +蟆 +蟋 +蟎 +蟑 +蟒 +蟠 +蟬 +蟲 +蟹 +蟻 +蟾 +蠅 +蠍 +蠔 +蠕 +蠛 +蠟 +蠡 +蠢 +蠣 +蠱 +蠶 +蠹 +蠻 +血 +衄 +衅 +衆 +行 +衍 +術 +衔 +街 +衙 +衛 +衝 +衞 +衡 +衢 +衣 +补 +表 +衩 +衫 +衬 +衮 +衰 +衲 +衷 +衹 +衾 +衿 +袁 +袂 +袄 +袅 +袈 +袋 +袍 +袒 +袖 +袜 +袞 +袤 +袪 +被 +袭 +袱 +裁 +裂 +装 +裆 +裊 +裏 +裔 +裕 +裘 +裙 +補 +裝 +裟 +裡 +裤 +裨 +裱 +裳 +裴 +裸 +裹 +製 +裾 +褂 +複 +褐 +褒 +褓 +褔 +褚 +褥 +褪 +褫 +褲 +褶 +褻 +襁 +襄 +襟 +襠 +襪 +襬 +襯 +襲 +西 +要 +覃 +覆 +覇 +見 +規 +覓 +視 +覚 +覦 +覧 +親 +覬 +観 +覷 +覺 +覽 +觀 +见 +观 +规 +觅 +视 +览 +觉 +觊 +觎 +觐 +觑 +角 +觞 +解 +觥 +触 +觸 +言 +訂 +計 +訊 +討 +訓 +訕 +訖 +託 +記 +訛 +訝 +訟 +訣 +訥 +訪 +設 +許 +訳 +訴 +訶 +診 +註 +証 +詆 +詐 +詔 +評 +詛 +詞 +詠 +詡 +詢 +詣 +試 +詩 +詫 +詬 +詭 +詮 +詰 +話 +該 +詳 +詹 +詼 +誅 +誇 +誉 +誌 +認 +誓 +誕 +誘 +語 +誠 +誡 +誣 +誤 +誥 +誦 +誨 +說 +説 +読 +誰 +課 +誹 +誼 +調 +諄 +談 +請 +諏 +諒 +論 +諗 +諜 +諡 +諦 +諧 +諫 +諭 +諮 +諱 +諳 +諷 +諸 +諺 +諾 +謀 +謁 +謂 +謄 +謊 +謎 +謐 +謔 +謗 +謙 +講 +謝 +謠 +謨 +謬 +謹 +謾 +譁 +證 +譎 +譏 +識 +譙 +譚 +譜 +警 +譬 +譯 +議 +譲 +譴 +護 +譽 +讀 +變 +讓 +讚 +讞 +计 +订 +认 +讥 +讧 +讨 +让 +讪 +讫 +训 +议 +讯 +记 +讲 +讳 +讴 +讶 +讷 +许 +讹 +论 +讼 +讽 +设 +访 +诀 +证 +诃 +评 +诅 +识 +诈 +诉 +诊 +诋 +词 +诏 +译 +试 +诗 +诘 +诙 +诚 +诛 +话 +诞 +诟 +诠 +诡 +询 +诣 +诤 +该 +详 +诧 +诩 +诫 +诬 +语 +误 +诰 +诱 +诲 +说 +诵 +诶 +请 +诸 +诺 +读 +诽 +课 +诿 +谀 +谁 +调 +谄 +谅 +谆 +谈 +谊 +谋 +谌 +谍 +谎 +谏 +谐 +谑 +谒 +谓 +谔 +谕 +谗 +谘 +谙 +谚 +谛 +谜 +谟 +谢 +谣 +谤 +谥 +谦 +谧 +谨 +谩 +谪 +谬 +谭 +谯 +谱 +谲 +谴 +谶 +谷 +豁 +豆 +豇 +豈 +豉 +豊 +豌 +豎 +豐 +豔 +豚 +象 +豢 +豪 +豫 +豬 +豹 +豺 +貂 +貅 +貌 +貓 +貔 +貘 +貝 +貞 +負 +財 +貢 +貧 +貨 +販 +貪 +貫 +責 +貯 +貰 +貳 +貴 +貶 +買 +貸 +費 +貼 +貽 +貿 +賀 +賁 +賂 +賃 +賄 +資 +賈 +賊 +賑 +賓 +賜 +賞 +賠 +賡 +賢 +賣 +賤 +賦 +質 +賬 +賭 +賴 +賺 +購 +賽 +贅 +贈 +贊 +贍 +贏 +贓 +贖 +贛 +贝 +贞 +负 +贡 +财 +责 +贤 +败 +账 +货 +质 +贩 +贪 +贫 +贬 +购 +贮 +贯 +贰 +贱 +贲 +贴 +贵 +贷 +贸 +费 +贺 +贻 +贼 +贾 +贿 +赁 +赂 +赃 +资 +赅 +赈 +赊 +赋 +赌 +赎 +赏 +赐 +赓 +赔 +赖 +赘 +赚 +赛 +赝 +赞 +赠 +赡 +赢 +赣 +赤 +赦 +赧 +赫 +赭 +走 +赳 +赴 +赵 +赶 +起 +趁 +超 +越 +趋 +趕 +趙 +趟 +趣 +趨 +足 +趴 +趵 +趸 +趺 +趾 +跃 +跄 +跆 +跋 +跌 +跎 +跑 +跖 +跚 +跛 +距 +跟 +跡 +跤 +跨 +跩 +跪 +路 +跳 +践 +跷 +跹 +跺 +跻 +踉 +踊 +踌 +踏 +踐 +踝 +踞 +踟 +踢 +踩 +踪 +踮 +踱 +踴 +踵 +踹 +蹂 +蹄 +蹇 +蹈 +蹉 +蹊 +蹋 +蹑 +蹒 +蹙 +蹟 +蹣 +蹤 +蹦 +蹩 +蹬 +蹭 +蹲 +蹴 +蹶 +蹺 +蹼 +蹿 +躁 +躇 +躉 +躊 +躋 +躍 +躏 +躪 +身 +躬 +躯 +躲 +躺 +軀 +車 +軋 +軌 +軍 +軒 +軟 +転 +軸 +軼 +軽 +軾 +較 +載 +輒 +輓 +輔 +輕 +輛 +輝 +輟 +輩 +輪 +輯 +輸 +輻 +輾 +輿 +轄 +轅 +轆 +轉 +轍 +轎 +轟 +车 +轧 +轨 +轩 +转 +轭 +轮 +软 +轰 +轲 +轴 +轶 +轻 +轼 +载 +轿 +较 +辄 +辅 +辆 +辇 +辈 +辉 +辊 +辍 +辐 +辑 +输 +辕 +辖 +辗 +辘 +辙 +辛 +辜 +辞 +辟 +辣 +辦 +辨 +辩 +辫 +辭 +辮 +辯 +辰 +辱 +農 +边 +辺 +辻 +込 +辽 +达 +迁 +迂 +迄 +迅 +过 +迈 +迎 +运 +近 +返 +还 +这 +进 +远 +违 +连 +迟 +迢 +迤 +迥 +迦 +迩 +迪 +迫 +迭 +述 +迴 +迷 +迸 +迹 +迺 +追 +退 +送 +适 +逃 +逅 +逆 +选 +逊 +逍 +透 +逐 +递 +途 +逕 +逗 +這 +通 +逛 +逝 +逞 +速 +造 +逢 +連 +逮 +週 +進 +逵 +逶 +逸 +逻 +逼 +逾 +遁 +遂 +遅 +遇 +遊 +運 +遍 +過 +遏 +遐 +遑 +遒 +道 +達 +違 +遗 +遙 +遛 +遜 +遞 +遠 +遢 +遣 +遥 +遨 +適 +遭 +遮 +遲 +遴 +遵 +遶 +遷 +選 +遺 +遼 +遽 +避 +邀 +邁 +邂 +邃 +還 +邇 +邈 +邊 +邋 +邏 +邑 +邓 +邕 +邛 +邝 +邢 +那 +邦 +邨 +邪 +邬 +邮 +邯 +邰 +邱 +邳 +邵 +邸 +邹 +邺 +邻 +郁 +郅 +郊 +郎 +郑 +郜 +郝 +郡 +郢 +郤 +郦 +郧 +部 +郫 +郭 +郴 +郵 +郷 +郸 +都 +鄂 +鄉 +鄒 +鄔 +鄙 +鄞 +鄢 +鄧 +鄭 +鄰 +鄱 +鄲 +鄺 +酉 +酊 +酋 +酌 +配 +酐 +酒 +酗 +酚 +酝 +酢 +酣 +酥 +酩 +酪 +酬 +酮 +酯 +酰 +酱 +酵 +酶 +酷 +酸 +酿 +醃 +醇 +醉 +醋 +醍 +醐 +醒 +醚 +醛 +醜 +醞 +醣 +醪 +醫 +醬 +醮 +醯 +醴 +醺 +釀 +釁 +采 +釉 +释 +釋 +里 +重 +野 +量 +釐 +金 +釗 +釘 +釜 +針 +釣 +釦 +釧 +釵 +鈀 +鈉 +鈍 +鈎 +鈔 +鈕 +鈞 +鈣 +鈦 +鈪 +鈴 +鈺 +鈾 +鉀 +鉄 +鉅 +鉉 +鉑 +鉗 +鉚 +鉛 +鉤 +鉴 +鉻 +銀 +銃 +銅 +銑 +銓 +銖 +銘 +銜 +銬 +銭 +銮 +銳 +銷 +銹 +鋁 +鋅 +鋒 +鋤 +鋪 +鋰 +鋸 +鋼 +錄 +錐 +錘 +錚 +錠 +錢 +錦 +錨 +錫 +錮 +錯 +録 +錳 +錶 +鍊 +鍋 +鍍 +鍛 +鍥 +鍰 +鍵 +鍺 +鍾 +鎂 +鎊 +鎌 +鎏 +鎔 +鎖 +鎗 +鎚 +鎧 +鎬 +鎮 +鎳 +鏈 +鏖 +鏗 +鏘 +鏞 +鏟 +鏡 +鏢 +鏤 +鏽 +鐘 +鐮 +鐲 +鐳 +鐵 +鐸 +鐺 +鑄 +鑊 +鑑 +鑒 +鑣 +鑫 +鑰 +鑲 +鑼 +鑽 +鑾 +鑿 +针 +钉 +钊 +钎 +钏 +钒 +钓 +钗 +钙 +钛 +钜 +钝 +钞 +钟 +钠 +钡 +钢 +钣 +钤 +钥 +钦 +钧 +钨 +钩 +钮 +钯 +钰 +钱 +钳 +钴 +钵 +钺 +钻 +钼 +钾 +钿 +铀 +铁 +铂 +铃 +铄 +铅 +铆 +铉 +铎 +铐 +铛 +铜 +铝 +铠 +铡 +铢 +铣 +铤 +铨 +铩 +铬 +铭 +铮 +铰 +铲 +铵 +银 +铸 +铺 +链 +铿 +销 +锁 +锂 +锄 +锅 +锆 +锈 +锉 +锋 +锌 +锏 +锐 +锑 +错 +锚 +锟 +锡 +锢 +锣 +锤 +锥 +锦 +锭 +键 +锯 +锰 +锲 +锵 +锹 +锺 +锻 +镀 +镁 +镂 +镇 +镉 +镌 +镍 +镐 +镑 +镕 +镖 +镗 +镛 +镜 +镣 +镭 +镯 +镰 +镳 +镶 +長 +长 +門 +閃 +閉 +開 +閎 +閏 +閑 +閒 +間 +閔 +閘 +閡 +関 +閣 +閥 +閨 +閩 +閱 +閲 +閹 +閻 +閾 +闆 +闇 +闊 +闌 +闍 +闔 +闕 +闖 +闘 +關 +闡 +闢 +门 +闪 +闫 +闭 +问 +闯 +闰 +闲 +间 +闵 +闷 +闸 +闹 +闺 +闻 +闽 +闾 +阀 +阁 +阂 +阅 +阆 +阇 +阈 +阉 +阎 +阐 +阑 +阔 +阕 +阖 +阙 +阚 +阜 +队 +阡 +阪 +阮 +阱 +防 +阳 +阴 +阵 +阶 +阻 +阿 +陀 +陂 +附 +际 +陆 +陇 +陈 +陋 +陌 +降 +限 +陕 +陛 +陝 +陞 +陟 +陡 +院 +陣 +除 +陨 +险 +陪 +陰 +陲 +陳 +陵 +陶 +陷 +陸 +険 +陽 +隅 +隆 +隈 +隊 +隋 +隍 +階 +随 +隐 +隔 +隕 +隘 +隙 +際 +障 +隠 +隣 +隧 +隨 +險 +隱 +隴 +隶 +隸 +隻 +隼 +隽 +难 +雀 +雁 +雄 +雅 +集 +雇 +雉 +雋 +雌 +雍 +雎 +雏 +雑 +雒 +雕 +雖 +雙 +雛 +雜 +雞 +離 +難 +雨 +雪 +雯 +雰 +雲 +雳 +零 +雷 +雹 +電 +雾 +需 +霁 +霄 +霆 +震 +霈 +霉 +霊 +霍 +霎 +霏 +霑 +霓 +霖 +霜 +霞 +霧 +霭 +霰 +露 +霸 +霹 +霽 +霾 +靂 +靄 +靈 +青 +靓 +靖 +静 +靚 +靛 +靜 +非 +靠 +靡 +面 +靥 +靦 +革 +靳 +靴 +靶 +靼 +鞅 +鞋 +鞍 +鞏 +鞑 +鞘 +鞠 +鞣 +鞦 +鞭 +韆 +韋 +韌 +韓 +韜 +韦 +韧 +韩 +韬 +韭 +音 +韵 +韶 +韻 +響 +頁 +頂 +頃 +項 +順 +須 +頌 +預 +頑 +頒 +頓 +頗 +領 +頜 +頡 +頤 +頫 +頭 +頰 +頷 +頸 +頹 +頻 +頼 +顆 +題 +額 +顎 +顏 +顔 +願 +顛 +類 +顧 +顫 +顯 +顱 +顴 +页 +顶 +顷 +项 +顺 +须 +顼 +顽 +顾 +顿 +颁 +颂 +预 +颅 +领 +颇 +颈 +颉 +颊 +颌 +颍 +颐 +频 +颓 +颔 +颖 +颗 +题 +颚 +颛 +颜 +额 +颞 +颠 +颡 +颢 +颤 +颦 +颧 +風 +颯 +颱 +颳 +颶 +颼 +飄 +飆 +风 +飒 +飓 +飕 +飘 +飙 +飚 +飛 +飞 +食 +飢 +飨 +飩 +飪 +飯 +飲 +飼 +飽 +飾 +餃 +餅 +餉 +養 +餌 +餐 +餒 +餓 +餘 +餚 +餛 +餞 +餡 +館 +餮 +餵 +餾 +饅 +饈 +饋 +饌 +饍 +饑 +饒 +饕 +饗 +饞 +饥 +饨 +饪 +饬 +饭 +饮 +饯 +饰 +饱 +饲 +饴 +饵 +饶 +饷 +饺 +饼 +饽 +饿 +馀 +馁 +馄 +馅 +馆 +馈 +馋 +馍 +馏 +馒 +馔 +首 +馗 +香 +馥 +馨 +馬 +馭 +馮 +馳 +馴 +駁 +駄 +駅 +駆 +駐 +駒 +駕 +駛 +駝 +駭 +駱 +駿 +騁 +騎 +騏 +験 +騙 +騨 +騰 +騷 +驀 +驅 +驊 +驍 +驒 +驕 +驗 +驚 +驛 +驟 +驢 +驥 +马 +驭 +驮 +驯 +驰 +驱 +驳 +驴 +驶 +驷 +驸 +驹 +驻 +驼 +驾 +驿 +骁 +骂 +骄 +骅 +骆 +骇 +骈 +骊 +骋 +验 +骏 +骐 +骑 +骗 +骚 +骛 +骜 +骞 +骠 +骡 +骤 +骥 +骧 +骨 +骯 +骰 +骶 +骷 +骸 +骼 +髂 +髅 +髋 +髏 +髒 +髓 +體 +髖 +高 +髦 +髪 +髮 +髯 +髻 +鬃 +鬆 +鬍 +鬓 +鬚 +鬟 +鬢 +鬣 +鬥 +鬧 +鬱 +鬼 +魁 +魂 +魄 +魅 +魇 +魍 +魏 +魔 +魘 +魚 +魯 +魷 +鮑 +鮨 +鮪 +鮭 +鮮 +鯉 +鯊 +鯖 +鯛 +鯨 +鯰 +鯽 +鰍 +鰓 +鰭 +鰲 +鰻 +鰾 +鱈 +鱉 +鱔 +鱗 +鱷 +鱸 +鱼 +鱿 +鲁 +鲈 +鲍 +鲑 +鲛 +鲜 +鲟 +鲢 +鲤 +鲨 +鲫 +鲱 +鲲 +鲶 +鲷 +鲸 +鳃 +鳄 +鳅 +鳌 +鳍 +鳕 +鳖 +鳗 +鳝 +鳞 +鳥 +鳩 +鳳 +鳴 +鳶 +鴉 +鴕 +鴛 +鴦 +鴨 +鴻 +鴿 +鵑 +鵜 +鵝 +鵡 +鵬 +鵰 +鵲 +鶘 +鶩 +鶯 +鶴 +鷗 +鷲 +鷹 +鷺 +鸚 +鸞 +鸟 +鸠 +鸡 +鸢 +鸣 +鸥 +鸦 +鸨 +鸪 +鸭 +鸯 +鸳 +鸵 +鸽 +鸾 +鸿 +鹂 +鹃 +鹄 +鹅 +鹈 +鹉 +鹊 +鹌 +鹏 +鹑 +鹕 +鹘 +鹜 +鹞 +鹤 +鹦 +鹧 +鹫 +鹭 +鹰 +鹳 +鹵 +鹹 +鹼 +鹽 +鹿 +麂 +麋 +麒 +麓 +麗 +麝 +麟 +麥 +麦 +麩 +麴 +麵 +麸 +麺 +麻 +麼 +麽 +麾 +黃 +黄 +黍 +黎 +黏 +黑 +黒 +黔 +默 +黛 +黜 +黝 +點 +黠 +黨 +黯 +黴 +鼋 +鼎 +鼐 +鼓 +鼠 +鼬 +鼹 +鼻 +鼾 +齁 +齊 +齋 +齐 +齒 +齡 +齢 +齣 +齦 +齿 +龄 +龅 +龈 +龊 +龋 +龌 +龍 +龐 +龔 +龕 +龙 +龚 +龛 +龜 +龟 +︰ +︱ +︶ +︿ +﹁ +﹂ +﹍ +﹏ +﹐ +﹑ +﹒ +﹔ +﹕ +﹖ +﹗ +﹙ +﹚ +﹝ +﹞ +﹡ +﹣ +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +。 +「 +」 +、 +・ +ッ +ー +イ +ク +シ +ス +ト +ノ +フ +ラ +ル +ン +゙ +゚ + ̄ +¥ +👍 +🔥 +😂 +😎 +... +yam +10 +2017 +12 +11 +2016 +20 +30 +15 +06 +lofter +##s +2015 +by +16 +14 +18 +13 +24 +17 +2014 +21 +##0 +22 +19 +25 +23 +com +100 +00 +05 +2013 +##a +03 +09 +08 +28 +##2 +50 +01 +04 +##1 +27 +02 +2012 +##3 +26 +##e +07 +##8 +##5 +##6 +##4 +##9 +##7 +29 +2011 +40 +##t +2010 +##o +##d +##i +2009 +##n +app +www +the +##m +31 +##c +##l +##y +##r +##g +2008 +60 +http +200 +qq +##p +80 +##f +google +pixnet +90 +cookies +tripadvisor +500 +##er +##k +35 +##h +facebook +2007 +2000 +70 +##b +of +##x +##u +45 +300 +iphone +32 +1000 +2006 +48 +ip +36 +in +38 +3d +##w +##ing +55 +ctrip +##on +##v +33 +##の +to +34 +400 +id +2005 +it +37 +windows +llc +top +99 +42 +39 +000 +led +at +##an +41 +51 +52 +46 +49 +43 +53 +44 +##z +android +58 +and +59 +2004 +56 +vr +##か +5000 +2003 +47 +blogthis +twitter +54 +##le +150 +ok +2018 +57 +75 +cn +no +ios +##in +##mm +##00 +800 +on +te +3000 +65 +2001 +360 +95 +ig +lv +120 +##ng +##を +##us +##に +pc +てす +── +600 +##te +85 +2002 +88 +##ed +html +ncc +wifi +email +64 +blog +is +##10 +##て +mail +online +##al +dvd +##ic +studio +##は +##℃ +##ia +##と +line +vip +72 +##q +98 +##ce +##en +for +##is +##ra +##es +##j +usb +net +cp +1999 +asia +4g +##cm +diy +new +3c +##お +ta +66 +language +vs +apple +tw +86 +web +##ne +ipad +62 +you +##re +101 +68 +##tion +ps +de +bt +pony +atm +##2017 +1998 +67 +##ch +ceo +##or +go +##na +av +pro +cafe +96 +pinterest +97 +63 +pixstyleme3c +##ta +more +said +##2016 +1997 +mp3 +700 +##ll +nba +jun +##20 +92 +tv +1995 +pm +61 +76 +nbsp +250 +##ie +linux +##ma +cd +110 +hd +##17 +78 +##ion +77 +6000 +am +##th +##st +94 +##se +##et +69 +180 +gdp +my +105 +81 +abc +89 +flash +79 +one +93 +1990 +1996 +##ck +gps +##も +##ly +web885 +106 +2020 +91 +##ge +4000 +1500 +xd +boss +isbn +1994 +org +##ry +me +love +##11 +0fork +73 +##12 +3g +##ter +##ar +71 +82 +##la +hotel +130 +1970 +pk +83 +87 +140 +ie +##os +##30 +##el +74 +##50 +seo +cpu +##ml +p2p +84 +may +##る +sun +tue +internet +cc +posted +youtube +##at +##ン +##man +ii +##ル +##15 +abs +nt +pdf +yahoo +ago +1980 +##it +news +mac +104 +##てす +##me +##り +java +1992 +spa +##de +##nt +hk +all +plus +la +1993 +##mb +##16 +##ve +west +##da +160 +air +##い +##ps +から +##to +1989 +logo +htc +php +https +fi +momo +##son +sat +##ke +##80 +ebd +suv +wi +day +apk +##88 +##um +mv +galaxy +wiki +or +brake +##ス +1200 +する +this +1991 +mon +##こ +❤2017 +po +##ない +javascript +life +home +june +##ss +system +900 +##ー +##0 +pp +1988 +world +fb +4k +br +##as +ic +ai +leonardo +safari +##60 +live +free +xx +wed +win7 +kiehl +##co +lg +o2o +##go +us +235 +1949 +mm +しい +vfm +kanye +##90 +##2015 +##id +jr +##ey +123 +rss +##sa +##ro +##am +##no +thu +fri +350 +##sh +##ki +103 +comments +name +##のて +##pe +##ine +max +1987 +8000 +uber +##mi +##ton +wordpress +office +1986 +1985 +##ment +107 +bd +win10 +##ld +##li +gmail +bb +dior +##rs +##ri +##rd +##ます +up +cad +##® +dr +して +read +##21 +をお +##io +##99 +url +1984 +pvc +paypal +show +policy +##40 +##ty +##18 +with +##★ +##01 +txt +102 +##ba +dna +from +post +mini +ar +taiwan +john +##ga +privacy +agoda +##13 +##ny +word +##24 +##22 +##by +##ur +##hz +1982 +##ang +265 +cookie +netscape +108 +##ka +##~ +##ad +house +share +note +ibm +code +hello +nike +sim +survey +##016 +1979 +1950 +wikia +##32 +##017 +5g +cbc +##tor +##kg +1983 +##rt +##14 +campaign +store +2500 +os +##ct +##ts +##° +170 +api +##ns +365 +excel +##な +##ao +##ら +##し +~~ +##nd +university +163 +には +518 +##70 +##ya +##il +##25 +pierre +ipo +0020 +897 +##23 +hotels +##ian +のお +125 +years +6606 +##ers +##26 +high +##day +time +##ay +bug +##line +##く +##す +##be +xp +talk2yam +yamservice +10000 +coco +##dy +sony +##ies +1978 +microsoft +david +people +##ha +1960 +instagram +intel +その +##ot +iso +1981 +##va +115 +##mo +##land +xxx +man +co +ltxsw +##ation +baby +220 +##pa +##ol +1945 +7000 +tag +450 +##ue +msn +##31 +oppo +##ト +##ca +control +##om +st +chrome +##ure +##ん +be +##き +lol +##19 +した +##bo +240 +lady +##100 +##way +##から +4600 +##ko +##do +##un +4s +corporation +168 +##ni +herme +##28 +cp +978 +##up +##06 +ui +##ds +ppt +admin +three +します +bbc +re +128 +##48 +ca +##015 +##35 +hp +##ee +tpp +##た +##ive +×× +root +##cc +##ました +##ble +##ity +adobe +park +114 +et +oled +city +##ex +##ler +##ap +china +##book +20000 +view +##ice +global +##km +your +hong +##mg +out +##ms +ng +ebay +##29 +menu +ubuntu +##cy +rom +##view +open +ktv +do +server +##lo +if +english +##ね +##5 +##oo +1600 +##02 +step1 +kong +club +135 +july +inc +1976 +mr +hi +##net +touch +##ls +##ii +michael +lcd +##05 +##33 +phone +james +step2 +1300 +ios9 +##box +dc +##2 +##ley +samsung +111 +280 +pokemon +css +##ent +##les +いいえ +##1 +s8 +atom +play +bmw +##said +sa +etf +ctrl +♥yoyo♥ +##55 +2025 +##2014 +##66 +adidas +amazon +1958 +##ber +##ner +visa +##77 +##der +1800 +connectivity +##hi +firefox +109 +118 +hr +so +style +mark +pop +ol +skip +1975 +as +##27 +##ir +##61 +190 +mba +##う +##ai +le +##ver +1900 +cafe2017 +lte +super +113 +129 +##ron +amd +like +##☆ +are +##ster +we +##sk +paul +data +international +##ft +longchamp +ssd +good +##ート +##ti +reply +##my +↓↓↓ +apr +star +##ker +source +136 +js +112 +get +force +photo +##one +126 +##2013 +##ow +link +bbs +1972 +goods +##lin +python +119 +##ip +game +##ics +##ません +blue +##● +520 +##45 +page +itunes +##03 +1955 +260 +1968 +gt +gif +618 +##ff +##47 +group +くたさい +about +bar +ganji +##nce +music +lee +not +1977 +1971 +1973 +##per +an +faq +comment +##って +days +##ock +116 +##bs +1974 +1969 +v1 +player +1956 +xbox +sql +fm +f1 +139 +##ah +210 +##lv +##mp +##000 +melody +1957 +##3 +550 +17life +199 +1966 +xml +market +##au +##71 +999 +##04 +what +gl +##95 +##age +tips +##68 +book +##ting +mysql +can +1959 +230 +##ung +wonderland +watch +10℃ +##ction +9000 +mar +mobile +1946 +1962 +article +##db +part +▲top +party +って +1967 +1964 +1948 +##07 +##ore +##op +この +dj +##78 +##38 +010 +main +225 +1965 +##ong +art +320 +ad +134 +020 +##73 +117 +pm2 +japan +228 +##08 +ts +1963 +##ica +der +sm +##36 +2019 +##wa +ct +##7 +##や +##64 +1937 +homemesh +search +##85 +##れは +##tv +##di +macbook +##9 +##くたさい +service +##♥ +type +った +750 +##ier +##si +##75 +##います +##ok +best +##ット +goris +lock +##った +cf +3m +big +##ut +ftp +carol +##vi +10 +1961 +happy +sd +##ac +122 +anti +pe +cnn +iii +1920 +138 +##ラ +1940 +esp +jan +tags +##98 +##51 +august +vol +##86 +154 +##™ +##fs +##れ +##sion +design +ac +##ム +press +jordan +ppp +that +key +check +##6 +##tt +##㎡ +1080p +##lt +power +##42 +1952 +##bc +vivi +##ック +he +133 +121 +jpg +##rry +201 +175 +3500 +1947 +nb +##ted +##rn +しています +1954 +usd +##t00 +master +##ンク +001 +model +##58 +al +##09 +1953 +##34 +ram +goo +ても +##ui +127 +1930 +red +##ary +rpg +item +##pm +##41 +270 +##za +project +##2012 +hot +td +blogabstract +##ger +##62 +650 +##44 +gr2 +##します +##m +black +electronic +nfc +year +asus +また +html5 +cindy +##hd +m3 +132 +esc +##od +booking +##53 +fed +tvb +##81 +##ina +mit +165 +##いる +chan +192 +distribution +next +になる +peter +bios +steam +cm +1941 +にも +pk10 +##ix +##65 +##91 +dec +nasa +##ana +icecat +00z +b1 +will +##46 +li +se +##ji +##み +##ard +oct +##ain +jp +##ze +##bi +cio +##56 +smart +h5 +##39 +##port +curve +vpn +##nm +##dia +utc +##あり +12345678910 +##52 +rmvb +chanel +a4 +miss +##and +##im +media +who +##63 +she +girl +5s +124 +vera +##して +class +vivo +king +##フ +##ei +national +ab +1951 +5cm +888 +145 +ipod +ap +1100 +5mm +211 +ms +2756 +##69 +mp4 +msci +##po +##89 +131 +mg +index +380 +##bit +##out +##zz +##97 +##67 +158 +apec +##8 +photoshop +opec +¥799 +ては +##96 +##tes +##ast +2g +○○ +##ール +¥2899 +##ling +##よ +##ory +1938 +##ical +kitty +content +##43 +step3 +##cn +win8 +155 +vc +1400 +iphone7 +robert +##した +tcl +137 +beauty +##87 +en +dollars +##ys +##oc +step +pay +yy +a1 +##2011 +##lly +##ks +##♪ +1939 +188 +download +1944 +sep +exe +ph +います +school +gb +center +pr +street +##board +uv +##37 +##lan +winrar +##que +##ua +##com +1942 +1936 +480 +gpu +##4 +ettoday +fu +tom +##54 +##ren +##via +149 +##72 +b2b +144 +##79 +##tch +rose +arm +mb +##49 +##ial +##nn +nvidia +step4 +mvp +00㎡ +york +156 +##イ +how +cpi +591 +2765 +gov +kg +joe +##xx +mandy +pa +##ser +copyright +fashion +1935 +don +##け +ecu +##ist +##art +erp +wap +have +##lm +talk +##ek +##ning +##if +ch +##ite +video +1943 +cs +san +iot +look +##84 +##2010 +##ku +october +##ux +trump +##hs +##ide +box +141 +first +##ins +april +##ight +##83 +185 +angel +protected +aa +151 +162 +x1 +m2 +##fe +##× +##ho +size +143 +min +ofo +fun +gomaji +ex +hdmi +food +dns +march +chris +kevin +##のか +##lla +##pp +##ec +ag +ems +6s +720p +##rm +##ham +off +##92 +asp +team +fandom +ed +299 +▌♥ +##ell +info +されています +##82 +sina +4066 +161 +##able +##ctor +330 +399 +315 +dll +rights +ltd +idc +jul +3kg +1927 +142 +ma +surface +##76 +##ク +~~~ +304 +mall +eps +146 +green +##59 +map +space +donald +v2 +sodu +##light +1931 +148 +1700 +まて +310 +reserved +htm +##han +##57 +2d +178 +mod +##ise +##tions +152 +ti +##shi +doc +1933 +icp +055 +wang +##ram +shopping +aug +##pi +##well +now +wam +b2 +からお +##hu +236 +1928 +##gb +266 +f2 +##93 +153 +mix +##ef +##uan +bwl +##plus +##res +core +##ess +tea +5℃ +hktvmall +nhk +##ate +list +##ese +301 +feb +4m +inn +ての +nov +159 +12345 +daniel +##ci +pass +##bet +##nk +coffee +202 +ssl +airbnb +##ute +fbi +woshipm +skype +ea +cg +sp +##fc +##www +yes +edge +alt +007 +##94 +fpga +##ght +##gs +iso9001 +さい +##ile +##wood +##uo +image +lin +icon +american +##em +1932 +set +says +##king +##tive +blogger +##74 +なと +256 +147 +##ox +##zy +##red +##ium +##lf +nokia +claire +##リ +##ding +november +lohas +##500 +##tic +##マ +##cs +##ある +##che +##ire +##gy +##ult +db +january +win +##カ +166 +road +ptt +##ま +##つ +198 +##fa +##mer +anna +pchome +はい +udn +ef +420 +##time +##tte +2030 +##ア +g20 +white +かかります +1929 +308 +garden +eleven +di +##おります +chen +309b +777 +172 +young +cosplay +ちてない +4500 +bat +##123 +##tra +##ては +kindle +npc +steve +etc +##ern +##| +call +xperia +ces +travel +sk +s7 +##ous +1934 +##int +みいたたけます +183 +edu +file +cho +qr +##car +##our +186 +##ant +##d +eric +1914 +rends +##jo +##する +mastercard +##2000 +kb +##min +290 +##ino +vista +##ris +##ud +jack +2400 +##set +169 +pos +1912 +##her +##ou +taipei +しく +205 +beta +##ませんか +232 +##fi +express +255 +body +##ill +aphojoy +user +december +meiki +##ick +tweet +richard +##av +##ᆫ +iphone6 +##dd +ちてすか +views +##mark +321 +pd +##00 +times +##▲ +level +##ash +10g +point +5l +##ome +208 +koreanmall +##ak +george +q2 +206 +wma +tcp +##200 +スタッフ +full +mlb +##lle +##watch +tm +run +179 +911 +smith +business +##und +1919 +color +##tal +222 +171 +##less +moon +4399 +##rl +update +pcb +shop +499 +157 +little +なし +end +##mhz +van +dsp +easy +660 +##house +##key +history +##o +oh +##001 +##hy +##web +oem +let +was +##2009 +##gg +review +##wan +182 +##°c +203 +uc +title +##val +united +233 +2021 +##ons +doi +trivago +overdope +sbs +##ance +##ち +grand +special +573032185 +imf +216 +wx17house +##so +##ーム +audi +##he +london +william +##rp +##ake +science +beach +cfa +amp +ps4 +880 +##800 +##link +##hp +crm +ferragamo +bell +make +##eng +195 +under +zh +photos +2300 +##style +##ント +via +176 +da +##gi +company +i7 +##ray +thomas +370 +ufo +i5 +##max +plc +ben +back +research +8g +173 +mike +##pc +##ッフ +september +189 +##ace +vps +february +167 +pantos +wp +lisa +1921 +★★ +jquery +night +long +offer +##berg +##news +1911 +##いて +ray +fks +wto +せます +over +164 +340 +##all +##rus +1924 +##888 +##works +blogtitle +loftpermalink +##→ +187 +martin +test +ling +km +##め +15000 +fda +v3 +##ja +##ロ +wedding +かある +outlet +family +##ea +をこ +##top +story +##ness +salvatore +##lu +204 +swift +215 +room +している +oracle +##ul +1925 +sam +b2c +week +pi +rock +##のは +##a +##けと +##ean +##300 +##gle +cctv +after +chinese +##back +powered +x2 +##tan +1918 +##nes +##イン +canon +only +181 +##zi +##las +say +##oe +184 +##sd +221 +##bot +##world +##zo +sky +made +top100 +just +1926 +pmi +802 +234 +gap +##vr +177 +les +174 +▲topoct +ball +vogue +vi +ing +ofweek +cos +##list +##ort +▲topmay +##なら +##lon +として +last +##tc +##of +##bus +##gen +real +eva +##コ +a3 +nas +##lie +##ria +##coin +##bt +▲topapr +his +212 +cat +nata +vive +health +⋯⋯ +drive +sir +▲topmar +du +cup +##カー +##ook +##よう +##sy +alex +msg +tour +しました +3ce +##word +193 +ebooks +r8 +block +318 +##より +2200 +nice +pvp +207 +months +1905 +rewards +##ther +1917 +0800 +##xi +##チ +##sc +micro +850 +gg +blogfp +op +1922 +daily +m1 +264 +true +##bb +ml +##tar +##のお +##ky +anthony +196 +253 +##yo +state +218 +##ara +##aa +##rc +##tz +##ston +より +gear +##eo +##ade +ge +see +1923 +##win +##ura +ss +heart +##den +##ita +down +##sm +el +png +2100 +610 +rakuten +whatsapp +bay +dream +add +##use +680 +311 +pad +gucci +mpv +##ode +##fo +island +▲topjun +##▼ +223 +jason +214 +chicago +##❤ +しの +##hone +io +##れる +##ことか +sogo +be2 +##ology +990 +cloud +vcd +##con +2~3 +##ford +##joy +##kb +##こさいます +##rade +but +##ach +docker +##ful +rfid +ul +##ase +hit +ford +##star +580 +##○ +11 +a2 +sdk +reading +edited +##are +cmos +##mc +238 +siri +light +##ella +##ため +bloomberg +##read +pizza +##ison +jimmy +##vm +college +node +journal +ba +18k +##play +245 +##cer +20 +magic +##yu +191 +jump +288 +tt +##ings +asr +##lia +3200 +step5 +network +##cd +mc +いします +1234 +pixstyleme +273 +##600 +2800 +money +★★★★★ +1280 +12 +430 +bl +みの +act +##tus +tokyo +##rial +##life +emba +##ae +saas +tcs +##rk +##wang +summer +##sp +ko +##ving +390 +premium +##その +netflix +##ヒ +uk +mt +##lton +right +frank +two +209 +える +##ple +##cal +021 +##んな +##sen +##ville +hold +nexus +dd +##ius +てお +##mah +##なく +tila +zero +820 +ce +##tin +resort +##ws +charles +old +p10 +5d +report +##360 +##ru +##には +bus +vans +lt +##est +pv +##レ +links +rebecca +##ツ +##dm +azure +##365 +きな +limited +bit +4gb +##mon +1910 +moto +##eam +213 +1913 +var +eos +なとの +226 +blogspot +された +699 +e3 +dos +dm +fc +##ments +##ik +##kw +boy +##bin +##ata +960 +er +##せ +219 +##vin +##tu +##ula +194 +##∥ +station +##ろ +##ature +835 +files +zara +hdr +top10 +nature +950 +magazine +s6 +marriott +##シ +avira +case +##っと +tab +##ran +tony +##home +oculus +im +##ral +jean +saint +cry +307 +rosie +##force +##ini +ice +##bert +のある +##nder +##mber +pet +2600 +##◆ +plurk +▲topdec +##sis +00kg +▲topnov +720 +##ence +tim +##ω +##nc +##ても +##name +log +ips +great +ikea +malaysia +unix +##イト +3600 +##ncy +##nie +12000 +akb48 +##ye +##oid +404 +##chi +##いた +oa +xuehai +##1000 +##orm +##rf +275 +さん +##ware +##リー +980 +ho +##pro +text +##era +560 +bob +227 +##ub +##2008 +8891 +scp +avi +##zen +2022 +mi +wu +museum +qvod +apache +lake +jcb +▲topaug +★★★ +ni +##hr +hill +302 +ne +weibo +490 +ruby +##ーシ +##ヶ +##row +4d +▲topjul +iv +##ish +github +306 +mate +312 +##スト +##lot +##ane +andrew +のハイト +##tina +t1 +rf +ed2k +##vel +##900 +way +final +りの +ns +5a +705 +197 +##メ +sweet +bytes +##ene +▲topjan +231 +##cker +##2007 +##px +100g +topapp +229 +helpapp +rs +low +14k +g4g +care +630 +ldquo +あり +##fork +leave +rm +edition +##gan +##zon +##qq +▲topsep +##google +##ism +gold +224 +explorer +##zer +toyota +category +select +visual +##labels +restaurant +##md +posts +s1 +##ico +もっと +angelababy +123456 +217 +sports +s3 +mbc +1915 +してくたさい +shell +x86 +candy +##new +kbs +face +xl +470 +##here +4a +swissinfo +v8 +▲topfeb +dram +##ual +##vice +3a +##wer +sport +q1 +ios10 +public +int +card +##c +ep +au +rt +##れた +1080 +bill +##mll +kim +30 +460 +wan +##uk +##ミ +x3 +298 +0t +scott +##ming +239 +e5 +##3d +h7n9 +worldcat +brown +##あります +##vo +##led +##580 +##ax +249 +410 +##ert +paris +##~6 +polo +925 +##lr +599 +##ナ +capital +##hing +bank +cv +1g +##chat +##s +##たい +adc +##ule +2m +##e +digital +hotmail +268 +##pad +870 +bbq +quot +##ring +before +wali +##まて +mcu +2k +2b +という +costco +316 +north +333 +switch +##city +##p +philips +##mann +management +panasonic +##cl +##vd +##ping +##rge +alice +##lk +##ましょう +css3 +##ney +vision +alpha +##ular +##400 +##tter +lz +にお +##ありません +mode +gre +1916 +pci +##tm +237 +1~2 +##yan +##そ +について +##let +##キ +work +war +coach +ah +mary +##ᅵ +huang +##pt +a8 +pt +follow +##berry +1895 +##ew +a5 +ghost +##ション +##wn +##og +south +##code +girls +##rid +action +villa +git +r11 +table +games +##cket +error +##anonymoussaid +##ag +here +##ame +##gc +qa +##■ +##lis +gmp +##gin +vmalife +##cher +yu +wedding +##tis +demo +dragon +530 +soho +social +bye +##rant +river +orz +acer +325 +##↑ +##ース +##ats +261 +del +##ven +440 +ups +##ように +##ター +305 +value +macd +yougou +##dn +661 +##ano +ll +##urt +##rent +continue +script +##wen +##ect +paper +263 +319 +shift +##chel +##フト +##cat +258 +x5 +fox +243 +##さん +car +aaa +##blog +loading +##yn +##tp +kuso +799 +si +sns +イカせるテンマ +ヒンクテンマ3 +rmb +vdc +forest +central +prime +help +ultra +##rmb +##ような +241 +square +688 +##しい +のないフロクに +##field +##reen +##ors +##ju +c1 +start +510 +##air +##map +cdn +##wo +cba +stephen +m8 +100km +##get +opera +##base +##ood +vsa +com™ +##aw +##ail +251 +なのて +count +t2 +##ᅡ +##een +2700 +hop +##gp +vsc +tree +##eg +##ose +816 +285 +##ories +##shop +alphago +v4 +1909 +simon +##ᆼ +fluke62max +zip +スホンサー +##sta +louis +cr +bas +##~10 +bc +##yer +hadoop +##ube +##wi +1906 +0755 +hola +##low +place +centre +5v +d3 +##fer +252 +##750 +##media +281 +540 +0l +exchange +262 +series +##ハー +##san +eb +##bank +##k +q3 +##nge +##mail +take +##lp +259 +1888 +client +east +cache +event +vincent +##ールを +きを +##nse +sui +855 +adchoice +##и +##stry +##なたの +246 +##zone +ga +apps +sea +##ab +248 +cisco +##タ +##rner +kymco +##care +dha +##pu +##yi +minkoff +royal +p1 +への +annie +269 +collection +kpi +playstation +257 +になります +866 +bh +##bar +queen +505 +radio +1904 +andy +armani +##xy +manager +iherb +##ery +##share +spring +raid +johnson +1908 +##ob +volvo +hall +##ball +v6 +our +taylor +##hk +bi +242 +##cp +kate +bo +water +technology +##rie +サイトは +277 +##ona +##sl +hpv +303 +gtx +hip +rdquo +jayz +stone +##lex +##rum +namespace +##やり +620 +##ale +##atic +des +##erson +##ql +##ves +##type +enter +##この +##てきます +d2 +##168 +##mix +##bian +との +a9 +jj +ky +##lc +access +movie +##hc +リストに +tower +##ration +##mit +ます +##nch +ua +tel +prefix +##o2 +1907 +##point +1901 +ott +~10 +##http +##ury +baidu +##ink +member +##logy +bigbang +nownews +##js +##shot +##tb +##こと +247 +eba +##tics +##lus +ける +v5 +spark +##ama +there +##ions +god +##lls +##down +hiv +##ress +burberry +day2 +##kv +◆◆ +jeff +related +film +edit +joseph +283 +##ark +cx +32gb +order +g9 +30000 +##ans +##tty +s5 +##bee +かあります +thread +xr +buy +sh +005 +land +spotify +mx +##ari +276 +##verse +×email +sf +why +##ことて +244 +7headlines +nego +sunny +dom +exo +401 +666 +positioning +fit +rgb +##tton +278 +kiss +alexa +adam +lp +みリストを +##g +mp +##ties +##llow +amy +##du +np +002 +institute +271 +##rth +##lar +2345 +590 +##des +sidebar +15 +imax +site +##cky +##kit +##ime +##009 +season +323 +##fun +##ンター +##ひ +gogoro +a7 +pu +lily +fire +twd600 +##ッセーシを +いて +##vis +30ml +##cture +##をお +information +##オ +close +friday +##くれる +yi +nick +てすか +##tta +##tel +6500 +##lock +cbd +economy +254 +かお +267 +tinker +double +375 +8gb +voice +##app +oops +channel +today +985 +##right +raw +xyz +##+ +jim +edm +##cent +7500 +supreme +814 +ds +##its +##asia +dropbox +##てすか +##tti +books +272 +100ml +##tle +##ller +##ken +##more +##boy +sex +309 +##dom +t3 +##ider +##なります +##unch +1903 +810 +feel +5500 +##かった +##put +により +s2 +mo +##gh +men +ka +amoled +div +##tr +##n1 +port +howard +##tags +ken +dnf +##nus +adsense +##а +ide +##へ +buff +thunder +##town +##ique +has +##body +auto +pin +##erry +tee +てした +295 +number +##the +##013 +object +psp +cool +udnbkk +16gb +##mic +miui +##tro +most +r2 +##alk +##nity +1880 +±0 +##いました +428 +s4 +law +version +##oa +n1 +sgs +docomo +##tf +##ack +henry +fc2 +##ded +##sco +##014 +##rite +286 +0mm +linkedin +##ada +##now +wii +##ndy +ucbug +##◎ +sputniknews +legalminer +##ika +##xp +2gb +##bu +q10 +oo +b6 +come +##rman +cheese +ming +maker +##gm +nikon +##fig +ppi +kelly +##ります +jchere +てきます +ted +md +003 +fgo +tech +##tto +dan +soc +##gl +##len +hair +earth +640 +521 +img +##pper +##a1 +##てきる +##ロク +acca +##ition +##ference +suite +##ig +outlook +##mond +##cation +398 +##pr +279 +101vip +358 +##999 +282 +64gb +3800 +345 +airport +##over +284 +##おり +jones +##ith +lab +##su +##いるのて +co2 +town +piece +##llo +no1 +vmware +24h +##qi +focus +reader +##admin +##ora +tb +false +##log +1898 +know +lan +838 +##ces +f4 +##ume +motel +stop +##oper +na +flickr +netcomponents +##af +##─ +pose +williams +local +##ound +##cg +##site +##iko +いお +274 +5m +gsm +con +##ath +1902 +friends +##hip +cell +317 +##rey +780 +cream +##cks +012 +##dp +facebooktwitterpinterestgoogle +sso +324 +shtml +song +swiss +##mw +##キンク +lumia +xdd +string +tiffany +522 +marc +られた +insee +russell +sc +dell +##ations +ok +camera +289 +##vs +##flow +##late +classic +287 +##nter +stay +g1 +mtv +512 +##ever +##lab +##nger +qe +sata +ryan +d1 +50ml +cms +##cing +su +292 +3300 +editor +296 +##nap +security +sunday +association +##ens +##700 +##bra +acg +##かり +sofascore +とは +mkv +##ign +jonathan +gary +build +labels +##oto +tesla +moba +qi +gohappy +general +ajax +1024 +##かる +サイト +society +##test +##urs +wps +fedora +##ich +mozilla +328 +##480 +##dr +usa +urn +##lina +##r +grace +##die +##try +##ader +1250 +##なり +elle +570 +##chen +##ᆯ +price +##ten +uhz +##ough +eq +##hen +states +push +session +balance +wow +506 +##cus +##py +when +##ward +##ep +34e +wong +library +prada +##サイト +##cle +running +##ree +313 +ck +date +q4 +##ctive +##ool +##> +mk +##ira +##163 +388 +die +secret +rq +dota +buffet +は1ヶ +e6 +##ez +pan +368 +ha +##card +##cha +2a +##さ +alan +day3 +eye +f3 +##end +france +keep +adi +rna +tvbs +##ala +solo +nova +##え +##tail +##ょう +support +##ries +##なる +##ved +base +copy +iis +fps +##ways +hero +hgih +profile +fish +mu +ssh +entertainment +chang +##wd +click +cake +##ond +pre +##tom +kic +pixel +##ov +##fl +product +6a +##pd +dear +##gate +es +yumi +audio +##² +##sky +echo +bin +where +##ture +329 +##ape +find +sap +isis +##なと +nand +##101 +##load +##ream +band +a6 +525 +never +##post +festival +50cm +##we +555 +guide +314 +zenfone +##ike +335 +gd +forum +jessica +strong +alexander +##ould +software +allen +##ious +program +360° +else +lohasthree +##gar +することかてきます +please +##れます +rc +##ggle +##ric +bim +50000 +##own +eclipse +355 +brian +3ds +##side +061 +361 +##other +##ける +##tech +##ator +485 +engine +##ged +##t +plaza +##fit +cia +ngo +westbrook +shi +tbs +50mm +##みませんか +sci +291 +reuters +##ily +contextlink +##hn +af +##cil +bridge +very +##cel +1890 +cambridge +##ize +15g +##aid +##data +790 +frm +##head +award +butler +##sun +meta +##mar +america +ps3 +puma +pmid +##すか +lc +670 +kitchen +##lic +オーフン5 +きなしソフトサーヒス +そして +day1 +future +★★★★ +##text +##page +##rris +pm1 +##ket +fans +##っています +1001 +christian +bot +kids +trackback +##hai +c3 +display +##hl +n2 +1896 +idea +さんも +##sent +airmail +##ug +##men +pwm +けます +028 +##lution +369 +852 +awards +schemas +354 +asics +wikipedia +font +##tional +##vy +c2 +293 +##れている +##dget +##ein +っている +contact +pepper +スキル +339 +##~5 +294 +##uel +##ument +730 +##hang +みてす +q5 +##sue +rain +##ndi +wei +swatch +##cept +わせ +331 +popular +##ste +##tag +p2 +501 +trc +1899 +##west +##live +justin +honda +ping +messenger +##rap +v9 +543 +##とは +unity +appqq +はすへて +025 +leo +##tone +##テ +##ass +uniqlo +##010 +502 +her +jane +memory +moneydj +##tical +human +12306 +していると +##m2 +coc +miacare +##mn +tmt +##core +vim +kk +##may +fan +target +use +too +338 +435 +2050 +867 +737 +fast +##2c +services +##ope +omega +energy +##わ +pinkoi +1a +##なから +##rain +jackson +##ement +##シャンルの +374 +366 +そんな +p9 +rd +##ᆨ +1111 +##tier +##vic +zone +##│ +385 +690 +dl +isofix +cpa +m4 +322 +kimi +めて +davis +##lay +lulu +##uck +050 +weeks +qs +##hop +920 +##n +ae +##ear +~5 +eia +405 +##fly +korea +jpeg +boost +##ship +small +##リア +1860 +eur +297 +425 +valley +##iel +simple +##ude +rn +k2 +##ena +されます +non +patrick +しているから +##ナー +feed +5757 +30g +process +well +qqmei +##thing +they +aws +lu +pink +##ters +##kin +または +board +##vertisement +wine +##ien +unicode +##dge +r1 +359 +##tant +いを +##twitter +##3c +cool1 +される +##れて +##l +isp +##012 +standard +45㎡2 +402 +##150 +matt +##fu +326 +##iner +googlemsn +pixnetfacebookyahoo +##ラン +x7 +886 +##uce +メーカー +sao +##ev +##きました +##file +9678 +403 +xddd +shirt +6l +##rio +##hat +3mm +givenchy +ya +bang +##lio +monday +crystal +ロクイン +##abc +336 +head +890 +ubuntuforumwikilinuxpastechat +##vc +##~20 +##rity +cnc +7866 +ipv6 +null +1897 +##ost +yang +imsean +tiger +##fet +##ンス +352 +##= +dji +327 +ji +maria +##come +##んて +foundation +3100 +##beth +##なった +1m +601 +active +##aft +##don +3p +sr +349 +emma +##khz +living +415 +353 +1889 +341 +709 +457 +sas +x6 +##face +pptv +x4 +##mate +han +sophie +##jing +337 +fifa +##mand +other +sale +inwedding +##gn +てきちゃいます +##mmy +##pmlast +bad +nana +nbc +してみてくたさいね +なとはお +##wu +##かあります +##あ +note7 +single +##340 +せからこ +してくたさい♪この +しにはとんとんワークケートを +するとあなたにもっとマッチした +ならワークケートへ +もみつかっちゃうかも +ワークケートの +##bel +window +##dio +##ht +union +age +382 +14 +##ivity +##y +コメント +domain +neo +##isa +##lter +5k +f5 +steven +##cts +powerpoint +tft +self +g2 +ft +##テル +zol +##act +mwc +381 +343 +もう +nbapop +408 +てある +eds +ace +##room +previous +author +tomtom +il +##ets +hu +financial +☆☆☆ +っています +bp +5t +chi +1gb +##hg +fairmont +cross +008 +gay +h2 +function +##けて +356 +also +1b +625 +##ータ +##raph +1894 +3~5 +##ils +i3 +334 +avenue +##host +による +##bon +##tsu +message +navigation +50g +fintech +h6 +##ことを +8cm +##ject +##vas +##firm +credit +##wf +xxxx +form +##nor +##space +huawei +plan +json +sbl +##dc +machine +921 +392 +wish +##120 +##sol +windows7 +edward +##ために +development +washington +##nsis +lo +818 +##sio +##ym +##bor +planet +##~8 +##wt +ieee +gpa +##めて +camp +ann +gm +##tw +##oka +connect +##rss +##work +##atus +wall +chicken +soul +2mm +##times +fa +##ather +##cord +009 +##eep +hitachi +gui +harry +##pan +e1 +disney +##press +##ーション +wind +386 +frigidaire +##tl +liu +hsu +332 +basic +von +ev +いた +てきる +スホンサーサイト +learning +##ull +expedia +archives +change +##wei +santa +cut +ins +6gb +turbo +brand +cf1 +508 +004 +return +747 +##rip +h1 +##nis +##をこ +128gb +##にお +3t +application +しており +emc +rx +##oon +384 +quick +412 +15058 +wilson +wing +chapter +##bug +beyond +##cms +##dar +##oh +zoom +e2 +trip +sb +##nba +rcep +342 +aspx +ci +080 +gc +gnu +める +##count +advanced +dance +dv +##url +##ging +367 +8591 +am09 +shadow +battle +346 +##i +##cia +##という +emily +##のてす +##tation +host +ff +techorz +sars +##mini +##mporary +##ering +nc +4200 +798 +##next +cma +##mbps +##gas +##ift +##dot +##ィ +455 +##~17 +amana +##りの +426 +##ros +ir +00㎡1 +##eet +##ible +##↓ +710 +ˋ▽ˊ +##aka +dcs +iq +##v +l1 +##lor +maggie +##011 +##iu +588 +##~1 +830 +##gt +1tb +articles +create +##burg +##iki +database +fantasy +##rex +##cam +dlc +dean +##you +hard +path +gaming +victoria +maps +cb +##lee +##itor +overchicstoretvhome +systems +##xt +416 +p3 +sarah +760 +##nan +407 +486 +x9 +install +second +626 +##ann +##ph +##rcle +##nic +860 +##nar +ec +##とう +768 +metro +chocolate +##rian +~4 +##table +##しています +skin +##sn +395 +mountain +##0mm +inparadise +6m +7x24 +ib +4800 +##jia +eeworld +creative +g5 +g3 +357 +parker +ecfa +village +からの +18000 +sylvia +サーヒス +hbl +##ques +##onsored +##x2 +##きます +##v4 +##tein +ie6 +383 +##stack +389 +ver +##ads +##baby +sound +bbe +##110 +##lone +##uid +ads +022 +gundam +351 +thinkpad +006 +scrum +match +##ave +mems +##470 +##oy +##なりました +##talk +glass +lamigo +span +##eme +job +##a5 +jay +wade +kde +498 +##lace +ocean +tvg +##covery +##r3 +##ners +##rea +junior +think +##aine +cover +##ision +##sia +↓↓ +##bow +msi +413 +458 +406 +##love +711 +801 +soft +z2 +##pl +456 +1840 +mobil +mind +##uy +427 +nginx +##oi +めた +##rr +6221 +##mple +##sson +##ーシてす +371 +##nts +91tv +comhd +crv3000 +##uard +1868 +397 +deep +lost +field +gallery +##bia +rate +spf +redis +traction +930 +icloud +011 +なら +fe +jose +372 +##tory +into +sohu +fx +899 +379 +kicstart2 +##hia +すく +##~3 +##sit +ra +24 +##walk +##xure +500g +##pact +pacific +xa +natural +carlo +##250 +##walker +1850 +##can +cto +gigi +516 +##サー +pen +##hoo +ob +matlab +##b +##yy +13913459 +##iti +mango +##bbs +sense +c5 +oxford +##ニア +walker +jennifer +##ola +course +##bre +701 +##pus +##rder +lucky +075 +##ぁ +ivy +なお +##nia +sotheby +side +##ugh +joy +##orage +##ush +##bat +##dt +364 +r9 +##2d +##gio +511 +country +wear +##lax +##~7 +##moon +393 +seven +study +411 +348 +lonzo +8k +##ェ +evolution +##イフ +##kk +gs +kd +##レス +arduino +344 +b12 +##lux +arpg +##rdon +cook +##x5 +dark +five +##als +##ida +とても +sign +362 +##ちの +something +20mm +##nda +387 +##posted +fresh +tf +1870 +422 +cam +##mine +##skip +##form +##ssion +education +394 +##tee +dyson +stage +##jie +want +##night +epson +pack +あります +##ppy +テリヘル +##█ +wd +##eh +##rence +left +##lvin +golden +mhz +discovery +##trix +##n2 +loft +##uch +##dra +##sse +speed +~1 +1mdb +sorry +welcome +##urn +wave +gaga +##lmer +teddy +##160 +トラックハック +せよ +611 +##f2016 +378 +rp +##sha +rar +##あなたに +##きた +840 +holiday +##ュー +373 +074 +##vg +##nos +##rail +gartner +gi +6p +##dium +kit +488 +b3 +eco +##ろう +20g +sean +##stone +autocad +nu +##np +f16 +write +029 +m5 +##ias +images +atp +##dk +fsm +504 +1350 +ve +52kb +##xxx +##のに +##cake +414 +unit +lim +ru +1v +##ification +published +angela +16g +analytics +ak +##q +##nel +gmt +##icon +again +##₂ +##bby +ios11 +445 +かこさいます +waze +いてす +##ハ +9985 +##ust +##ティー +framework +##007 +iptv +delete +52sykb +cl +wwdc +027 +30cm +##fw +##ての +1389 +##xon +brandt +##ses +##dragon +tc +vetements +anne +monte +modern +official +##へて +##ere +##nne +##oud +もちろん +50 +etnews +##a2 +##graphy +421 +863 +##ちゃん +444 +##rtex +##てお +l2 +##gma +mount +ccd +たと +archive +morning +tan +ddos +e7 +##ホ +day4 +##ウ +gis +453 +its +495 +factory +bruce +pg +##ito +ってくたさい +guest +cdma +##lling +536 +n3 +しかし +3~4 +mega +eyes +ro +13 +women +dac +church +##jun +singapore +##facebook +6991 +starbucks +##tos +##stin +##shine +zen +##mu +tina +20℃ +1893 +##たけて +503 +465 +request +##gence +qt +##っ +1886 +347 +363 +q7 +##zzi +diary +##tore +409 +##ead +468 +cst +##osa +canada +agent +va +##jiang +##ちは +##ーク +##lam +sg +##nix +##sday +##よって +g6 +##master +bing +##zl +charlie +16 +8mm +nb40 +##ーン +thai +##ルフ +ln284ct +##itz +##2f +bonnie +##food +##lent +originals +##stro +##lts +418 +∟∣ +##bscribe +children +ntd +yesstyle +##かも +hmv +##tment +d5 +2cm +arts +sms +##pn +##я +##いい +topios9 +539 +lifestyle +virtual +##ague +xz +##deo +muji +024 +unt +##nnis +##ᅩ +faq1 +1884 +396 +##ette +fly +64㎡ +はしめまして +441 +curry +##pop +のこ +release +##← +##◆◆ +##cast +073 +ありな +500ml +##ews +5c +##stle +ios7 +##ima +787 +dog +lenovo +##r4 +roger +013 +cbs +vornado +100m +417 +##desk +##クok +##ald +1867 +9595 +2900 +##van +oil +##x +some +break +common +##jy +##lines +g7 +twice +419 +ella +nano +belle +にこ +##mes +##self +##note +jb +##ことかてきます +benz +##との +##ova +451 +save +##wing +##ますのて +kai +りは +##hua +##rect +rainer +##unge +448 +##0m +adsl +##かな +guestname +##uma +##kins +##zu +tokichoi +##price +county +##med +##mus +rmk +391 +address +vm +えて +openload +##group +##hin +##iginal +amg +urban +##oz +jobs +emi +##public +beautiful +##sch +album +##dden +##bell +jerry +works +hostel +miller +##drive +##rmin +##10 +376 +boot +828 +##370 +##fx +##cm~ +1885 +##nome +##ctionary +##oman +##lish +##cr +##hm +433 +##how +432 +francis +xi +c919 +b5 +evernote +##uc +vga +##3000 +coupe +##urg +##cca +##uality +019 +6g +れる +multi +##また +##ett +em +hey +##ani +##tax +##rma +inside +than +740 +leonnhurt +##jin +ict +れた +bird +notes +200mm +くの +##dical +##lli +result +442 +iu +ee +438 +smap +gopro +##last +yin +pure +998 +32g +けた +5kg +##dan +##rame +mama +##oot +bean +marketing +##hur +2l +bella +sync +xuite +##ground +515 +discuz +##getrelax +##ince +##bay +##5s +cj +##イス +gmat +apt +##pass +jing +##rix +c4 +rich +##とても +niusnews +##ello +bag +770 +##eting +##mobile +18 +culture +015 +##のてすか +377 +1020 +area +##ience +616 +details +gp +universal +silver +dit +はお +private +ddd +u11 +kanshu +##ified +fung +##nny +dx +##520 +tai +475 +023 +##fr +##lean +3s +##pin +429 +##rin +25000 +ly +rick +##bility +usb3 +banner +##baru +##gion +metal +dt +vdf +1871 +karl +qualcomm +bear +1010 +oldid +ian +jo +##tors +population +##ernel +1882 +mmorpg +##mv +##bike +603 +##© +ww +friend +##ager +exhibition +##del +##pods +fpx +structure +##free +##tings +kl +##rley +##copyright +##mma +california +3400 +orange +yoga +4l +canmake +honey +##anda +##コメント +595 +nikkie +##ルハイト +dhl +publishing +##mall +##gnet +20cm +513 +##クセス +##┅ +e88 +970 +##dog +fishbase +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##{ +##| +##} +##~ +##£ +##¤ +##¥ +##§ +##« +##± +##³ +##µ +##· +##¹ +##º +##» +##¼ +##ß +##æ +##÷ +##ø +##đ +##ŋ +##ɔ +##ə +##ɡ +##ʰ +##ˇ +##ˈ +##ˊ +##ˋ +##ˍ +##ː +##˙ +##˚ +##ˢ +##α +##β +##γ +##δ +##ε +##η +##θ +##ι +##κ +##λ +##μ +##ν +##ο +##π +##ρ +##ς +##σ +##τ +##υ +##φ +##χ +##ψ +##б +##в +##г +##д +##е +##ж +##з +##к +##л +##м +##н +##о +##п +##р +##с +##т +##у +##ф +##х +##ц +##ч +##ш +##ы +##ь +##і +##ا +##ب +##ة +##ت +##د +##ر +##س +##ع +##ل +##م +##ن +##ه +##و +##ي +##۩ +##ก +##ง +##น +##ม +##ย +##ร +##อ +##า +##เ +##๑ +##་ +##ღ +##ᄀ +##ᄁ +##ᄂ +##ᄃ +##ᄅ +##ᄆ +##ᄇ +##ᄈ +##ᄉ +##ᄋ +##ᄌ +##ᄎ +##ᄏ +##ᄐ +##ᄑ +##ᄒ +##ᅢ +##ᅣ +##ᅥ +##ᅦ +##ᅧ +##ᅨ +##ᅪ +##ᅬ +##ᅭ +##ᅮ +##ᅯ +##ᅲ +##ᅳ +##ᅴ +##ᆷ +##ᆸ +##ᆺ +##ᆻ +##ᗜ +##ᵃ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵘ +##‖ +##„ +##† +##• +##‥ +##‧ +##
 +##‰ +##′ +##″ +##‹ +##› +##※ +##‿ +##⁄ +##ⁱ +##⁺ +##ⁿ +##₁ +##₃ +##₄ +##€ +##№ +##ⅰ +##ⅱ +##ⅲ +##ⅳ +##ⅴ +##↔ +##↗ +##↘ +##⇒ +##∀ +##− +##∕ +##∙ +##√ +##∞ +##∟ +##∠ +##∣ +##∩ +##∮ +##∶ +##∼ +##∽ +##≈ +##≒ +##≡ +##≤ +##≥ +##≦ +##≧ +##≪ +##≫ +##⊙ +##⋅ +##⋈ +##⋯ +##⌒ +##① +##② +##③ +##④ +##⑤ +##⑥ +##⑦ +##⑧ +##⑨ +##⑩ +##⑴ +##⑵ +##⑶ +##⑷ +##⑸ +##⒈ +##⒉ +##⒊ +##⒋ +##ⓒ +##ⓔ +##ⓘ +##━ +##┃ +##┆ +##┊ +##┌ +##└ +##├ +##┣ +##═ +##║ +##╚ +##╞ +##╠ +##╭ +##╮ +##╯ +##╰ +##╱ +##╳ +##▂ +##▃ +##▅ +##▇ +##▉ +##▋ +##▌ +##▍ +##▎ +##□ +##▪ +##▫ +##▬ +##△ +##▶ +##► +##▽ +##◇ +##◕ +##◠ +##◢ +##◤ +##☀ +##☕ +##☞ +##☺ +##☼ +##♀ +##♂ +##♠ +##♡ +##♣ +##♦ +##♫ +##♬ +##✈ +##✔ +##✕ +##✖ +##✦ +##✨ +##✪ +##✰ +##✿ +##❀ +##➜ +##➤ +##⦿ +##、 +##。 +##〃 +##々 +##〇 +##〈 +##〉 +##《 +##》 +##「 +##」 +##『 +##』 +##【 +##】 +##〓 +##〔 +##〕 +##〖 +##〗 +##〜 +##〝 +##〞 +##ぃ +##ぇ +##ぬ +##ふ +##ほ +##む +##ゃ +##ゅ +##ゆ +##ょ +##゜ +##ゝ +##ァ +##ゥ +##エ +##ォ +##ケ +##サ +##セ +##ソ +##ッ +##ニ +##ヌ +##ネ +##ノ +##ヘ +##モ +##ャ +##ヤ +##ュ +##ユ +##ョ +##ヨ +##ワ +##ヲ +##・ +##ヽ +##ㄅ +##ㄆ +##ㄇ +##ㄉ +##ㄋ +##ㄌ +##ㄍ +##ㄎ +##ㄏ +##ㄒ +##ㄚ +##ㄛ +##ㄞ +##ㄟ +##ㄢ +##ㄤ +##ㄥ +##ㄧ +##ㄨ +##ㆍ +##㈦ +##㊣ +##㗎 +##一 +##丁 +##七 +##万 +##丈 +##三 +##上 +##下 +##不 +##与 +##丐 +##丑 +##专 +##且 +##丕 +##世 +##丘 +##丙 +##业 +##丛 +##东 +##丝 +##丞 +##丟 +##両 +##丢 +##两 +##严 +##並 +##丧 +##丨 +##个 +##丫 +##中 +##丰 +##串 +##临 +##丶 +##丸 +##丹 +##为 +##主 +##丼 +##丽 +##举 +##丿 +##乂 +##乃 +##久 +##么 +##义 +##之 +##乌 +##乍 +##乎 +##乏 +##乐 +##乒 +##乓 +##乔 +##乖 +##乗 +##乘 +##乙 +##乜 +##九 +##乞 +##也 +##习 +##乡 +##书 +##乩 +##买 +##乱 +##乳 +##乾 +##亀 +##亂 +##了 +##予 +##争 +##事 +##二 +##于 +##亏 +##云 +##互 +##五 +##井 +##亘 +##亙 +##亚 +##些 +##亜 +##亞 +##亟 +##亡 +##亢 +##交 +##亥 +##亦 +##产 +##亨 +##亩 +##享 +##京 +##亭 +##亮 +##亲 +##亳 +##亵 +##人 +##亿 +##什 +##仁 +##仃 +##仄 +##仅 +##仆 +##仇 +##今 +##介 +##仍 +##从 +##仏 +##仑 +##仓 +##仔 +##仕 +##他 +##仗 +##付 +##仙 +##仝 +##仞 +##仟 +##代 +##令 +##以 +##仨 +##仪 +##们 +##仮 +##仰 +##仲 +##件 +##价 +##任 +##份 +##仿 +##企 +##伉 +##伊 +##伍 +##伎 +##伏 +##伐 +##休 +##伕 +##众 +##优 +##伙 +##会 +##伝 +##伞 +##伟 +##传 +##伢 +##伤 +##伦 +##伪 +##伫 +##伯 +##估 +##伴 +##伶 +##伸 +##伺 +##似 +##伽 +##佃 +##但 +##佇 +##佈 +##位 +##低 +##住 +##佐 +##佑 +##体 +##佔 +##何 +##佗 +##佘 +##余 +##佚 +##佛 +##作 +##佝 +##佞 +##佟 +##你 +##佢 +##佣 +##佤 +##佥 +##佩 +##佬 +##佯 +##佰 +##佳 +##併 +##佶 +##佻 +##佼 +##使 +##侃 +##侄 +##來 +##侈 +##例 +##侍 +##侏 +##侑 +##侖 +##侗 +##供 +##依 +##侠 +##価 +##侣 +##侥 +##侦 +##侧 +##侨 +##侬 +##侮 +##侯 +##侵 +##侶 +##侷 +##便 +##係 +##促 +##俄 +##俊 +##俎 +##俏 +##俐 +##俑 +##俗 +##俘 +##俚 +##保 +##俞 +##俟 +##俠 +##信 +##俨 +##俩 +##俪 +##俬 +##俭 +##修 +##俯 +##俱 +##俳 +##俸 +##俺 +##俾 +##倆 +##倉 +##個 +##倌 +##倍 +##倏 +##們 +##倒 +##倔 +##倖 +##倘 +##候 +##倚 +##倜 +##借 +##倡 +##値 +##倦 +##倩 +##倪 +##倫 +##倬 +##倭 +##倶 +##债 +##值 +##倾 +##偃 +##假 +##偈 +##偉 +##偌 +##偎 +##偏 +##偕 +##做 +##停 +##健 +##側 +##偵 +##偶 +##偷 +##偻 +##偽 +##偿 +##傀 +##傅 +##傍 +##傑 +##傘 +##備 +##傚 +##傢 +##傣 +##傥 +##储 +##傩 +##催 +##傭 +##傲 +##傳 +##債 +##傷 +##傻 +##傾 +##僅 +##働 +##像 +##僑 +##僕 +##僖 +##僚 +##僥 +##僧 +##僭 +##僮 +##僱 +##僵 +##價 +##僻 +##儀 +##儂 +##億 +##儆 +##儉 +##儋 +##儒 +##儕 +##儘 +##償 +##儡 +##優 +##儲 +##儷 +##儼 +##儿 +##兀 +##允 +##元 +##兄 +##充 +##兆 +##兇 +##先 +##光 +##克 +##兌 +##免 +##児 +##兑 +##兒 +##兔 +##兖 +##党 +##兜 +##兢 +##入 +##內 +##全 +##兩 +##八 +##公 +##六 +##兮 +##兰 +##共 +##兲 +##关 +##兴 +##兵 +##其 +##具 +##典 +##兹 +##养 +##兼 +##兽 +##冀 +##内 +##円 +##冇 +##冈 +##冉 +##冊 +##册 +##再 +##冏 +##冒 +##冕 +##冗 +##写 +##军 +##农 +##冠 +##冢 +##冤 +##冥 +##冨 +##冪 +##冬 +##冯 +##冰 +##冲 +##决 +##况 +##冶 +##冷 +##冻 +##冼 +##冽 +##冾 +##净 +##凄 +##准 +##凇 +##凈 +##凉 +##凋 +##凌 +##凍 +##减 +##凑 +##凛 +##凜 +##凝 +##几 +##凡 +##凤 +##処 +##凪 +##凭 +##凯 +##凰 +##凱 +##凳 +##凶 +##凸 +##凹 +##出 +##击 +##函 +##凿 +##刀 +##刁 +##刃 +##分 +##切 +##刈 +##刊 +##刍 +##刎 +##刑 +##划 +##列 +##刘 +##则 +##刚 +##创 +##初 +##删 +##判 +##別 +##刨 +##利 +##刪 +##别 +##刮 +##到 +##制 +##刷 +##券 +##刹 +##刺 +##刻 +##刽 +##剁 +##剂 +##剃 +##則 +##剉 +##削 +##剋 +##剌 +##前 +##剎 +##剐 +##剑 +##剔 +##剖 +##剛 +##剜 +##剝 +##剣 +##剤 +##剥 +##剧 +##剩 +##剪 +##副 +##割 +##創 +##剷 +##剽 +##剿 +##劃 +##劇 +##劈 +##劉 +##劊 +##劍 +##劏 +##劑 +##力 +##劝 +##办 +##功 +##加 +##务 +##劣 +##动 +##助 +##努 +##劫 +##劭 +##励 +##劲 +##劳 +##労 +##劵 +##効 +##劾 +##势 +##勁 +##勃 +##勇 +##勉 +##勋 +##勐 +##勒 +##動 +##勖 +##勘 +##務 +##勛 +##勝 +##勞 +##募 +##勢 +##勤 +##勧 +##勳 +##勵 +##勸 +##勺 +##勻 +##勾 +##勿 +##匀 +##包 +##匆 +##匈 +##匍 +##匐 +##匕 +##化 +##北 +##匙 +##匝 +##匠 +##匡 +##匣 +##匪 +##匮 +##匯 +##匱 +##匹 +##区 +##医 +##匾 +##匿 +##區 +##十 +##千 +##卅 +##升 +##午 +##卉 +##半 +##卍 +##华 +##协 +##卑 +##卒 +##卓 +##協 +##单 +##卖 +##南 +##単 +##博 +##卜 +##卞 +##卟 +##占 +##卡 +##卢 +##卤 +##卦 +##卧 +##卫 +##卮 +##卯 +##印 +##危 +##即 +##却 +##卵 +##卷 +##卸 +##卻 +##卿 +##厂 +##厄 +##厅 +##历 +##厉 +##压 +##厌 +##厕 +##厘 +##厚 +##厝 +##原 +##厢 +##厥 +##厦 +##厨 +##厩 +##厭 +##厮 +##厲 +##厳 +##去 +##县 +##叁 +##参 +##參 +##又 +##叉 +##及 +##友 +##双 +##反 +##収 +##发 +##叔 +##取 +##受 +##变 +##叙 +##叛 +##叟 +##叠 +##叡 +##叢 +##口 +##古 +##句 +##另 +##叨 +##叩 +##只 +##叫 +##召 +##叭 +##叮 +##可 +##台 +##叱 +##史 +##右 +##叵 +##叶 +##号 +##司 +##叹 +##叻 +##叼 +##叽 +##吁 +##吃 +##各 +##吆 +##合 +##吉 +##吊 +##吋 +##同 +##名 +##后 +##吏 +##吐 +##向 +##吒 +##吓 +##吕 +##吖 +##吗 +##君 +##吝 +##吞 +##吟 +##吠 +##吡 +##否 +##吧 +##吨 +##吩 +##含 +##听 +##吭 +##吮 +##启 +##吱 +##吳 +##吴 +##吵 +##吶 +##吸 +##吹 +##吻 +##吼 +##吽 +##吾 +##呀 +##呂 +##呃 +##呆 +##呈 +##告 +##呋 +##呎 +##呐 +##呓 +##呕 +##呗 +##员 +##呛 +##呜 +##呢 +##呤 +##呦 +##周 +##呱 +##呲 +##味 +##呵 +##呷 +##呸 +##呻 +##呼 +##命 +##咀 +##咁 +##咂 +##咄 +##咆 +##咋 +##和 +##咎 +##咏 +##咐 +##咒 +##咔 +##咕 +##咖 +##咗 +##咘 +##咙 +##咚 +##咛 +##咣 +##咤 +##咦 +##咧 +##咨 +##咩 +##咪 +##咫 +##咬 +##咭 +##咯 +##咱 +##咲 +##咳 +##咸 +##咻 +##咽 +##咿 +##哀 +##品 +##哂 +##哄 +##哆 +##哇 +##哈 +##哉 +##哋 +##哌 +##响 +##哎 +##哏 +##哐 +##哑 +##哒 +##哔 +##哗 +##哟 +##員 +##哥 +##哦 +##哧 +##哨 +##哩 +##哪 +##哭 +##哮 +##哲 +##哺 +##哼 +##哽 +##唁 +##唄 +##唆 +##唇 +##唉 +##唏 +##唐 +##唑 +##唔 +##唠 +##唤 +##唧 +##唬 +##售 +##唯 +##唰 +##唱 +##唳 +##唷 +##唸 +##唾 +##啃 +##啄 +##商 +##啉 +##啊 +##問 +##啓 +##啕 +##啖 +##啜 +##啞 +##啟 +##啡 +##啤 +##啥 +##啦 +##啧 +##啪 +##啫 +##啬 +##啮 +##啰 +##啱 +##啲 +##啵 +##啶 +##啷 +##啸 +##啻 +##啼 +##啾 +##喀 +##喂 +##喃 +##善 +##喆 +##喇 +##喉 +##喊 +##喋 +##喎 +##喏 +##喔 +##喘 +##喙 +##喚 +##喜 +##喝 +##喟 +##喧 +##喪 +##喫 +##喬 +##單 +##喰 +##喱 +##喲 +##喳 +##喵 +##営 +##喷 +##喹 +##喺 +##喻 +##喽 +##嗅 +##嗆 +##嗇 +##嗎 +##嗑 +##嗒 +##嗓 +##嗔 +##嗖 +##嗚 +##嗜 +##嗝 +##嗟 +##嗡 +##嗣 +##嗤 +##嗦 +##嗨 +##嗪 +##嗬 +##嗯 +##嗰 +##嗲 +##嗳 +##嗶 +##嗷 +##嗽 +##嘀 +##嘅 +##嘆 +##嘈 +##嘉 +##嘌 +##嘍 +##嘎 +##嘔 +##嘖 +##嘗 +##嘘 +##嘚 +##嘛 +##嘜 +##嘞 +##嘟 +##嘢 +##嘣 +##嘤 +##嘧 +##嘩 +##嘭 +##嘮 +##嘯 +##嘰 +##嘱 +##嘲 +##嘴 +##嘶 +##嘸 +##嘹 +##嘻 +##嘿 +##噁 +##噌 +##噎 +##噓 +##噔 +##噗 +##噙 +##噜 +##噠 +##噢 +##噤 +##器 +##噩 +##噪 +##噬 +##噱 +##噴 +##噶 +##噸 +##噹 +##噻 +##噼 +##嚀 +##嚇 +##嚎 +##嚏 +##嚐 +##嚓 +##嚕 +##嚟 +##嚣 +##嚥 +##嚨 +##嚮 +##嚴 +##嚷 +##嚼 +##囂 +##囉 +##囊 +##囍 +##囑 +##囔 +##囗 +##囚 +##四 +##囝 +##回 +##囟 +##因 +##囡 +##团 +##団 +##囤 +##囧 +##囪 +##囫 +##园 +##困 +##囱 +##囲 +##図 +##围 +##囹 +##固 +##国 +##图 +##囿 +##圃 +##圄 +##圆 +##圈 +##國 +##圍 +##圏 +##園 +##圓 +##圖 +##團 +##圜 +##土 +##圣 +##圧 +##在 +##圩 +##圭 +##地 +##圳 +##场 +##圻 +##圾 +##址 +##坂 +##均 +##坊 +##坍 +##坎 +##坏 +##坐 +##坑 +##块 +##坚 +##坛 +##坝 +##坞 +##坟 +##坠 +##坡 +##坤 +##坦 +##坨 +##坪 +##坯 +##坳 +##坵 +##坷 +##垂 +##垃 +##垄 +##型 +##垒 +##垚 +##垛 +##垠 +##垢 +##垣 +##垦 +##垩 +##垫 +##垭 +##垮 +##垵 +##埂 +##埃 +##埋 +##城 +##埔 +##埕 +##埗 +##域 +##埠 +##埤 +##埵 +##執 +##埸 +##培 +##基 +##埼 +##堀 +##堂 +##堃 +##堅 +##堆 +##堇 +##堑 +##堕 +##堙 +##堡 +##堤 +##堪 +##堯 +##堰 +##報 +##場 +##堵 +##堺 +##堿 +##塊 +##塌 +##塑 +##塔 +##塗 +##塘 +##塚 +##塞 +##塢 +##塩 +##填 +##塬 +##塭 +##塵 +##塾 +##墀 +##境 +##墅 +##墉 +##墊 +##墒 +##墓 +##増 +##墘 +##墙 +##墜 +##增 +##墟 +##墨 +##墩 +##墮 +##墳 +##墻 +##墾 +##壁 +##壅 +##壆 +##壇 +##壊 +##壑 +##壓 +##壕 +##壘 +##壞 +##壟 +##壢 +##壤 +##壩 +##士 +##壬 +##壮 +##壯 +##声 +##売 +##壳 +##壶 +##壹 +##壺 +##壽 +##处 +##备 +##変 +##复 +##夏 +##夔 +##夕 +##外 +##夙 +##多 +##夜 +##够 +##夠 +##夢 +##夥 +##大 +##天 +##太 +##夫 +##夭 +##央 +##夯 +##失 +##头 +##夷 +##夸 +##夹 +##夺 +##夾 +##奂 +##奄 +##奇 +##奈 +##奉 +##奋 +##奎 +##奏 +##奐 +##契 +##奔 +##奕 +##奖 +##套 +##奘 +##奚 +##奠 +##奢 +##奥 +##奧 +##奪 +##奬 +##奮 +##女 +##奴 +##奶 +##奸 +##她 +##好 +##如 +##妃 +##妄 +##妆 +##妇 +##妈 +##妊 +##妍 +##妒 +##妓 +##妖 +##妘 +##妙 +##妝 +##妞 +##妣 +##妤 +##妥 +##妨 +##妩 +##妪 +##妮 +##妲 +##妳 +##妹 +##妻 +##妾 +##姆 +##姉 +##姊 +##始 +##姍 +##姐 +##姑 +##姒 +##姓 +##委 +##姗 +##姚 +##姜 +##姝 +##姣 +##姥 +##姦 +##姨 +##姪 +##姫 +##姬 +##姹 +##姻 +##姿 +##威 +##娃 +##娄 +##娅 +##娆 +##娇 +##娉 +##娑 +##娓 +##娘 +##娛 +##娜 +##娟 +##娠 +##娣 +##娥 +##娩 +##娱 +##娲 +##娴 +##娶 +##娼 +##婀 +##婁 +##婆 +##婉 +##婊 +##婕 +##婚 +##婢 +##婦 +##婧 +##婪 +##婭 +##婴 +##婵 +##婶 +##婷 +##婺 +##婿 +##媒 +##媚 +##媛 +##媞 +##媧 +##媲 +##媳 +##媽 +##媾 +##嫁 +##嫂 +##嫉 +##嫌 +##嫑 +##嫔 +##嫖 +##嫘 +##嫚 +##嫡 +##嫣 +##嫦 +##嫩 +##嫲 +##嫵 +##嫻 +##嬅 +##嬉 +##嬌 +##嬗 +##嬛 +##嬢 +##嬤 +##嬪 +##嬰 +##嬴 +##嬷 +##嬸 +##嬿 +##孀 +##孃 +##子 +##孑 +##孔 +##孕 +##孖 +##字 +##存 +##孙 +##孚 +##孛 +##孜 +##孝 +##孟 +##孢 +##季 +##孤 +##学 +##孩 +##孪 +##孫 +##孬 +##孰 +##孱 +##孳 +##孵 +##學 +##孺 +##孽 +##孿 +##宁 +##它 +##宅 +##宇 +##守 +##安 +##宋 +##完 +##宏 +##宓 +##宕 +##宗 +##官 +##宙 +##定 +##宛 +##宜 +##宝 +##实 +##実 +##宠 +##审 +##客 +##宣 +##室 +##宥 +##宦 +##宪 +##宫 +##宮 +##宰 +##害 +##宴 +##宵 +##家 +##宸 +##容 +##宽 +##宾 +##宿 +##寂 +##寄 +##寅 +##密 +##寇 +##富 +##寐 +##寒 +##寓 +##寛 +##寝 +##寞 +##察 +##寡 +##寢 +##寥 +##實 +##寧 +##寨 +##審 +##寫 +##寬 +##寮 +##寰 +##寵 +##寶 +##寸 +##对 +##寺 +##寻 +##导 +##対 +##寿 +##封 +##専 +##射 +##将 +##將 +##專 +##尉 +##尊 +##尋 +##對 +##導 +##小 +##少 +##尔 +##尕 +##尖 +##尘 +##尚 +##尝 +##尤 +##尧 +##尬 +##就 +##尴 +##尷 +##尸 +##尹 +##尺 +##尻 +##尼 +##尽 +##尾 +##尿 +##局 +##屁 +##层 +##屄 +##居 +##屆 +##屈 +##屉 +##届 +##屋 +##屌 +##屍 +##屎 +##屏 +##屐 +##屑 +##展 +##屜 +##属 +##屠 +##屡 +##屢 +##層 +##履 +##屬 +##屯 +##山 +##屹 +##屿 +##岀 +##岁 +##岂 +##岌 +##岐 +##岑 +##岔 +##岖 +##岗 +##岘 +##岙 +##岚 +##岛 +##岡 +##岩 +##岫 +##岬 +##岭 +##岱 +##岳 +##岷 +##岸 +##峇 +##峋 +##峒 +##峙 +##峡 +##峤 +##峥 +##峦 +##峨 +##峪 +##峭 +##峯 +##峰 +##峴 +##島 +##峻 +##峽 +##崁 +##崂 +##崆 +##崇 +##崎 +##崑 +##崔 +##崖 +##崗 +##崙 +##崛 +##崧 +##崩 +##崭 +##崴 +##崽 +##嵇 +##嵊 +##嵋 +##嵌 +##嵐 +##嵘 +##嵩 +##嵬 +##嵯 +##嶂 +##嶄 +##嶇 +##嶋 +##嶙 +##嶺 +##嶼 +##嶽 +##巅 +##巍 +##巒 +##巔 +##巖 +##川 +##州 +##巡 +##巢 +##工 +##左 +##巧 +##巨 +##巩 +##巫 +##差 +##己 +##已 +##巳 +##巴 +##巷 +##巻 +##巽 +##巾 +##巿 +##币 +##市 +##布 +##帅 +##帆 +##师 +##希 +##帐 +##帑 +##帕 +##帖 +##帘 +##帚 +##帛 +##帜 +##帝 +##帥 +##带 +##帧 +##師 +##席 +##帮 +##帯 +##帰 +##帳 +##帶 +##帷 +##常 +##帼 +##帽 +##幀 +##幂 +##幄 +##幅 +##幌 +##幔 +##幕 +##幟 +##幡 +##幢 +##幣 +##幫 +##干 +##平 +##年 +##并 +##幸 +##幹 +##幺 +##幻 +##幼 +##幽 +##幾 +##广 +##庁 +##広 +##庄 +##庆 +##庇 +##床 +##序 +##庐 +##库 +##应 +##底 +##庖 +##店 +##庙 +##庚 +##府 +##庞 +##废 +##庠 +##度 +##座 +##庫 +##庭 +##庵 +##庶 +##康 +##庸 +##庹 +##庾 +##廁 +##廂 +##廃 +##廈 +##廉 +##廊 +##廓 +##廖 +##廚 +##廝 +##廟 +##廠 +##廢 +##廣 +##廬 +##廳 +##延 +##廷 +##建 +##廿 +##开 +##弁 +##异 +##弃 +##弄 +##弈 +##弊 +##弋 +##式 +##弑 +##弒 +##弓 +##弔 +##引 +##弗 +##弘 +##弛 +##弟 +##张 +##弥 +##弦 +##弧 +##弩 +##弭 +##弯 +##弱 +##張 +##強 +##弹 +##强 +##弼 +##弾 +##彅 +##彆 +##彈 +##彌 +##彎 +##归 +##当 +##录 +##彗 +##彙 +##彝 +##形 +##彤 +##彥 +##彦 +##彧 +##彩 +##彪 +##彫 +##彬 +##彭 +##彰 +##影 +##彷 +##役 +##彻 +##彼 +##彿 +##往 +##征 +##径 +##待 +##徇 +##很 +##徉 +##徊 +##律 +##後 +##徐 +##徑 +##徒 +##従 +##徕 +##得 +##徘 +##徙 +##徜 +##從 +##徠 +##御 +##徨 +##復 +##循 +##徬 +##微 +##徳 +##徴 +##徵 +##德 +##徹 +##徼 +##徽 +##心 +##必 +##忆 +##忌 +##忍 +##忏 +##忐 +##忑 +##忒 +##忖 +##志 +##忘 +##忙 +##応 +##忠 +##忡 +##忤 +##忧 +##忪 +##快 +##忱 +##念 +##忻 +##忽 +##忿 +##怀 +##态 +##怂 +##怅 +##怆 +##怎 +##怏 +##怒 +##怔 +##怕 +##怖 +##怙 +##怜 +##思 +##怠 +##怡 +##急 +##怦 +##性 +##怨 +##怪 +##怯 +##怵 +##总 +##怼 +##恁 +##恃 +##恆 +##恋 +##恍 +##恐 +##恒 +##恕 +##恙 +##恚 +##恢 +##恣 +##恤 +##恥 +##恨 +##恩 +##恪 +##恫 +##恬 +##恭 +##息 +##恰 +##恳 +##恵 +##恶 +##恸 +##恺 +##恻 +##恼 +##恿 +##悄 +##悅 +##悉 +##悌 +##悍 +##悔 +##悖 +##悚 +##悟 +##悠 +##患 +##悦 +##您 +##悩 +##悪 +##悬 +##悯 +##悱 +##悲 +##悴 +##悵 +##悶 +##悸 +##悻 +##悼 +##悽 +##情 +##惆 +##惇 +##惊 +##惋 +##惑 +##惕 +##惘 +##惚 +##惜 +##惟 +##惠 +##惡 +##惦 +##惧 +##惨 +##惩 +##惫 +##惬 +##惭 +##惮 +##惯 +##惰 +##惱 +##想 +##惴 +##惶 +##惹 +##惺 +##愁 +##愆 +##愈 +##愉 +##愍 +##意 +##愕 +##愚 +##愛 +##愜 +##感 +##愣 +##愤 +##愧 +##愫 +##愷 +##愿 +##慄 +##慈 +##態 +##慌 +##慎 +##慑 +##慕 +##慘 +##慚 +##慟 +##慢 +##慣 +##慧 +##慨 +##慫 +##慮 +##慰 +##慳 +##慵 +##慶 +##慷 +##慾 +##憂 +##憊 +##憋 +##憎 +##憐 +##憑 +##憔 +##憚 +##憤 +##憧 +##憨 +##憩 +##憫 +##憬 +##憲 +##憶 +##憾 +##懂 +##懇 +##懈 +##應 +##懊 +##懋 +##懑 +##懒 +##懦 +##懲 +##懵 +##懶 +##懷 +##懸 +##懺 +##懼 +##懾 +##懿 +##戀 +##戈 +##戊 +##戌 +##戍 +##戎 +##戏 +##成 +##我 +##戒 +##戕 +##或 +##战 +##戚 +##戛 +##戟 +##戡 +##戦 +##截 +##戬 +##戮 +##戰 +##戲 +##戳 +##戴 +##戶 +##户 +##戸 +##戻 +##戾 +##房 +##所 +##扁 +##扇 +##扈 +##扉 +##手 +##才 +##扎 +##扑 +##扒 +##打 +##扔 +##払 +##托 +##扛 +##扣 +##扦 +##执 +##扩 +##扪 +##扫 +##扬 +##扭 +##扮 +##扯 +##扰 +##扱 +##扳 +##扶 +##批 +##扼 +##找 +##承 +##技 +##抄 +##抉 +##把 +##抑 +##抒 +##抓 +##投 +##抖 +##抗 +##折 +##抚 +##抛 +##抜 +##択 +##抟 +##抠 +##抡 +##抢 +##护 +##报 +##抨 +##披 +##抬 +##抱 +##抵 +##抹 +##押 +##抽 +##抿 +##拂 +##拄 +##担 +##拆 +##拇 +##拈 +##拉 +##拋 +##拌 +##拍 +##拎 +##拐 +##拒 +##拓 +##拔 +##拖 +##拗 +##拘 +##拙 +##拚 +##招 +##拜 +##拟 +##拡 +##拢 +##拣 +##拥 +##拦 +##拧 +##拨 +##择 +##括 +##拭 +##拮 +##拯 +##拱 +##拳 +##拴 +##拷 +##拼 +##拽 +##拾 +##拿 +##持 +##挂 +##指 +##挈 +##按 +##挎 +##挑 +##挖 +##挙 +##挚 +##挛 +##挝 +##挞 +##挟 +##挠 +##挡 +##挣 +##挤 +##挥 +##挨 +##挪 +##挫 +##振 +##挲 +##挹 +##挺 +##挽 +##挾 +##捂 +##捅 +##捆 +##捉 +##捋 +##捌 +##捍 +##捎 +##捏 +##捐 +##捕 +##捞 +##损 +##捡 +##换 +##捣 +##捧 +##捨 +##捩 +##据 +##捱 +##捲 +##捶 +##捷 +##捺 +##捻 +##掀 +##掂 +##掃 +##掇 +##授 +##掉 +##掌 +##掏 +##掐 +##排 +##掖 +##掘 +##掙 +##掛 +##掠 +##採 +##探 +##掣 +##接 +##控 +##推 +##掩 +##措 +##掬 +##掰 +##掲 +##掳 +##掴 +##掷 +##掸 +##掺 +##揀 +##揃 +##揄 +##揆 +##揉 +##揍 +##描 +##提 +##插 +##揖 +##揚 +##換 +##握 +##揣 +##揩 +##揪 +##揭 +##揮 +##援 +##揶 +##揸 +##揹 +##揽 +##搀 +##搁 +##搂 +##搅 +##損 +##搏 +##搐 +##搓 +##搔 +##搖 +##搗 +##搜 +##搞 +##搡 +##搪 +##搬 +##搭 +##搵 +##搶 +##携 +##搽 +##摀 +##摁 +##摄 +##摆 +##摇 +##摈 +##摊 +##摒 +##摔 +##摘 +##摞 +##摟 +##摧 +##摩 +##摯 +##摳 +##摸 +##摹 +##摺 +##摻 +##撂 +##撃 +##撅 +##撇 +##撈 +##撐 +##撑 +##撒 +##撓 +##撕 +##撚 +##撞 +##撤 +##撥 +##撩 +##撫 +##撬 +##播 +##撮 +##撰 +##撲 +##撵 +##撷 +##撸 +##撻 +##撼 +##撿 +##擀 +##擁 +##擂 +##擄 +##擅 +##擇 +##擊 +##擋 +##操 +##擎 +##擒 +##擔 +##擘 +##據 +##擞 +##擠 +##擡 +##擢 +##擦 +##擬 +##擰 +##擱 +##擲 +##擴 +##擷 +##擺 +##擼 +##擾 +##攀 +##攏 +##攒 +##攔 +##攘 +##攙 +##攜 +##攝 +##攞 +##攢 +##攣 +##攤 +##攥 +##攪 +##攫 +##攬 +##支 +##收 +##攸 +##改 +##攻 +##放 +##政 +##故 +##效 +##敌 +##敍 +##敎 +##敏 +##救 +##敕 +##敖 +##敗 +##敘 +##教 +##敛 +##敝 +##敞 +##敢 +##散 +##敦 +##敬 +##数 +##敲 +##整 +##敵 +##敷 +##數 +##斂 +##斃 +##文 +##斋 +##斌 +##斎 +##斐 +##斑 +##斓 +##斗 +##料 +##斛 +##斜 +##斟 +##斡 +##斤 +##斥 +##斧 +##斩 +##斫 +##斬 +##断 +##斯 +##新 +##斷 +##方 +##於 +##施 +##旁 +##旃 +##旅 +##旋 +##旌 +##旎 +##族 +##旖 +##旗 +##无 +##既 +##日 +##旦 +##旧 +##旨 +##早 +##旬 +##旭 +##旮 +##旱 +##时 +##旷 +##旺 +##旻 +##昀 +##昂 +##昆 +##昇 +##昉 +##昊 +##昌 +##明 +##昏 +##易 +##昔 +##昕 +##昙 +##星 +##映 +##春 +##昧 +##昨 +##昭 +##是 +##昱 +##昴 +##昵 +##昶 +##昼 +##显 +##晁 +##時 +##晃 +##晉 +##晋 +##晌 +##晏 +##晒 +##晓 +##晔 +##晕 +##晖 +##晗 +##晚 +##晝 +##晞 +##晟 +##晤 +##晦 +##晨 +##晩 +##普 +##景 +##晰 +##晴 +##晶 +##晷 +##智 +##晾 +##暂 +##暄 +##暇 +##暈 +##暉 +##暌 +##暐 +##暑 +##暖 +##暗 +##暝 +##暢 +##暧 +##暨 +##暫 +##暮 +##暱 +##暴 +##暸 +##暹 +##曄 +##曆 +##曇 +##曉 +##曖 +##曙 +##曜 +##曝 +##曠 +##曦 +##曬 +##曰 +##曲 +##曳 +##更 +##書 +##曹 +##曼 +##曾 +##替 +##最 +##會 +##月 +##有 +##朋 +##服 +##朐 +##朔 +##朕 +##朗 +##望 +##朝 +##期 +##朦 +##朧 +##木 +##未 +##末 +##本 +##札 +##朮 +##术 +##朱 +##朴 +##朵 +##机 +##朽 +##杀 +##杂 +##权 +##杆 +##杈 +##杉 +##李 +##杏 +##材 +##村 +##杓 +##杖 +##杜 +##杞 +##束 +##杠 +##条 +##来 +##杨 +##杭 +##杯 +##杰 +##東 +##杳 +##杵 +##杷 +##杼 +##松 +##板 +##极 +##构 +##枇 +##枉 +##枋 +##析 +##枕 +##林 +##枚 +##果 +##枝 +##枢 +##枣 +##枪 +##枫 +##枭 +##枯 +##枰 +##枱 +##枳 +##架 +##枷 +##枸 +##柄 +##柏 +##某 +##柑 +##柒 +##染 +##柔 +##柘 +##柚 +##柜 +##柞 +##柠 +##柢 +##查 +##柩 +##柬 +##柯 +##柱 +##柳 +##柴 +##柵 +##査 +##柿 +##栀 +##栃 +##栄 +##栅 +##标 +##栈 +##栉 +##栋 +##栎 +##栏 +##树 +##栓 +##栖 +##栗 +##校 +##栩 +##株 +##样 +##核 +##根 +##格 +##栽 +##栾 +##桀 +##桁 +##桂 +##桃 +##桅 +##框 +##案 +##桉 +##桌 +##桎 +##桐 +##桑 +##桓 +##桔 +##桜 +##桠 +##桡 +##桢 +##档 +##桥 +##桦 +##桧 +##桨 +##桩 +##桶 +##桿 +##梁 +##梅 +##梆 +##梏 +##梓 +##梗 +##條 +##梟 +##梢 +##梦 +##梧 +##梨 +##梭 +##梯 +##械 +##梳 +##梵 +##梶 +##检 +##棂 +##棄 +##棉 +##棋 +##棍 +##棒 +##棕 +##棗 +##棘 +##棚 +##棟 +##棠 +##棣 +##棧 +##森 +##棱 +##棲 +##棵 +##棹 +##棺 +##椁 +##椅 +##椋 +##植 +##椎 +##椒 +##検 +##椪 +##椭 +##椰 +##椹 +##椽 +##椿 +##楂 +##楊 +##楓 +##楔 +##楚 +##楝 +##楞 +##楠 +##楣 +##楨 +##楫 +##業 +##楮 +##極 +##楷 +##楸 +##楹 +##楼 +##楽 +##概 +##榄 +##榆 +##榈 +##榉 +##榔 +##榕 +##榖 +##榛 +##榜 +##榨 +##榫 +##榭 +##榮 +##榱 +##榴 +##榷 +##榻 +##槁 +##槃 +##構 +##槌 +##槍 +##槎 +##槐 +##槓 +##様 +##槛 +##槟 +##槤 +##槭 +##槲 +##槳 +##槻 +##槽 +##槿 +##樁 +##樂 +##樊 +##樑 +##樓 +##標 +##樞 +##樟 +##模 +##樣 +##権 +##横 +##樫 +##樯 +##樱 +##樵 +##樸 +##樹 +##樺 +##樽 +##樾 +##橄 +##橇 +##橋 +##橐 +##橘 +##橙 +##機 +##橡 +##橢 +##橫 +##橱 +##橹 +##橼 +##檀 +##檄 +##檎 +##檐 +##檔 +##檗 +##檜 +##檢 +##檬 +##檯 +##檳 +##檸 +##檻 +##櫃 +##櫚 +##櫛 +##櫥 +##櫸 +##櫻 +##欄 +##權 +##欒 +##欖 +##欠 +##次 +##欢 +##欣 +##欧 +##欲 +##欸 +##欺 +##欽 +##款 +##歆 +##歇 +##歉 +##歌 +##歎 +##歐 +##歓 +##歙 +##歛 +##歡 +##止 +##正 +##此 +##步 +##武 +##歧 +##歩 +##歪 +##歯 +##歲 +##歳 +##歴 +##歷 +##歸 +##歹 +##死 +##歼 +##殁 +##殃 +##殆 +##殇 +##殉 +##殊 +##残 +##殒 +##殓 +##殖 +##殘 +##殞 +##殡 +##殤 +##殭 +##殯 +##殲 +##殴 +##段 +##殷 +##殺 +##殼 +##殿 +##毀 +##毁 +##毂 +##毅 +##毆 +##毋 +##母 +##毎 +##每 +##毒 +##毓 +##比 +##毕 +##毗 +##毘 +##毙 +##毛 +##毡 +##毫 +##毯 +##毽 +##氈 +##氏 +##氐 +##民 +##氓 +##气 +##氖 +##気 +##氙 +##氛 +##氟 +##氡 +##氢 +##氣 +##氤 +##氦 +##氧 +##氨 +##氪 +##氫 +##氮 +##氯 +##氰 +##氲 +##水 +##氷 +##永 +##氹 +##氾 +##汀 +##汁 +##求 +##汆 +##汇 +##汉 +##汎 +##汐 +##汕 +##汗 +##汙 +##汛 +##汝 +##汞 +##江 +##池 +##污 +##汤 +##汨 +##汩 +##汪 +##汰 +##汲 +##汴 +##汶 +##汹 +##決 +##汽 +##汾 +##沁 +##沂 +##沃 +##沅 +##沈 +##沉 +##沌 +##沏 +##沐 +##沒 +##沓 +##沖 +##沙 +##沛 +##沟 +##没 +##沢 +##沣 +##沥 +##沦 +##沧 +##沪 +##沫 +##沭 +##沮 +##沱 +##河 +##沸 +##油 +##治 +##沼 +##沽 +##沾 +##沿 +##況 +##泄 +##泉 +##泊 +##泌 +##泓 +##法 +##泗 +##泛 +##泞 +##泠 +##泡 +##波 +##泣 +##泥 +##注 +##泪 +##泫 +##泮 +##泯 +##泰 +##泱 +##泳 +##泵 +##泷 +##泸 +##泻 +##泼 +##泽 +##泾 +##洁 +##洄 +##洋 +##洒 +##洗 +##洙 +##洛 +##洞 +##津 +##洩 +##洪 +##洮 +##洱 +##洲 +##洵 +##洶 +##洸 +##洹 +##活 +##洼 +##洽 +##派 +##流 +##浃 +##浄 +##浅 +##浆 +##浇 +##浊 +##测 +##济 +##浏 +##浑 +##浒 +##浓 +##浔 +##浙 +##浚 +##浜 +##浣 +##浦 +##浩 +##浪 +##浬 +##浮 +##浯 +##浴 +##海 +##浸 +##涂 +##涅 +##涇 +##消 +##涉 +##涌 +##涎 +##涓 +##涔 +##涕 +##涙 +##涛 +##涝 +##涞 +##涟 +##涠 +##涡 +##涣 +##涤 +##润 +##涧 +##涨 +##涩 +##涪 +##涮 +##涯 +##液 +##涵 +##涸 +##涼 +##涿 +##淀 +##淄 +##淅 +##淆 +##淇 +##淋 +##淌 +##淑 +##淒 +##淖 +##淘 +##淙 +##淚 +##淞 +##淡 +##淤 +##淦 +##淨 +##淩 +##淪 +##淫 +##淬 +##淮 +##深 +##淳 +##淵 +##混 +##淹 +##淺 +##添 +##淼 +##清 +##済 +##渉 +##渊 +##渋 +##渍 +##渎 +##渐 +##渔 +##渗 +##渙 +##渚 +##減 +##渝 +##渠 +##渡 +##渣 +##渤 +##渥 +##渦 +##温 +##測 +##渭 +##港 +##渲 +##渴 +##游 +##渺 +##渾 +##湃 +##湄 +##湊 +##湍 +##湖 +##湘 +##湛 +##湟 +##湧 +##湫 +##湮 +##湯 +##湳 +##湾 +##湿 +##満 +##溃 +##溅 +##溉 +##溏 +##源 +##準 +##溜 +##溝 +##溟 +##溢 +##溥 +##溧 +##溪 +##溫 +##溯 +##溱 +##溴 +##溶 +##溺 +##溼 +##滁 +##滂 +##滄 +##滅 +##滇 +##滋 +##滌 +##滑 +##滓 +##滔 +##滕 +##滙 +##滚 +##滝 +##滞 +##滟 +##满 +##滢 +##滤 +##滥 +##滦 +##滨 +##滩 +##滬 +##滯 +##滲 +##滴 +##滷 +##滸 +##滾 +##滿 +##漁 +##漂 +##漆 +##漉 +##漏 +##漓 +##演 +##漕 +##漠 +##漢 +##漣 +##漩 +##漪 +##漫 +##漬 +##漯 +##漱 +##漲 +##漳 +##漸 +##漾 +##漿 +##潆 +##潇 +##潋 +##潍 +##潑 +##潔 +##潘 +##潛 +##潜 +##潞 +##潟 +##潢 +##潤 +##潦 +##潧 +##潭 +##潮 +##潰 +##潴 +##潸 +##潺 +##潼 +##澀 +##澄 +##澆 +##澈 +##澍 +##澎 +##澗 +##澜 +##澡 +##澤 +##澧 +##澱 +##澳 +##澹 +##激 +##濁 +##濂 +##濃 +##濑 +##濒 +##濕 +##濘 +##濛 +##濟 +##濠 +##濡 +##濤 +##濫 +##濬 +##濮 +##濯 +##濱 +##濺 +##濾 +##瀅 +##瀆 +##瀉 +##瀋 +##瀏 +##瀑 +##瀕 +##瀘 +##瀚 +##瀛 +##瀝 +##瀞 +##瀟 +##瀧 +##瀨 +##瀬 +##瀰 +##瀾 +##灌 +##灏 +##灑 +##灘 +##灝 +##灞 +##灣 +##火 +##灬 +##灭 +##灯 +##灰 +##灵 +##灶 +##灸 +##灼 +##災 +##灾 +##灿 +##炀 +##炁 +##炅 +##炉 +##炊 +##炎 +##炒 +##炔 +##炕 +##炖 +##炙 +##炜 +##炫 +##炬 +##炭 +##炮 +##炯 +##炳 +##炷 +##炸 +##点 +##為 +##炼 +##炽 +##烁 +##烂 +##烃 +##烈 +##烊 +##烏 +##烘 +##烙 +##烛 +##烟 +##烤 +##烦 +##烧 +##烨 +##烩 +##烫 +##烬 +##热 +##烯 +##烷 +##烹 +##烽 +##焉 +##焊 +##焕 +##焖 +##焗 +##焘 +##焙 +##焚 +##焜 +##無 +##焦 +##焯 +##焰 +##焱 +##然 +##焼 +##煅 +##煉 +##煊 +##煌 +##煎 +##煒 +##煖 +##煙 +##煜 +##煞 +##煤 +##煥 +##煦 +##照 +##煨 +##煩 +##煮 +##煲 +##煸 +##煽 +##熄 +##熊 +##熏 +##熒 +##熔 +##熙 +##熟 +##熠 +##熨 +##熬 +##熱 +##熵 +##熹 +##熾 +##燁 +##燃 +##燄 +##燈 +##燉 +##燊 +##燎 +##燒 +##燔 +##燕 +##燙 +##燜 +##營 +##燥 +##燦 +##燧 +##燭 +##燮 +##燴 +##燻 +##燼 +##燿 +##爆 +##爍 +##爐 +##爛 +##爪 +##爬 +##爭 +##爰 +##爱 +##爲 +##爵 +##父 +##爷 +##爸 +##爹 +##爺 +##爻 +##爽 +##爾 +##牆 +##片 +##版 +##牌 +##牍 +##牒 +##牙 +##牛 +##牝 +##牟 +##牠 +##牡 +##牢 +##牦 +##牧 +##物 +##牯 +##牲 +##牴 +##牵 +##特 +##牺 +##牽 +##犀 +##犁 +##犄 +##犊 +##犍 +##犒 +##犢 +##犧 +##犬 +##犯 +##状 +##犷 +##犸 +##犹 +##狀 +##狂 +##狄 +##狈 +##狎 +##狐 +##狒 +##狗 +##狙 +##狞 +##狠 +##狡 +##狩 +##独 +##狭 +##狮 +##狰 +##狱 +##狸 +##狹 +##狼 +##狽 +##猎 +##猕 +##猖 +##猗 +##猙 +##猛 +##猜 +##猝 +##猥 +##猩 +##猪 +##猫 +##猬 +##献 +##猴 +##猶 +##猷 +##猾 +##猿 +##獄 +##獅 +##獎 +##獐 +##獒 +##獗 +##獠 +##獣 +##獨 +##獭 +##獰 +##獲 +##獵 +##獷 +##獸 +##獺 +##獻 +##獼 +##獾 +##玄 +##率 +##玉 +##王 +##玑 +##玖 +##玛 +##玟 +##玠 +##玥 +##玩 +##玫 +##玮 +##环 +##现 +##玲 +##玳 +##玷 +##玺 +##玻 +##珀 +##珂 +##珅 +##珈 +##珉 +##珊 +##珍 +##珏 +##珐 +##珑 +##珙 +##珞 +##珠 +##珣 +##珥 +##珩 +##珪 +##班 +##珮 +##珲 +##珺 +##現 +##球 +##琅 +##理 +##琇 +##琉 +##琊 +##琍 +##琏 +##琐 +##琛 +##琢 +##琥 +##琦 +##琨 +##琪 +##琬 +##琮 +##琰 +##琲 +##琳 +##琴 +##琵 +##琶 +##琺 +##琼 +##瑀 +##瑁 +##瑄 +##瑋 +##瑕 +##瑗 +##瑙 +##瑚 +##瑛 +##瑜 +##瑞 +##瑟 +##瑠 +##瑣 +##瑤 +##瑩 +##瑪 +##瑯 +##瑰 +##瑶 +##瑾 +##璀 +##璁 +##璃 +##璇 +##璉 +##璋 +##璎 +##璐 +##璜 +##璞 +##璟 +##璧 +##璨 +##環 +##璽 +##璿 +##瓊 +##瓏 +##瓒 +##瓜 +##瓢 +##瓣 +##瓤 +##瓦 +##瓮 +##瓯 +##瓴 +##瓶 +##瓷 +##甄 +##甌 +##甕 +##甘 +##甙 +##甚 +##甜 +##生 +##產 +##産 +##甥 +##甦 +##用 +##甩 +##甫 +##甬 +##甭 +##甯 +##田 +##由 +##甲 +##申 +##电 +##男 +##甸 +##町 +##画 +##甾 +##畀 +##畅 +##界 +##畏 +##畑 +##畔 +##留 +##畜 +##畝 +##畢 +##略 +##畦 +##番 +##畫 +##異 +##畲 +##畳 +##畴 +##當 +##畸 +##畹 +##畿 +##疆 +##疇 +##疊 +##疏 +##疑 +##疔 +##疖 +##疗 +##疙 +##疚 +##疝 +##疟 +##疡 +##疣 +##疤 +##疥 +##疫 +##疮 +##疯 +##疱 +##疲 +##疳 +##疵 +##疸 +##疹 +##疼 +##疽 +##疾 +##痂 +##病 +##症 +##痈 +##痉 +##痊 +##痍 +##痒 +##痔 +##痕 +##痘 +##痙 +##痛 +##痞 +##痠 +##痢 +##痣 +##痤 +##痧 +##痨 +##痪 +##痫 +##痰 +##痱 +##痴 +##痹 +##痺 +##痼 +##痿 +##瘀 +##瘁 +##瘋 +##瘍 +##瘓 +##瘘 +##瘙 +##瘟 +##瘠 +##瘡 +##瘢 +##瘤 +##瘦 +##瘧 +##瘩 +##瘪 +##瘫 +##瘴 +##瘸 +##瘾 +##療 +##癇 +##癌 +##癒 +##癖 +##癜 +##癞 +##癡 +##癢 +##癣 +##癥 +##癫 +##癬 +##癮 +##癱 +##癲 +##癸 +##発 +##登 +##發 +##白 +##百 +##皂 +##的 +##皆 +##皇 +##皈 +##皋 +##皎 +##皑 +##皓 +##皖 +##皙 +##皚 +##皮 +##皰 +##皱 +##皴 +##皺 +##皿 +##盂 +##盃 +##盅 +##盆 +##盈 +##益 +##盎 +##盏 +##盐 +##监 +##盒 +##盔 +##盖 +##盗 +##盘 +##盛 +##盜 +##盞 +##盟 +##盡 +##監 +##盤 +##盥 +##盧 +##盪 +##目 +##盯 +##盱 +##盲 +##直 +##相 +##盹 +##盼 +##盾 +##省 +##眈 +##眉 +##看 +##県 +##眙 +##眞 +##真 +##眠 +##眦 +##眨 +##眩 +##眯 +##眶 +##眷 +##眸 +##眺 +##眼 +##眾 +##着 +##睁 +##睇 +##睏 +##睐 +##睑 +##睛 +##睜 +##睞 +##睡 +##睢 +##督 +##睥 +##睦 +##睨 +##睪 +##睫 +##睬 +##睹 +##睽 +##睾 +##睿 +##瞄 +##瞅 +##瞇 +##瞋 +##瞌 +##瞎 +##瞑 +##瞒 +##瞓 +##瞞 +##瞟 +##瞠 +##瞥 +##瞧 +##瞩 +##瞪 +##瞬 +##瞭 +##瞰 +##瞳 +##瞻 +##瞼 +##瞿 +##矇 +##矍 +##矗 +##矚 +##矛 +##矜 +##矢 +##矣 +##知 +##矩 +##矫 +##短 +##矮 +##矯 +##石 +##矶 +##矽 +##矾 +##矿 +##码 +##砂 +##砌 +##砍 +##砒 +##研 +##砖 +##砗 +##砚 +##砝 +##砣 +##砥 +##砧 +##砭 +##砰 +##砲 +##破 +##砷 +##砸 +##砺 +##砼 +##砾 +##础 +##硅 +##硐 +##硒 +##硕 +##硝 +##硫 +##硬 +##确 +##硯 +##硼 +##碁 +##碇 +##碉 +##碌 +##碍 +##碎 +##碑 +##碓 +##碗 +##碘 +##碚 +##碛 +##碟 +##碣 +##碧 +##碩 +##碰 +##碱 +##碳 +##碴 +##確 +##碼 +##碾 +##磁 +##磅 +##磊 +##磋 +##磐 +##磕 +##磚 +##磡 +##磨 +##磬 +##磯 +##磲 +##磷 +##磺 +##礁 +##礎 +##礙 +##礡 +##礦 +##礪 +##礫 +##礴 +##示 +##礼 +##社 +##祀 +##祁 +##祂 +##祇 +##祈 +##祉 +##祎 +##祐 +##祕 +##祖 +##祗 +##祚 +##祛 +##祜 +##祝 +##神 +##祟 +##祠 +##祢 +##祥 +##票 +##祭 +##祯 +##祷 +##祸 +##祺 +##祿 +##禀 +##禁 +##禄 +##禅 +##禍 +##禎 +##福 +##禛 +##禦 +##禧 +##禪 +##禮 +##禱 +##禹 +##禺 +##离 +##禽 +##禾 +##禿 +##秀 +##私 +##秃 +##秆 +##秉 +##秋 +##种 +##科 +##秒 +##秘 +##租 +##秣 +##秤 +##秦 +##秧 +##秩 +##秭 +##积 +##称 +##秸 +##移 +##秽 +##稀 +##稅 +##程 +##稍 +##税 +##稔 +##稗 +##稚 +##稜 +##稞 +##稟 +##稠 +##稣 +##種 +##稱 +##稲 +##稳 +##稷 +##稹 +##稻 +##稼 +##稽 +##稿 +##穀 +##穂 +##穆 +##穌 +##積 +##穎 +##穗 +##穢 +##穩 +##穫 +##穴 +##究 +##穷 +##穹 +##空 +##穿 +##突 +##窃 +##窄 +##窈 +##窍 +##窑 +##窒 +##窓 +##窕 +##窖 +##窗 +##窘 +##窜 +##窝 +##窟 +##窠 +##窥 +##窦 +##窨 +##窩 +##窪 +##窮 +##窯 +##窺 +##窿 +##竄 +##竅 +##竇 +##竊 +##立 +##竖 +##站 +##竜 +##竞 +##竟 +##章 +##竣 +##童 +##竭 +##端 +##競 +##竹 +##竺 +##竽 +##竿 +##笃 +##笆 +##笈 +##笋 +##笏 +##笑 +##笔 +##笙 +##笛 +##笞 +##笠 +##符 +##笨 +##第 +##笹 +##笺 +##笼 +##筆 +##等 +##筊 +##筋 +##筍 +##筏 +##筐 +##筑 +##筒 +##答 +##策 +##筛 +##筝 +##筠 +##筱 +##筲 +##筵 +##筷 +##筹 +##签 +##简 +##箇 +##箋 +##箍 +##箏 +##箐 +##箔 +##箕 +##算 +##箝 +##管 +##箩 +##箫 +##箭 +##箱 +##箴 +##箸 +##節 +##篁 +##範 +##篆 +##篇 +##築 +##篑 +##篓 +##篙 +##篝 +##篠 +##篡 +##篤 +##篩 +##篪 +##篮 +##篱 +##篷 +##簇 +##簌 +##簍 +##簡 +##簦 +##簧 +##簪 +##簫 +##簷 +##簸 +##簽 +##簾 +##簿 +##籁 +##籃 +##籌 +##籍 +##籐 +##籟 +##籠 +##籤 +##籬 +##籮 +##籲 +##米 +##类 +##籼 +##籽 +##粄 +##粉 +##粑 +##粒 +##粕 +##粗 +##粘 +##粟 +##粤 +##粥 +##粧 +##粪 +##粮 +##粱 +##粲 +##粳 +##粵 +##粹 +##粼 +##粽 +##精 +##粿 +##糅 +##糊 +##糍 +##糕 +##糖 +##糗 +##糙 +##糜 +##糞 +##糟 +##糠 +##糧 +##糬 +##糯 +##糰 +##糸 +##系 +##糾 +##紀 +##紂 +##約 +##紅 +##紉 +##紊 +##紋 +##納 +##紐 +##紓 +##純 +##紗 +##紘 +##紙 +##級 +##紛 +##紜 +##素 +##紡 +##索 +##紧 +##紫 +##紮 +##累 +##細 +##紳 +##紹 +##紺 +##終 +##絃 +##組 +##絆 +##経 +##結 +##絕 +##絞 +##絡 +##絢 +##給 +##絨 +##絮 +##統 +##絲 +##絳 +##絵 +##絶 +##絹 +##綁 +##綏 +##綑 +##經 +##継 +##続 +##綜 +##綠 +##綢 +##綦 +##綫 +##綬 +##維 +##綱 +##網 +##綴 +##綵 +##綸 +##綺 +##綻 +##綽 +##綾 +##綿 +##緊 +##緋 +##総 +##緑 +##緒 +##緘 +##線 +##緝 +##緞 +##締 +##緣 +##編 +##緩 +##緬 +##緯 +##練 +##緹 +##緻 +##縁 +##縄 +##縈 +##縛 +##縝 +##縣 +##縫 +##縮 +##縱 +##縴 +##縷 +##總 +##績 +##繁 +##繃 +##繆 +##繇 +##繋 +##織 +##繕 +##繚 +##繞 +##繡 +##繩 +##繪 +##繫 +##繭 +##繳 +##繹 +##繼 +##繽 +##纂 +##續 +##纍 +##纏 +##纓 +##纔 +##纖 +##纜 +##纠 +##红 +##纣 +##纤 +##约 +##级 +##纨 +##纪 +##纫 +##纬 +##纭 +##纯 +##纰 +##纱 +##纲 +##纳 +##纵 +##纶 +##纷 +##纸 +##纹 +##纺 +##纽 +##纾 +##线 +##绀 +##练 +##组 +##绅 +##细 +##织 +##终 +##绊 +##绍 +##绎 +##经 +##绑 +##绒 +##结 +##绔 +##绕 +##绘 +##给 +##绚 +##绛 +##络 +##绝 +##绞 +##统 +##绡 +##绢 +##绣 +##绥 +##绦 +##继 +##绩 +##绪 +##绫 +##续 +##绮 +##绯 +##绰 +##绳 +##维 +##绵 +##绶 +##绷 +##绸 +##绻 +##综 +##绽 +##绾 +##绿 +##缀 +##缄 +##缅 +##缆 +##缇 +##缈 +##缉 +##缎 +##缓 +##缔 +##缕 +##编 +##缘 +##缙 +##缚 +##缜 +##缝 +##缠 +##缢 +##缤 +##缥 +##缨 +##缩 +##缪 +##缭 +##缮 +##缰 +##缱 +##缴 +##缸 +##缺 +##缽 +##罂 +##罄 +##罌 +##罐 +##网 +##罔 +##罕 +##罗 +##罚 +##罡 +##罢 +##罩 +##罪 +##置 +##罰 +##署 +##罵 +##罷 +##罹 +##羁 +##羅 +##羈 +##羊 +##羌 +##美 +##羔 +##羚 +##羞 +##羟 +##羡 +##羣 +##群 +##羥 +##羧 +##羨 +##義 +##羯 +##羲 +##羸 +##羹 +##羽 +##羿 +##翁 +##翅 +##翊 +##翌 +##翎 +##習 +##翔 +##翘 +##翟 +##翠 +##翡 +##翦 +##翩 +##翰 +##翱 +##翳 +##翹 +##翻 +##翼 +##耀 +##老 +##考 +##耄 +##者 +##耆 +##耋 +##而 +##耍 +##耐 +##耒 +##耕 +##耗 +##耘 +##耙 +##耦 +##耨 +##耳 +##耶 +##耷 +##耸 +##耻 +##耽 +##耿 +##聂 +##聆 +##聊 +##聋 +##职 +##聒 +##联 +##聖 +##聘 +##聚 +##聞 +##聪 +##聯 +##聰 +##聲 +##聳 +##聴 +##聶 +##職 +##聽 +##聾 +##聿 +##肃 +##肄 +##肅 +##肆 +##肇 +##肉 +##肋 +##肌 +##肏 +##肓 +##肖 +##肘 +##肚 +##肛 +##肝 +##肠 +##股 +##肢 +##肤 +##肥 +##肩 +##肪 +##肮 +##肯 +##肱 +##育 +##肴 +##肺 +##肽 +##肾 +##肿 +##胀 +##胁 +##胃 +##胄 +##胆 +##背 +##胍 +##胎 +##胖 +##胚 +##胛 +##胜 +##胝 +##胞 +##胡 +##胤 +##胥 +##胧 +##胫 +##胭 +##胯 +##胰 +##胱 +##胳 +##胴 +##胶 +##胸 +##胺 +##能 +##脂 +##脅 +##脆 +##脇 +##脈 +##脉 +##脊 +##脍 +##脏 +##脐 +##脑 +##脓 +##脖 +##脘 +##脚 +##脛 +##脣 +##脩 +##脫 +##脯 +##脱 +##脲 +##脳 +##脸 +##脹 +##脾 +##腆 +##腈 +##腊 +##腋 +##腌 +##腎 +##腐 +##腑 +##腓 +##腔 +##腕 +##腥 +##腦 +##腩 +##腫 +##腭 +##腮 +##腰 +##腱 +##腳 +##腴 +##腸 +##腹 +##腺 +##腻 +##腼 +##腾 +##腿 +##膀 +##膈 +##膊 +##膏 +##膑 +##膘 +##膚 +##膛 +##膜 +##膝 +##膠 +##膦 +##膨 +##膩 +##膳 +##膺 +##膻 +##膽 +##膾 +##膿 +##臀 +##臂 +##臃 +##臆 +##臉 +##臊 +##臍 +##臓 +##臘 +##臟 +##臣 +##臥 +##臧 +##臨 +##自 +##臬 +##臭 +##至 +##致 +##臺 +##臻 +##臼 +##臾 +##舀 +##舂 +##舅 +##舆 +##與 +##興 +##舉 +##舊 +##舌 +##舍 +##舎 +##舐 +##舒 +##舔 +##舖 +##舗 +##舛 +##舜 +##舞 +##舟 +##航 +##舫 +##般 +##舰 +##舱 +##舵 +##舶 +##舷 +##舸 +##船 +##舺 +##舾 +##艇 +##艋 +##艘 +##艙 +##艦 +##艮 +##良 +##艰 +##艱 +##色 +##艳 +##艷 +##艹 +##艺 +##艾 +##节 +##芃 +##芈 +##芊 +##芋 +##芍 +##芎 +##芒 +##芙 +##芜 +##芝 +##芡 +##芥 +##芦 +##芩 +##芪 +##芫 +##芬 +##芭 +##芮 +##芯 +##花 +##芳 +##芷 +##芸 +##芹 +##芻 +##芽 +##芾 +##苁 +##苄 +##苇 +##苋 +##苍 +##苏 +##苑 +##苒 +##苓 +##苔 +##苕 +##苗 +##苛 +##苜 +##苞 +##苟 +##苡 +##苣 +##若 +##苦 +##苫 +##苯 +##英 +##苷 +##苹 +##苻 +##茁 +##茂 +##范 +##茄 +##茅 +##茉 +##茎 +##茏 +##茗 +##茜 +##茧 +##茨 +##茫 +##茬 +##茭 +##茯 +##茱 +##茲 +##茴 +##茵 +##茶 +##茸 +##茹 +##茼 +##荀 +##荃 +##荆 +##草 +##荊 +##荏 +##荐 +##荒 +##荔 +##荖 +##荘 +##荚 +##荞 +##荟 +##荠 +##荡 +##荣 +##荤 +##荥 +##荧 +##荨 +##荪 +##荫 +##药 +##荳 +##荷 +##荸 +##荻 +##荼 +##荽 +##莅 +##莆 +##莉 +##莊 +##莎 +##莒 +##莓 +##莖 +##莘 +##莞 +##莠 +##莢 +##莧 +##莪 +##莫 +##莱 +##莲 +##莴 +##获 +##莹 +##莺 +##莽 +##莿 +##菀 +##菁 +##菅 +##菇 +##菈 +##菊 +##菌 +##菏 +##菓 +##菖 +##菘 +##菜 +##菟 +##菠 +##菡 +##菩 +##華 +##菱 +##菲 +##菸 +##菽 +##萁 +##萃 +##萄 +##萊 +##萋 +##萌 +##萍 +##萎 +##萘 +##萝 +##萤 +##营 +##萦 +##萧 +##萨 +##萩 +##萬 +##萱 +##萵 +##萸 +##萼 +##落 +##葆 +##葉 +##著 +##葚 +##葛 +##葡 +##董 +##葦 +##葩 +##葫 +##葬 +##葭 +##葯 +##葱 +##葳 +##葵 +##葷 +##葺 +##蒂 +##蒋 +##蒐 +##蒔 +##蒙 +##蒜 +##蒞 +##蒟 +##蒡 +##蒨 +##蒲 +##蒸 +##蒹 +##蒻 +##蒼 +##蒿 +##蓁 +##蓄 +##蓆 +##蓉 +##蓋 +##蓑 +##蓓 +##蓖 +##蓝 +##蓟 +##蓦 +##蓬 +##蓮 +##蓼 +##蓿 +##蔑 +##蔓 +##蔔 +##蔗 +##蔘 +##蔚 +##蔡 +##蔣 +##蔥 +##蔫 +##蔬 +##蔭 +##蔵 +##蔷 +##蔺 +##蔻 +##蔼 +##蔽 +##蕁 +##蕃 +##蕈 +##蕉 +##蕊 +##蕎 +##蕙 +##蕤 +##蕨 +##蕩 +##蕪 +##蕭 +##蕲 +##蕴 +##蕻 +##蕾 +##薄 +##薅 +##薇 +##薈 +##薊 +##薏 +##薑 +##薔 +##薙 +##薛 +##薦 +##薨 +##薩 +##薪 +##薬 +##薯 +##薰 +##薹 +##藉 +##藍 +##藏 +##藐 +##藓 +##藕 +##藜 +##藝 +##藤 +##藥 +##藩 +##藹 +##藻 +##藿 +##蘆 +##蘇 +##蘊 +##蘋 +##蘑 +##蘚 +##蘭 +##蘸 +##蘼 +##蘿 +##虎 +##虏 +##虐 +##虑 +##虔 +##處 +##虚 +##虛 +##虜 +##虞 +##號 +##虢 +##虧 +##虫 +##虬 +##虱 +##虹 +##虻 +##虽 +##虾 +##蚀 +##蚁 +##蚂 +##蚊 +##蚌 +##蚓 +##蚕 +##蚜 +##蚝 +##蚣 +##蚤 +##蚩 +##蚪 +##蚯 +##蚱 +##蚵 +##蛀 +##蛆 +##蛇 +##蛊 +##蛋 +##蛎 +##蛐 +##蛔 +##蛙 +##蛛 +##蛟 +##蛤 +##蛭 +##蛮 +##蛰 +##蛳 +##蛹 +##蛻 +##蛾 +##蜀 +##蜂 +##蜃 +##蜆 +##蜇 +##蜈 +##蜊 +##蜍 +##蜒 +##蜓 +##蜕 +##蜗 +##蜘 +##蜚 +##蜜 +##蜡 +##蜢 +##蜥 +##蜱 +##蜴 +##蜷 +##蜻 +##蜿 +##蝇 +##蝈 +##蝉 +##蝌 +##蝎 +##蝕 +##蝗 +##蝙 +##蝟 +##蝠 +##蝦 +##蝨 +##蝴 +##蝶 +##蝸 +##蝼 +##螂 +##螃 +##融 +##螞 +##螢 +##螨 +##螯 +##螳 +##螺 +##蟀 +##蟄 +##蟆 +##蟋 +##蟎 +##蟑 +##蟒 +##蟠 +##蟬 +##蟲 +##蟹 +##蟻 +##蟾 +##蠅 +##蠍 +##蠔 +##蠕 +##蠛 +##蠟 +##蠡 +##蠢 +##蠣 +##蠱 +##蠶 +##蠹 +##蠻 +##血 +##衄 +##衅 +##衆 +##行 +##衍 +##術 +##衔 +##街 +##衙 +##衛 +##衝 +##衞 +##衡 +##衢 +##衣 +##补 +##表 +##衩 +##衫 +##衬 +##衮 +##衰 +##衲 +##衷 +##衹 +##衾 +##衿 +##袁 +##袂 +##袄 +##袅 +##袈 +##袋 +##袍 +##袒 +##袖 +##袜 +##袞 +##袤 +##袪 +##被 +##袭 +##袱 +##裁 +##裂 +##装 +##裆 +##裊 +##裏 +##裔 +##裕 +##裘 +##裙 +##補 +##裝 +##裟 +##裡 +##裤 +##裨 +##裱 +##裳 +##裴 +##裸 +##裹 +##製 +##裾 +##褂 +##複 +##褐 +##褒 +##褓 +##褔 +##褚 +##褥 +##褪 +##褫 +##褲 +##褶 +##褻 +##襁 +##襄 +##襟 +##襠 +##襪 +##襬 +##襯 +##襲 +##西 +##要 +##覃 +##覆 +##覇 +##見 +##規 +##覓 +##視 +##覚 +##覦 +##覧 +##親 +##覬 +##観 +##覷 +##覺 +##覽 +##觀 +##见 +##观 +##规 +##觅 +##视 +##览 +##觉 +##觊 +##觎 +##觐 +##觑 +##角 +##觞 +##解 +##觥 +##触 +##觸 +##言 +##訂 +##計 +##訊 +##討 +##訓 +##訕 +##訖 +##託 +##記 +##訛 +##訝 +##訟 +##訣 +##訥 +##訪 +##設 +##許 +##訳 +##訴 +##訶 +##診 +##註 +##証 +##詆 +##詐 +##詔 +##評 +##詛 +##詞 +##詠 +##詡 +##詢 +##詣 +##試 +##詩 +##詫 +##詬 +##詭 +##詮 +##詰 +##話 +##該 +##詳 +##詹 +##詼 +##誅 +##誇 +##誉 +##誌 +##認 +##誓 +##誕 +##誘 +##語 +##誠 +##誡 +##誣 +##誤 +##誥 +##誦 +##誨 +##說 +##説 +##読 +##誰 +##課 +##誹 +##誼 +##調 +##諄 +##談 +##請 +##諏 +##諒 +##論 +##諗 +##諜 +##諡 +##諦 +##諧 +##諫 +##諭 +##諮 +##諱 +##諳 +##諷 +##諸 +##諺 +##諾 +##謀 +##謁 +##謂 +##謄 +##謊 +##謎 +##謐 +##謔 +##謗 +##謙 +##講 +##謝 +##謠 +##謨 +##謬 +##謹 +##謾 +##譁 +##證 +##譎 +##譏 +##識 +##譙 +##譚 +##譜 +##警 +##譬 +##譯 +##議 +##譲 +##譴 +##護 +##譽 +##讀 +##變 +##讓 +##讚 +##讞 +##计 +##订 +##认 +##讥 +##讧 +##讨 +##让 +##讪 +##讫 +##训 +##议 +##讯 +##记 +##讲 +##讳 +##讴 +##讶 +##讷 +##许 +##讹 +##论 +##讼 +##讽 +##设 +##访 +##诀 +##证 +##诃 +##评 +##诅 +##识 +##诈 +##诉 +##诊 +##诋 +##词 +##诏 +##译 +##试 +##诗 +##诘 +##诙 +##诚 +##诛 +##话 +##诞 +##诟 +##诠 +##诡 +##询 +##诣 +##诤 +##该 +##详 +##诧 +##诩 +##诫 +##诬 +##语 +##误 +##诰 +##诱 +##诲 +##说 +##诵 +##诶 +##请 +##诸 +##诺 +##读 +##诽 +##课 +##诿 +##谀 +##谁 +##调 +##谄 +##谅 +##谆 +##谈 +##谊 +##谋 +##谌 +##谍 +##谎 +##谏 +##谐 +##谑 +##谒 +##谓 +##谔 +##谕 +##谗 +##谘 +##谙 +##谚 +##谛 +##谜 +##谟 +##谢 +##谣 +##谤 +##谥 +##谦 +##谧 +##谨 +##谩 +##谪 +##谬 +##谭 +##谯 +##谱 +##谲 +##谴 +##谶 +##谷 +##豁 +##豆 +##豇 +##豈 +##豉 +##豊 +##豌 +##豎 +##豐 +##豔 +##豚 +##象 +##豢 +##豪 +##豫 +##豬 +##豹 +##豺 +##貂 +##貅 +##貌 +##貓 +##貔 +##貘 +##貝 +##貞 +##負 +##財 +##貢 +##貧 +##貨 +##販 +##貪 +##貫 +##責 +##貯 +##貰 +##貳 +##貴 +##貶 +##買 +##貸 +##費 +##貼 +##貽 +##貿 +##賀 +##賁 +##賂 +##賃 +##賄 +##資 +##賈 +##賊 +##賑 +##賓 +##賜 +##賞 +##賠 +##賡 +##賢 +##賣 +##賤 +##賦 +##質 +##賬 +##賭 +##賴 +##賺 +##購 +##賽 +##贅 +##贈 +##贊 +##贍 +##贏 +##贓 +##贖 +##贛 +##贝 +##贞 +##负 +##贡 +##财 +##责 +##贤 +##败 +##账 +##货 +##质 +##贩 +##贪 +##贫 +##贬 +##购 +##贮 +##贯 +##贰 +##贱 +##贲 +##贴 +##贵 +##贷 +##贸 +##费 +##贺 +##贻 +##贼 +##贾 +##贿 +##赁 +##赂 +##赃 +##资 +##赅 +##赈 +##赊 +##赋 +##赌 +##赎 +##赏 +##赐 +##赓 +##赔 +##赖 +##赘 +##赚 +##赛 +##赝 +##赞 +##赠 +##赡 +##赢 +##赣 +##赤 +##赦 +##赧 +##赫 +##赭 +##走 +##赳 +##赴 +##赵 +##赶 +##起 +##趁 +##超 +##越 +##趋 +##趕 +##趙 +##趟 +##趣 +##趨 +##足 +##趴 +##趵 +##趸 +##趺 +##趾 +##跃 +##跄 +##跆 +##跋 +##跌 +##跎 +##跑 +##跖 +##跚 +##跛 +##距 +##跟 +##跡 +##跤 +##跨 +##跩 +##跪 +##路 +##跳 +##践 +##跷 +##跹 +##跺 +##跻 +##踉 +##踊 +##踌 +##踏 +##踐 +##踝 +##踞 +##踟 +##踢 +##踩 +##踪 +##踮 +##踱 +##踴 +##踵 +##踹 +##蹂 +##蹄 +##蹇 +##蹈 +##蹉 +##蹊 +##蹋 +##蹑 +##蹒 +##蹙 +##蹟 +##蹣 +##蹤 +##蹦 +##蹩 +##蹬 +##蹭 +##蹲 +##蹴 +##蹶 +##蹺 +##蹼 +##蹿 +##躁 +##躇 +##躉 +##躊 +##躋 +##躍 +##躏 +##躪 +##身 +##躬 +##躯 +##躲 +##躺 +##軀 +##車 +##軋 +##軌 +##軍 +##軒 +##軟 +##転 +##軸 +##軼 +##軽 +##軾 +##較 +##載 +##輒 +##輓 +##輔 +##輕 +##輛 +##輝 +##輟 +##輩 +##輪 +##輯 +##輸 +##輻 +##輾 +##輿 +##轄 +##轅 +##轆 +##轉 +##轍 +##轎 +##轟 +##车 +##轧 +##轨 +##轩 +##转 +##轭 +##轮 +##软 +##轰 +##轲 +##轴 +##轶 +##轻 +##轼 +##载 +##轿 +##较 +##辄 +##辅 +##辆 +##辇 +##辈 +##辉 +##辊 +##辍 +##辐 +##辑 +##输 +##辕 +##辖 +##辗 +##辘 +##辙 +##辛 +##辜 +##辞 +##辟 +##辣 +##辦 +##辨 +##辩 +##辫 +##辭 +##辮 +##辯 +##辰 +##辱 +##農 +##边 +##辺 +##辻 +##込 +##辽 +##达 +##迁 +##迂 +##迄 +##迅 +##过 +##迈 +##迎 +##运 +##近 +##返 +##还 +##这 +##进 +##远 +##违 +##连 +##迟 +##迢 +##迤 +##迥 +##迦 +##迩 +##迪 +##迫 +##迭 +##述 +##迴 +##迷 +##迸 +##迹 +##迺 +##追 +##退 +##送 +##适 +##逃 +##逅 +##逆 +##选 +##逊 +##逍 +##透 +##逐 +##递 +##途 +##逕 +##逗 +##這 +##通 +##逛 +##逝 +##逞 +##速 +##造 +##逢 +##連 +##逮 +##週 +##進 +##逵 +##逶 +##逸 +##逻 +##逼 +##逾 +##遁 +##遂 +##遅 +##遇 +##遊 +##運 +##遍 +##過 +##遏 +##遐 +##遑 +##遒 +##道 +##達 +##違 +##遗 +##遙 +##遛 +##遜 +##遞 +##遠 +##遢 +##遣 +##遥 +##遨 +##適 +##遭 +##遮 +##遲 +##遴 +##遵 +##遶 +##遷 +##選 +##遺 +##遼 +##遽 +##避 +##邀 +##邁 +##邂 +##邃 +##還 +##邇 +##邈 +##邊 +##邋 +##邏 +##邑 +##邓 +##邕 +##邛 +##邝 +##邢 +##那 +##邦 +##邨 +##邪 +##邬 +##邮 +##邯 +##邰 +##邱 +##邳 +##邵 +##邸 +##邹 +##邺 +##邻 +##郁 +##郅 +##郊 +##郎 +##郑 +##郜 +##郝 +##郡 +##郢 +##郤 +##郦 +##郧 +##部 +##郫 +##郭 +##郴 +##郵 +##郷 +##郸 +##都 +##鄂 +##鄉 +##鄒 +##鄔 +##鄙 +##鄞 +##鄢 +##鄧 +##鄭 +##鄰 +##鄱 +##鄲 +##鄺 +##酉 +##酊 +##酋 +##酌 +##配 +##酐 +##酒 +##酗 +##酚 +##酝 +##酢 +##酣 +##酥 +##酩 +##酪 +##酬 +##酮 +##酯 +##酰 +##酱 +##酵 +##酶 +##酷 +##酸 +##酿 +##醃 +##醇 +##醉 +##醋 +##醍 +##醐 +##醒 +##醚 +##醛 +##醜 +##醞 +##醣 +##醪 +##醫 +##醬 +##醮 +##醯 +##醴 +##醺 +##釀 +##釁 +##采 +##釉 +##释 +##釋 +##里 +##重 +##野 +##量 +##釐 +##金 +##釗 +##釘 +##釜 +##針 +##釣 +##釦 +##釧 +##釵 +##鈀 +##鈉 +##鈍 +##鈎 +##鈔 +##鈕 +##鈞 +##鈣 +##鈦 +##鈪 +##鈴 +##鈺 +##鈾 +##鉀 +##鉄 +##鉅 +##鉉 +##鉑 +##鉗 +##鉚 +##鉛 +##鉤 +##鉴 +##鉻 +##銀 +##銃 +##銅 +##銑 +##銓 +##銖 +##銘 +##銜 +##銬 +##銭 +##銮 +##銳 +##銷 +##銹 +##鋁 +##鋅 +##鋒 +##鋤 +##鋪 +##鋰 +##鋸 +##鋼 +##錄 +##錐 +##錘 +##錚 +##錠 +##錢 +##錦 +##錨 +##錫 +##錮 +##錯 +##録 +##錳 +##錶 +##鍊 +##鍋 +##鍍 +##鍛 +##鍥 +##鍰 +##鍵 +##鍺 +##鍾 +##鎂 +##鎊 +##鎌 +##鎏 +##鎔 +##鎖 +##鎗 +##鎚 +##鎧 +##鎬 +##鎮 +##鎳 +##鏈 +##鏖 +##鏗 +##鏘 +##鏞 +##鏟 +##鏡 +##鏢 +##鏤 +##鏽 +##鐘 +##鐮 +##鐲 +##鐳 +##鐵 +##鐸 +##鐺 +##鑄 +##鑊 +##鑑 +##鑒 +##鑣 +##鑫 +##鑰 +##鑲 +##鑼 +##鑽 +##鑾 +##鑿 +##针 +##钉 +##钊 +##钎 +##钏 +##钒 +##钓 +##钗 +##钙 +##钛 +##钜 +##钝 +##钞 +##钟 +##钠 +##钡 +##钢 +##钣 +##钤 +##钥 +##钦 +##钧 +##钨 +##钩 +##钮 +##钯 +##钰 +##钱 +##钳 +##钴 +##钵 +##钺 +##钻 +##钼 +##钾 +##钿 +##铀 +##铁 +##铂 +##铃 +##铄 +##铅 +##铆 +##铉 +##铎 +##铐 +##铛 +##铜 +##铝 +##铠 +##铡 +##铢 +##铣 +##铤 +##铨 +##铩 +##铬 +##铭 +##铮 +##铰 +##铲 +##铵 +##银 +##铸 +##铺 +##链 +##铿 +##销 +##锁 +##锂 +##锄 +##锅 +##锆 +##锈 +##锉 +##锋 +##锌 +##锏 +##锐 +##锑 +##错 +##锚 +##锟 +##锡 +##锢 +##锣 +##锤 +##锥 +##锦 +##锭 +##键 +##锯 +##锰 +##锲 +##锵 +##锹 +##锺 +##锻 +##镀 +##镁 +##镂 +##镇 +##镉 +##镌 +##镍 +##镐 +##镑 +##镕 +##镖 +##镗 +##镛 +##镜 +##镣 +##镭 +##镯 +##镰 +##镳 +##镶 +##長 +##长 +##門 +##閃 +##閉 +##開 +##閎 +##閏 +##閑 +##閒 +##間 +##閔 +##閘 +##閡 +##関 +##閣 +##閥 +##閨 +##閩 +##閱 +##閲 +##閹 +##閻 +##閾 +##闆 +##闇 +##闊 +##闌 +##闍 +##闔 +##闕 +##闖 +##闘 +##關 +##闡 +##闢 +##门 +##闪 +##闫 +##闭 +##问 +##闯 +##闰 +##闲 +##间 +##闵 +##闷 +##闸 +##闹 +##闺 +##闻 +##闽 +##闾 +##阀 +##阁 +##阂 +##阅 +##阆 +##阇 +##阈 +##阉 +##阎 +##阐 +##阑 +##阔 +##阕 +##阖 +##阙 +##阚 +##阜 +##队 +##阡 +##阪 +##阮 +##阱 +##防 +##阳 +##阴 +##阵 +##阶 +##阻 +##阿 +##陀 +##陂 +##附 +##际 +##陆 +##陇 +##陈 +##陋 +##陌 +##降 +##限 +##陕 +##陛 +##陝 +##陞 +##陟 +##陡 +##院 +##陣 +##除 +##陨 +##险 +##陪 +##陰 +##陲 +##陳 +##陵 +##陶 +##陷 +##陸 +##険 +##陽 +##隅 +##隆 +##隈 +##隊 +##隋 +##隍 +##階 +##随 +##隐 +##隔 +##隕 +##隘 +##隙 +##際 +##障 +##隠 +##隣 +##隧 +##隨 +##險 +##隱 +##隴 +##隶 +##隸 +##隻 +##隼 +##隽 +##难 +##雀 +##雁 +##雄 +##雅 +##集 +##雇 +##雉 +##雋 +##雌 +##雍 +##雎 +##雏 +##雑 +##雒 +##雕 +##雖 +##雙 +##雛 +##雜 +##雞 +##離 +##難 +##雨 +##雪 +##雯 +##雰 +##雲 +##雳 +##零 +##雷 +##雹 +##電 +##雾 +##需 +##霁 +##霄 +##霆 +##震 +##霈 +##霉 +##霊 +##霍 +##霎 +##霏 +##霑 +##霓 +##霖 +##霜 +##霞 +##霧 +##霭 +##霰 +##露 +##霸 +##霹 +##霽 +##霾 +##靂 +##靄 +##靈 +##青 +##靓 +##靖 +##静 +##靚 +##靛 +##靜 +##非 +##靠 +##靡 +##面 +##靥 +##靦 +##革 +##靳 +##靴 +##靶 +##靼 +##鞅 +##鞋 +##鞍 +##鞏 +##鞑 +##鞘 +##鞠 +##鞣 +##鞦 +##鞭 +##韆 +##韋 +##韌 +##韓 +##韜 +##韦 +##韧 +##韩 +##韬 +##韭 +##音 +##韵 +##韶 +##韻 +##響 +##頁 +##頂 +##頃 +##項 +##順 +##須 +##頌 +##預 +##頑 +##頒 +##頓 +##頗 +##領 +##頜 +##頡 +##頤 +##頫 +##頭 +##頰 +##頷 +##頸 +##頹 +##頻 +##頼 +##顆 +##題 +##額 +##顎 +##顏 +##顔 +##願 +##顛 +##類 +##顧 +##顫 +##顯 +##顱 +##顴 +##页 +##顶 +##顷 +##项 +##顺 +##须 +##顼 +##顽 +##顾 +##顿 +##颁 +##颂 +##预 +##颅 +##领 +##颇 +##颈 +##颉 +##颊 +##颌 +##颍 +##颐 +##频 +##颓 +##颔 +##颖 +##颗 +##题 +##颚 +##颛 +##颜 +##额 +##颞 +##颠 +##颡 +##颢 +##颤 +##颦 +##颧 +##風 +##颯 +##颱 +##颳 +##颶 +##颼 +##飄 +##飆 +##风 +##飒 +##飓 +##飕 +##飘 +##飙 +##飚 +##飛 +##飞 +##食 +##飢 +##飨 +##飩 +##飪 +##飯 +##飲 +##飼 +##飽 +##飾 +##餃 +##餅 +##餉 +##養 +##餌 +##餐 +##餒 +##餓 +##餘 +##餚 +##餛 +##餞 +##餡 +##館 +##餮 +##餵 +##餾 +##饅 +##饈 +##饋 +##饌 +##饍 +##饑 +##饒 +##饕 +##饗 +##饞 +##饥 +##饨 +##饪 +##饬 +##饭 +##饮 +##饯 +##饰 +##饱 +##饲 +##饴 +##饵 +##饶 +##饷 +##饺 +##饼 +##饽 +##饿 +##馀 +##馁 +##馄 +##馅 +##馆 +##馈 +##馋 +##馍 +##馏 +##馒 +##馔 +##首 +##馗 +##香 +##馥 +##馨 +##馬 +##馭 +##馮 +##馳 +##馴 +##駁 +##駄 +##駅 +##駆 +##駐 +##駒 +##駕 +##駛 +##駝 +##駭 +##駱 +##駿 +##騁 +##騎 +##騏 +##験 +##騙 +##騨 +##騰 +##騷 +##驀 +##驅 +##驊 +##驍 +##驒 +##驕 +##驗 +##驚 +##驛 +##驟 +##驢 +##驥 +##马 +##驭 +##驮 +##驯 +##驰 +##驱 +##驳 +##驴 +##驶 +##驷 +##驸 +##驹 +##驻 +##驼 +##驾 +##驿 +##骁 +##骂 +##骄 +##骅 +##骆 +##骇 +##骈 +##骊 +##骋 +##验 +##骏 +##骐 +##骑 +##骗 +##骚 +##骛 +##骜 +##骞 +##骠 +##骡 +##骤 +##骥 +##骧 +##骨 +##骯 +##骰 +##骶 +##骷 +##骸 +##骼 +##髂 +##髅 +##髋 +##髏 +##髒 +##髓 +##體 +##髖 +##高 +##髦 +##髪 +##髮 +##髯 +##髻 +##鬃 +##鬆 +##鬍 +##鬓 +##鬚 +##鬟 +##鬢 +##鬣 +##鬥 +##鬧 +##鬱 +##鬼 +##魁 +##魂 +##魄 +##魅 +##魇 +##魍 +##魏 +##魔 +##魘 +##魚 +##魯 +##魷 +##鮑 +##鮨 +##鮪 +##鮭 +##鮮 +##鯉 +##鯊 +##鯖 +##鯛 +##鯨 +##鯰 +##鯽 +##鰍 +##鰓 +##鰭 +##鰲 +##鰻 +##鰾 +##鱈 +##鱉 +##鱔 +##鱗 +##鱷 +##鱸 +##鱼 +##鱿 +##鲁 +##鲈 +##鲍 +##鲑 +##鲛 +##鲜 +##鲟 +##鲢 +##鲤 +##鲨 +##鲫 +##鲱 +##鲲 +##鲶 +##鲷 +##鲸 +##鳃 +##鳄 +##鳅 +##鳌 +##鳍 +##鳕 +##鳖 +##鳗 +##鳝 +##鳞 +##鳥 +##鳩 +##鳳 +##鳴 +##鳶 +##鴉 +##鴕 +##鴛 +##鴦 +##鴨 +##鴻 +##鴿 +##鵑 +##鵜 +##鵝 +##鵡 +##鵬 +##鵰 +##鵲 +##鶘 +##鶩 +##鶯 +##鶴 +##鷗 +##鷲 +##鷹 +##鷺 +##鸚 +##鸞 +##鸟 +##鸠 +##鸡 +##鸢 +##鸣 +##鸥 +##鸦 +##鸨 +##鸪 +##鸭 +##鸯 +##鸳 +##鸵 +##鸽 +##鸾 +##鸿 +##鹂 +##鹃 +##鹄 +##鹅 +##鹈 +##鹉 +##鹊 +##鹌 +##鹏 +##鹑 +##鹕 +##鹘 +##鹜 +##鹞 +##鹤 +##鹦 +##鹧 +##鹫 +##鹭 +##鹰 +##鹳 +##鹵 +##鹹 +##鹼 +##鹽 +##鹿 +##麂 +##麋 +##麒 +##麓 +##麗 +##麝 +##麟 +##麥 +##麦 +##麩 +##麴 +##麵 +##麸 +##麺 +##麻 +##麼 +##麽 +##麾 +##黃 +##黄 +##黍 +##黎 +##黏 +##黑 +##黒 +##黔 +##默 +##黛 +##黜 +##黝 +##點 +##黠 +##黨 +##黯 +##黴 +##鼋 +##鼎 +##鼐 +##鼓 +##鼠 +##鼬 +##鼹 +##鼻 +##鼾 +##齁 +##齊 +##齋 +##齐 +##齒 +##齡 +##齢 +##齣 +##齦 +##齿 +##龄 +##龅 +##龈 +##龊 +##龋 +##龌 +##龍 +##龐 +##龔 +##龕 +##龙 +##龚 +##龛 +##龜 +##龟 +##︰ +##︱ +##︶ +##︿ +##﹁ +##﹂ +##﹍ +##﹏ +##﹐ +##﹑ +##﹒ +##﹔ +##﹕ +##﹖ +##﹗ +##﹙ +##﹚ +##﹝ +##﹞ +##﹡ +##﹣ +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##, +##- +##. +##/ +##: +##; +##< +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##f +##h +##j +##u +##w +##z +##{ +##} +##。 +##「 +##」 +##、 +##・ +##ッ +##ー +##イ +##ク +##シ +##ス +##ト +##ノ +##フ +##ラ +##ル +##ン +##゙ +##゚ +## ̄ +##¥ +##👍 +##🔥 +##😂 +##😎 +[unused0] +[unused100] +[unused101] +[unused102] +[unused103] +[unused104] +[unused105] +[unused106] +[unused107] +[unused108] +[unused109] +[unused110] +[unused111] +[unused112] +[unused113] +[unused114] +[unused115] +[unused116] +[unused117] +[unused118] +[unused119] +[unused120] +[unused121] +[unused122] +[unused123] +[unused124] +[unused125] +[unused126] +[unused127] +[unused128] +[unused129] +[unused130] +[unused131] +[unused132] +[unused133] +[unused134] +[unused135] +[unused136] +[unused137] +[unused138] +[unused139] +[unused140] +[unused141] +[unused142] +[unused143] +[unused144] +[unused145] +[unused146] +[unused147] +[unused148] +[unused149] +[unused150] +[unused151] +[unused152] +[unused153] +[unused154] +[unused155] +[unused156] +[unused157] +[unused158] +[unused159] +[unused160] +[unused161] +[unused162] +[unused163] +[unused164] +[unused165] +[unused166] +[unused167] +[unused168] +[unused169] +[unused170] +[unused171] +[unused172] +[unused173] +[unused174] +[unused175] +[unused176] +[unused177] +[unused178] +[unused179] +[unused180] +[unused181] +[unused182] +[unused183] +[unused184] +[unused185] +[unused186] +[unused187] +[unused188] +[unused189] +[unused190] +[unused191] +[unused192] +[unused193] +[unused194] +[unused195] +[unused196] +[unused197] +[unused198] +[unused199] +[unused200] +[unused201] +[unused202] +[unused203] +[unused204] +[unused205] +[unused206] +[unused207] +[unused208] +[unused209] +[unused210] +[unused211] +[unused212] +[unused213] +[unused214] +[unused215] +[unused216] +[unused217] +[unused218] +[unused219] +[unused220] +[unused221] +[unused222] +[unused223] +[unused224] +[unused225] +[unused226] +[unused227] +[unused228] +[unused229] +[unused230] +[unused231] +[unused232] +[unused233] +[unused234] +[unused235] +[unused236] +[unused237] +[unused238] +[unused239] +[unused240] +[unused241] +[unused242] +[unused243] +[unused244] +[unused245] +[unused246] +[unused247] +[unused248] +[unused249] +[unused250] +[unused251] +[unused252] +[unused253] +[unused254] +[unused255] +[unused256] +[unused257] +[unused258] +[unused259] +[unused260] +[unused261] +[unused262] +[unused263] +[unused264] +[unused265] +[unused266] +[unused267] +[unused268] +[unused269] +[unused270] +[unused271] +[unused272] +[unused273] +[unused274] +[unused275] +[unused276] +[unused277] +[unused278] +[unused279] +[unused280] +[unused281] +[unused282] +[unused283] +[unused284] +[unused285] +[unused286] +[unused287] +[unused288] +[unused289] +[unused290] +[unused291] +[unused292] +[unused293] +[unused294] +[unused295] +[unused296] +[unused297] +[unused298] +[unused299] +[unused300] +[unused301] +[unused302] +[unused303] +[unused304] +[unused305] +[unused306] +[unused307] +[unused308] +[unused309] +[unused310] +[unused311] +[unused312] +[unused313] +[unused314] +[unused315] +[unused316] +[unused317] +[unused318] +[unused319] +[unused320] +[unused321] +[unused322] +[unused323] +[unused324] +[unused325] +[unused326] +[unused327] +[unused328] +[unused329] +[unused330] +[unused331] +[unused332] +[unused333] +[unused334] +[unused335] +[unused336] +[unused337] +[unused338] +[unused339] +[unused340] +[unused341] +[unused342] +[unused343] +[unused344] +[unused345] +[unused346] +[unused347] +[unused348] +[unused349] +[unused350] +[unused351] +[unused352] +[unused353] +[unused354] +[unused355] +[unused356] +[unused357] +[unused358] +[unused359] +[unused360] +[unused361] +[unused362] +[unused363] +[unused364] +[unused365] +[unused366] +[unused367] +[unused368] +[unused369] +[unused370] +[unused371] +[unused372] +[unused373] +[unused374] +[unused375] +[unused376] +[unused377] +[unused378] +[unused379] +[unused380] +[unused381] +[unused382] +[unused383] +[unused384] +[unused385] +[unused386] +[unused387] +[unused388] +[unused389] +[unused390] +[unused391] +[unused392] +[unused393] +[unused394] +[unused395] +[unused396] +[unused397] +[unused398] +[unused399] +[unused400] +[unused401] +[unused402] +[unused403] +[unused404] +[unused405] +[unused406] +[unused407] +[unused408] +[unused409] +[unused410] +[unused411] +[unused412] +[unused413] +[unused414] +[unused415] +[unused416] +[unused417] +[unused418] +[unused419] +[unused420] +[unused421] +[unused422] +[unused423] +[unused424] +[unused425] +[unused426] +[unused427] +[unused428] +[unused429] +[unused430] +[unused431] +[unused432] +[unused433] +[unused434] +[unused435] +[unused436] +[unused437] +[unused438] +[unused439] +[unused440] +[unused441] +[unused442] +[unused443] +[unused444] +[unused445] +[unused446] +[unused447] +[unused448] +[unused449] +[unused450] +[unused451] +[unused452] +[unused453] +[unused454] +[unused455] +[unused456] +[unused457] +[unused458] +[unused459] +[unused460] +[unused461] +[unused462] +[unused463] +[unused464] +[unused465] +[unused466] +[unused467] +[unused468] +[unused469] +[unused470] +[unused471] +[unused472] +[unused473] +[unused474] +[unused475] +[unused476] +[unused477] +[unused478] +[unused479] +[unused480] +[unused481] +[unused482] +[unused483] +[unused484] +[unused485] +[unused486] +[unused487] +[unused488] +[unused489] +[unused490] +[unused491] +[unused492] +[unused493] +[unused494] +[unused495] +[unused496] +[unused497] +[unused498] +[unused499] +[unused500] +[unused501] +[unused502] +[unused503] +[unused504] +[unused505] +[unused506] +[unused507] +[unused508] +[unused509] +[unused510] +[unused511] +[unused512] +[unused513] +[unused514] +[unused515] +[unused516] +[unused517] +[unused518] +[unused519] +[unused520] +[unused521] +[unused522] +[unused523] +[unused524] +[unused525] +[unused526] +[unused527] +[unused528] +[unused529] +[unused530] +[unused531] +[unused532] +[unused533] +[unused534] +[unused535] +[unused536] +[unused537] +[unused538] +[unused539] +[unused540] +[unused541] +[unused542] +[unused543] +[unused544] +[unused545] +[unused546] +[unused547] +[unused548] +[unused549] +[unused550] +[unused551] +[unused552] +[unused553] +[unused554] +[unused555] +[unused556] +[unused557] +[unused558] +[unused559] +[unused560] +[unused561] +[unused562] +[unused563] +[unused564] +[unused565] +[unused566] +[unused567] +[unused568] +[unused569] +[unused570] +[unused571] +[unused572] +[unused573] +[unused574] +[unused575] +[unused576] +[unused577] +[unused578] +[unused579] +[unused580] +[unused581] +[unused582] +[unused583] +[unused584] +[unused585] +[unused586] +[unused587] +[unused588] +[unused589] +[unused590] +[unused591] +[unused592] +[unused593] +[unused594] +[unused595] +[unused596] +[unused597] +[unused598] +[unused599] +[unused600] +[unused601] +[unused602] +[unused603] +[unused604] +[unused605] +[unused606] +[unused607] +[unused608] +[unused609] +[unused610] +[unused611] +[unused612] +[unused613] +[unused614] +[unused615] +[unused616] +[unused617] +[unused618] +[unused619] +[unused620] +[unused621] +[unused622] +[unused623] +[unused624] +[unused625] +[unused626] +[unused627] +[unused628] +[unused629] +[unused630] +[unused631] +[unused632] +[unused633] +[unused634] +[unused635] +[unused636] +[unused637] +[unused638] +[unused639] +[unused640] +[unused641] +[unused642] +[unused643] +[unused644] +[unused645] +[unused646] +[unused647] +[unused648] +[unused649] +[unused650] +[unused651] +[unused652] +[unused653] +[unused654] +[unused655] +[unused656] +[unused657] +[unused658] +[unused659] +[unused660] +[unused661] +[unused662] +[unused663] +[unused664] +[unused665] +[unused666] +[unused667] +[unused668] +[unused669] +[unused670] +[unused671] +[unused672] +[unused673] +[unused674] +[unused675] +[unused676] +[unused677] +[unused678] +[unused679] +[unused680] +[unused681] +[unused682] +[unused683] +[unused684] +[unused685] +[unused686] +[unused687] +[unused688] +[unused689] +[unused690] +[unused691] +[unused692] +[unused693] +[unused694] +[unused695] +[unused696] +[unused697] +[unused698] +[unused699] +[unused700] +[unused701] +[unused702] +[unused703] +[unused704] +[unused705] +[unused706] +[unused707] +[unused708] +[unused709] +[unused710] +[unused711] +[unused712] +[unused713] +[unused714] +[unused715] +[unused716] +[unused717] +[unused718] +[unused719] +[unused720] +[unused721] +[unused722] +[unused723] +[unused724] +[unused725] +[unused726] +[unused727] +[unused728] +[unused729] +[unused730] +[unused731] +[unused732] +[unused733] +[unused734] +[unused735] +[unused736] +[unused737] +[unused738] +[unused739] +[unused740] +[unused741] +[unused742] +[unused743] +[unused744] +[unused745] +[unused746] +[unused747] +[unused748] +[unused749] +[unused750] +[unused751] +[unused752] +[unused753] +[unused754] +[unused755] +[unused756] +[unused757] +[unused758] +[unused759] +[unused760] +[unused761] +[unused762] +[unused763] +[unused764] +[unused765] +[unused766] +[unused767] +[unused768] +[unused769] +[unused770] +[unused771] +[unused772] +[unused773] +[unused774] +[unused775] +[unused776] +[unused777] +[unused778] +[unused779] +[unused780] +[unused781] +[unused782] +[unused783] +[unused784] +[unused785] +[unused786] +[unused787] +[unused788] +[unused789] +[unused790] +[unused791] +[unused792] +[unused793] +[unused794] +[unused795] +[unused796] +[unused797] +[unused798] +[unused799] +[unused800] +[unused801] +[unused802] +[unused803] +[unused804] +[unused805] +[unused806] +[unused807] +[unused808] +[unused809] +[unused810] +[unused811] +[unused812] +[unused813] +[unused814] +[unused815] +[unused816] +[unused817] +[unused818] +[unused819] +[unused820] +[unused821] +[unused822] +[unused823] +[unused824] +[unused825] +[unused826] +[unused827] +[unused828] +[unused829] +[unused830] +[unused831] +[unused832] +[unused833] +[unused834] +[unused835] +[unused836] +[unused837] +[unused838] +[unused839] +[unused840] +[unused841] +[unused842] +[unused843] +[unused844] +[unused845] +[unused846] +[unused847] +[unused848] +[unused849] +[unused850] +[unused851] +[unused852] +[unused853] +[unused854] +[unused855] +[unused856] +[unused857] +[unused858] +[unused859] +[unused860] +[unused861] +[unused862] +[unused863] +[unused864] +[unused865] +[unused866] +[unused867] +[unused868] +[unused869] +[unused870] +[unused871] +[unused872] +[unused873] +[unused874] +[unused875] +[unused876] +[unused877] +[unused878] +[unused879] +[unused880] +[unused881] +[unused882] +[unused883] +[unused884] +[unused885] +[unused886] +[unused887] +[unused888] +[unused889] +[unused890] +[unused891] +[unused892] +[unused893] +[unused894] +[unused895] +[unused896] +[unused897] +[unused898] +[unused899] +[unused900] +[unused901] +[unused902] +[unused903] +[unused904] +[unused905] +[unused906] +[unused907] +[unused908] +[unused909] +[unused910] +[unused911] +[unused912] +[unused913] +[unused914] +[unused915] +[unused916] +[unused917] +[unused918] +[unused919] +[unused920] +[unused921] +[unused922] +[unused923] +[unused924] +[unused925] +[unused926] +[unused927] +[unused928] +[unused929] +[unused930] +[unused931] +[unused932] +[unused933] +[unused934] +[unused935] +[unused936] +[unused937] +[unused938] +[unused939] +[unused940] +[unused941] +[unused942] +[unused943] +[unused944] +[unused945] +[unused946] +[unused947] +[unused948] +[unused949] +[unused950] +[unused951] +[unused952] +[unused953] +[unused954] +[unused955] +[unused956] +[unused957] +[unused958] +[unused959] +[unused960] +[unused961] +[unused962] +[unused963] +[unused964] +[unused965] +[unused966] +[unused967] +[unused968] +[unused969] +[unused970] +[unused971] +[unused972] +[unused973] +[unused974] +[unused975] +[unused976] +[unused977] +[unused978] +[unused979] +[unused980] +[unused981] +[unused982] +[unused983] +[unused984] +[unused985] +[unused986] +[unused987] +[unused988] +[unused989] +[unused990] +[unused991] +[unused992] +[unused993] +` +¡ +¢ +¦ +¨ +ª +¬ +´ +¶ +½ +¾ +¿ +ð +þ +ħ +ı +ł +œ +ƒ +ɐ +ɑ +ɒ +ɕ +ɛ +ɣ +ɨ +ɪ +ɫ +ɬ +ɯ +ɲ +ɴ +ɹ +ɾ +ʀ +ʁ +ʂ +ʃ +ʉ +ʊ +ʋ +ʌ +ʎ +ʐ +ʑ +ʒ +ʔ +ʲ +ʳ +ʷ +ʸ +ʻ +ʼ +ʾ +ʿ +ˡ +ˣ +ˤ +ζ +ξ +щ +ъ +э +ю +ђ +є +ј +љ +њ +ћ +ӏ +ա +բ +գ +դ +ե +թ +ի +լ +կ +հ +մ +յ +ն +ո +պ +ս +վ +տ +ր +ւ +ք +־ +א +ב +ג +ד +ה +ו +ז +ח +ט +י +ך +כ +ל +ם +מ +ן +נ +ס +ע +ף +פ +ץ +צ +ק +ר +ש +ת +، +ء +ث +ج +ح +خ +ذ +ز +ش +ص +ض +ط +ظ +غ +ـ +ف +ق +ك +ى +ٹ +پ +چ +ک +گ +ں +ھ +ہ +ی +ے +अ +आ +उ +ए +क +ख +ग +च +ज +ट +ड +ण +त +थ +द +ध +न +प +ब +भ +म +य +र +ल +व +श +ष +स +ह +ा +ि +ी +ो +। +॥ +ং +অ +আ +ই +উ +এ +ও +ক +খ +গ +চ +ছ +জ +ট +ড +ণ +ত +থ +দ +ধ +ন +প +ব +ভ +ম +য +র +ল +শ +ষ +স +হ +া +ি +ী +ে +க +ச +ட +த +ந +ன +ப +ம +ய +ர +ல +ள +வ +ா +ி +ு +ே +ை +ನ +ರ +ಾ +ක +ය +ර +ල +ව +ා +ต +ท +พ +ล +ว +ส +། +ག +ང +ད +ན +པ +བ +མ +འ +ར +ལ +ས +မ +ა +ბ +გ +დ +ე +ვ +თ +ი +კ +ლ +მ +ნ +ო +რ +ს +ტ +უ +ᄊ +ᴬ +ᴮ +ᴰ +ᴵ +ᴺ +ᵀ +ᵇ +ᵈ +ᵖ +ᵗ +ᵢ +ᵣ +ᵤ +ᵥ +ᶜ +ᶠ +‐ +‑ +‒ +– +— +― +‘ +’ +‚ +“ +” +‡ +… +⁰ +⁴ +⁵ +⁶ +⁷ +⁸ +⁹ +⁻ +₀ +₅ +₆ +₇ +₈ +₉ +₊ +₍ +₎ +ₐ +ₑ +ₒ +ₓ +ₕ +ₖ +ₗ +ₘ +ₙ +ₚ +ₛ +ₜ +₤ +₩ +₱ +₹ +ℓ +ℝ +⅓ +⅔ +↦ +⇄ +⇌ +∂ +∅ +∆ +∇ +∈ +∗ +∘ +∧ +∨ +∪ +⊂ +⊆ +⊕ +⊗ +☉ +♭ +♯ +⟨ +⟩ +ⱼ +⺩ +⺼ +⽥ +亻 +宀 +彳 +忄 +扌 +氵 +疒 +糹 +訁 +辶 +阝 +龸 +fi +fl +had +were +which +him +their +been +would +then +them +could +during +through +between +while +later +around +did +such +being +used +against +many +both +these +known +until +even +didn +because +born +since +still +became +any +including +took +same +each +called +much +however +four +another +found +won +going +away +hand +several +following +released +played +began +district +those +held +own +early +league +government +came +based +thought +looked +along +went +few +father +former +located +got +though +every +century +without +within +building +large +named +started +once +should +built +british +death +moved +door +need +president +wasn +although +due +major +died +third +knew +asked +turned +wanted +together +received +son +served +different +behind +himself +felt +members +football +near +having +saw +mother +army +front +late +hands +put +division +across +told +often +ever +french +six +include +tell +among +species +really +according +half +original +gave +making +enough +opened +must +included +given +german +woman +community +might +million +court +short +round +seen +always +become +sure +almost +director +council +career +things +using +couldn +better +students +married +nothing +worked +others +record +anything +continued +give +military +established +returned +does +written +thing +feet +far +already +championship +western +department +role +various +production +television +produced +working +region +present +period +looking +least +total +england +wife +per +brother +soon +political +taken +created +further +able +reached +joined +upon +done +important +either +appeared +position +ground +lead +election +arms +police +instead +words +moment +someone +announced +less +wrote +past +followed +founded +finally +india +taking +records +considered +northern +toward +european +outside +described +track +playing +heard +professional +australia +miles +yet +trying +blood +southern +maybe +everything +mouth +race +recorded +above +daughter +points +middle +move +tried +elected +closed +ten +minister +chief +person +similar +brought +rest +formed +floor +doing +killed +training +needed +turn +finished +railway +rather +sent +example +ran +term +coming +currently +forces +despite +areas +fact +dead +originally +germany +probably +developed +pulled +stood +signed +songs +child +eventually +met +average +teams +minutes +current +kind +decided +usually +eastern +seemed +episode +bed +added +indian +route +available +throughout +addition +appointed +eight +construction +mean +remained +schools +sometimes +events +possible +australian +forward +debut +seat +performance +committee +features +character +herself +lot +russian +range +hours +sold +quickly +directed +guitar +performed +players +smile +myself +placed +province +towards +wouldn +leading +whole +designed +census +europe +attack +japanese +getting +alone +lower +wide +hospital +believe +changed +sister +gone +hadn +ship +studies +academy +shot +below +involved +kept +largest +especially +beginning +movement +section +female +professor +lord +longer +walked +actually +civil +families +thus +aircraft +completed +includes +captain +fight +vocals +featured +fourth +officer +hear +means +medical +groups +lips +competition +entire +lived +leaving +federal +tournament +passed +independent +kingdom +spent +fine +doesn +reported +fall +raised +itself +replaced +leader +theatre +whose +parents +spanish +canadian +degree +writing +awarded +higher +coast +provided +senior +organization +stopped +onto +countries +parts +conference +interest +saying +allowed +earlier +matter +winning +try +happened +moving +los +breath +nearly +mid +certain +italian +african +standing +fell +artist +shows +deal +mine +industry +everyone +republic +provide +student +primary +owned +older +heavy +1st +makes +attention +anyone +africa +stated +length +ended +fingers +command +staff +foreign +opening +governor +okay +medal +kill +introduced +chest +hell +feeling +success +meet +reason +meeting +novel +trade +buildings +guy +goal +native +husband +previously +entered +producer +operations +takes +covered +forced +roman +complete +successful +texas +cold +traditional +films +clear +approximately +nine +prince +question +tracks +ireland +regional +personal +operation +economic +holding +twenty +additional +hour +regular +historic +places +whom +shook +km² +secretary +prior +scored +units +ask +property +ready +immediately +month +listed +contract +themselves +lines +navy +writer +meant +runs +practice +championships +singer +commission +required +starting +generally +giving +attended +couple +stand +catholic +caught +executive +thinking +chair +quite +shoulder +hope +decision +plays +defeated +municipality +whether +offered +slowly +pain +direction +mission +mostly +noted +individual +managed +lives +plant +helped +except +studied +computer +figure +relationship +issue +significant +loss +smiled +gun +highest +male +bring +goals +mexico +problem +distance +commercial +completely +location +annual +famous +neck +caused +italy +understand +greek +highway +wrong +comes +appearance +issues +musical +companies +castle +income +assembly +bass +initially +parliament +artists +experience +particular +walk +foot +engineering +talking +dropped +boys +stars +remember +carried +train +stadium +angeles +evidence +becoming +assistant +soviet +upper +youth +reach +actor +numerous +nodded +arrived +minute +believed +complex +victory +associated +temple +chance +perhaps +bishop +launched +particularly +retired +subject +prize +contains +yeah +theory +empire +suddenly +waiting +trust +recording +terms +champion +religious +zealand +names +2nd +ancient +corner +represented +legal +justice +cause +watched +brothers +material +changes +simply +response +answer +historical +stories +straight +feature +increased +administration +virginia +activities +cultural +overall +winner +programs +basketball +legs +guard +cast +doctor +flight +results +remains +cost +effect +winter +larger +islands +problems +chairman +grew +commander +isn +failed +selected +hurt +fort +regiment +majority +plans +shown +pretty +irish +characters +directly +scene +likely +operated +allow +matches +looks +houses +fellow +marriage +rules +florida +expected +nearby +congress +peace +recent +wait +subsequently +variety +serving +agreed +poor +attempt +wood +democratic +rural +mile +appears +township +soldiers +##ized +pennsylvania +closer +fighting +claimed +score +physical +filled +genus +specific +sitting +mom +therefore +supported +status +fear +cases +meaning +wales +minor +spain +vice +parish +separate +horse +fifth +remaining +branch +presented +stared +uses +forms +baseball +exactly +choice +discovered +composed +truth +russia +dad +ring +referred +numbers +greater +metres +slightly +direct +increase +responsible +crew +rule +trees +troops +broke +goes +individuals +hundred +weight +creek +sleep +defense +provides +ordered +jewish +safe +judge +whatever +corps +realized +growing +cities +gaze +lies +spread +letter +showed +situation +mayor +transport +watching +workers +extended +expression +normal +chart +multiple +border +mrs +walls +piano +heat +cannot +earned +products +drama +era +authority +seasons +join +grade +difficult +territory +mainly +stations +squadron +stepped +iron +19th +serve +appear +speak +broken +charge +knowledge +kilometres +removed +ships +campus +pushed +britain +leaves +recently +boston +latter +acquired +poland +quality +officers +presence +planned +nations +mass +broadcast +influence +wild +emperor +electric +headed +ability +promoted +yellow +ministry +throat +smaller +politician +latin +spoke +cars +males +lack +acting +seeing +consists +estate +pressure +newspaper +olympics +conditions +beat +elements +walking +vote +needs +carolina +featuring +levels +francisco +purpose +females +dutch +duke +ahead +gas +safety +serious +turning +highly +lieutenant +firm +amount +mixed +proposed +perfect +agreement +affairs +3rd +seconds +contemporary +paid +prison +label +administrative +intended +constructed +academic +teacher +races +formerly +nation +issued +shut +drums +housing +seems +graduated +mentioned +picked +recognized +shortly +protection +picture +notable +elections +1980s +loved +percent +racing +elizabeth +volume +hockey +beside +settled +competed +replied +drew +actress +marine +scotland +steel +glanced +farm +risk +tonight +positive +singles +effects +gray +screen +residents +sides +none +secondary +literature +polish +destroyed +flying +founder +households +lay +reserve +industrial +younger +approach +appearances +ones +finish +powerful +fully +growth +honor +jersey +projects +revealed +infantry +pair +equipment +visit +evening +grant +effort +treatment +buried +republican +primarily +bottom +owner +1970s +israel +gives +remain +spot +produce +champions +accepted +ways +##ally +losing +split +capacity +basis +trial +questions +20th +guess +officially +memorial +naval +initial +##ization +whispered +median +engineer +sydney +columbia +strength +tears +senate +asian +draw +warm +supposed +transferred +leaned +candidate +escape +mountains +potential +activity +seem +traffic +murder +slow +orchestra +haven +agency +taught +website +comedy +unable +storm +planning +albums +rugby +environment +scientific +grabbed +protect +boat +typically +damage +principal +divided +dedicated +ohio +pick +fought +driver +empty +shoulders +sort +thank +berlin +prominent +account +freedom +necessary +efforts +headquarters +follows +alongside +suggested +operating +steps +technical +begin +easily +teeth +speaking +settlement +scale +renamed +enemy +semi +joint +compared +scottish +leadership +analysis +offers +georgia +pieces +captured +animal +deputy +organized +combined +method +challenge +1960s +huge +wants +battalion +sons +rise +crime +types +facilities +telling +platform +sit +1990s +tells +assigned +pull +commonly +alive +letters +concept +conducted +wearing +happen +bought +becomes +holy +gets +defeat +languages +purchased +occurred +titled +declared +applied +sciences +concert +sounds +jazz +brain +painting +fleet +tax +michigan +animals +leaders +episodes +birth +clubs +palace +critical +refused +fair +leg +laughed +returning +surrounding +participated +formation +lifted +pointed +connected +rome +medicine +laid +powers +tall +shared +focused +knowing +yards +entrance +falls +calling +sources +chosen +beneath +resources +yard +nominated +silence +defined +gained +thirty +bodies +adopted +christmas +widely +register +apart +iran +premier +serves +unknown +parties +generation +continues +fields +brigade +quiet +teaching +clothes +impact +weapons +partner +flat +theater +relations +plants +suffered +begins +seats +armed +models +worth +laws +communities +classes +background +knows +thanks +quarter +reaching +humans +carry +killing +format +setting +architecture +disease +railroad +possibly +arthur +thoughts +doors +density +crowd +illinois +stomach +tone +unique +reports +anyway +liberal +vehicle +thick +dry +drug +faced +largely +facility +theme +holds +creation +strange +colonel +revolution +politics +turns +silent +rail +relief +independence +combat +shape +determined +sales +learned +4th +finger +providing +heritage +fiction +situated +designated +allowing +hosted +sight +interview +estimated +reduced +toronto +footballer +keeping +guys +damn +claim +motion +sixth +stayed +rear +receive +handed +twelve +dress +audience +granted +brazil +spirit +##ated +noticed +olympic +representative +tight +trouble +reviews +drink +vampire +missing +roles +ranked +newly +household +finals +critics +phase +massachusetts +pilot +unlike +philadelphia +bright +guns +crown +organizations +roof +respectively +clearly +tongue +marked +circle +bronze +expanded +sexual +supply +yourself +inspired +labour +reference +draft +connection +reasons +driving +jesus +cells +entry +neither +trail +claims +atlantic +orders +labor +nose +afraid +identified +intelligence +calls +cancer +attacked +passing +positions +imperial +grey +swedish +avoid +extra +uncle +covers +allows +surprise +materials +fame +hunter +citizens +figures +environmental +confirmed +shit +titles +performing +difference +acts +attacks +existing +votes +opportunity +nor +entirely +trains +opposite +pakistan +develop +resulted +representatives +actions +reality +pressed +barely +conversation +faculty +northwest +ends +documentary +nuclear +stock +sets +eat +alternative +resulting +creating +surprised +cemetery +drop +finding +cricket +streets +tradition +ride +ear +explained +composer +injury +apartment +municipal +educational +occupied +netherlands +clean +billion +constitution +learn +maximum +classical +lose +opposition +ontario +hills +rolled +ending +drawn +permanent +lewis +sites +chamber +scoring +height +lyrics +staring +officials +snow +oldest +qualified +interior +apparently +succeeded +thousand +dinner +lights +existence +heavily +greatest +conservative +send +bowl +catch +duty +speech +authorities +princess +performances +versions +shall +graduate +pictures +effective +remembered +poetry +desk +crossed +starring +starts +passenger +sharp +acres +ass +weather +falling +rank +fund +supporting +adult +heads +southeast +lane +condition +transfer +prevent +regions +earl +federation +relatively +answered +besides +obtained +portion +reaction +liked +peak +counter +religion +chain +rare +convention +aid +lie +vehicles +perform +squad +wonder +lying +crazy +sword +attempted +centuries +weren +philosophy +interested +sweden +wolf +frequently +abandoned +literary +alliance +task +entitled +threw +promotion +tiny +soccer +visited +achieved +defence +internal +persian +methods +arrested +otherwise +programming +villages +elementary +districts +rooms +criminal +conflict +worry +trained +attempts +waited +signal +truck +subsequent +programme +communist +faith +sector +carrying +laugh +controlled +korean +showing +origin +fuel +evil +brief +identity +darkness +pool +missed +publication +wings +invited +briefly +standards +kissed +ideas +climate +causing +walter +worse +albert +winners +desire +aged +northeast +dangerous +gate +doubt +wooden +poet +rising +funding +communications +communication +violence +copies +prepared +investigation +skills +pulling +containing +ultimately +offices +singing +understanding +tomorrow +christ +ward +pope +stands +5th +flow +studios +aired +commissioned +contained +exist +americans +wrestling +approved +kid +employed +respect +suit +asking +increasing +frame +angry +selling +1950s +thin +finds +temperature +statement +ali +explain +inhabitants +towns +extensive +narrow +flowers +promise +somewhere +closely +bureau +cape +weekly +presidential +legislative +launch +founding +artillery +strike +un +institutions +roll +writers +landing +chose +anymore +attorney +billboard +receiving +agricultural +breaking +sought +dave +admitted +lands +mexican +##bury +specifically +hole +moscow +roads +accident +proved +struck +guards +stuff +slid +expansion +melbourne +opposed +sub +southwest +architect +failure +plane +tank +listen +regarding +wet +introduction +metropolitan +fighter +inch +grown +gene +anger +fixed +khan +domestic +worldwide +chapel +mill +functions +examples +developing +turkey +hits +pocket +antonio +papers +grow +unless +circuit +18th +concerned +attached +journalist +selection +journey +converted +provincial +painted +hearing +aren +bands +negative +aside +wondered +knight +lap +noise +billy +shooting +bedroom +priest +resistance +motor +homes +sounded +giant +scenes +equal +comic +patients +hidden +solid +actual +bringing +afternoon +touched +funds +consisted +marie +canal +treaty +turkish +recognition +residence +cathedral +broad +knees +incident +shaped +fired +norwegian +handle +cheek +contest +represent +representing +birds +advantage +emergency +wrapped +drawing +notice +broadcasting +somehow +bachelor +seventh +collected +registered +establishment +assumed +chemical +personnel +retirement +portuguese +wore +tied +device +threat +progress +advance +##ised +banks +hired +manchester +nfl +teachers +structures +forever +tennis +helping +saturday +applications +junction +incorporated +neighborhood +dressed +ceremony +influenced +hers +stairs +decades +inner +kansas +hung +hoped +gain +scheduled +downtown +engaged +austria +clock +norway +certainly +pale +victor +employees +plate +putting +surrounded +##ists +finishing +blues +tropical +minnesota +consider +philippines +accept +retrieved +concern +anderson +properties +institution +gordon +successfully +vietnam +backing +outstanding +muslim +crossing +folk +producing +usual +demand +occurs +observed +lawyer +educated +pleasure +budget +items +quietly +colorado +philip +typical +##worth +derived +survived +asks +mental +jake +jews +distinguished +sri +extremely +athletic +loud +thousands +worried +transportation +horses +weapon +arena +importance +users +objects +contributed +douglas +aware +senator +johnny +sisters +engines +flag +investment +samuel +shock +capable +clark +row +wheel +refers +familiar +biggest +wins +hate +maintained +drove +hamilton +expressed +injured +underground +churches +wars +tunnel +passes +stupid +agriculture +softly +cabinet +regarded +joining +indiana +dates +spend +behavior +woods +protein +gently +chase +morgan +mention +burning +wake +combination +occur +mirror +leads +indeed +impossible +paintings +covering +soldier +locations +attendance +sell +historian +wisconsin +invasion +argued +painter +diego +changing +egypt +experienced +inches +missouri +grounds +spoken +switzerland +reform +rolling +forget +massive +resigned +burned +tennessee +locked +values +improved +wounded +universe +sick +dating +facing +purchase +##pur +moments +merged +anniversary +coal +brick +understood +causes +dynasty +queensland +establish +stores +crisis +promote +hoping +cards +referee +extension +raise +arizona +improve +colonial +formal +charged +palm +hide +rescue +faces +feelings +candidates +juan +6th +courses +weekend +luke +cash +fallen +delivered +affected +installed +carefully +tries +hollywood +costs +lincoln +responsibility +shore +proper +normally +maryland +assistance +constant +offering +friendly +waters +persons +realize +contain +trophy +partnership +factor +musicians +bound +oregon +indicated +houston +medium +consisting +somewhat +cycle +beer +moore +frederick +gotten +worst +weak +approached +arranged +chin +loan +bond +fifteen +pattern +disappeared +translated +##zed +lip +arab +capture +interests +insurance +shifted +cave +prix +warning +sections +courts +coat +plot +smell +golf +favorite +maintain +knife +voted +degrees +finance +quebec +opinion +translation +manner +ruled +operate +productions +choose +musician +confused +tired +separated +stream +techniques +committed +attend +ranking +kings +throw +passengers +measure +horror +mining +sand +danger +salt +calm +decade +dam +require +runner +rush +associate +greece +rivers +consecutive +matthew +##ski +sighed +sq +documents +closing +tie +accused +islamic +distributed +directors +organisation +7th +breathing +mad +lit +arrival +concrete +taste +composition +shaking +faster +amateur +adjacent +stating +twin +flew +publications +obviously +ridge +storage +carl +pages +concluded +desert +driven +universities +ages +terminal +sequence +borough +constituency +cousin +economics +dreams +margaret +notably +reduce +montreal +17th +ears +saved +vocal +riding +roughly +threatened +meters +meanwhile +landed +compete +repeated +grass +czech +regularly +charges +sudden +appeal +solution +describes +classification +glad +parking +belt +physics +rachel +hungarian +participate +expedition +damaged +gift +childhood +fifty +mathematics +jumped +letting +defensive +mph +testing +hundreds +shoot +owners +matters +smoke +israeli +kentucky +dancing +mounted +grandfather +designs +profit +argentina +truly +lawrence +cole +begun +detroit +willing +branches +smiling +decide +miami +enjoyed +recordings +##dale +poverty +ethnic +arabic +accompanied +fishing +determine +residential +acid +returns +starred +strategy +forty +businesses +equivalent +commonwealth +distinct +ill +seriously +##ped +harris +replace +rio +imagine +formula +ensure +additionally +scheme +conservation +occasionally +purposes +feels +favor +1930s +contrast +hanging +hunt +movies +instruments +victims +danish +christopher +busy +demon +sugar +earliest +colony +studying +duties +belgium +slipped +carter +visible +stages +iraq +commune +forming +continuing +talked +counties +legend +bathroom +option +tail +clay +daughters +afterwards +severe +jaw +visitors +devices +aviation +entering +subjects +temporary +swimming +forth +smooth +bush +operates +rocks +movements +signs +eddie +voices +honorary +memories +dallas +measures +racial +promised +harvard +16th +parliamentary +indicate +benefit +flesh +dublin +louisiana +patient +sleeping +membership +coastal +medieval +wanting +element +scholars +rice +limit +survive +makeup +rating +definitely +collaboration +obvious +baron +birthday +linked +soil +diocese +ncaa +offensive +shouldn +waist +plain +ross +organ +resolution +manufacturing +adding +relative +kennedy +whilst +moth +gardens +crash +heading +partners +credited +carlos +moves +cable +marshall +depending +bottle +represents +rejected +responded +existed +denmark +##ating +treated +graham +routes +talent +commissioner +drugs +secure +tests +reign +restored +photography +contributions +oklahoma +designer +disc +grin +seattle +robin +paused +atlanta +unusual +praised +las +laughing +satellite +hungary +visiting +interesting +factors +deck +poems +norman +##water +stuck +speaker +rifle +premiered +comics +actors +reputation +eliminated +8th +ceiling +prisoners +leather +austin +mississippi +rapidly +admiral +parallel +charlotte +guilty +tools +gender +divisions +fruit +laboratory +nelson +marry +rapid +aunt +tribe +requirements +aspects +suicide +amongst +adams +bone +ukraine +kick +sees +edinburgh +clothing +column +rough +gods +hunting +broadway +gathered +concerns +spending +ty +12th +snapped +requires +solar +bones +cavalry +iowa +drinking +waste +franklin +charity +thompson +stewart +tip +landscape +enjoy +singh +poem +listening +eighth +fred +differences +adapted +bomb +ukrainian +surgery +corporate +masters +anywhere +waves +odd +portugal +orleans +dick +debate +kent +eating +puerto +cleared +expect +cinema +guitarist +blocks +electrical +agree +involving +depth +dying +panel +struggle +peninsula +adults +novels +emerged +vienna +debuted +shoes +tamil +songwriter +meets +prove +beating +instance +heaven +scared +sending +marks +artistic +passage +superior +significantly +retained +##izing +technique +cheeks +warren +maintenance +destroy +extreme +allied +appearing +fill +advice +alabama +qualifying +policies +cleveland +hat +battery +authors +10th +soundtrack +acted +dated +lb +glance +equipped +coalition +funny +outer +ambassador +roy +possibility +couples +campbell +loose +ethan +supplies +gonna +monster +shake +agents +frequency +springs +dogs +practices +gang +plastic +easier +suggests +gulf +blade +exposed +colors +industries +markets +nervous +electoral +charts +legislation +ownership +##idae +appointment +shield +assault +socialist +abbey +monument +license +throne +employment +replacement +charter +suffering +accounts +oak +connecticut +strongly +wright +colour +13th +context +welsh +networks +voiced +gabriel +forehead +manage +schedule +totally +remix +forests +occupation +print +nicholas +brazilian +strategic +vampires +engineers +roots +seek +correct +instrumental +und +alfred +backed +stanley +robinson +traveled +wayne +austrian +achieve +exit +rates +strip +whereas +sing +deeply +adventure +bobby +jamie +careful +components +cap +useful +personality +knee +pushing +hosts +protest +ottoman +symphony +boundary +processes +considering +considerable +tons +cooper +trading +conduct +illegal +revolutionary +definition +harder +jacob +circumstances +destruction +popularity +grip +classified +liverpool +baltimore +flows +seeking +honour +approval +mechanical +till +happening +statue +critic +increasingly +immediate +describe +commerce +stare +indonesia +meat +rounds +boats +baker +orthodox +depression +formally +worn +naked +muttered +sentence +11th +document +criticism +wished +vessel +spiritual +bent +virgin +minimum +murray +lunch +danny +printed +compilation +keyboards +blow +belonged +raising +cutting +pittsburgh +9th +shadows +hated +indigenous +jon +15th +barry +scholar +oliver +stick +susan +meetings +attracted +spell +romantic +ye +demanded +customers +logan +revival +keys +modified +commanded +jeans +upset +phil +detective +hiding +resident +##bly +experiences +diamond +defeating +coverage +lucas +external +parks +franchise +helen +bible +successor +percussion +celebrated +lift +clan +romania +##ied +mills +nobody +achievement +shrugged +fault +rhythm +initiative +breakfast +carbon +lasted +violent +wound +killer +gradually +filmed +°c +processing +remove +criticized +guests +sang +chemistry +legislature +##bridge +uniform +escaped +integrated +proposal +purple +denied +liquid +influential +morris +nights +stones +intense +experimental +twisted +pace +nazi +mitchell +ny +blind +reporter +newspapers +14th +centers +burn +basin +forgotten +surviving +filed +collections +monastery +losses +manual +couch +description +appropriate +merely +missions +sebastian +restoration +replacing +triple +elder +julia +warriors +benjamin +julian +convinced +stronger +amazing +declined +versus +merchant +happens +output +finland +bare +barbara +absence +ignored +dawn +injuries +producers +luis +##ities +kw +admit +expensive +electricity +exception +symbol +ladies +shower +sheriff +characteristics +##je +aimed +button +ratio +effectively +summit +angle +jury +bears +foster +vessels +pants +executed +evans +dozen +advertising +kicked +patrol +competitions +lifetime +principles +athletics +birmingham +sponsored +rob +nomination +acoustic +creature +longest +credits +harbor +dust +josh +territories +milk +infrastructure +completion +thailand +indians +leon +archbishop +assist +pitch +blake +arrangement +girlfriend +serbian +operational +hence +sad +scent +fur +sessions +refer +rarely +exists +1892 +scientists +dirty +penalty +burst +portrait +seed +pole +limits +rival +stable +grave +constitutional +alcohol +arrest +flower +mystery +devil +architectural +relationships +greatly +habitat +##istic +larry +progressive +remote +cotton +preserved +reaches +cited +vast +scholarship +decisions +teach +editions +knocked +eve +searching +partly +participation +animated +fate +excellent +alternate +saints +youngest +climbed +suggest +discussion +staying +choir +lakes +jacket +revenue +nevertheless +peaked +instrument +wondering +annually +managing +neil +1891 +signing +terry +apply +clinical +brooklyn +aim +catherine +fuck +farmers +figured +ninth +pride +hugh +ordinary +involvement +comfortable +shouted +encouraged +representation +sharing +panic +exact +cargo +competing +fat +cried +1920s +occasions +cabin +borders +utah +marcus +##isation +badly +muscles +victorian +transition +warner +bet +permission +slave +terrible +similarly +shares +seth +uefa +possession +medals +benefits +colleges +lowered +perfectly +transit +##kar +publisher +##ened +harrison +deaths +elevation +asleep +machines +sigh +ash +hardly +argument +occasion +parent +decline +contribution +concentration +opportunities +hispanic +guardian +extent +emotions +hips +mason +volumes +bloody +controversy +diameter +steady +mistake +phoenix +identify +violin +departure +richmond +spin +funeral +enemies +1864 +literally +connor +random +sergeant +grab +confusion +1865 +transmission +informed +leaning +sacred +suspended +thinks +gates +portland +luck +agencies +yours +hull +expert +muscle +layer +practical +sculpture +jerusalem +latest +lloyd +statistics +deeper +recommended +warrior +arkansas +mess +supports +greg +eagle +recovered +rated +concerts +rushed +stops +eggs +premiere +keith +delhi +turner +pit +affair +belief +paint +##zing +victim +withdrew +bonus +styles +fled +glasgow +technologies +funded +adaptation +portrayed +cooperation +supporters +judges +bernard +hallway +ralph +graduating +controversial +distant +continental +spider +bite +recognize +intention +mixing +egyptian +bow +tourism +suppose +claiming +dominated +participants +nurse +partially +tape +psychology +essential +touring +duo +voting +civilian +emotional +channels +apparent +hebrew +1887 +tommy +carrier +intersection +beast +hudson +bench +discuss +costa +##ered +detailed +behalf +drivers +unfortunately +obtain +rocky +##dae +siege +friendship +1861 +hang +governments +collins +respond +wildlife +preferred +operator +laura +pregnant +videos +dennis +suspected +boots +instantly +weird +automatic +businessman +alleged +placing +throwing +mood +1862 +perry +venue +jet +remainder +passion +biological +boyfriend +1863 +dirt +buffalo +ron +segment +abuse +genre +thrown +stroke +colored +stress +exercise +displayed +struggled +abroad +dramatic +wonderful +thereafter +madrid +component +widespread +##sed +tale +citizen +todd +vancouver +overseas +forcing +crying +descent +discussed +substantial +ranks +regime +provinces +drum +zane +tribes +proof +researchers +volunteer +manor +silk +milan +donated +allies +venture +principle +delivery +enterprise +bars +traditionally +witch +reminded +copper +pete +inter +colin +grinned +elsewhere +competitive +frequent +scream +tension +texts +submarine +finnish +defending +defend +pat +detail +affiliated +stuart +themes +periods +tool +belgian +ruling +crimes +answers +folded +licensed +demolished +hans +lucy +1881 +lion +traded +photographs +writes +craig +trials +generated +beth +noble +debt +percentage +yorkshire +erected +viewed +grades +confidence +ceased +islam +telephone +retail +chile +m² +roberts +sixteen +commented +hampshire +innocent +dual +pounds +checked +regulations +afghanistan +sung +rico +liberty +assets +bigger +options +angels +relegated +tribute +wells +attending +leaf +romanian +monthly +patterns +gmina +madison +hurricane +rev +##ians +bristol +elite +valuable +disaster +democracy +awareness +germans +freyja +loop +absolutely +paying +populations +maine +sole +prayer +spencer +releases +doorway +bull +lover +midnight +conclusion +thirteen +mediterranean +nhl +proud +sample +##hill +drummer +guinea +murphy +climb +instant +attributed +horn +ain +railways +autumn +ferry +opponent +traveling +secured +corridor +stretched +tales +sheet +trinity +cattle +helps +indicates +manhattan +murdered +fitted +gentle +grandmother +mines +shocked +vegas +produces +caribbean +belong +continuous +desperate +drunk +historically +trio +waved +raf +dealing +nathan +murmured +interrupted +residing +scientist +pioneer +harold +aaron +delta +attempting +minority +believes +chorus +tend +lots +eyed +indoor +load +shots +updated +jail +concerning +connecting +wealth +slaves +arrive +rangers +sufficient +rebuilt +##wick +cardinal +flood +muhammad +whenever +relation +runners +moral +repair +viewers +arriving +revenge +punk +assisted +bath +fairly +breathe +lists +innings +illustrated +whisper +nearest +voters +clinton +ties +ultimate +screamed +beijing +lions +andre +fictional +gathering +comfort +radar +suitable +dismissed +hms +ban +pine +wrist +atmosphere +voivodeship +bid +timber +##ned +giants +cameron +recovery +uss +identical +categories +switched +serbia +laughter +noah +ensemble +therapy +peoples +touching +##off +locally +pearl +platforms +everywhere +ballet +tables +lanka +herbert +outdoor +toured +derek +1883 +spaces +contested +swept +1878 +exclusive +slight +connections +winds +prisoner +collective +bangladesh +tube +publicly +wealthy +isolated +insisted +fortune +ticket +spotted +reportedly +animation +enforcement +tanks +decides +wider +lowest +owen +nod +hitting +gregory +furthermore +magazines +fighters +solutions +pointing +requested +peru +reed +chancellor +knights +mask +worker +eldest +flames +reduction +volunteers +reporting +wire +advisory +endemic +origins +settlers +pursue +knock +consumer +1876 +eu +compound +creatures +mansion +sentenced +ivan +deployed +guitars +frowned +involves +mechanism +kilometers +perspective +shops +terminus +duncan +alien +fist +bridges +##pers +heroes +derby +swallowed +patent +sara +illness +characterized +adventures +slide +hawaii +jurisdiction +organised +adelaide +walks +biology +rogers +swing +tightly +boundaries +prepare +implementation +stolen +certified +colombia +edwards +garage +recalled +rage +harm +nigeria +breast +furniture +pupils +settle +cuba +balls +alaska +21st +linear +thrust +celebration +latino +genetic +terror +##ening +lightning +fee +witness +lodge +establishing +skull +earning +hood +rebellion +sporting +warned +missile +devoted +activist +porch +worship +fourteen +package +decorated +##shire +housed +chess +sailed +doctors +oscar +joan +treat +garcia +harbour +jeremy +traditions +dominant +jacques +##gon +relocated +1879 +amendment +sized +companion +simultaneously +volleyball +spun +acre +increases +stopping +loves +belongs +affect +drafted +tossed +scout +battles +1875 +filming +shoved +munich +tenure +vertical +romance +argue +craft +ranging +opens +honest +tyler +yesterday +muslims +reveal +snake +immigrants +radical +screaming +speakers +firing +saving +belonging +ease +lighting +prefecture +blame +farmer +hungry +grows +rubbed +beam +sur +subsidiary +armenian +dropping +conventional +qualify +spots +sweat +festivals +immigration +physician +discover +exposure +sandy +explanation +isaac +implemented +##fish +hart +initiated +stakes +presents +heights +householder +pleased +tourist +regardless +slip +closest +surely +sultan +brings +riley +preparation +aboard +slammed +baptist +experiment +ongoing +interstate +organic +playoffs +1877 +hindu +tours +tier +plenty +arrangements +talks +trapped +excited +sank +athens +1872 +denver +welfare +suburb +athletes +trick +diverse +belly +exclusively +yelled +conversion +1874 +internationally +computers +conductor +abilities +sensitive +dispute +measured +globe +rocket +prices +amsterdam +flights +tigers +municipalities +emotion +references +explains +airlines +manufactured +archaeological +1873 +interpretation +devon +##ites +settlements +kissing +absolute +improvement +impressed +barcelona +sullivan +jefferson +towers +jesse +julie +grandson +gauge +regard +rings +interviews +trace +raymond +thumb +departments +burns +serial +bulgarian +scores +demonstrated +1866 +kyle +alberta +underneath +romanized +relieved +acquisition +phrase +cliff +reveals +cuts +merger +custom +nee +gilbert +graduation +assessment +difficulty +demands +swung +democrat +commons +1940s +grove +completing +focuses +sum +substitute +bearing +stretch +reception +reflected +essentially +destination +pairs +##ched +survival +resource +##bach +promoting +doubles +messages +tear +##fully +parade +florence +harvey +incumbent +partial +pedro +frozen +procedure +olivia +controls +shelter +personally +temperatures +brisbane +tested +sits +marble +comprehensive +oxygen +leonard +##kov +inaugural +iranian +referring +quarters +attitude +mainstream +lined +mars +dakota +norfolk +unsuccessful +explosion +helicopter +congressional +##sing +inspector +bitch +seal +departed +divine +coaching +examination +punishment +manufacturer +sink +columns +unincorporated +signals +nevada +squeezed +dylan +dining +martial +manuel +eighteen +elevator +brushed +plates +ministers +congregation +slept +specialized +taxes +restricted +negotiations +likes +statistical +arnold +inspiration +execution +bold +intermediate +significance +margin +ruler +wheels +gothic +intellectual +dependent +listened +eligible +buses +widow +syria +earn +cincinnati +collapsed +recipient +secrets +accessible +philippine +maritime +goddess +clerk +surrender +breaks +playoff +ideal +beetle +aspect +soap +regulation +strings +expand +anglo +shorter +crosses +retreat +tough +coins +wallace +directions +pressing +shipping +locomotives +comparison +topics +nephew +distinction +honors +travelled +sierra +ibn +fortress +recognised +carved +1869 +clients +intent +coaches +describing +bread +##ington +beaten +northwestern +merit +collapse +challenges +historians +objective +submitted +virus +attacking +drake +assume +diseases +stem +leeds +farming +glasses +visits +nowhere +fellowship +relevant +carries +restaurants +experiments +constantly +bases +targets +shah +tenth +opponents +verse +territorial +writings +corruption +instruction +inherited +reverse +emphasis +employee +arch +keeps +rabbi +watson +payment +uh +nancy +##tre +venice +fastest +sexy +banned +adrian +properly +ruth +touchdown +dollar +boards +metre +circles +edges +favour +travels +liberation +scattered +firmly +holland +permitted +diesel +kenya +den +originated +demons +resumed +dragged +rider +servant +blinked +extend +torn +##sey +input +meal +everybody +cylinder +kinds +camps +bullet +logic +croatian +evolved +healthy +fool +wise +preserve +pradesh +respective +artificial +gross +corresponding +convicted +cage +caroline +dialogue +##dor +narrative +stranger +mario +christianity +failing +trent +commanding +buddhist +1848 +maurice +focusing +yale +bike +altitude +mouse +revised +##sley +veteran +pulls +theology +crashed +campaigns +legion +##ability +drag +excellence +customer +cancelled +intensity +excuse +liga +participating +contributing +printing +##burn +variable +curious +legacy +renaissance +symptoms +binding +vocalist +dancer +grammar +gospel +democrats +enters +diplomatic +hitler +clouds +mathematical +quit +defended +oriented +##heim +fundamental +hardware +impressive +equally +convince +confederate +guilt +chuck +sliding +magnetic +narrowed +petersburg +bulgaria +otto +phd +skill +hopes +pitcher +reservoir +hearts +automatically +expecting +mysterious +bennett +extensively +imagined +seeds +monitor +fix +##ative +journalism +struggling +signature +ranch +encounter +photographer +observation +protests +influences +calendar +cruz +croatia +locomotive +hughes +naturally +shakespeare +basement +hook +uncredited +faded +theories +approaches +dare +phillips +filling +fury +obama +efficient +arc +deliver +breeding +inducted +leagues +efficiency +axis +montana +eagles +##ked +supplied +instructions +karen +picking +indicating +trap +anchor +practically +christians +tomb +vary +occasional +electronics +lords +readers +newcastle +faint +innovation +collect +situations +engagement +claude +mixture +##feld +peer +tissue +lean +°f +floors +architects +reducing +rope +1859 +ottawa +##har +samples +banking +declaration +proteins +resignation +francois +saudi +advocate +exhibited +armor +twins +divorce +##ras +abraham +reviewed +temporarily +matrix +physically +pulse +curled +difficulties +bengal +usage +##ban +riders +certificate +holes +warsaw +distinctive +mutual +1857 +customs +circular +eugene +removal +loaded +mere +vulnerable +depicted +generations +dame +heir +enormous +lightly +climbing +pitched +lessons +pilots +nepal +preparing +brad +louise +renowned +liam +##ably +shaw +brilliant +bills +##nik +fucking +mainland +pleasant +seized +veterans +jerked +fail +brush +radiation +stored +warmth +southeastern +nate +sin +raced +berkeley +joke +athlete +designation +trunk +roland +qualification +heels +artwork +receives +judicial +reserves +##bed +woke +installation +abu +floating +fake +lesser +excitement +interface +concentrated +addressed +characteristic +amanda +saxophone +monk +releasing +egg +dies +interaction +defender +outbreak +glory +loving +sequel +consciousness +awake +ski +enrolled +handling +rookie +brow +somebody +biography +warfare +amounts +contracts +presentation +fabric +dissolved +challenged +meter +psychological +elevated +rally +accurate +##tha +hospitals +undergraduate +specialist +venezuela +exhibit +shed +nursing +protestant +fluid +structural +footage +jared +consistent +prey +##ska +succession +reflect +exile +lebanon +wiped +suspect +shanghai +resting +integration +preservation +marvel +variant +pirates +sheep +rounded +capita +sailing +colonies +manuscript +deemed +variations +clarke +functional +emerging +boxing +relaxed +curse +azerbaijan +heavyweight +nickname +editorial +rang +grid +tightened +earthquake +flashed +miguel +rushing +##ches +improvements +boxes +brooks +consumption +molecular +felix +societies +repeatedly +variation +aids +civic +graphics +professionals +realm +autonomous +receiver +delayed +workshop +militia +chairs +canyon +harsh +extending +lovely +happiness +##jan +stake +eyebrows +embassy +wellington +hannah +corners +bishops +swear +cloth +contents +namely +commenced +1854 +stanford +nashville +courage +graphic +commitment +garrison +hamlet +clearing +rebels +attraction +literacy +cooking +ruins +temples +jenny +humanity +celebrate +hasn +freight +sixty +rebel +bastard +newton +deer +##ges +##ching +smiles +delaware +singers +approaching +assists +flame +boulevard +barrel +planted +pursuit +consequences +shallow +invitation +rode +depot +ernest +kane +rod +concepts +preston +topic +chambers +striking +blast +arrives +descendants +montgomery +ranges +worlds +chaos +praise +fewer +1855 +sanctuary +mud +programmes +maintaining +harper +bore +handsome +closure +tournaments +nebraska +linda +facade +puts +satisfied +argentine +dale +cork +dome +panama +##yl +1858 +tasks +experts +##ates +feeding +equation +engage +bryan +um +quartet +disbanded +sheffield +blocked +gasped +delay +kisses +connects +##non +sts +poured +creator +publishers +guided +ellis +extinct +hug +gaining +##ord +complicated +poll +clenched +investigate +thereby +quantum +spine +cdp +humor +kills +administered +semifinals +encountered +ignore +commentary +##maker +bother +roosevelt +plains +halfway +flowing +cultures +crack +imprisoned +neighboring +airline +gather +wolves +marathon +transformed +cruise +organisations +punch +exhibitions +numbered +alarm +ratings +daddy +silently +##stein +queens +colours +impression +guidance +tactical +##rat +marshal +della +arrow +rested +feared +tender +owns +bitter +advisor +escort +##ides +spare +farms +grants +dragons +encourage +colleagues +cameras +sucked +pile +spirits +prague +statements +suspension +landmark +fence +torture +recreation +bags +permanently +survivors +pond +spy +predecessor +bombing +coup +protecting +transformation +glow +##lands +dug +priests +andrea +feat +barn +jumping +##ologist +casualties +stern +auckland +pipe +serie +revealing +trevor +mercy +spectrum +consist +governing +collaborated +possessed +epic +comprises +blew +shane +lopez +honored +magical +sacrifice +judgment +perceived +hammer +baronet +tune +das +missionary +sheets +neutral +oral +threatening +attractive +shade +aims +seminary +estates +1856 +michel +wounds +refugees +manufacturers +mercury +syndrome +porter +##iya +##din +hamburg +identification +upstairs +purse +widened +pause +cared +breathed +affiliate +santiago +prevented +celtic +fisher +recruited +byzantine +reconstruction +farther +diet +sake +spite +sensation +blank +separation +##hon +vladimir +armies +anime +accommodate +orbit +cult +sofia +##ify +founders +sustained +disorder +honours +northeastern +mia +crops +violet +threats +blanket +fires +canton +followers +southwestern +prototype +voyage +assignment +altered +moderate +protocol +pistol +questioned +brass +lifting +1852 +math +authored +doug +dimensional +dynamic +1851 +pronounced +grateful +quest +uncomfortable +boom +presidency +stevens +relating +politicians +barrier +quinn +diana +mosque +tribal +palmer +portions +sometime +chester +treasure +bend +millions +reforms +registration +consequently +monitoring +ate +preliminary +brandon +invented +eaten +exterior +intervention +ports +documented +displays +lecture +sally +favourite +vermont +invisible +isle +breed +journalists +relay +speaks +backward +explore +midfielder +actively +stefan +procedures +cannon +blond +kenneth +centered +servants +chains +libraries +malcolm +essex +henri +slavery +##hal +facts +fairy +coached +cassie +cats +washed +cop +announcement +2000s +vinyl +activated +marco +frontier +growled +curriculum +##das +loyal +accomplished +leslie +ritual +kenny +vii +napoleon +hollow +hybrid +jungle +stationed +friedrich +counted +##ulated +platinum +theatrical +seated +col +rubber +glen +diversity +healing +extends +provisions +administrator +columbus +tributary +assured +##uous +prestigious +examined +lectures +grammy +ronald +associations +bailey +allan +essays +flute +believing +consultant +proceedings +travelling +1853 +kerala +yugoslavia +buddy +methodist +burial +centres +batman +discontinued +dock +stockholm +lungs +severely +citing +manga +steal +mumbai +iraqi +robot +celebrity +bride +broadcasts +abolished +pot +joel +overhead +franz +packed +reconnaissance +johann +acknowledged +introduce +handled +doctorate +developments +drinks +alley +palestine +##aki +proceeded +recover +bradley +grain +patch +afford +infection +nationalist +legendary +interchange +virtually +gen +gravity +exploration +amber +vital +wishes +powell +doctrine +elbow +screenplay +##bird +contribute +indonesian +creates +enzyme +kylie +discipline +drops +manila +hunger +layers +suffer +fever +bits +monica +keyboard +manages +##hood +searched +appeals +##bad +testament +grande +reid +##war +beliefs +congo +requiring +casey +1849 +regret +streak +rape +depends +syrian +sprint +pound +tourists +upcoming +pub +tense +##els +practiced +nationwide +guild +motorcycle +liz +##zar +chiefs +desired +elena +precious +absorbed +relatives +booth +pianist +##mal +citizenship +exhausted +wilhelm +##ceae +##hed +noting +quarterback +urge +hectares +##gue +holly +blonde +davies +parked +sustainable +stepping +twentieth +airfield +nest +chip +##nell +shaft +paulo +requirement +paradise +tobacco +trans +renewed +vietnamese +suggesting +catching +holmes +enjoying +trips +colt +holder +butterfly +nerve +reformed +cherry +bowling +trailer +carriage +goodbye +appreciate +toy +joshua +interactive +enabled +involve +##kan +collar +determination +bunch +recall +shorts +superintendent +episcopal +frustration +giovanni +nineteenth +laser +privately +array +circulation +##ovic +armstrong +deals +painful +permit +discrimination +aires +retiring +cottage +horizon +ellen +jamaica +ripped +fernando +chapters +patron +lecturer +behaviour +genes +georgian +export +solomon +rivals +seventeen +rodriguez +princeton +independently +sox +1847 +arguing +entity +casting +hank +criteria +oakland +geographic +milwaukee +reflection +expanding +conquest +dubbed +halt +brave +brunswick +arched +curtis +divorced +predominantly +somerset +streams +ugly +zoo +horrible +curved +buenos +fierce +dictionary +vector +theological +unions +handful +stability +punjab +segments +altar +ignoring +gesture +monsters +pastor +thighs +unexpected +operators +abruptly +coin +compiled +associates +improving +migration +compact +collegiate +quarterfinals +roster +restore +assembled +hurry +oval +##cies +1846 +flags +martha +victories +sharply +##rated +argues +deadly +drawings +symbols +performer +griffin +restrictions +editing +andrews +journals +arabia +compositions +dee +pierce +removing +hindi +casino +runway +civilians +minds +##zation +refuge +rent +retain +potentially +conferences +suburban +conducting +descended +massacre +ammunition +terrain +fork +souls +counts +chelsea +durham +drives +cab +perth +realizing +palestinian +finn +simpson +##dal +betty +moreover +particles +cardinals +tent +evaluation +extraordinary +inscription +wednesday +chloe +maintains +panels +ashley +trucks +##nation +cluster +sunlight +strikes +zhang +dialect +tucked +collecting +##mas +##sville +quoted +evan +franco +aria +buying +cleaning +closet +provision +apollo +clinic +rat +necessarily +##ising +venues +flipped +cent +spreading +trustees +checking +authorized +disappointed +##ado +notion +duration +trumpet +hesitated +topped +brussels +rolls +theoretical +hint +define +aggressive +repeat +wash +peaceful +optical +width +allegedly +mcdonald +strict +##illa +investors +jam +witnesses +sounding +miranda +michelle +hugo +harmony +valid +lynn +glared +nina +headquartered +diving +boarding +gibson +albanian +marsh +routine +dealt +enhanced +intelligent +substance +targeted +enlisted +discovers +spinning +observations +pissed +smoking +capitol +varied +costume +seemingly +indies +compensation +surgeon +thursday +arsenal +westminster +suburbs +rid +anglican +##ridge +knots +foods +alumni +lighter +fraser +whoever +portal +scandal +gavin +advised +instructor +flooding +terrorist +teenage +interim +senses +duck +teen +thesis +abby +eager +overcome +newport +glenn +rises +shame +prompted +priority +forgot +bomber +nicolas +protective +cartoon +katherine +breeze +lonely +trusted +henderson +richardson +relax +palms +remarkable +legends +cricketer +essay +ordained +edmund +rifles +trigger +##uri +##away +sail +alert +1830 +audiences +penn +sussex +siblings +pursued +indianapolis +resist +rosa +consequence +succeed +avoided +1845 +##ulation +inland +##tie +##nna +counsel +profession +chronicle +hurried +##una +eyebrow +eventual +bleeding +innovative +cure +committees +accounting +scope +hardy +heather +tenor +gut +herald +codes +tore +scales +wagon +luxury +tin +prefer +fountain +triangle +bonds +darling +convoy +dried +traced +beings +troy +accidentally +slam +findings +smelled +joey +lawyers +outcome +steep +bosnia +configuration +shifting +toll +brook +performers +lobby +philosophical +construct +shrine +aggregate +cox +phenomenon +savage +insane +solely +reynolds +nationally +holdings +consideration +enable +edgar +fights +relegation +chances +atomic +hub +conjunction +awkward +reactions +currency +finale +kumar +underwent +steering +elaborate +gifts +comprising +melissa +veins +reasonable +sunshine +solve +trails +inhabited +elimination +ethics +huh +ana +molly +consent +apartments +layout +marines +hunters +bulk +##oma +hometown +##wall +##mont +cracked +reads +neighbouring +withdrawn +admission +wingspan +damned +anthology +lancashire +brands +batting +forgive +cuban +awful +##lyn +dimensions +imagination +dante +tracking +desperately +goalkeeper +##yne +groaned +workshops +confident +burton +gerald +milton +circus +uncertain +slope +copenhagen +sophia +fog +philosopher +portraits +accent +cycling +varying +gripped +larvae +garrett +specified +scotia +mature +luther +kurt +rap +##kes +aerial +ferdinand +heated +transported +##shan +safely +nonetheless +##orn +##gal +motors +demanding +##sburg +startled +##brook +ally +generate +caps +ghana +stained +mentions +beds +afterward +##bling +utility +##iro +richards +1837 +conspiracy +conscious +shining +footsteps +observer +cyprus +urged +loyalty +developer +probability +olive +upgraded +gym +miracle +insects +graves +1844 +ourselves +hydrogen +katie +tickets +poets +planes +prevention +witnessed +dense +jin +randy +tang +warehouse +monroe +archived +elderly +investigations +alec +granite +mineral +conflicts +controlling +aboriginal +mechanics +stan +stark +rhode +skirt +est +bombs +respected +##horn +imposed +limestone +deny +nominee +memphis +grabbing +disabled +amusement +frankfurt +corn +referendum +varies +slowed +disk +firms +unconscious +incredible +clue +sue +##zhou +twist +##cio +joins +idaho +chad +developers +computing +destroyer +mortal +tucker +kingston +choices +carson +whitney +geneva +pretend +dimension +staged +plateau +maya +##une +freestyle +rovers +##ids +tristan +classroom +prospect +##hus +honestly +diploma +lied +thermal +auxiliary +feast +unlikely +iata +morocco +pounding +treasury +lithuania +considerably +1841 +dish +1812 +geological +matching +stumbled +destroying +marched +brien +advances +nicole +settling +measuring +directing +##mie +tuesday +bassist +capabilities +stunned +fraud +torpedo +##phone +anton +wisdom +surveillance +ruined +##ulate +lawsuit +healthcare +theorem +halls +trend +aka +horizontal +dozens +acquire +lasting +swim +hawk +gorgeous +fees +vicinity +decrease +adoption +tactics +##ography +pakistani +##ole +draws +##hall +willie +burke +heath +algorithm +integral +powder +elliott +brigadier +jackie +tate +varieties +darker +##cho +lately +cigarette +specimens +adds +##ensis +##inger +exploded +finalist +murders +wilderness +arguments +nicknamed +acceptance +onwards +manufacture +robertson +jets +tampa +enterprises +loudly +composers +nominations +1838 +malta +inquiry +automobile +hosting +viii +rays +tilted +grief +museums +strategies +furious +euro +equality +cohen +poison +surrey +wireless +governed +ridiculous +moses +##esh +vanished +barnes +attract +morrison +istanbul +##iness +absent +rotation +petition +janet +##logical +satisfaction +custody +deliberately +observatory +comedian +surfaces +pinyin +novelist +strictly +canterbury +oslo +monks +embrace +jealous +photograph +continent +dorothy +marina +excess +holden +allegations +explaining +stack +avoiding +lance +storyline +majesty +poorly +spike +bradford +raven +travis +classics +proven +voltage +pillow +fists +butt +1842 +interpreted +1839 +gage +telegraph +lens +promising +expelled +casual +collector +zones +silly +nintendo +##kh +downstairs +chef +suspicious +afl +flies +vacant +uganda +pregnancy +condemned +lutheran +estimates +cheap +decree +saxon +proximity +stripped +idiot +deposits +contrary +presenter +magnus +glacier +offense +edwin +##ori +upright +##long +bolt +##ois +toss +geographical +##izes +environments +delicate +marking +abstract +xavier +nails +windsor +plantation +occurring +equity +saskatchewan +fears +drifted +sequences +vegetation +revolt +##stic +1843 +sooner +fusion +opposing +nato +skating +1836 +secretly +ruin +lease +flora +anxiety +##ological +##mia +bout +taxi +emmy +frost +rainbow +compounds +foundations +rainfall +assassination +nightmare +dominican +achievements +deserve +orlando +intact +armenia +##nte +calgary +valentine +marion +proclaimed +theodore +bells +courtyard +thigh +gonzalez +console +troop +minimal +everyday +supporter +terrorism +buck +openly +presbyterian +activists +carpet +##iers +rubbing +uprising +cute +conceived +legally +##cht +millennium +cello +velocity +rescued +cardiff +1835 +rex +concentrate +senators +beard +rendered +glowing +battalions +scouts +competitors +sculptor +catalogue +arctic +ion +raja +bicycle +glancing +lawn +##woman +gentleman +lighthouse +publish +predicted +calculated +variants +##gne +strain +winston +deceased +touchdowns +brady +caleb +sinking +echoed +crush +hon +blessed +protagonist +hayes +endangered +magnitude +editors +##tine +estimate +responsibilities +##mel +backup +laying +consumed +sealed +zurich +lovers +frustrated +##eau +ahmed +kicking +treasurer +1832 +biblical +refuse +terrified +pump +agrees +genuine +imprisonment +refuses +plymouth +lou +##nen +tara +trembling +antarctic +ton +learns +##tas +crap +crucial +faction +atop +##borough +wrap +lancaster +odds +hopkins +erik +lyon +##eon +bros +snap +locality +empress +crowned +cal +acclaimed +chuckled +clara +sends +mild +towel +wishing +assuming +interviewed +##bal +interactions +eden +cups +helena +indie +beck +##fire +batteries +filipino +wizard +parted +traces +##born +rows +idol +albany +delegates +##ees +##sar +discussions +notre +instructed +belgrade +highways +suggestion +lauren +possess +orientation +alexandria +abdul +beats +salary +reunion +ludwig +alright +wagner +intimate +pockets +slovenia +hugged +brighton +merchants +cruel +stole +trek +slopes +repairs +enrollment +politically +underlying +promotional +counting +boeing +isabella +naming +keen +bacteria +listing +separately +belfast +ussr +lithuanian +anybody +ribs +sphere +martinez +cock +embarrassed +proposals +fragments +nationals +##wski +premises +fin +alpine +matched +freely +bounded +jace +sleeve +pier +populated +evident +##like +frances +flooded +##dle +frightened +pour +trainer +framed +visitor +challenging +pig +wickets +##fold +infected +##pes +arose +reward +ecuador +oblast +vale +shuttle +##usa +bach +rankings +forbidden +cornwall +accordance +salem +consumers +bruno +fantastic +toes +machinery +resolved +julius +remembering +propaganda +iceland +bombardment +tide +contacts +wives +##rah +concerto +macdonald +albania +implement +daisy +tapped +sudan +helmet +mistress +crop +sunk +finest +##craft +hostile +boxer +fr +paths +adjusted +habit +ballot +supervision +soprano +bullets +wicked +sunset +regiments +disappear +lamp +performs +##gia +rabbit +digging +incidents +entries +##cion +dishes +introducing +##ati +##fied +freshman +slot +jill +tackles +baroque +backs +##iest +lone +sponsor +destiny +altogether +convert +##aro +consensus +shapes +demonstration +basically +feminist +auction +artifacts +##bing +strongest +halifax +allmusic +mighty +smallest +precise +alexandra +viola +##los +##ille +manuscripts +##illo +dancers +ari +managers +monuments +blades +barracks +springfield +maiden +consolidated +electron +berry +airing +wheat +nobel +inclusion +blair +payments +geography +bee +eleanor +react +##hurst +afc +manitoba +lineup +fitness +recreational +investments +airborne +disappointment +##dis +edmonton +viewing +renovation +infant +bankruptcy +roses +aftermath +pavilion +carpenter +withdrawal +ladder +discussing +popped +reliable +agreements +rochester +##abad +curves +bombers +rao +reverend +decreased +choosing +stiff +consulting +naples +crawford +tracy +ribbon +cops +crushed +deciding +unified +teenager +accepting +flagship +poles +sanchez +inspection +revived +skilled +induced +exchanged +flee +locals +tragedy +swallow +hanna +demonstrate +##ela +salvador +flown +contestants +civilization +##ines +wanna +rhodes +fletcher +hector +knocking +considers +nash +mechanisms +sensed +mentally +walt +unclear +##eus +renovated +madame +crews +governmental +undertaken +monkey +##ben +##ato +fatal +armored +copa +caves +governance +grasp +perception +certification +froze +damp +tugged +wyoming +##rg +##ero +newman +nerves +curiosity +graph +##ami +withdraw +tunnels +dull +meredith +moss +exhibits +neighbors +communicate +accuracy +explored +raiders +republicans +secular +kat +superman +penny +criticised +freed +conviction +ham +likewise +delegation +gotta +doll +promises +technological +myth +nationality +resolve +convent +sharon +dig +sip +coordinator +entrepreneur +fold +##dine +capability +councillor +synonym +blown +swan +cursed +1815 +jonas +haired +sofa +canvas +keeper +rivalry +##hart +rapper +speedway +swords +postal +maxwell +estonia +potter +recurring +errors +##oni +cognitive +1834 +claws +nadu +roberto +bce +wrestler +ellie +infinite +ink +##tia +presumably +finite +staircase +noel +patricia +nacional +chill +eternal +tu +preventing +prussia +fossil +limbs +##logist +ernst +frog +perez +rene +prussian +##ios +molecules +regulatory +answering +opinions +sworn +lengths +supposedly +hypothesis +upward +habitats +seating +ancestors +drank +yield +synthesis +researcher +modest +##var +mothers +peered +voluntary +homeland +acclaim +##igan +static +valve +luxembourg +alto +carroll +receptor +norton +ambulance +##tian +johnston +catholics +depicting +jointly +elephant +gloria +mentor +badge +ahmad +distinguish +remarked +councils +precisely +allison +advancing +detection +crowded +cooperative +ankle +mercedes +dagger +surrendered +pollution +commit +subway +jeffrey +lesson +sculptures +provider +##fication +membrane +timothy +rectangular +fiscal +heating +teammate +basket +particle +anonymous +deployment +missiles +courthouse +proportion +shoe +sec +complaints +forbes +blacks +abandon +remind +sizes +overwhelming +autobiography +natalie +##awa +risks +contestant +countryside +babies +scorer +invaded +enclosed +proceed +hurling +disorders +##cu +reflecting +continuously +cruiser +graduates +freeway +investigated +ore +deserved +maid +blocking +phillip +jorge +shakes +dove +mann +variables +lacked +burden +accompanying +que +consistently +organizing +provisional +complained +endless +tubes +juice +georges +krishna +mick +thriller +laps +arcade +sage +snail +shannon +laurence +seoul +vacation +presenting +hire +churchill +surprisingly +prohibited +savannah +technically +##oli +##lessly +testimony +suited +speeds +toys +romans +flowering +measurement +talented +kay +settings +charleston +expectations +shattered +achieving +triumph +ceremonies +portsmouth +lanes +mandatory +loser +stretching +cologne +realizes +seventy +cornell +careers +webb +##ulating +americas +budapest +ava +suspicion +yo +conrad +sterling +jessie +rector +##az +1831 +transform +organize +loans +christine +volcanic +warrant +slender +summers +subfamily +newer +danced +dynamics +rhine +proceeds +heinrich +gastropod +commands +sings +facilitate +easter +positioned +responses +expense +fruits +yanked +imported +25th +velvet +vic +primitive +tribune +baldwin +neighbourhood +donna +rip +hay +##uro +1814 +espn +welcomed +##aria +qualifier +glare +highland +timing +##cted +shells +eased +geometry +louder +exciting +slovakia +##iz +savings +prairie +marching +rafael +tonnes +##lled +curtain +preceding +shy +heal +greene +worthy +##pot +detachment +bury +sherman +##eck +reinforced +seeks +bottles +contracted +duchess +outfit +walsh +mickey +geoffrey +archer +squeeze +dawson +eliminate +invention +##enberg +neal +##eth +stance +dealer +coral +maple +retire +simplified +1833 +hid +watts +backwards +jules +##oke +genesis +frames +rebounds +burma +woodland +moist +santos +whispers +drained +subspecies +streaming +ulster +burnt +correspondence +maternal +gerard +denis +stealing +genius +duchy +##oria +inaugurated +momentum +suits +placement +sovereign +clause +thames +##hara +confederation +reservation +sketch +yankees +lets +rotten +charm +hal +verses +commercially +dot +salon +citation +adopt +winnipeg +mist +allocated +cairo +jenkins +interference +objectives +##wind +1820 +portfolio +armoured +sectors +initiatives +integrity +exercises +robe +tap +gazed +##tones +distracted +rulers +favorable +jerome +tended +cart +factories +##eri +diplomat +valued +gravel +charitable +calvin +exploring +shepherd +terrace +pupil +##ural +reflects +##rch +governors +shelf +depths +##nberg +trailed +crest +tackle +##nian +hatred +##kai +clare +makers +ethiopia +longtime +detected +embedded +lacking +slapped +rely +thomson +anticipation +morton +successive +agnes +screenwriter +straightened +philippe +playwright +haunted +licence +iris +intentions +sutton +logical +correctly +##weight +branded +licked +tipped +silva +ricky +narrator +requests +##ents +greeted +supernatural +cow +##wald +lung +refusing +employer +strait +gaelic +liner +##piece +zoe +sabha +##mba +driveway +harvest +prints +bates +reluctantly +threshold +algebra +ira +wherever +coupled +assumption +picks +designers +raids +gentlemen +roller +blowing +leipzig +locks +screw +dressing +strand +##lings +scar +dwarf +depicts +##nu +nods +differ +boris +##eur +yuan +flip +##gie +mob +invested +questioning +applying +shout +##sel +gameplay +blamed +illustrations +bothered +weakness +rehabilitation +##zes +envelope +rumors +miners +leicester +subtle +kerry +ferguson +premiership +bengali +prof +catches +remnants +dana +##rily +shouting +presidents +baltic +ought +ghosts +dances +sailors +shirley +fancy +dominic +##bie +madonna +##rick +bark +buttons +gymnasium +ashes +liver +toby +oath +providence +doyle +evangelical +nixon +cement +carnegie +embarked +hatch +surroundings +guarantee +needing +pirate +essence +filter +crane +hammond +projected +immune +percy +twelfth +regent +doctoral +damon +mikhail +##ichi +critically +elect +realised +abortion +acute +screening +mythology +steadily +frown +nottingham +kirk +wa +minneapolis +##rra +module +algeria +nautical +encounters +surprising +statues +availability +shirts +pie +alma +brows +munster +mack +soup +crater +tornado +sanskrit +cedar +explosive +bordered +dixon +planets +stamp +exam +happily +##bble +carriers +kidnapped +accommodation +emigrated +##met +knockout +correspondent +violation +profits +peaks +lang +specimen +agenda +ancestry +pottery +spelling +equations +obtaining +ki +linking +1825 +debris +asylum +buddhism +##ants +gazette +dental +eligibility +fathers +averaged +zimbabwe +francesco +coloured +hissed +translator +lynch +mandate +humanities +mackenzie +uniforms +##iana +asset +fitting +samantha +genera +rim +beloved +shark +riot +entities +expressions +indo +carmen +slipping +owing +abbot +neighbor +sidney +rats +recommendations +encouraging +squadrons +anticipated +commanders +conquered +donations +diagnosed +divide +##iva +guessed +decoration +vernon +auditorium +revelation +conversations +##kers +##power +herzegovina +dash +alike +protested +lateral +herman +accredited +##gent +freeman +mel +fiji +crow +crimson +##rine +livestock +##pped +humanitarian +bored +oz +whip +##lene +##ali +legitimate +alter +grinning +spelled +anxious +oriental +wesley +##nin +##hole +carnival +controller +detect +##ssa +bowed +educator +kosovo +macedonia +##sin +occupy +mastering +stephanie +janeiro +para +unaware +nurses +noon +hopefully +ranger +combine +sociology +polar +rica +##eer +neill +##sman +holocaust +doubled +lust +1828 +decent +cooling +unveiled +1829 +nsw +homer +chapman +meyer +dive +mae +reagan +expertise +##gled +darwin +brooke +sided +prosecution +investigating +comprised +petroleum +genres +reluctant +differently +trilogy +johns +vegetables +corpse +highlighted +lounge +pension +unsuccessfully +elegant +aided +ivory +beatles +amelia +cain +dubai +immigrant +babe +underwater +combining +mumbled +atlas +horns +accessed +ballad +physicians +homeless +gestured +rpm +freak +louisville +corporations +patriots +prizes +rational +warn +modes +decorative +overnight +din +troubled +phantom +monarch +sheer +##dorf +generals +guidelines +organs +addresses +enhance +curling +parishes +cord +##kie +caesar +deutsche +bavaria +coleman +cyclone +##eria +bacon +petty +##yama +##old +hampton +diagnosis +1824 +throws +complexity +rita +disputed +pablo +marketed +trafficking +##ulus +examine +plague +formats +vault +faithful +##bourne +webster +highlights +##ient +phones +vacuum +sandwich +modeling +##gated +bolivia +clergy +qualities +isabel +##nas +##ars +wears +screams +reunited +annoyed +bra +##ancy +##rate +differential +transmitter +tattoo +container +poker +##och +excessive +resides +cowboys +##tum +augustus +trash +providers +statute +retreated +balcony +reversed +void +storey +preceded +masses +leap +laughs +neighborhoods +wards +schemes +falcon +santo +battlefield +ronnie +lesbian +venus +##dian +beg +sandstone +daylight +punched +gwen +analog +stroked +wwe +acceptable +measurements +toxic +##kel +adequate +surgical +economist +parameters +varsity +##sberg +quantity +##chy +##rton +countess +generating +precision +diamonds +expressway +##ı +1821 +uruguay +talents +galleries +expenses +scanned +colleague +outlets +ryder +lucien +##ila +paramount +syracuse +dim +fangs +gown +sweep +##sie +missionaries +websites +sentences +adviser +val +trademark +spells +##plane +patience +starter +slim +##borg +toe +incredibly +shoots +elliot +nobility +##wyn +cowboy +endorsed +gardner +tendency +persuaded +organisms +emissions +kazakhstan +amused +boring +chips +themed +##hand +constantinople +chasing +systematic +guatemala +borrowed +erin +carey +##hard +highlands +struggles +1810 +##ifying +##ced +exceptions +develops +enlarged +kindergarten +castro +##rina +leigh +zombie +juvenile +##most +consul +sailor +hyde +clarence +intensive +pinned +nasty +useless +jung +clayton +stuffed +exceptional +ix +apostolic +transactions +exempt +swinging +cove +religions +shields +dairy +bypass +pursuing +joyce +bombay +chassis +southampton +chat +interact +redesignated +##pen +nascar +pray +salmon +rigid +regained +malaysian +grim +publicity +constituted +capturing +toilet +delegate +purely +tray +drift +loosely +striker +weakened +trinidad +mitch +itv +defines +transmitted +scarlet +nodding +fitzgerald +narrowly +tooth +standings +virtue +##wara +##cting +chateau +gloves +lid +hurting +conservatory +##pel +sinclair +reopened +sympathy +nigerian +strode +advocated +optional +chronic +discharge +suck +compatible +laurel +stella +fails +wage +dodge +informal +sorts +levi +buddha +villagers +chronicles +heavier +summoned +gateway +eleventh +jewelry +translations +accordingly +seas +##ency +fiber +pyramid +cubic +dragging +##ista +caring +##ops +contacted +lunar +lisbon +patted +1826 +sacramento +theft +madagascar +subtropical +disputes +holidays +piper +willow +mare +cane +newfoundland +benny +companions +dong +raj +observe +roar +charming +plaque +tibetan +fossils +enacted +manning +bubble +tanzania +##eda +##hir +funk +swamp +deputies +cloak +ufc +scenario +par +scratch +metals +anthem +guru +engaging +specially +##boat +dialects +nineteen +cecil +duet +disability +unofficial +##lies +defunct +moonlight +drainage +surname +puzzle +switching +conservatives +mammals +knox +broadcaster +sidewalk +cope +##ried +benson +princes +peterson +##sal +bedford +sharks +eli +wreck +alberto +gasp +archaeology +lgbt +teaches +securities +madness +compromise +waving +coordination +davidson +visions +leased +possibilities +eighty +fernandez +enthusiasm +assassin +sponsorship +reviewer +kingdoms +estonian +laboratories +##fy +##nal +applies +verb +celebrations +##zzo +rowing +lightweight +sadness +submit +balanced +dude +explicitly +metric +magnificent +mound +brett +mohammad +mistakes +irregular +sanders +betrayed +shipped +surge +##enburg +reporters +termed +georg +pity +verbal +bulls +abbreviated +enabling +appealed +sicily +sting +heel +sweetheart +bart +spacecraft +brutal +monarchy +aberdeen +cameo +diane +survivor +clyde +##aries +complaint +##makers +clarinet +delicious +chilean +karnataka +coordinates +1818 +panties +##rst +pretending +dramatically +kiev +tends +distances +catalog +launching +instances +telecommunications +portable +lindsay +vatican +##eim +angles +aliens +marker +stint +screens +bolton +##rne +judy +wool +benedict +plasma +europa +imaging +filmmaker +swiftly +contributor +opted +stamps +apologize +financing +butter +gideon +sophisticated +alignment +avery +chemicals +yearly +speculation +prominence +professionally +immortal +institutional +inception +wrists +identifying +tribunal +derives +gains +papal +preference +linguistic +vince +operative +brewery +##ont +unemployment +boyd +##ured +##outs +albeit +prophet +1813 +##rad +quarterly +asteroid +cleaned +radius +temper +##llen +telugu +jerk +viscount +##ote +glimpse +##aya +yacht +hawaiian +baden +laptop +readily +##gu +monetary +offshore +scots +watches +##yang +##arian +upgrade +needle +lea +encyclopedia +flank +fingertips +delight +teachings +confirm +roth +beaches +midway +winters +##iah +teasing +daytime +beverly +gambling +##backs +regulated +clement +hermann +tricks +knot +##shing +##uring +##vre +detached +ecological +owed +specialty +byron +inventor +bats +stays +screened +unesco +midland +trim +affection +##ander +jess +thoroughly +feedback +chennai +strained +heartbeat +wrapping +overtime +pleaded +##sworth +leisure +oclc +##tate +##ele +feathers +angelo +thirds +nuts +surveys +clever +gill +commentator +##dos +darren +rides +gibraltar +dissolution +dedication +shin +meals +saddle +elvis +reds +chaired +taller +appreciation +functioning +niece +favored +advocacy +robbie +criminals +suffolk +yugoslav +passport +constable +congressman +hastings +##rov +consecrated +sparks +ecclesiastical +confined +##ovich +muller +floyd +nora +1822 +paved +1827 +cumberland +ned +saga +spiral +appreciated +collaborative +treating +similarities +feminine +finishes +##ib +jade +import +##hot +champagne +mice +securing +celebrities +helsinki +attributes +##gos +cousins +phases +ache +lucia +gandhi +submission +vicar +spear +shine +tasmania +biting +detention +constitute +tighter +seasonal +##gus +terrestrial +matthews +effectiveness +parody +philharmonic +##onic +1816 +strangers +encoded +consortium +guaranteed +regards +shifts +tortured +collision +supervisor +inform +broader +insight +theaters +armour +emeritus +blink +incorporates +mapping +handball +flexible +##nta +substantially +generous +thief +carr +loses +1793 +prose +ucla +romeo +generic +metallic +realization +damages +commissioners +zach +default +helicopters +lengthy +stems +partnered +spectators +rogue +indication +penalties +teresa +1801 +sen +##tric +dalton +##wich +irving +photographic +##vey +deaf +peters +excluded +unsure +##vable +patterson +crawled +##zio +resided +whipped +latvia +slower +ecole +pipes +employers +maharashtra +comparable +textile +pageant +##gel +alphabet +binary +irrigation +chartered +choked +antoine +offs +waking +supplement +quantities +demolition +regain +locate +urdu +folks +scary +andreas +whites +##ava +classrooms +mw +aesthetic +publishes +valleys +guides +cubs +johannes +bryant +conventions +affecting +##itt +drain +awesome +isolation +prosecutor +ambitious +apology +captive +downs +atmospheric +lorenzo +aisle +beef +foul +##onia +kidding +composite +disturbed +illusion +natives +##ffer +rockets +riverside +wartime +painters +adolf +melted +uncertainty +simulation +hawks +progressed +meantime +builder +spray +breach +unhappy +regina +russians +determining +tram +1806 +##quin +aging +1823 +garion +rented +mister +diaz +terminated +clip +1817 +depend +nervously +disco +owe +defenders +shiva +notorious +disbelief +shiny +worcester +##gation +##yr +trailing +undertook +islander +belarus +limitations +watershed +fuller +overlooking +utilized +raphael +1819 +synthetic +breakdown +klein +##nate +moaned +memoir +lamb +practicing +##erly +cellular +arrows +exotic +witches +charted +rey +hut +hierarchy +subdivision +freshwater +giuseppe +aloud +reyes +qatar +marty +sideways +utterly +sexually +jude +prayers +mccarthy +softball +blend +damien +##gging +##metric +wholly +erupted +lebanese +negro +revenues +tasted +comparative +teamed +transaction +labeled +maori +sovereignty +parkway +trauma +gran +malay +advancement +descendant +buzz +salvation +inventory +symbolic +##making +antarctica +mps +##bro +mohammed +myanmar +holt +submarines +tones +##lman +locker +patriarch +bangkok +emerson +remarks +predators +kin +afghan +confession +norwich +rental +emerge +advantages +##zel +rca +##hold +shortened +storms +aidan +##matic +autonomy +compliance +##quet +dudley +##osis +1803 +motto +documentation +summary +professors +spectacular +christina +archdiocese +flashing +innocence +remake +##dell +psychic +reef +scare +employ +sticks +meg +gus +leans +accompany +bergen +tomas +doom +wages +pools +##bes +breasts +scholarly +alison +outline +brittany +breakthrough +willis +realistic +##cut +##boro +competitor +##stan +pike +picnic +designing +commercials +washing +villain +skiing +costumes +auburn +halted +executives +logistics +cycles +vowel +applicable +barrett +exclaimed +eurovision +eternity +ramon +##umi +modifications +sweeping +disgust +torch +aviv +ensuring +rude +dusty +sonic +donovan +outskirts +cu +pathway +##band +##gun +disciplines +acids +cadet +paired +sketches +##sive +marriages +folding +peers +slovak +implies +admired +##beck +1880s +leopold +instinct +attained +weston +megan +horace +##ination +dorsal +ingredients +evolutionary +complications +deity +lethal +brushing +levy +deserted +institutes +posthumously +delivering +telescope +coronation +motivated +rapids +luc +flicked +pays +volcano +tanner +weighed +##nica +crowds +frankie +gifted +addressing +granddaughter +winding +##rna +constantine +gomez +##front +landscapes +rudolf +anthropology +slate +werewolf +astronomy +circa +rouge +dreaming +sack +knelt +drowned +naomi +prolific +tracked +freezing +herb +agony +randall +twisting +wendy +deposit +touches +vein +wheeler +##bbled +batted +retaining +tire +presently +compare +specification +daemon +nigel +##grave +merry +recommendation +czechoslovakia +sandra +roma +##sts +lambert +inheritance +sheikh +winchester +cries +examining +##yle +comeback +cuisine +nave +##iv +retrieve +tomatoes +barker +polished +defining +irene +lantern +personalities +begging +tract +swore +1809 +##gic +omaha +brotherhood +haiti +##ots +exeter +##ete +##zia +steele +dumb +pearson +surveyed +elisabeth +trends +fritz +bugs +fraction +calmly +viking +##birds +tug +inserted +unusually +##ield +confronted +distress +crashing +brent +turks +resign +##olo +cambodia +gabe +sauce +##kal +evelyn +extant +clusters +quarry +teenagers +luna +##lers +##ister +affiliation +drill +##ashi +panthers +scenic +libya +anita +strengthen +inscriptions +##cated +lace +sued +judith +riots +##uted +mint +##eta +preparations +midst +dub +challenger +##vich +mock +displaced +wicket +breaths +enables +schmidt +analyst +##lum +highlight +automotive +axe +josef +newark +sufficiently +resembles +50th +##pal +flushed +mum +traits +##ante +commodore +incomplete +warming +titular +ceremonial +ethical +celebrating +eighteenth +cao +lima +medalist +mobility +strips +snakes +miniature +zagreb +barton +escapes +umbrella +automated +doubted +differs +cooled +georgetown +dresden +cooked +fade +wyatt +jacobs +carlton +abundant +stereo +madras +inning +spur +malayalam +begged +osaka +groan +escaping +charging +dose +##aj +bud +papa +communists +advocates +edged +tri +resemble +peaking +necklace +fried +montenegro +saxony +goose +glances +stuttgart +curator +recruit +grocery +sympathetic +##tting +##fort +lotus +randolph +ancestor +##rand +succeeding +jupiter +1798 +macedonian +##heads +hiking +1808 +handing +fischer +##itive +garbage +##pies +prone +singular +papua +inclined +attractions +italia +pouring +motioned +grandma +garnered +jacksonville +corp +ego +ringing +aluminum +##hausen +ordering +##foot +drawer +traders +synagogue +##kawa +resistant +wandering +fragile +fiona +teased +hardcore +soaked +jubilee +decisive +exposition +mercer +poster +valencia +hale +kuwait +1811 +##ises +##wr +##eed +tavern +gamma +johan +##uer +airways +amino +gil +vocational +domains +torres +generator +folklore +outcomes +##keeper +canberra +shooter +fl +beams +confrontation +##gram +aligned +forestry +pipeline +jax +motorway +conception +decay +coffin +##cott +stalin +1805 +escorted +minded +##nam +sitcom +purchasing +twilight +veronica +additions +passive +tensions +straw +frequencies +1804 +refugee +cultivation +##iate +christie +clary +bulletin +crept +disposal +##rich +##zong +processor +crescent +##rol +emphasized +whale +nazis +aurora +dwelling +hauled +sponsors +toledo +ideology +theatres +tessa +cerambycidae +saves +turtle +cone +suspects +kara +rusty +yelling +greeks +mozart +shades +cocked +participant +shire +spit +freeze +necessity +##cos +inmates +nielsen +councillors +loaned +uncommon +omar +peasants +botanical +offspring +daniels +formations +jokes +1794 +pioneers +sigma +licensing +##sus +wheelchair +polite +1807 +liquor +pratt +trustee +##uta +forewings +balloon +kilometre +camping +explicit +casually +shawn +foolish +teammates +nm +hassan +carrie +judged +satisfy +vanessa +knives +selective +flowed +##lice +stressed +eliza +mathematician +cease +cultivated +##roy +commissions +browns +##ania +destroyers +sheridan +meadow +##rius +minerals +##cial +downstream +clash +gram +memoirs +ventures +baha +seymour +archie +midlands +edith +fare +flynn +invite +canceled +tiles +stabbed +boulder +incorporate +amended +camden +facial +mollusk +unreleased +descriptions +grabs +raises +ramp +shiver +##rose +coined +pioneering +tunes +qing +warwick +tops +melanie +giles +##rous +wandered +##inal +annexed +30th +unnamed +##ished +organizational +airplane +normandy +stoke +whistle +blessing +violations +chased +holders +shotgun +##ctic +reactor +##vik +tires +tearing +shores +fortified +mascot +constituencies +columnist +productive +tibet +##rta +lineage +hooked +tapes +judging +cody +##gger +hansen +kashmir +triggered +##eva +solved +cliffs +##tree +resisted +anatomy +protesters +transparent +implied +##iga +injection +mattress +excluding +##mbo +defenses +helpless +devotion +##elli +growl +liberals +weber +phenomena +atoms +plug +##iff +mortality +apprentice +howe +convincing +swimmer +barber +leone +promptly +sodium +def +nowadays +arise +##oning +gloucester +corrected +dignity +norm +erie +##ders +elders +evacuated +compression +##yar +hartford +backpack +reasoning +accepts +24th +wipe +millimetres +marcel +##oda +dodgers +albion +1790 +overwhelmed +aerospace +oaks +1795 +showcase +acknowledge +recovering +nolan +ashe +hurts +geology +fashioned +disappearance +farewell +swollen +shrug +marquis +wimbledon +rue +1792 +commemorate +reduces +experiencing +inevitable +calcutta +##court +murderer +sticking +fisheries +imagery +bloom +##inus +gustav +hesitation +memorable +viral +beans +accidents +tunisia +antenna +spilled +consort +treatments +aye +perimeter +##gard +donation +hostage +migrated +banker +addiction +apex +lil +trout +##ously +conscience +##nova +rams +sands +genome +passionate +troubles +##lets +amid +##ibility +##ret +higgins +exceed +vikings +##vie +payne +##zan +muscular +defendant +sucking +##wal +ibrahim +fuselage +claudia +vfl +europeans +snails +interval +##garh +preparatory +statewide +tasked +lacrosse +viktor +##lation +angola +##hra +flint +implications +employs +teens +patrons +stall +weekends +barriers +scrambled +nucleus +tehran +jenna +parsons +lifelong +robots +displacement +##bles +precipitation +knuckles +clutched +1802 +marrying +ecology +marx +accusations +declare +scars +kolkata +mat +meadows +bermuda +skeleton +finalists +vintage +crawl +coordinate +affects +subjected +orchestral +mistaken +mirrors +dipped +relied +arches +candle +##nick +incorporating +wildly +fond +basilica +owl +fringe +rituals +whispering +stirred +feud +tertiary +slick +goat +honorable +whereby +ricardo +stripes +parachute +adjoining +submerged +synthesizer +##gren +intend +positively +ninety +phi +beaver +partition +fellows +alexis +prohibition +carlisle +bizarre +fraternity +doubts +icy +aquatic +sneak +sonny +combines +airports +crude +supervised +spatial +merge +alfonso +##bic +corrupt +scan +undergo +##ams +disabilities +colombian +comparing +dolphins +perkins +reprinted +unanimous +bounced +hairs +underworld +midwest +semester +bucket +paperback +miniseries +coventry +demise +##leigh +demonstrations +sensor +rotating +yan +##hler +arrange +soils +##idge +hyderabad +labs +brakes +grandchildren +##nde +negotiated +rover +ferrari +continuation +directorate +augusta +stevenson +counterpart +gore +##rda +nursery +rican +ave +collectively +broadly +pastoral +repertoire +asserted +discovering +nordic +styled +fiba +cunningham +harley +middlesex +survives +tumor +tempo +zack +aiming +lok +urgent +##nto +devils +contractor +turin +##wl +bliss +repaired +simmons +moan +astronomical +negotiate +lyric +1890s +lara +bred +clad +angus +pbs +engineered +posed +hernandez +possessions +elbows +psychiatric +strokes +confluence +electorate +lifts +campuses +lava +alps +##ution +##date +physicist +woody +##ographic +##itis +juliet +reformation +sparhawk +complement +suppressed +jewel +##½ +floated +##kas +continuity +sadly +##ische +inability +melting +scanning +paula +flour +judaism +safer +vague +solving +curb +##stown +financially +gable +bees +expired +miserable +cassidy +dominion +1789 +cupped +robbery +facto +amos +warden +resume +tallest +marvin +pounded +declaring +gasoline +##aux +darkened +sophomore +##mere +erection +gossip +televised +risen +dial +##eu +pillars +passages +profound +arabian +ashton +silicon +nail +##lated +##hardt +fleming +firearms +ducked +circuits +blows +waterloo +titans +fireplace +cheshire +financed +activation +algorithms +constituent +catcher +cherokee +partnerships +sexuality +platoon +tragic +vivian +guarded +whiskey +meditation +poetic +##nga +porto +listeners +dominance +kendra +mona +chandler +factions +22nd +salisbury +attitudes +derivative +##ido +##haus +intake +paced +javier +illustrator +barrels +bias +cockpit +burnett +dreamed +ensuing +receptors +someday +hawkins +mattered +##lal +slavic +1799 +jesuit +cameroon +wasted +wax +lowering +victorious +freaking +outright +hancock +librarian +sensing +bald +calcium +myers +tablet +announcing +barack +shipyard +pharmaceutical +greenwich +flush +medley +patches +wolfgang +speeches +acquiring +exams +nikolai +hayden +kannada +reilly +waitress +abdomen +devastated +capped +pseudonym +pharmacy +fulfill +paraguay +1796 +clicked +##trom +archipelago +syndicated +##hman +lumber +orgasm +rejection +clifford +lorraine +advent +mafia +rodney +brock +##used +##elia +cassette +chamberlain +despair +mongolia +sensors +developmental +upstream +##alis +spanning +trombone +basque +seeded +interred +renewable +rhys +leapt +revision +molecule +##ages +chord +vicious +nord +shivered +23rd +arlington +debts +corpus +sunrise +bays +blackburn +centimetres +##uded +shuddered +strangely +gripping +cartoons +isabelle +orbital +##ppa +seals +proving +refusal +strengthened +bust +assisting +baghdad +batsman +portrayal +mara +pushes +spears +og +##cock +reside +nathaniel +brennan +1776 +confirmation +caucus +##worthy +markings +yemen +nobles +ku +lazy +viewer +catalan +encompasses +sawyer +##fall +sparked +substances +patents +braves +arranger +evacuation +sergio +persuade +dover +tolerance +penguin +cum +jockey +insufficient +townships +occupying +declining +plural +processed +projection +puppet +flanders +introduces +liability +##yon +gymnastics +antwerp +hobart +candles +jeep +wes +observers +chaplain +bundle +glorious +##hine +hazel +flung +sol +excavations +dumped +stares +bangalore +triangular +icelandic +intervals +expressing +turbine +##vers +songwriting +crafts +##igo +jasmine +ditch +rite +entertaining +comply +sorrow +wrestlers +basel +emirates +marian +rivera +helpful +##some +caution +downward +networking +##atory +##tered +darted +genocide +emergence +replies +specializing +spokesman +convenient +unlocked +fading +augustine +concentrations +resemblance +elijah +investigator +andhra +##uda +promotes +##rrell +fleeing +simone +announcer +lydia +weaver +residency +modification +##fest +stretches +alternatively +nat +lowe +lacks +##ented +pam +tile +concealed +inferior +abdullah +residences +tissues +vengeance +##ided +moisture +peculiar +groove +bologna +jennings +ninja +oversaw +zombies +pumping +batch +livingston +emerald +installations +1797 +peel +nitrogen +rama +##fying +schooling +strands +responding +werner +lime +casa +accurately +targeting +##rod +underway +##uru +hemisphere +lester +##yard +occupies +griffith +angrily +reorganized +##owing +courtney +deposited +estadio +##ifies +dunn +exiled +##ying +checks +##combe +successes +unexpectedly +blu +assessed +##flower +observing +sacked +spiders +kn +nodes +prosperity +audrey +divisional +broncos +tangled +adjust +feeds +erosion +paolo +surf +directory +snatched +humid +admiralty +screwed +reddish +##nese +modules +trench +lamps +bind +leah +bucks +competes +##nz +transcription +isles +violently +clutching +pga +cyclist +inflation +flats +ragged +unnecessary +##hian +stubborn +coordinated +harriet +baba +disqualified +insect +wolfe +##fies +reinforcements +rocked +duel +winked +embraced +bricks +##raj +hiatus +defeats +pending +brightly +jealousy +##xton +##uki +lena +colorful +##dley +stein +kidney +##shu +underwear +wanderers +##haw +##icus +guardians +m³ +roared +habits +##wise +permits +uranium +punished +disguise +bundesliga +elise +dundee +erotic +partisan +collectors +float +individually +rendering +behavioral +bucharest +ser +hare +valerie +corporal +nutrition +proportional +immense +##kis +pavement +##zie +##eld +sutherland +crouched +1775 +suzuki +trades +endurance +operas +crosby +prayed +priory +rory +socially +gujarat +walton +cube +pasha +privilege +lennon +floods +thorne +waterfall +nipple +scouting +approve +##lov +minorities +voter +dwight +extensions +assure +ballroom +slap +dripping +privileges +rejoined +confessed +demonstrating +patriotic +yell +investor +##uth +pagan +slumped +squares +confront +bert +embarrassment +aston +urging +sweater +starr +yuri +brains +williamson +commuter +mortar +structured +selfish +exports +##jon +cds +##him +unfinished +##rre +mortgage +destinations +##nagar +canoe +solitary +buchanan +delays +magistrate +fk +##pling +motivation +##lier +##vier +recruiting +assess +##mouth +malik +antique +1791 +pius +rahman +reich +tub +zhou +smashed +airs +galway +xii +conditioning +honduras +discharged +dexter +##pf +lionel +debates +lemon +volunteered +dioxide +procession +devi +sic +tremendous +advertisements +colts +transferring +verdict +hanover +decommissioned +utter +relate +pac +racism +beacon +limp +similarity +terra +occurrence +ant +becky +capt +updates +armament +richie +pal +##graph +halloween +mayo +##ssen +##bone +cara +serena +fcc +dolls +obligations +##dling +violated +lafayette +jakarta +exploitation +infamous +iconic +##lah +##park +moody +reginald +dread +spill +crystals +olivier +modeled +bluff +equilibrium +separating +notices +ordnance +extinction +onset +cosmic +attachment +sammy +expose +privy +anchored +##bil +abbott +admits +bending +baritone +emmanuel +policeman +vaughan +winged +climax +dresses +denny +polytechnic +mohamed +burmese +authentic +nikki +genetics +grandparents +homestead +gaza +postponed +metacritic +una +##sby +unstable +dissertation +##cian +curls +obscure +uncovered +bronx +praying +disappearing +##hoe +prehistoric +coke +turret +mutations +nonprofit +pits +monaco +##usion +prominently +dispatched +podium +##mir +uci +##uation +fortifications +birthplace +kendall +##lby +##oll +preacher +rack +goodman +persistent +##ott +countless +jaime +recorder +lexington +persecution +jumps +renewal +wagons +crushing +##holder +decorations +##lake +abundance +wrath +laundry +£1 +garde +jeanne +beetles +peasant +splitting +caste +sergei +##rer +##ema +scripts +##ively +rub +satellites +##vor +inscribed +verlag +scrapped +gale +packages +chick +potato +slogan +kathleen +arabs +##culture +counterparts +reminiscent +choral +##tead +rand +retains +bushes +dane +accomplish +courtesy +closes +##oth +slaughter +hague +krakow +lawson +tailed +elias +ginger +##ttes +canopy +betrayal +rebuilding +turf +##hof +frowning +allegiance +brigades +kicks +rebuild +polls +alias +nationalism +rowan +audition +bowie +fortunately +recognizes +harp +dillon +horrified +##oro +renault +ropes +presumed +rewarded +infrared +wiping +accelerated +illustration +presses +practitioners +badminton +##iard +detained +##tera +recognizing +relates +misery +##sies +##tly +reproduction +piercing +potatoes +thornton +esther +manners +hbo +##aan +ours +bullshit +ernie +perennial +sensitivity +illuminated +rupert +##iss +rfc +nassau +##dock +staggered +socialism +##haven +appointments +nonsense +prestige +sharma +haul +solidarity +##rata +igor +pedestrian +##uit +baxter +tenants +wires +medication +unlimited +guiding +impacts +diabetes +##rama +sasha +pas +clive +extraction +continually +constraints +##bilities +sonata +hunted +sixteenth +chu +planting +quote +mayer +pretended +spat +ceramic +##cci +curtains +pigs +pitching +##dad +latvian +sore +dayton +##sted +patrols +slice +playground +##nted +shone +stool +apparatus +inadequate +mates +treason +##ija +desires +##liga +##croft +somalia +laurent +mir +grape +obliged +chevrolet +thirteenth +stunning +enthusiastic +##ede +accounted +concludes +currents +basil +##kovic +drought +##rica +mai +##aire +shove +posting +##shed +pilgrimage +humorous +packing +fry +pencil +wines +smells +marilyn +aching +newest +clung +bon +neighbours +sanctioned +##pie +mug +##stock +drowning +hydraulic +##vil +hiring +reminder +lilly +investigators +##ncies +sour +##eous +compulsory +packet +##rion +##graphic +##elle +cannes +##inate +depressed +##rit +heroic +importantly +theresa +##tled +conway +saturn +marginal +rae +##xia +corresponds +royce +pact +jasper +explosives +packaging +aluminium +##ttered +denotes +rhythmic +spans +assignments +hereditary +outlined +originating +sundays +lad +reissued +greeting +beatrice +##dic +pillar +marcos +plots +handbook +alcoholic +judiciary +avant +slides +extract +masculine +blur +##eum +homage +trembled +owens +hymn +trey +signaling +socks +accumulated +reacted +attic +theo +lining +angie +distraction +primera +talbot +creativity +billed +##hey +deacon +eduardo +identifies +proposition +dizzy +gunner +hogan +##yam +##pping +##hol +ja +##chan +jensen +reconstructed +##berger +clearance +darius +##nier +abe +harlem +plea +dei +circled +emotionally +notation +fascist +neville +exceeded +upwards +viable +ducks +workforce +racer +limiting +shri +##lson +possesses +kerr +moths +devastating +laden +disturbing +locking +gal +fearing +accreditation +flavor +aide +1870s +mountainous +##baum +melt +##ures +texture +servers +soda +herd +##nium +erect +puzzled +hum +peggy +examinations +gould +testified +geoff +ren +devised +sacks +##law +denial +posters +grunted +cesar +tutor +gerry +offerings +byrne +falcons +combinations +incoming +pardon +rocking +26th +avengers +flared +mankind +seller +uttar +loch +nadia +stroking +exposing +fertile +ancestral +instituted +##has +noises +prophecy +taxation +eminent +vivid +pol +##bol +dart +indirect +multimedia +notebook +upside +displaying +adrenaline +referenced +geometric +##iving +progression +##ddy +blunt +announce +##far +implementing +##lav +aggression +liaison +cooler +cares +headache +plantations +gorge +dots +impulse +thickness +ashamed +averaging +kathy +obligation +precursor +fowler +symmetry +thee +hears +##rai +undergoing +butcher +bowler +##lip +cigarettes +subscription +goodness +##ically +browne +##hos +kyoto +donor +##erty +damaging +friction +drifting +expeditions +hardened +prostitution +fauna +blankets +claw +tossing +snarled +butterflies +recruits +investigative +coated +healed +communal +hai +xiii +academics +boone +psychologist +restless +lahore +stephens +brendan +foreigners +printer +ached +explode +27th +deed +scratched +dared +##pole +cardiac +1780 +okinawa +proto +commando +compelled +oddly +electrons +replica +thanksgiving +##rist +sheila +deliberate +stafford +tidal +representations +hercules +ou +##path +##iated +kidnapping +lenses +##tling +deficit +samoa +mouths +consuming +computational +maze +granting +smirk +razor +fixture +ideals +inviting +aiden +nominal +issuing +julio +pitt +ramsey +docks +##oss +exhaust +##owed +bavarian +draped +anterior +mating +ethiopian +explores +noticing +##nton +discarded +convenience +hoffman +endowment +beasts +cartridge +mormon +paternal +probe +sleeves +interfere +lump +deadline +jenks +bulldogs +scrap +alternating +justified +reproductive +nam +seize +descending +secretariat +kirby +grouped +smash +panther +sedan +tapping +lola +cheer +germanic +unfortunate +##eter +unrelated +##fan +subordinate +##sdale +suzanne +advertisement +##ility +horsepower +##lda +cautiously +discourse +luigi +##mans +##fields +noun +prevalent +mao +schneider +everett +surround +governorate +kira +##avia +westward +##take +misty +rails +sustainability +unused +##rating +packs +toast +unwilling +regulate +thy +suffrage +nile +awe +assam +definitions +travelers +affordable +##rb +conferred +sells +undefeated +beneficial +torso +basal +repeating +remixes +bahrain +cables +fang +##itated +excavated +numbering +statutory +deluxe +##lian +forested +ramirez +derbyshire +zeus +slamming +transfers +astronomer +banana +lottery +berg +histories +bamboo +##uchi +resurrection +posterior +bowls +vaguely +##thi +thou +preserving +tensed +offence +##inas +meyrick +callum +ridden +watt +langdon +tying +lowland +snorted +daring +truman +##hale +##girl +aura +overly +filing +weighing +goa +infections +philanthropist +saunders +eponymous +##owski +latitude +perspectives +reviewing +mets +commandant +radial +##kha +flashlight +reliability +koch +vowels +amazed +ada +elaine +supper +##encies +predator +debated +soviets +cola +##boards +##nah +compartment +crooked +arbitrary +fourteenth +havana +majors +steelers +clips +profitable +ambush +exited +packers +##tile +nude +cracks +fungi +limb +trousers +josie +shelby +tens +frederic +##ος +definite +smoothly +constellation +insult +baton +discs +lingering +##nco +conclusions +lent +staging +becker +grandpa +shaky +##tron +einstein +obstacles +adverse +economically +##moto +mccartney +thor +dismissal +motions +readings +nostrils +treatise +##pace +squeezing +evidently +prolonged +1783 +venezuelan +je +marguerite +beirut +takeover +shareholders +##vent +denise +digit +airplay +norse +##bbling +imaginary +pills +hubert +blaze +vacated +eliminating +vine +mansfield +retrospective +barrow +borne +clutch +bail +forensic +weaving +##nett +##witz +desktop +citadel +promotions +worrying +dorset +subdivided +##iating +manned +expeditionary +pickup +synod +chuckle +barney +##rz +##ffin +functionality +karachi +litigation +meanings +lick +anders +##ffed +execute +curl +oppose +ankles +typhoon +##ache +linguistics +compassion +pressures +grazing +perfection +##iting +immunity +monopoly +muddy +backgrounds +namibia +francesca +monitors +attracting +stunt +tuition +##ии +vegetable +##mates +##quent +mgm +jen +complexes +forts +cellar +bites +seventeenth +royals +flemish +failures +mast +charities +##cular +peruvian +capitals +macmillan +ipswich +outward +frigate +postgraduate +folds +employing +##ouse +concurrently +fiery +##tai +contingent +nightmares +monumental +nicaragua +##kowski +lizard +mal +fielding +gig +reject +harding +##ipe +coastline +##cin +beethoven +humphrey +innovations +##tam +norris +doris +solicitor +obey +niagara +shelves +bourbon +nightclub +specifications +hilton +##ndo +centennial +dispersed +worm +neglected +briggs +kuala +uneasy +##nstein +##bound +##aking +##burgh +awaiting +pronunciation +##bbed +##quest +eh +optimal +zhu +raped +greens +presided +brenda +worries +venetian +marxist +turnout +##lius +refined +braced +sins +grasped +sunderland +nickel +speculated +lowell +cyrillic +communism +fundraising +resembling +colonists +mutant +freddie +usc +##mos +gratitude +##run +mural +##lous +chemist +reminds +28th +steals +tess +pietro +##ingen +promoter +ri +microphone +honoured +rai +sant +##qui +feather +##nson +burlington +kurdish +terrorists +deborah +sickness +##wed +hazard +irritated +desperation +veil +clarity +##rik +jewels +xv +##gged +##ows +##cup +berkshire +unfair +mysteries +orchid +winced +exhaustion +renovations +stranded +obe +infinity +##nies +adapt +redevelopment +thanked +registry +olga +domingo +noir +tudor +ole +commenting +behaviors +##ais +crisp +pauline +probable +stirling +wigan +paralympics +panting +surpassed +##rew +luca +barred +famed +##sters +cassandra +waiter +carolyn +exported +##orted +andres +destructive +deeds +jonah +castles +vacancy +##glass +1788 +orchard +yep +famine +belarusian +sprang +##forth +skinny +##mis +administrators +rotterdam +zambia +zhao +boiler +discoveries +##ride +##physics +lucius +disappointing +outreach +spoon +##frame +qualifications +unanimously +enjoys +regency +##iidae +stade +realism +veterinary +rodgers +dump +alain +chestnut +castile +censorship +rumble +gibbs +communion +reggae +inactivated +logs +loads +##houses +homosexual +##iano +ale +informs +##cas +phrases +plaster +linebacker +ambrose +kaiser +fascinated +limerick +recruitment +forge +mastered +##nding +leinster +rooted +threaten +##strom +borneo +##hes +suggestions +scholarships +propeller +documentaries +patronage +coats +constructing +invest +neurons +comet +entirety +shouts +identities +annoying +unchanged +wary +##antly +##ogy +neat +oversight +##kos +phillies +replay +constance +##kka +incarnation +humble +skies +minus +##acy +smithsonian +guerrilla +jar +cadets +##plate +surplus +audit +##aru +cracking +joanna +louisa +pacing +##lights +intentionally +##iri +diner +nwa +imprint +australians +tong +unprecedented +bunker +naive +specialists +ark +nichols +railing +leaked +pedal +##uka +shrub +longing +roofs +captains +neural +tuned +##ntal +##jet +emission +medina +frantic +codex +definitive +sid +abolition +intensified +stocks +enrique +sustain +genoa +oxide +##written +clues +cha +##gers +tributaries +fragment +venom +##ente +##sca +muffled +vain +sire +laos +##ingly +##hana +hastily +snapping +surfaced +sentiment +motive +##oft +contests +approximate +mesa +luckily +dinosaur +exchanges +propelled +accord +bourne +relieve +tow +masks +offended +##ues +cynthia +##mmer +rains +bartender +zinc +reviewers +lois +##sai +legged +arrogant +rafe +comprise +handicap +blockade +inlet +lagoon +copied +drilling +shelley +petals +##inian +mandarin +obsolete +##inated +onward +arguably +productivity +praising +seldom +busch +discusses +raleigh +shortage +ranged +stanton +encouragement +firstly +conceded +overs +temporal +##uke +cbe +##bos +woo +certainty +pumps +##pton +stalked +##uli +lizzie +periodic +thieves +weaker +gases +shoving +chooses +wc +##chemical +prompting +weights +##kill +robust +flanked +sticky +tuberculosis +##eb +##eal +christchurch +resembled +wallet +reese +inappropriate +pictured +distract +fixing +fiddle +giggled +burger +heirs +hairy +mechanic +torque +obsessed +chiefly +cheng +logging +extracted +meaningful +numb +##vsky +gloucestershire +reminding +unite +##lit +breeds +diminished +clown +glove +1860s +archibald +focal +freelance +sliced +depiction +##yk +organism +switches +sights +stray +crawling +##ril +lever +leningrad +interpretations +loops +anytime +reel +alicia +delighted +##ech +inhaled +xiv +suitcase +bernie +vega +licenses +northampton +exclusion +induction +monasteries +racecourse +homosexuality +##sfield +##rky +dimitri +michele +alternatives +ions +commentators +genuinely +objected +pork +hospitality +fencing +stephan +warships +peripheral +wit +drunken +wrinkled +quentin +spends +departing +chung +numerical +spokesperson +johannesburg +caliber +killers +##udge +assumes +neatly +demographic +abigail +bloc +mounting +##lain +bentley +slightest +xu +recipients +##jk +merlin +##writer +seniors +prisons +blinking +hindwings +flickered +kappa +##hel +80s +strengthening +appealing +brewing +gypsy +mali +lashes +hulk +unpleasant +harassment +bio +treaties +predict +instrumentation +pulp +troupe +boiling +mantle +##ffe +##vn +dividing +handles +verbs +##onal +coconut +senegal +thorough +gum +momentarily +##sto +cocaine +panicked +destined +##turing +teatro +denying +weary +captained +mans +##hawks +wakefield +bollywood +thankfully +cyril +amendments +##bahn +consultation +stud +reflections +kindness +1787 +internally +##ovo +tex +mosaic +distribute +paddy +seeming +##hic +piers +##mura +popularly +winger +kang +sentinel +mccoy +##anza +covenant +##bag +verge +fireworks +suppress +thrilled +dominate +##jar +swansea +reconciliation +stiffened +cue +dorian +##uf +damascus +amor +ida +foremost +##aga +porsche +unseen +dir +##had +##azi +stony +lexi +melodies +##nko +angular +integer +podcast +ants +inherent +jaws +justify +persona +##olved +josephine +##nr +##ressed +customary +flashes +gala +cyrus +glaring +backyard +ariel +physiology +greenland +stir +avon +atletico +finch +methodology +ked +mas +catholicism +townsend +branding +quincy +fits +containers +1777 +ashore +aragon +forearm +poisoning +adopting +conquer +grinding +amnesty +keller +finances +evaluate +forged +lankan +instincts +##uto +guam +bosnian +photographed +workplace +desirable +protector +allocation +intently +encourages +willy +##sten +bodyguard +electro +brighter +bihar +##chev +lasts +opener +amphibious +sal +verde +arte +##cope +captivity +vocabulary +yields +##tted +agreeing +desmond +pioneered +##chus +strap +campaigned +railroads +##ович +emblem +##dre +stormed +##ulous +marijuana +northumberland +##nath +bowen +landmarks +beaumont +##qua +danube +##bler +attorneys +th +flyers +critique +villains +cass +mutation +acc +##0s +colombo +mckay +motif +sampling +concluding +syndicate +##rell +neon +stables +warnings +clint +mourning +wilkinson +##tated +merrill +leopard +evenings +exhaled +emil +sonia +ezra +discrete +stove +farrell +fifteenth +prescribed +superhero +##rier +worms +helm +wren +##duction +expo +##rator +hq +unfamiliar +antony +prevents +acceleration +fiercely +mari +painfully +calculations +cheaper +ign +clifton +irvine +davenport +mozambique +pierced +##evich +wonders +##wig +##cate +##iling +crusade +ware +enzymes +reasonably +mls +##coe +mater +ambition +bunny +eliot +kernel +##fin +asphalt +headmaster +torah +aden +lush +pins +waived +##yas +joao +substrate +enforce +##grad +##ules +alvarez +selections +epidemic +tempted +bremen +translates +ensured +waterfront +29th +forrest +manny +malone +kramer +reigning +simpler +absorption +engraved +##ffy +evaluated +1778 +haze +comforting +crossover +##abe +thorn +##rift +##imo +suppression +fatigue +cutter +wurttemberg +##orf +enforced +hovering +proprietary +samurai +syllable +ascent +lacey +tick +lars +tractor +merchandise +rep +bouncing +defendants +##yre +huntington +##oko +standardized +##hor +##hima +assassinated +predecessors +rainy +liar +assurance +lyrical +##uga +secondly +flattened +parameter +undercover +##mity +bordeaux +punish +ridges +markers +exodus +inactive +hesitate +debbie +nyc +pledge +savoy +nagar +offset +organist +##tium +hesse +marin +converting +##iver +diagram +propulsion +validity +reverted +supportive +ministries +clans +responds +proclamation +##inae +ein +pleading +patriot +birch +islanders +strauss +hates +##dh +brandenburg +concession +1900s +killings +textbook +antiquity +cinematography +wharf +embarrassing +setup +creed +farmland +inequality +centred +signatures +fallon +##ingham +##uts +ceylon +gazing +directive +laurie +##tern +globally +##uated +##dent +allah +excavation +threads +##cross +frantically +icc +utilize +determines +respiratory +thoughtful +receptions +##dicate +merging +chandra +seine +builders +builds +diagnostic +dev +visibility +goddamn +analyses +dhaka +proves +chancel +concurrent +curiously +canadians +pumped +restoring +1850s +turtles +jaguar +sinister +spinal +declan +vows +1784 +glowed +capitalism +swirling +universidad +##lder +##oat +soloist +##genic +##oor +coincidence +beginnings +nissan +dip +resorts +caucasus +combustion +infectious +##eno +pigeon +serpent +##itating +conclude +masked +salad +jew +##gr +surreal +toni +##wc +harmonica +##gins +##etic +##coat +fishermen +intending +bravery +##wave +klaus +titan +wembley +taiwanese +ransom +40th +incorrect +hussein +eyelids +cooke +dramas +utilities +##etta +##print +eisenhower +principally +granada +lana +##rak +openings +concord +##bl +bethany +connie +morality +sega +##mons +##nard +earnings +##kara +##cine +communes +##rel +coma +composing +softened +severed +grapes +nguyen +analyzed +warlord +hubbard +heavenly +behave +slovenian +##hit +##ony +hailed +filmmakers +trance +caldwell +skye +unrest +coward +likelihood +##aging +bern +taliban +honolulu +propose +browser +imagining +cobra +contributes +dukes +instinctively +conan +violinist +##ores +accessories +gradual +##amp +quotes +sioux +##dating +undertake +intercepted +sparkling +compressed +fungus +tombs +haley +imposing +rests +degradation +lincolnshire +retailers +wetlands +tulsa +distributor +dungeon +nun +greenhouse +convey +atlantis +aft +exits +oman +dresser +lyons +##sti +joking +eddy +judgement +omitted +digits +##game +juniors +##rae +cents +stricken +une +##ngo +wizards +weir +breton +nan +technician +fibers +liking +royalty +persia +terribly +magician +##rable +##unt +vance +cafeteria +booker +camille +warmer +##static +consume +cavern +gaps +compass +contemporaries +foyer +soothing +graveyard +maj +plunged +blush +##wear +cascade +demonstrates +ordinance +##nov +boyle +##lana +rockefeller +shaken +banjo +izzy +##ense +breathless +vines +##eman +alterations +chromosome +dwellings +feudal +mole +catalonia +relics +tenant +mandated +##fm +fridge +hats +honesty +patented +raul +heap +cruisers +accusing +enlightenment +infants +wherein +chatham +contractors +affinity +hc +osborne +piston +traps +maturity +##rana +lagos +##zal +peering +##nay +attendant +dealers +protocols +subset +prospects +biographical +##cre +artery +##zers +insignia +nuns +endured +##eration +recommend +schwartz +serbs +berger +cromwell +crossroads +enduring +clasped +grounded +##bine +marseille +twitched +abel +choke +catalyst +moldova +italians +##tist +disastrous +wee +##oured +##nti +wwf +nope +##piration +##asa +expresses +thumbs +##nza +coca +1781 +cheating +##ption +skipped +sensory +heidelberg +spies +satan +dangers +semifinal +bohemia +whitish +confusing +shipbuilding +relies +surgeons +landings +ravi +baku +moor +suffix +alejandro +##yana +litre +upheld +##unk +rajasthan +##rek +coaster +insists +posture +scenarios +etienne +favoured +appoint +transgender +elephants +poked +greenwood +defences +fulfilled +militant +somali +1758 +chalk +potent +##ucci +migrants +wink +assistants +nos +restriction +activism +niger +##ario +colon +shaun +##sat +daphne +##erated +swam +congregations +reprise +considerations +magnet +playable +xvi +overthrow +tobias +knob +chavez +coding +##mers +propped +katrina +orient +newcomer +##suke +temperate +##pool +farmhouse +interrogation +committing +##vert +forthcoming +strawberry +joaquin +macau +ponds +shocking +siberia +##cellular +chant +contributors +##nant +##ologists +sped +absorb +hail +1782 +spared +##hore +barbados +karate +opus +originates +saul +##xie +evergreen +leaped +##rock +correlation +exaggerated +weekday +unification +bump +tracing +brig +afb +pathways +utilizing +disturbance +kneeling +##stad +##guchi +100th +pune +##thy +decreasing +manipulation +miriam +academia +ecosystem +occupational +rbi +##lem +rift +rotary +stacked +incorporation +awakening +generators +guerrero +racist +##omy +cyber +derivatives +culminated +allie +annals +panzer +sainte +pops +zu +austro +##vate +algerian +politely +nicholson +mornings +educate +tastes +thrill +dartmouth +##gating +##jee +regan +differing +concentrating +choreography +divinity +pledged +alexandre +routing +gregor +madeline +##idal +apocalypse +##hora +gunfire +culminating +elves +fined +liang +lam +programmed +tar +guessing +transparency +gabrielle +##gna +cancellation +flexibility +##lining +accession +shea +stronghold +nets +specializes +##rgan +abused +hasan +sgt +exceeding +admiration +supermarket +photographers +specialised +tilt +resonance +hmm +perfume +sami +threatens +garland +botany +guarding +boiled +greet +puppy +russo +supplier +wilmington +vibrant +vijay +##bius +paralympic +grumbled +paige +faa +licking +margins +hurricanes +##gong +fest +grenade +ripping +##uz +counseling +weigh +##sian +needles +wiltshire +edison +costly +##not +fulton +tramway +redesigned +staffordshire +gasping +watkins +sleepy +candidacy +monkeys +timeline +throbbing +##bid +##sos +berth +uzbekistan +vanderbilt +bothering +overturned +ballots +gem +##iger +sunglasses +subscribers +hooker +compelling +ang +exceptionally +saloon +stab +##rdi +carla +terrifying +##vision +coil +##oids +satisfying +vendors +31st +mackay +deities +overlooked +ambient +bahamas +felipe +olympia +whirled +botanist +advertised +tugging +disciples +morales +unionist +rites +foley +morse +motives +creepy +##₀ +soo +##sz +bargain +highness +frightening +turnpike +tory +reorganization +depict +biographer +unopposed +manifesto +##gles +institut +emile +accidental +kapoor +##dam +kilkenny +cortex +lively +romanesque +jain +shan +cannons +##ske +petrol +echoing +amalgamated +disappears +cautious +proposes +sanctions +trenton +flotilla +aus +contempt +tor +canary +cote +theirs +##hun +conceptual +deleted +fascinating +paso +blazing +elf +honourable +hutchinson +##eiro +##outh +##zin +surveyor +amidst +wooded +reissue +intro +##ono +cobb +shelters +newsletter +hanson +brace +encoding +confiscated +dem +caravan +marino +scroll +melodic +cows +imam +##adi +##aneous +northward +searches +biodiversity +cora +roaring +##bers +connell +theologian +halo +compose +pathetic +unmarried +dynamo +az +calculation +toulouse +deserves +humour +nr +forgiveness +tam +undergone +martyr +pamela +myths +whore +counselor +hicks +heavens +battleship +electromagnetic +stellar +establishments +presley +hopped +##chin +temptation +90s +wills +##yuan +nhs +##nya +seminars +##yev +adaptations +gong +asher +lex +indicator +sikh +tobago +cites +goin +##yte +satirical +##gies +characterised +correspond +bubbles +lure +participates +##vid +eruption +skate +therapeutic +1785 +canals +wholesale +defaulted +sac +petit +##zzled +virgil +leak +ravens +portraying +##yx +ghetto +creators +dams +portray +vicente +##rington +fae +namesake +bounty +##arium +joachim +##ota +##iser +aforementioned +axle +snout +depended +dismantled +reuben +##ibly +gallagher +##lau +earnest +##ieu +##iary +inflicted +objections +##llar +asa +gritted +##athy +jericho +##sea +##was +flick +underside +ceramics +undead +substituted +eastward +undoubtedly +wheeled +chimney +##iche +guinness +siding +traitor +baptiste +disguised +inauguration +tipperary +choreographer +perched +warmed +stationary +##ntes +bacterial +##aurus +flores +phosphate +attacker +invaders +alvin +intersects +indirectly +immigrated +businessmen +cornelius +valves +narrated +pill +sober +nationale +monastic +applicants +scenery +##jack +motifs +constitutes +##osh +jurisdictions +tuning +irritation +woven +##uddin +fertility +gao +##erie +antagonist +impatient +glacial +hides +boarded +denominations +interception +##jas +nicola +algebraic +marquess +bahn +parole +buyers +bait +turbines +paperwork +bestowed +natasha +renee +oceans +purchases +vaccine +##tock +fixtures +playhouse +integrate +jai +oswald +intellectuals +booked +nests +mortimer +##isi +obsession +sept +##gler +##sum +scrutiny +simultaneous +squinted +##shin +collects +oven +shankar +penned +remarkably +slips +luggage +spectral +1786 +collaborations +louie +consolidation +##ailed +##ivating +hoover +blackpool +harness +ignition +vest +tails +belmont +mongol +skinner +##nae +visually +mage +derry +##tism +##unce +stevie +transitional +##rdy +redskins +drying +prep +prospective +annoyance +oversee +##loaded +fills +##books +announces +scowled +respects +prasad +mystic +tucson +##vale +revue +springer +bankrupt +1772 +aristotle +habsburg +##geny +dal +natal +nut +pod +chewing +darts +moroccan +walkover +rosario +lenin +punjabi +##ße +grossed +scattering +wired +invasive +hui +polynomial +corridors +wakes +gina +portrays +##cratic +arid +retreating +erich +irwin +sniper +##dha +linen +lindsey +maneuver +butch +shutting +socio +bounce +commemorative +postseason +jeremiah +pines +mystical +beads +abbas +furnace +bidding +consulted +assaulted +empirical +rubble +enclosure +sob +weakly +cancel +polly +yielded +##emann +curly +prediction +battered +70s +vhs +jacqueline +render +sails +barked +detailing +grayson +riga +sloane +raging +##yah +herbs +bravo +##athlon +alloy +giggle +imminent +suffers +assumptions +waltz +##itate +accomplishments +##ited +bathing +remixed +deception +##emia +deepest +##eis +balkan +frogs +##rong +slab +##pate +philosophers +peterborough +grains +imports +dickinson +rwanda +##atics +1774 +dirk +tablets +##rove +clone +##rice +caretaker +hostilities +mclean +##gre +regimental +treasures +norms +impose +tsar +tango +diplomacy +variously +complain +recognise +arrests +1779 +celestial +pulitzer +##dus +libretto +##moor +adele +splash +expectation +lds +confronts +##izer +spontaneous +harmful +wedge +entrepreneurs +buyer +bilingual +translate +rugged +conner +circulated +uae +eaton +##gra +##zzle +lingered +lockheed +vishnu +reelection +alonso +##oom +joints +yankee +headline +cooperate +heinz +laureate +invading +##sford +echoes +scandinavian +##dham +hugging +vitamin +salute +micah +hind +trader +##sper +radioactive +##ndra +militants +poisoned +ratified +remark +campeonato +deprived +wander +prop +##dong +##tani +##eye +chiang +darcy +##oping +mandolin +spice +statesman +babylon +walled +forgetting +afro +##cap +giorgio +buffer +##polis +planetary +##gis +overlap +terminals +kinda +centenary +##bir +arising +manipulate +elm +ke +1770 +##tad +chrysler +mapped +moose +pomeranian +quad +macarthur +assemblies +shoreline +recalls +stratford +##rted +noticeable +##evic +imp +##rita +##sque +accustomed +supplying +tents +disgusted +sipped +filters +khz +reno +selecting +luftwaffe +mcmahon +tyne +masterpiece +carriages +collided +dunes +exercised +flare +remembers +muzzle +heck +##rson +burgess +lunged +middleton +boycott +bilateral +##sity +hazardous +lumpur +multiplayer +spotlight +jackets +goldman +liege +porcelain +rag +waterford +attracts +hopeful +battling +ottomans +kensington +baked +hymns +cheyenne +lattice +levine +borrow +polymer +clashes +michaels +monitored +commitments +denounced +##von +cavity +##oney +hobby +akin +##holders +futures +intricate +cornish +patty +##oned +illegally +dolphin +##lag +barlow +yellowish +maddie +apologized +luton +plagued +##puram +##rds +sway +fanny +łodz +##rino +psi +suspicions +hanged +##eding +initiate +charlton +##por +nak +competent +analytical +annex +wardrobe +reservations +sect +fairfax +hedge +piled +buckingham +uneven +bauer +simplicity +snyder +interpret +accountability +donors +moderately +byrd +continents +##cite +disciple +jamaican +nominees +##uss +mongolian +diver +attackers +eagerly +ideological +pillows +miracles +apartheid +revolver +sulfur +clinics +moran +##enko +ile +katy +rhetoric +##icated +chronology +recycling +##hrer +elongated +mughal +pascal +profiles +vibration +databases +domination +##fare +matthias +digest +rehearsal +polling +weiss +initiation +reeves +clinging +flourished +impress +##hoff +buckley +symposium +rhythms +weed +emphasize +transforming +##taking +##yman +accountant +analyze +flicker +foil +priesthood +voluntarily +decreases +##hya +slater +sv +charting +mcgill +##lde +moreno +besieged +zur +robes +##phic +admitting +deported +turmoil +peyton +earthquakes +##ares +nationalists +beau +clair +brethren +interrupt +welch +curated +galerie +requesting +##ested +impending +steward +viper +##vina +complaining +beautifully +brandy +foam +nl +1660 +alessandro +punches +laced +explanations +##lim +attribute +clit +reggie +discomfort +##cards +smoothed +whales +##cene +adler +countered +duffy +disciplinary +widening +recipe +reliance +conducts +goats +gradient +preaching +##shaw +matilda +quasi +striped +meridian +cannabis +cordoba +certificates +##agh +##tering +graffiti +hangs +pilgrims +repeats +##ych +revive +urine +etat +##hawk +fueled +belts +fuzzy +susceptible +mauritius +salle +sincere +beers +hooks +##cki +arbitration +entrusted +advise +sniffed +seminar +junk +donnell +processors +principality +strapped +celia +mendoza +everton +fortunes +prejudice +starving +reassigned +steamer +##lund +tuck +evenly +foreman +##ffen +dans +envisioned +slit +baseman +liberia +rosemary +##weed +electrified +periodically +potassium +stride +contexts +sperm +slade +mariners +influx +bianca +subcommittee +##rane +spilling +icao +estuary +##nock +delivers +##ulata +isa +mira +bohemian +dessert +##sbury +welcoming +proudly +slowing +##chs +musee +ascension +russ +##vian +waits +##psy +africans +exploit +##morphic +eccentric +crab +peck +entrances +formidable +marketplace +groom +bolted +metabolism +patton +robbins +courier +payload +endure +##ifier +andes +refrigerator +ornate +##uca +ruthless +illegitimate +masonry +strasbourg +bikes +apples +quintet +willingly +niche +bakery +corpses +energetic +##cliffe +##sser +##ards +centimeters +centro +fuscous +cretaceous +rancho +##yde +andrei +telecom +tottenham +oasis +ordination +vulnerability +presiding +corey +penguins +sims +##pis +malawi +piss +correction +##cked +##ffle +##ryn +countdown +detectives +psychiatrist +psychedelic +dinosaurs +blouse +choi +vowed +randomly +##pol +49ers +scrub +blanche +bruins +dusseldorf +##using +unwanted +##ums +dominique +elevations +headlights +om +laguna +##oga +1750 +famously +ignorance +shrewsbury +breuning +che +confederacy +greco +overhaul +##screen +paz +skirts +disagreement +cruelty +jagged +phoebe +shifter +hovered +viruses +##wes +##lined +landlord +squirrel +dashed +ornamental +gag +wally +grange +literal +spurs +undisclosed +proceeding +billie +orphan +spanned +humidity +indy +weighted +presentations +explosions +lucian +##tary +vaughn +hindus +##anga +##hell +psycho +daytona +protects +efficiently +rematch +sly +tandem +##oya +rebranded +impaired +hee +metropolis +peach +godfrey +diaspora +ethnicity +prosperous +gleaming +dar +grossing +playback +##rden +stripe +pistols +##tain +births +labelled +##cating +rudy +alba +##onne +aquarium +hostility +##tase +shudder +sumatra +hardest +lakers +consonant +creeping +demos +homicide +capsule +zeke +liberties +expulsion +pueblo +##comb +trait +transporting +##ddin +##neck +##yna +depart +gregg +mold +ledge +hangar +oldham +playboy +termination +analysts +gmbh +romero +##itic +insist +cradle +filthy +brightness +slash +shootout +deposed +bordering +##truct +microwave +tumbled +sheltered +cathy +werewolves +messy +andersen +convex +clapped +clinched +satire +wasting +edo +rufus +##jak +mont +##etti +poznan +##keeping +restructuring +transverse +##rland +azerbaijani +slovene +gestures +roommate +choking +shear +##quist +vanguard +oblivious +##hiro +disagreed +baptism +##lich +coliseum +##aceae +salvage +societe +cory +locke +relocation +relying +versailles +ahl +swelling +##elo +cheerful +##edes +gin +sarajevo +obstacle +diverted +##nac +messed +thoroughbred +fluttered +utrecht +chewed +acquaintance +assassins +dispatch +mirza +##wart +salzburg +swell +yen +##gee +idle +ligue +samson +##nds +##igh +playful +spawned +##cise +tease +##case +burgundy +stirring +skeptical +interceptions +marathi +##dies +bedrooms +aroused +pinch +##lik +preferences +tattoos +buster +digitally +projecting +rust +##ital +kitten +priorities +addison +pseudo +##guard +dusk +icons +sermon +##psis +##iba +##lift +ju +truce +rink +##dah +##wy +defects +psychiatry +offences +calculate +glucose +##iful +##rized +##unda +francaise +##hari +richest +warwickshire +carly +1763 +purity +redemption +lending +##cious +muse +bruises +cerebral +aero +carving +preface +terminology +invade +monty +anarchist +blurred +##iled +rossi +treats +guts +shu +foothills +ballads +undertaking +premise +cecilia +affiliates +blasted +conditional +wilder +minors +drone +rudolph +buffy +swallowing +horton +attested +rutherford +howell +primetime +livery +penal +##bis +minimize +hydro +wrecked +wrought +palazzo +##gling +cans +vernacular +friedman +nobleman +shale +walnut +danielle +##ection +##tley +sears +##kumar +chords +lend +flipping +streamed +por +dracula +gallons +sacrifices +gamble +orphanage +##iman +mckenzie +##gible +boxers +daly +##balls +##ان +##ific +##rative +##iq +exploited +slated +##uity +circling +hillary +pinched +goldberg +provost +campaigning +piles +ironically +jong +mohan +successors +usaf +##tem +##ught +autobiographical +haute +preserves +##ending +acquitted +comparisons +hydroelectric +gangs +cypriot +torpedoes +rushes +derive +bumps +instability +fiat +pets +##mbe +silas +dye +reckless +settler +##itation +heats +##writing +canonical +maltese +fins +mushroom +stacy +aspen +avid +##kur +##loading +vickers +gaston +hillside +statutes +wilde +gail +kung +sabine +comfortably +motorcycles +##rgo +pneumonia +fetch +##sonic +axel +faintly +parallels +##oop +mclaren +spouse +compton +interdisciplinary +miner +##eni +clamped +##chal +##llah +separates +versa +##mler +scarborough +labrador +##lity +##osing +rutgers +hurdles +como +burt +divers +wichita +cade +coincided +bruised +mla +vineyard +##ili +##brush +notch +mentioning +jase +hearted +kits +doe +##acle +pomerania +##ady +ronan +seizure +pavel +problematic +##zaki +domenico +##ulin +catering +penelope +dependence +parental +emilio +ministerial +atkinson +##bolic +clarkson +chargers +colby +grill +peeked +arises +summon +##aged +fools +##grapher +faculties +qaeda +##vial +garner +refurbished +##hwa +geelong +disasters +nudged +bs +shareholder +lori +algae +reinstated +rot +##ades +##nous +invites +stainless +inclusive +##itude +diocesan +til +##icz +denomination +##xa +benton +floral +registers +##erman +##kell +absurd +brunei +guangzhou +hitter +retaliation +##uled +##eve +blanc +nh +consistency +contamination +##eres +dire +palermo +broadcasters +diaries +inspire +vols +brewer +tightening +mixtape +hormone +##tok +stokes +##color +##dly +##ssi +##ometer +##lington +sanitation +##tility +intercontinental +##adt +¹⁄₂ +cylinders +economies +favourable +unison +croix +gertrude +odyssey +vanity +dangling +##logists +upgrades +dice +middleweight +practitioner +henrik +parlor +orion +angered +lac +blurted +##rri +sensual +intends +swings +angled +##phs +husky +attain +peerage +precinct +textiles +cheltenham +shuffled +dai +confess +tasting +bhutan +##riation +tyrone +segregation +abrupt +ruiz +##rish +smirked +blackwell +confidential +browning +amounted +vase +scarce +fabulous +raided +staple +guyana +unemployed +glider +shay +##tow +carmine +troll +intervene +squash +superstar +cylindrical +len +roadway +researched +handy +##rium +##jana +lao +declares +##rring +##tadt +##elin +##kova +willem +shrubs +napoleonic +realms +skater +volkswagen +##ł +tad +hara +archaeologist +awkwardly +eerie +##kind +wiley +##heimer +titus +organizers +cfl +crusaders +lama +vent +enraged +thankful +occupants +maximilian +##gaard +possessing +textbooks +##oran +collaborator +quaker +##ulo +avalanche +mono +silky +straits +isaiah +mustang +surged +resolutions +potomac +descend +kilograms +plato +strains +saturdays +##olin +bernstein +##ype +holstein +ponytail +belize +conversely +heroine +perpetual +##ylus +charcoal +piedmont +glee +negotiating +backdrop +prologue +##jah +pasadena +climbs +ramos +sunni +##holm +##tner +##tri +anand +deficiency +hertfordshire +stout +##avi +aperture +orioles +##irs +doncaster +intrigued +bombed +coating +otis +##mat +cocktail +##jit +##eto +amir +arousal +sar +##proof +dixie +pots +whereabouts +##fted +drains +bullying +cottages +scripture +coherent +fore +poe +appetite +##uration +sampled +##ators +derrick +rotor +jays +peacock +installment +##rro +advisors +##coming +rodeo +scotch +##mot +##fen +##vant +ensued +rodrigo +dictatorship +martyrs +twenties +towed +incidence +marta +rainforest +sai +scaled +##cles +oceanic +qualifiers +symphonic +mcbride +dislike +generalized +aubrey +colonization +##iation +##lion +##ssing +disliked +lublin +salesman +##ulates +spherical +whatsoever +sweating +avalon +contention +punt +severity +alderman +atari +##dina +##grant +##rop +scarf +seville +vertices +annexation +fairfield +fascination +inspiring +launches +palatinate +regretted +##rca +feral +##iom +elk +nap +olsen +reddy +yong +##leader +##iae +garment +transports +feng +gracie +outrage +viceroy +insides +##esis +breakup +grady +organizer +softer +grimaced +murals +galicia +arranging +vectors +##rsten +##sb +##cens +sloan +##eka +bitten +ara +fender +nausea +bumped +kris +banquet +comrades +detector +persisted +##llan +adjustment +endowed +cinemas +sellers +##uman +peek +epa +kindly +neglect +simpsons +talon +mausoleum +runaway +hangul +lookout +##cic +coughed +acquainted +chloride +quicker +accordion +neolithic +##qa +artemis +coefficient +lenny +pandora +tx +##xed +ecstasy +litter +segunda +chairperson +gemma +hiss +rumor +vow +nasal +antioch +compensate +patiently +transformers +##eded +judo +morrow +penis +posthumous +bandits +husbands +denote +flaming +##any +##phones +langley +yorker +1760 +walters +##kle +gubernatorial +fatty +leroy +outlaw +##nine +unpublished +poole +jakob +##ᵢ +##ₙ +crete +distorted +superiority +##dhi +intercept +crust +mig +claus +crashes +stallion +frontal +armistice +##estinal +elton +aj +encompassing +camel +commemorated +malaria +woodward +calf +cigar +penetrate +##oso +willard +##rno +##uche +illustrate +amusing +convergence +noteworthy +##lma +##rva +journeys +realise +manfred +##sable +##vocation +hearings +fiance +##posed +educators +provoked +adjusting +##cturing +modular +stockton +paterson +vlad +rejects +electors +selena +maureen +##tres +##rce +swirled +##num +proportions +nanny +pawn +naturalist +parma +apostles +awoke +ethel +wen +##bey +monsoon +overview +##inating +mccain +rendition +risky +adorned +##ih +equestrian +germain +nj +conspicuous +confirming +##yoshi +shivering +##imeter +milestone +rumours +flinched +bounds +smacked +token +##bei +lectured +automobiles +##shore +impacted +##iable +nouns +nero +##leaf +ismail +prostitute +trams +bridget +sud +stimulus +impressions +reins +revolves +##gned +giro +honeymoon +##swell +criterion +##sms +##uil +libyan +prefers +##osition +preview +sucks +accusation +bursts +metaphor +diffusion +tolerate +faye +betting +cinematographer +liturgical +specials +bitterly +humboldt +##ckle +flux +rattled +##itzer +archaeologists +odor +authorised +marshes +discretion +##ов +alarmed +archaic +inverse +##leton +explorers +##pine +drummond +tsunami +woodlands +##minate +##tland +booklet +insanity +owning +insert +crafted +calculus +receivers +stung +##eca +##nched +prevailing +travellers +eyeing +lila +graphs +##borne +julien +##won +morale +adaptive +therapist +erica +cw +libertarian +bowman +pitches +vita +##ional +crook +##entation +caledonia +mutiny +##sible +1840s +automation +flock +##pia +ironic +pathology +##imus +remarried +joker +withstand +energies +##att +shropshire +hostages +madeleine +tentatively +conflicting +mateo +recipes +euros +mercenaries +nico +##ndon +albuquerque +augmented +mythical +bel +freud +##child +cough +##lica +freddy +lillian +genetically +nuremberg +calder +bonn +outdoors +paste +suns +urgency +vin +restraint +tyson +##cera +##selle +barrage +bethlehem +kahn +##par +mounts +nippon +barony +happier +ryu +makeshift +sheldon +blushed +castillo +barking +listener +taped +bethel +fluent +headlines +pornography +rum +disclosure +sighing +mace +doubling +gunther +manly +##plex +interventions +physiological +forwards +emerges +##tooth +##gny +compliment +rib +recession +visibly +barge +faults +connector +exquisite +prefect +##rlin +patio +##cured +elevators +italics +pena +wasp +satin +botswana +graceful +respectable +##jima +##rter +##oic +franciscan +generates +##dl +alfredo +disgusting +##olate +##iously +sherwood +warns +cod +promo +cheryl +sino +##escu +twitch +##zhi +brownish +thom +ortiz +##dron +densely +##beat +carmel +reinforce +##bana +anastasia +downhill +vertex +contaminated +remembrance +harmonic +homework +fiancee +gears +olds +angelica +ramsay +quiz +colliery +sevens +##cape +autism +##hil +walkway +##boats +ruben +abnormal +ounce +khmer +##bbe +zachary +bedside +morphology +punching +##olar +sparrow +convinces +hewitt +queer +remastered +rods +mabel +solemn +notified +lyricist +symmetric +##xide +encore +passports +wildcats +##uni +baja +##pac +mildly +##ease +bleed +commodity +mounds +glossy +orchestras +##omo +damian +prelude +ambitions +##vet +awhile +remotely +##aud +asserts +imply +##iques +distinctly +modelling +remedy +##dded +windshield +dani +xiao +##endra +audible +powerplant +invalid +elemental +acquisitions +##hala +immaculate +libby +plata +smuggling +ventilation +denoted +minh +##morphism +differed +dion +kelley +lore +mocking +sabbath +spikes +hygiene +drown +runoff +stylized +tally +liberated +aux +interpreter +righteous +aba +siren +reaper +pearce +millie +##cier +##yra +gaius +##iso +captures +##ttering +dorm +claudio +##sic +benches +knighted +blackness +##ored +discount +fumble +oxidation +routed +novak +perpendicular +spoiled +fracture +splits +pads +topology +##cats +axes +fortunate +offenders +protestants +esteem +broadband +convened +frankly +hound +prototypes +isil +facilitated +keel +##sher +sahara +awaited +bubba +orb +prosecutors +hem +##xing +relaxing +remnant +romney +sorted +slalom +stefano +ulrich +##active +exemption +folder +pauses +foliage +hitchcock +epithet +criticisms +##aca +ballistic +brody +hinduism +chaotic +youths +equals +##pala +pts +thicker +analogous +capitalist +improvised +overseeing +sinatra +ascended +beverage +straightforward +##kon +curran +bois +induce +surveying +emperors +sax +unpopular +cartoonist +fused +##mble +unto +##yuki +localities +##cko +##ln +darlington +slain +academie +lobbying +sediment +puzzles +##grass +defiance +dickens +manifest +tongues +alumnus +arbor +coincide +appalachian +mustafa +examiner +cabaret +traumatic +yves +bracelet +draining +heroin +magnum +baths +odessa +consonants +mitsubishi +##gua +kellan +vaudeville +joked +straps +probation +##ław +ceded +interfaces +##pas +##zawa +blinding +viet +rothschild +museo +huddersfield +tactic +##storm +brackets +dazed +incorrectly +##vu +reg +glazed +fearful +manifold +benefited +irony +stumbling +##rte +willingness +balkans +mei +wraps +##aba +injected +##lea +gu +syed +harmless +##hammer +bray +takeoff +poppy +timor +cardboard +astronaut +purdue +weeping +southbound +cursing +stalls +diagonal +##neer +lamar +bryce +comte +weekdays +harrington +##uba +negatively +##see +lays +grouping +##cken +##henko +affirmed +halle +modernist +##lai +hodges +smelling +aristocratic +baptized +dismiss +justification +oilers +coupling +qin +snack +healer +##qing +gardener +layla +battled +formulated +stephenson +gravitational +##gill +1768 +granny +coordinating +suites +##ioned +monarchs +##cote +##hips +blended +barrister +deposition +fia +mina +policemen +paranoid +##pressed +churchyard +covert +crumpled +creep +abandoning +tr +transmit +conceal +barr +understands +readiness +spire +##cology +##enia +startling +unlock +vida +bowled +slots +##nat +##islav +spaced +trusting +admire +rig +slack +casualty +classmates +##odes +##rar +##rked +amherst +furnished +evolve +foundry +menace +mead +##lein +flu +wesleyan +##kled +monterey +webber +##vos +wil +##mith +##на +bartholomew +justices +restrained +##cke +amenities +mediated +sewage +trenches +mainz +##thus +1800s +##cula +##inski +caine +bonding +converts +spheres +superseded +marianne +crypt +sweaty +ensign +historia +##br +spruce +##ask +forks +thoughtfully +yukon +pamphlet +ames +##uter +karma +##yya +bryn +negotiation +sighs +incapable +##mbre +##ntial +actresses +taft +##mill +luce +prevailed +##amine +1773 +motionless +envoy +testify +investing +sculpted +instructors +provence +kali +cullen +horseback +##while +goodwin +##jos +gaa +norte +##ldon +modify +wavelength +abd +skinned +sprinter +forecast +scheduling +marries +squared +tentative +##chman +boer +##isch +bolts +swap +fisherman +assyrian +impatiently +guthrie +martins +murdoch +tanya +nicely +dolly +lacy +med +syn +decks +fashionable +millionaire +surfing +heaved +tammy +consulate +attendees +routinely +fuse +saxophonist +backseat +malaya +##lord +scowl +tau +##ishly +sighted +steaming +##rks +##holes +##hong +ching +##wife +bless +conserved +jurassic +stacey +zion +chunk +rigorous +blaine +peabody +slayer +dismay +brewers +nz +##jer +det +##glia +glover +postwar +penetration +sylvester +imitation +vertically +airlift +heiress +knoxville +viva +##uin +macon +##rim +##fighter +##gonal +janice +##orescence +##wari +marius +belongings +leicestershire +blanco +inverted +preseason +sanity +sobbing +##due +##elt +##dled +collingwood +regeneration +flickering +shortest +##mount +##osi +feminism +##lat +sherlock +cabinets +fumbled +northbound +precedent +snaps +##mme +researching +##akes +guillaume +insights +manipulated +vapor +neighbour +gangster +frey +stalking +scarcely +callie +barnett +tendencies +doomed +assessing +slung +panchayat +ambiguous +bartlett +##etto +distributing +violating +wolverhampton +##hetic +swami +histoire +##urus +liable +pounder +groin +hussain +larsen +popping +surprises +##atter +vie +curt +##station +mute +relocate +musicals +authorization +richter +##sef +immortality +tna +bombings +deteriorated +yiddish +##acious +robbed +colchester +ao +verified +balancing +apostle +swayed +recognizable +oxfordshire +retention +nottinghamshire +contender +judd +invitational +shrimp +uhf +##icient +cleaner +longitudinal +tanker +##mur +acronym +broker +koppen +sundance +suppliers +##gil +clipped +fuels +petite +##anne +landslide +helene +diversion +populous +landowners +auspices +melville +quantitative +##xes +ferries +nicky +##llus +doo +haunting +roche +carver +downed +unavailable +##pathy +approximation +hiroshima +##hue +garfield +valle +comparatively +keyboardist +traveler +##eit +congestion +calculating +subsidiaries +##bate +serb +modernization +fairies +deepened +ville +averages +##lore +inflammatory +tonga +##itch +co₂ +squads +##hea +gigantic +serum +enjoyment +retailer +verona +35th +cis +##phobic +magna +technicians +##vati +arithmetic +##sport +levin +##dation +amtrak +chow +sienna +##eyer +backstage +entrepreneurship +##otic +learnt +tao +##udy +worcestershire +formulation +baggage +hesitant +bali +sabotage +##kari +barren +enhancing +murmur +pl +freshly +putnam +syntax +aces +medicines +resentment +bandwidth +##sier +grins +chili +guido +##sei +framing +implying +gareth +lissa +genevieve +pertaining +admissions +geo +thorpe +proliferation +sato +bela +analyzing +parting +##gor +awakened +##isman +huddled +secrecy +##kling +hush +gentry +dungeons +##ego +coasts +##utz +sacrificed +##chule +landowner +mutually +prevalence +programmer +adolescent +disrupted +seaside +gee +trusts +vamp +georgie +##nesian +##iol +schedules +sindh +##market +etched +hm +sparse +bey +beaux +scratching +gliding +unidentified +collaborating +gems +jesuits +oro +accumulation +shaping +mbe +anal +##xin +enthusiasts +newscast +##egan +janata +dewey +parkinson +ankara +biennial +towering +inconsistent +##chet +thriving +terminate +cabins +furiously +eats +advocating +donkey +marley +muster +phyllis +leiden +##user +grassland +glittering +iucn +loneliness +memorandum +armenians +##ddle +popularized +rhodesia +60s +lame +##illon +sans +bikini +header +orbits +##finger +##ulator +sharif +spines +biotechnology +strolled +naughty +yates +##wire +fremantle +milo +##mour +abducted +removes +##atin +humming +##chrome +##ester +hume +pivotal +##rates +armand +grams +believers +elector +rte +apron +bis +scraped +##yria +endorsement +initials +##llation +dotted +hints +buzzing +emigration +nearer +indicators +##ulu +coarse +neutron +protectorate +##uze +directional +exploits +pains +loire +1830s +proponents +guggenheim +rabbits +ritchie +hectare +inputs +hutton +##raz +verify +##ako +boilers +longitude +##lev +skeletal +yer +emilia +citrus +compromised +##gau +prescription +paragraph +eduard +cadillac +attire +categorized +kenyan +weddings +charley +##bourg +entertain +monmouth +##lles +nutrients +davey +mesh +incentive +practised +ecosystems +kemp +subdued +overheard +##rya +bodily +maxim +##nius +apprenticeship +ursula +##fight +lodged +rug +silesian +unconstitutional +patel +inspected +coyote +unbeaten +##hak +34th +disruption +convict +parcel +##nham +collier +implicated +mallory +##iac +susannah +winkler +##rber +shia +phelps +sediments +graphical +robotic +##sner +adulthood +mart +smoked +##isto +kathryn +clarified +##aran +divides +convictions +oppression +pausing +burying +##mt +federico +mathias +eileen +##tana +kite +hunched +##acies +##atz +disadvantage +liza +kinetic +greedy +paradox +yokohama +dowager +trunks +ventured +##gement +gupta +vilnius +olaf +##thest +crimean +hopper +##ej +progressively +arturo +mouthed +arrondissement +##fusion +rubin +simulcast +oceania +##orum +##stra +##rred +busiest +intensely +navigator +cary +##vine +##hini +##bies +fife +rowe +rowland +posing +insurgents +shafts +lawsuits +activate +conor +inward +culturally +garlic +##eering +eclectic +##hui +##kee +##nl +furrowed +vargas +meteorological +rendezvous +##aus +culinary +commencement +##dition +quota +##notes +mommy +salaries +overlapping +mule +##iology +##mology +sums +wentworth +##isk +##zione +mainline +subgroup +##illy +hack +plaintiff +verdi +bulb +differentiation +engagements +multinational +supplemented +bertrand +caller +regis +##naire +##sler +##arts +##imated +blossom +propagation +kilometer +viaduct +vineyards +##uate +beckett +optimization +golfer +songwriters +seminal +semitic +thud +volatile +evolving +ridley +##wley +trivial +distributions +scandinavia +jiang +wrestled +insistence +emphasizes +napkin +##ods +adjunct +rhyme +##ricted +##eti +hopeless +surrounds +tremble +32nd +smoky +##ntly +oils +medicinal +padded +steer +wilkes +concessions +hue +uniquely +blinded +landon +##lane +hendrix +commemorating +dex +specify +chicks +##ggio +intercity +morley +##torm +highlighting +##oting +pang +oblique +stalled +##liner +flirting +newborn +1769 +bishopric +shaved +currie +dharma +spartan +##ooped +favorites +smug +novella +sirens +abusive +creations +espana +##lage +paradigm +semiconductor +sheen +##rdo +##yen +##zak +nrl +renew +##pose +##tur +adjutant +marches +norma +##enity +ineffective +weimar +grunt +##gat +lordship +plotting +expenditure +infringement +lbs +refrain +mimi +mistakenly +postmaster +1771 +##bara +ras +motorsports +tito +subjective +##zza +bully +stew +##kaya +prescott +##raphic +##zam +bids +styling +paranormal +reeve +sneaking +exploding +katz +akbar +migrant +syllables +indefinitely +##ogical +destroys +replaces +applause +##phine +pest +##fide +articulated +bertie +##cars +##ptic +courtroom +crowley +aesthetics +cummings +tehsil +hormones +titanic +dangerously +##ibe +stadion +jaenelle +auguste +ciudad +##chu +mysore +partisans +lucan +philipp +##aly +debating +henley +interiors +##rano +##tious +homecoming +beyonce +usher +henrietta +prepares +weeds +ely +plucked +##pire +##dable +luxurious +##aq +artifact +password +pasture +juno +maddy +minsk +##dder +##ologies +##rone +assessments +martian +royalist +1765 +examines +##mani +nino +parry +scooped +relativity +##eli +##uting +##cao +congregational +noisy +traverse +##agawa +strikeouts +nickelodeon +obituary +transylvania +binds +depictions +polk +trolley +##yed +##lard +breeders +##under +dryly +hokkaido +1762 +strengths +stacks +bonaparte +neared +prostitutes +stamped +anaheim +gutierrez +sinai +##zzling +bram +fresno +madhya +proton +##lena +##llum +##phon +reelected +wanda +##anus +##lb +ample +distinguishing +##yler +grasping +sermons +tomato +bland +stimulation +avenues +##eux +spreads +scarlett +fern +pentagon +assert +baird +chesapeake +calmed +distortion +fatalities +##olis +correctional +pricing +##astic +##gina +prom +dammit +ying +collaborate +##chia +welterweight +33rd +pointer +substitution +bonded +umpire +communicating +multitude +paddle +##obe +federally +intimacy +##insky +betray +ssr +##lett +##lves +##therapy +airbus +##tery +functioned +ud +bearer +biomedical +##hire +##nca +condom +brink +ik +##nical +macy +flap +gma +experimented +jelly +lavender +##icles +##ulia +munro +##mian +##tial +rye +##rle +60th +gigs +hottest +rotated +predictions +fuji +bu +##erence +##omi +barangay +##fulness +##sas +clocks +##rwood +##liness +cereal +roe +wight +decker +uttered +babu +onion +forcibly +##df +petra +sarcasm +hartley +peeled +storytelling +##xley +##ysis +##ffa +fibre +kiel +auditor +fig +harald +greenville +##berries +geographically +nell +quartz +##athic +cemeteries +crossings +nah +holloway +reptiles +chun +sichuan +snowy +corrections +##ivo +zheng +ambassadors +blacksmith +fielded +fluids +hardcover +turnover +medications +melvin +academies +##erton +roach +absorbing +spaniards +colton +##founded +outsider +espionage +kelsey +edible +##ulf +dora +establishes +##sham +##tries +contracting +##tania +cinematic +costello +nesting +##uron +connolly +duff +##nology +mma +##mata +fergus +sexes +optics +spectator +woodstock +banning +##hee +##fle +differentiate +outfielder +refinery +gerhard +horde +lair +drastically +##udi +landfall +##cheng +motorsport +odi +##achi +predominant +quay +skins +##ental +edna +harshly +complementary +murdering +##aves +wreckage +ono +outstretched +lennox +munitions +galen +reconcile +scalp +bicycles +gillespie +questionable +rosenberg +guillermo +jarvis +kabul +opium +yd +##twined +abuses +decca +outpost +##cino +sensible +neutrality +ponce +anchorage +atkins +turrets +inadvertently +disagree +libre +vodka +reassuring +weighs +##yal +glide +jumper +ceilings +repertory +outs +stain +##bial +envy +##ucible +smashing +heightened +policing +hyun +mixes +lai +prima +##ples +celeste +##bina +lucrative +intervened +kc +manually +##rned +stature +staffed +bun +bastards +nairobi +priced +##auer +thatcher +##kia +tripped +comune +##ogan +##pled +brasil +incentives +emanuel +hereford +musica +##kim +benedictine +biennale +##lani +eureka +gardiner +rb +knocks +sha +##ael +##elled +##onate +efficacy +ventura +masonic +sanford +maize +leverage +##feit +capacities +santana +##aur +novelty +vanilla +##cter +##tour +benin +##oir +neptune +drafting +tallinn +##cable +humiliation +##boarding +schleswig +fabian +bernardo +liturgy +spectacle +sweeney +pont +routledge +cosmos +ut +hilt +sleek +universally +##eville +##gawa +typed +##dry +favors +allegheny +glaciers +##rly +recalling +aziz +parasite +requiem +auf +##berto +##llin +illumination +##breaker +##issa +festivities +bows +govern +vibe +vp +sprawled +larson +pilgrim +bwf +leaping +##rts +##ssel +alexei +greyhound +hoarse +##dler +##oration +seneca +##cule +gaping +##ulously +##pura +cinnamon +##gens +##rricular +craven +fantasies +houghton +engined +reigned +dictator +supervising +##oris +bogota +commentaries +unnatural +fingernails +spirituality +tighten +canadiens +protesting +intentional +cheers +sparta +##ytic +##iere +##zine +widen +belgarath +controllers +dodd +iaaf +navarre +##ication +defect +squire +steiner +whisky +##mins +inevitably +tome +##gold +chew +##lid +elastic +##aby +streaked +alliances +jailed +regal +##ined +##phy +czechoslovak +narration +absently +##uld +bluegrass +guangdong +quran +criticizing +hose +hari +##liest +##owa +skier +streaks +deploy +##lom +raft +bose +dialed +huff +##eira +haifa +simplest +bursting +endings +sultanate +##titled +franks +whitman +ensures +sven +##ggs +collaborators +forster +organising +banished +napier +injustice +teller +layered +thump +##otti +roc +battleships +evidenced +fugitive +sadie +robotics +##roud +equatorial +geologist +##iza +yielding +##bron +##sr +internationale +mecca +##diment +skyline +toad +uploaded +reflective +undrafted +lal +leafs +bayern +##dai +lakshmi +shortlisted +##stick +##wicz +camouflage +donate +christi +lau +##acio +disclosed +nemesis +1761 +assemble +straining +northamptonshire +tal +##asi +bernardino +premature +heidi +42nd +coefficients +galactic +reproduce +buzzed +sensations +zionist +monsieur +myrtle +archery +strangled +musically +viewpoint +antiquities +bei +trailers +seahawks +cured +pee +preferring +tasmanian +lange +sul +##working +colder +overland +lucivar +massey +gatherings +haitian +##smith +disapproval +flaws +##cco +##enbach +1766 +npr +##icular +boroughs +creole +forums +techno +1755 +dent +abdominal +streetcar +##eson +##stream +procurement +gemini +predictable +##tya +acheron +christoph +feeder +fronts +vendor +bernhard +jammu +tumors +slang +##uber +goaltender +twists +curving +manson +vuelta +mer +peanut +confessions +pouch +unpredictable +allowance +theodor +vascular +##factory +bala +authenticity +metabolic +coughing +nanjing +##cea +pembroke +##bard +splendid +36th +hourly +##ahu +elmer +handel +##ivate +awarding +thrusting +experimentation +##hesion +caressed +entertained +steak +##rangle +biologist +orphans +baroness +oyster +stepfather +##dridge +mirage +reefs +speeding +barons +1764 +inhabit +preached +repealed +##tral +honoring +boogie +captives +administer +johanna +##imate +gel +suspiciously +1767 +sobs +##dington +backbone +hayward +garry +##folding +##nesia +maxi +##oof +##ppe +ellison +galileo +##stand +crimea +frenzy +amour +bumper +matrices +natalia +baking +garth +palestinians +##grove +smack +conveyed +ensembles +gardening +##manship +##rup +##stituting +1640 +harvesting +topography +shifters +dormitory +##carriage +##lston +ist +skulls +##stadt +dolores +jewellery +sarawak +##wai +##zier +fences +christy +confinement +tumbling +credibility +fir +stench +##bria +##plication +##nged +##sam +virtues +##belt +marjorie +pba +##eem +##made +celebrates +schooner +agitated +barley +fulfilling +anthropologist +restrict +novi +regulating +##nent +padres +##rani +##hesive +loyola +tabitha +milky +olson +proprietor +crambidae +guarantees +intercollegiate +ljubljana +hilda +##sko +ignorant +hooded +sardinia +##lidae +##vation +frontman +privileged +witchcraft +jammed +laude +poking +##than +bracket +amazement +yunnan +##erus +maharaja +linnaeus +commissioning +milano +peacefully +##logies +akira +rani +regulator +grasses +##rance +luzon +crows +compiler +gretchen +seaman +edouard +buccaneers +ellington +hamlets +whig +socialists +##anto +directorial +easton +mythological +##kr +##vary +rhineland +semantic +taut +dune +inventions +succeeds +##iter +replication +branched +##pired +prosecuted +kangaroo +penetrated +##avian +middlesbrough +doses +bleak +madam +predatory +relentless +##vili +reluctance +##vir +hailey +crore +silvery +1759 +monstrous +swimmers +transmissions +hawthorn +informing +##eral +toilets +caracas +crouch +##sett +cartel +hadley +##aling +alexia +yvonne +##biology +cinderella +eton +superb +blizzard +stabbing +industrialist +maximus +##orus +groves +maud +clade +oversized +comedic +##bella +rosen +nomadic +fulham +montane +beverages +galaxies +redundant +swarm +##rot +##folia +##llis +buckinghamshire +fen +bearings +bahadur +##rom +gilles +phased +dynamite +faber +benoit +##ount +fractured +tailored +anya +spices +westwood +cairns +auditions +inflammation +steamed +##rocity +##acion +##urne +skyla +thereof +watford +torment +archdeacon +transforms +demeanor +fucked +serge +##sor +mckenna +minas +entertainer +##icide +caress +originate +residue +##sty +1740 +##ilised +##org +beech +##wana +subsidies +##ghton +emptied +gladstone +firefighters +voodoo +het +nightingale +tamara +edmond +ingredient +weaknesses +silhouette +compatibility +withdrawing +hampson +##mona +anguish +giggling +bookstore +southernmost +tilting +##vance +bai +economical +briefcase +dreadful +hinted +projections +shattering +totaling +##rogate +analogue +indicted +periodical +fullback +##dman +haynes +##tenberg +##ffs +##ishment +1745 +thirst +stumble +penang +vigorous +##ddling +##kor +##lium +octave +##ove +##enstein +##inen +##ones +siberian +##uti +cbn +repeal +swaying +##vington +khalid +tanaka +unicorn +otago +plastered +lobe +riddle +##rella +perch +##ishing +croydon +filtered +graeme +tripoli +##ossa +crocodile +##chers +sufi +mined +##tung +inferno +lsu +##phi +swelled +utilizes +£2 +cale +periodicals +styx +hike +informally +coop +lund +##tidae +ala +hen +qui +transformations +disposed +sheath +chickens +##cade +fitzroy +silesia +unacceptable +odisha +1650 +sabrina +spokane +ratios +athena +massage +shen +dilemma +##drum +##riz +##hul +corona +doubtful +niall +##pha +##bino +fines +cite +acknowledging +bangor +ballard +bathurst +##resh +huron +mustered +alzheimer +garments +kinase +tyre +warship +flashback +pulmonary +braun +cheat +kamal +cyclists +constructions +grenades +ndp +traveller +excuses +stomped +signalling +trimmed +futsal +mosques +relevance +##wine +wta +##vah +hoc +##riding +optimistic +##´s +deco +interacting +rejecting +moniker +waterways +##ieri +##oku +mayors +gdansk +outnumbered +pearls +##ended +##hampton +fairs +totals +dominating +notions +stairway +compiling +pursed +commodities +grease +yeast +##jong +carthage +griffiths +residual +amc +contraction +laird +sapphire +##marine +##ivated +amalgamation +dissolve +inclination +lyle +packaged +altitudes +suez +canons +graded +lurched +narrowing +boasts +guise +enrico +##ovsky +rower +scarred +bree +cub +iberian +protagonists +bargaining +proposing +trainers +voyages +fishes +##aea +##ivist +##verance +encryption +artworks +kazan +sabre +cleopatra +hepburn +rotting +supremacy +mecklenburg +##brate +burrows +hazards +outgoing +flair +organizes +##ctions +scorpion +##usions +boo +chevalier +dunedin +slapping +ineligible +pensions +##omic +manufactures +emails +bismarck +weakening +blackish +ding +mcgee +quo +##rling +northernmost +manpower +greed +sampson +clicking +##ange +##horpe +##inations +##roving +torre +##eptive +##moral +symbolism +38th +asshole +meritorious +outfits +splashed +biographies +sprung +astros +##tale +filly +raoul +nw +tokugawa +linden +clubhouse +##apa +tracts +romano +##pio +putin +chained +dickson +gunshot +moe +gunn +rashid +##tails +zipper +##bas +##nea +contrasted +##ply +##udes +plum +pharaoh +##pile +aw +comedies +ingrid +sandwiches +subdivisions +mariana +kamen +hz +delaney +veto +herring +##words +possessive +outlines +##roup +siemens +stairwell +gallantry +messiah +palais +yells +zeppelin +bolivar +##cede +smackdown +mckinley +##mora +##yt +muted +geologic +finely +unitary +avatar +hamas +maynard +rees +bog +contrasting +##rut +liv +chico +disposition +##erate +becca +dmitry +yeshiva +narratives +##lva +##ulton +mercenary +sharpe +tempered +navigate +stealth +amassed +keynes +##lini +untouched +##rrie +havoc +lithium +##fighting +abyss +graf +southward +wolverine +balloons +implements +ngos +transitions +##icum +ambushed +concacaf +dormant +economists +##dim +costing +csi +rana +universite +boulders +verity +##llon +collin +mellon +misses +cypress +fluorescent +lifeless +spence +##ulla +crewe +shepard +pak +revelations +jolly +gibbons +paw +##dro +##quel +freeing +shack +fries +palatine +##hiko +accompaniment +cruising +recycled +##aver +erwin +sorting +synthesizers +dyke +realities +strides +enslaved +wetland +##ghan +competence +gunpowder +grassy +maroon +reactors +objection +##oms +carlson +gearbox +macintosh +radios +shelton +##sho +clergyman +prakash +mongols +trophies +oricon +stimuli +twenty20 +cantonese +cortes +mirrored +##saurus +bhp +cristina +melancholy +##lating +enjoyable +nuevo +##wny +downfall +schumacher +##ind +banging +lausanne +rumbled +paramilitary +reflex +ax +amplitude +migratory +##gall +##ups +midi +barnard +lastly +sherry +##nall +keystone +##kra +carleton +slippery +coloring +foe +socket +otter +##rgos +mats +##tose +consultants +bafta +bison +topping +primal +abandonment +transplant +atoll +hideous +mort +pained +reproduced +tae +howling +##turn +unlawful +billionaire +hotter +poised +lansing +##chang +dinamo +retro +messing +domesday +##mina +blitz +timed +##athing +##kley +ascending +gesturing +##izations +signaled +tis +chinatown +mermaid +savanna +jameson +##aint +catalina +##pet +##hers +cochrane +cy +chatting +##kus +alerted +computation +mused +noelle +majestic +mohawk +campo +octagonal +##sant +##hend +aspiring +##mart +comprehend +iona +paralyzed +shimmering +swindon +rhone +##eley +reputed +configurations +pitchfork +agitation +francais +gillian +lipstick +##ilo +outsiders +pontifical +resisting +bitterness +sewer +rockies +##edd +##ucher +misleading +1756 +exiting +galloway +##nging +risked +##heart +commemoration +schultz +##rka +integrating +##rsa +poses +shrieked +##weiler +guineas +gladys +jerking +owls +goldsmith +nightly +penetrating +##unced +lia +ignited +betsy +##aring +##thorpe +follower +vigorously +##rave +coded +kiran +knit +zoology +tbilisi +##bered +repository +govt +deciduous +dino +growling +##bba +enhancement +unleashed +chanting +pussy +biochemistry +##eric +kettle +repression +toxicity +nrhp +##arth +##kko +##bush +ernesto +commended +outspoken +mca +parchment +kristen +##aton +bisexual +raked +glamour +navajo +conditioned +showcased +##hma +spacious +youthful +##esa +usl +appliances +junta +brest +layne +conglomerate +enchanted +chao +loosened +picasso +circulating +inspect +montevideo +##centric +##kti +piazza +spurred +##aith +bari +freedoms +poultry +stamford +lieu +indigo +sarcastic +bahia +stump +attach +dvds +frankenstein +lille +approx +scriptures +pollen +##script +nmi +overseen +##ivism +tides +proponent +newmarket +inherit +milling +##erland +centralized +##rou +distributors +credentials +drawers +abbreviation +##lco +downing +uncomfortably +ripe +##oes +erase +franchises +populace +##bery +##khar +decomposition +pleas +##tet +daryl +sabah +##wide +fearless +genie +lesions +annette +##ogist +oboe +appendix +nair +dripped +petitioned +maclean +mosquito +parrot +hampered +1648 +operatic +reservoirs +##tham +irrelevant +jolt +summarized +##fp +medallion +##taff +clawed +harlow +narrower +goddard +marcia +bodied +fremont +suarez +altering +tempest +mussolini +porn +##isms +sweetly +oversees +walkers +solitude +grimly +shrines +ich +supervisors +hostess +dietrich +legitimacy +brushes +expressive +##yp +dissipated +##rse +localized +systemic +##nikov +gettysburg +##uaries +dialogues +muttering +housekeeper +sicilian +discouraged +##frey +beamed +kaladin +halftime +kidnap +##amo +##llet +1754 +synonymous +depleted +instituto +insulin +reprised +##opsis +clashed +##ctric +interrupting +radcliffe +insisting +medici +1715 +ejected +playfully +turbulent +starvation +##rini +shipment +rebellious +petersen +verification +merits +##rified +cakes +##charged +1757 +milford +shortages +spying +fidelity +##aker +emitted +storylines +harvested +seismic +##iform +cheung +kilda +theoretically +barbie +lynx +##rgy +##tius +goblin +mata +poisonous +##nburg +reactive +residues +obedience +##евич +conjecture +##rac +hating +sixties +kicker +moaning +motown +##bha +emancipation +neoclassical +##hering +consoles +ebert +professorship +##tures +sustaining +assaults +obeyed +affluent +incurred +tornadoes +##eber +##zow +emphasizing +highlanders +cheated +helmets +##ctus +internship +terence +bony +executions +legislators +berries +peninsular +tinged +##aco +1689 +amplifier +corvette +ribbons +lavish +pennant +##lander +worthless +##chfield +##forms +mariano +pyrenees +expenditures +##icides +chesterfield +mandir +tailor +39th +sergey +nestled +willed +aristocracy +devotees +goodnight +raaf +rumored +weaponry +remy +appropriations +harcourt +burr +riaa +##lence +limitation +unnoticed +guo +soaking +swamps +##tica +collapsing +tatiana +descriptive +brigham +psalm +##chment +maddox +##lization +patti +caliph +##aja +akron +injuring +serra +##ganj +basins +##sari +astonished +launcher +##church +hilary +wilkins +sewing +##sf +stinging +##fia +##ncia +underwood +startup +compilations +vibrations +embankment +jurist +bard +juventus +groundwater +kern +palaces +helium +boca +cramped +marissa +soto +##worm +jae +princely +##ggy +faso +bazaar +warmly +##voking +pairing +##lite +##grate +##nets +wien +freaked +ulysses +rebirth +##alia +mummy +guzman +jimenez +stilled +##nitz +trajectory +tha +woken +archival +professions +##pts +##pta +hilly +shadowy +shrink +##bolt +norwood +glued +migrate +stereotypes +devoid +##pheus +evacuate +horrors +infancy +gotham +knowles +optic +downloaded +sachs +kingsley +parramatta +darryl +mor +##onale +shady +commence +confesses +kan +##meter +##placed +marlborough +roundabout +regents +frigates +##imating +gothenburg +revoked +carvings +clockwise +convertible +intruder +##sche +banged +##ogo +vicky +bourgeois +##mony +dupont +footing +##gum +##real +buckle +yun +penthouse +sane +serviced +stakeholders +neumann +##eers +comb +##gam +catchment +pinning +rallies +typing +##elles +forefront +freiburg +sweetie +giacomo +widowed +goodwill +worshipped +aspirations +midday +##vat +fishery +##trick +bournemouth +turk +hearth +ethanol +guadalajara +murmurs +sl +##uge +afforded +scripted +##hta +wah +##jn +coroner +translucent +memorials +puck +progresses +clumsy +##race +candace +recounted +##slin +##uve +filtering +##mac +howl +strata +heron +leveled +##ays +dubious +##oja +##wheel +citations +exhibiting +##laya +##mics +turkic +##lberg +injunction +##ennial +antibodies +organise +##rigues +cardiovascular +cushion +inverness +##zquez +dia +cocoa +sibling +##tman +##roid +expanse +feasible +tunisian +algiers +##relli +rus +dso +westphalia +bro +tacoma +downloads +##ours +konrad +duran +##hdi +continuum +jett +compares +legislator +secession +##nable +##gues +##zuka +translating +reacher +##gley +##ła +aleppo +##agi +orchards +trapping +linguist +versatile +drumming +postage +calhoun +superiors +##mx +barefoot +leary +##cis +ignacio +alfa +kaplan +##rogen +bratislava +mori +##vot +disturb +haas +cartridges +gilmore +radiated +salford +tunic +hades +##ulsive +archeological +delilah +magistrates +auditioned +brewster +charters +empowerment +blogs +cappella +dynasties +iroquois +whipping +##krishna +raceway +truths +myra +weaken +judah +mcgregor +##horse +mic +refueling +37th +burnley +bosses +markus +premio +query +##gga +dunbar +##economic +darkest +lyndon +sealing +commendation +reappeared +##mun +addicted +ezio +slaughtered +satisfactory +shuffle +##eves +##thic +##uj +fortification +warrington +##otto +resurrected +fargo +mane +##utable +##lei +foreword +ox +##aris +##vern +abrams +hua +##mento +sakura +##alo +sentimental +##skaya +midfield +##eses +sturdy +scrolls +macleod +##kyu +entropy +##lance +mitochondrial +cicero +excelled +thinner +convoys +perceive +##oslav +##urable +systematically +grind +burkina +##tagram +ops +##aman +guantanamo +##cloth +##tite +forcefully +wavy +##jou +pointless +##linger +##tze +layton +portico +superficial +clerical +outlaws +##hism +burials +muir +##inn +creditors +hauling +rattle +##leg +calais +monde +archers +reclaimed +dwell +wexford +hellenic +falsely +remorse +##tek +dough +furnishings +##uttered +gabon +neurological +novice +##igraphy +contemplated +pulpit +nightstand +saratoga +##istan +documenting +pulsing +taluk +##firmed +busted +marital +##rien +disagreements +wasps +##yes +hodge +mcdonnell +mimic +fran +pendant +dhabi +musa +##nington +congratulations +argent +darrell +concussion +losers +regrets +thessaloniki +reversal +donaldson +hardwood +thence +achilles +ritter +##eran +demonic +jurgen +prophets +goethe +eki +classmate +##cking +yank +irrational +##inging +perished +seductive +qur +sourced +##crat +##typic +mustard +ravine +barre +horizontally +characterization +phylogenetic +boise +##dit +##runner +##tower +brutally +intercourse +seduce +##bbing +fay +ferris +ogden +amar +nik +unarmed +##inator +evaluating +kyrgyzstan +sweetness +##lford +##oki +mccormick +meiji +notoriety +stimulate +disrupt +figuring +instructional +mcgrath +##zoo +groundbreaking +##lto +flinch +khorasan +agrarian +bengals +mixer +radiating +##sov +ingram +pitchers +nad +tariff +##cript +tata +##codes +##emi +##ungen +appellate +lehigh +##bled +##giri +brawl +duct +texans +##ciation +##ropolis +skipper +speculative +vomit +doctrines +stresses +davy +graders +whitehead +jozef +timely +cumulative +haryana +paints +appropriately +boon +cactus +##ales +##pid +dow +legions +##pit +perceptions +1730 +picturesque +##yse +periphery +rune +wr +##aha +celtics +sentencing +whoa +##erin +confirms +variance +moines +mathews +spade +rave +fronted +blending +alleging +reared +##paper +grassroots +eroded +##physical +directs +ordeal +##sław +accelerate +hacker +rooftop +##inia +lev +buys +cebu +devote +##lce +specialising +##ulsion +choreographed +repetition +warehouses +##ryl +paisley +tuscany +analogy +sorcerer +hash +huts +shards +descends +exclude +nix +chaplin +ito +vane +##drich +causeway +misconduct +limo +orchestrated +glands +jana +##kot +u2 +##sons +branching +contrasts +scoop +longed +##virus +chattanooga +syrup +cornerstone +##tized +##mind +##iaceae +careless +precedence +frescoes +##uet +chilled +consult +modelled +snatch +peat +##thermal +caucasian +humane +relaxation +spins +temperance +##lbert +occupations +lambda +hybrids +moons +##oese +rolf +societal +yerevan +ness +##ssler +befriended +mechanized +nominate +trough +boasted +cues +seater +##hom +bends +##tangle +conductors +emptiness +eurasian +adriatic +tian +##cie +anxiously +lark +propellers +chichester +jock +##holding +credible +recounts +tori +loyalist +abduction +##hoot +##redo +nepali +##mite +ventral +tempting +##ango +##crats +steered +##wice +javelin +dipping +laborers +prentice +looming +titanium +badges +emir +tensor +##ntation +egyptians +rash +denies +hawthorne +lombard +showers +wehrmacht +dietary +trojan +##reus +welles +executing +horseshoe +lifeboat +##lak +elsa +infirmary +nearing +roberta +boyer +mutter +trillion +joanne +##fine +##oked +sinks +vortex +uruguayan +clasp +sirius +##block +accelerator +prohibit +sunken +byu +chronological +diplomats +ochreous +symmetrical +1644 +maia +##tology +salts +reigns +atrocities +##ия +hess +bared +issn +##vyn +cater +saturated +##cycle +##isse +sable +voyager +dyer +yusuf +##inge +fountains +wolff +##nni +engraving +rollins +atheist +ominous +##ault +herr +chariot +martina +strung +##fell +##farlane +horrific +sahib +gazes +saetan +erased +ptolemy +##olic +flushing +lauderdale +analytic +##ices +navarro +beak +gorilla +herrera +broom +guadalupe +raiding +sykes +bsc +deliveries +1720 +invasions +carmichael +tajikistan +thematic +ecumenical +sentiments +onstage +##rians +##brand +##sume +catastrophic +flanks +molten +##arns +waller +aimee +terminating +##icing +alternately +##oche +nehru +printers +outraged +##eving +empires +template +banners +repetitive +za +##oise +vegetarian +##tell +guiana +opt +cavendish +lucknow +synthesized +##hani +##mada +finalized +##ctable +fictitious +mayoral +unreliable +##enham +embracing +peppers +rbis +##chio +##neo +inhibition +slashed +togo +orderly +embroidered +salty +barron +benito +totaled +##dak +pubs +simulated +caden +devin +tolkien +momma +welding +sesame +##ept +gottingen +hardness +shaman +temeraire +adequately +pediatric +assertion +radicals +composure +cadence +seafood +beaufort +lazarus +mani +warily +cunning +kurdistan +cantata +##kir +ares +##clusive +nape +townland +geared +insulted +flutter +boating +violate +draper +dumping +malmo +##hh +##romatic +firearm +alta +bono +obscured +##clave +exceeds +panorama +unbelievable +##train +preschool +##essed +disconnected +installing +rescuing +secretaries +accessibility +##castle +##ifice +##film +bouts +slug +waterway +mindanao +##buro +##ratic +halves +calming +liter +maternity +adorable +bragg +electrification +mcc +##dote +roxy +schizophrenia +munoz +kaye +whaling +mil +tingling +tolerant +##ago +unconventional +volcanoes +##finder +deportivo +##llie +robson +kaufman +neuroscience +wai +deportation +masovian +scraping +converse +##bh +hacking +bulge +##oun +administratively +yao +mammoth +booster +claremont +hooper +nomenclature +pursuits +mclaughlin +melinda +##sul +catfish +barclay +substrates +taxa +zee +kimberly +packets +padma +##ality +borrowing +ostensibly +solvent +##bri +##genesis +##mist +lukas +shreveport +veracruz +##lou +##wives +cheney +anatolia +hobbs +##zyn +cyclic +radiant +alistair +greenish +siena +dat +independents +##bation +conform +pieter +hyper +applicant +bradshaw +spores +telangana +vinci +inexpensive +nuclei +jang +nme +spd +cradled +receptionist +pow +##rika +fascism +##ifer +experimenting +##ading +##iec +##region +jocelyn +maris +stair +nocturnal +toro +constabulary +elgin +##kker +msc +##giving +##schen +##rase +doherty +doping +sarcastically +batter +maneuvers +##cano +##apple +##gai +##git +intrinsic +##nst +##stor +1753 +showtime +cafes +gasps +lviv +ushered +##thed +fours +restart +astonishment +transmitting +flyer +shrugs +##sau +intriguing +cones +dictated +mushrooms +medial +##kovsky +##elman +escorting +gaped +godfather +##door +##sell +djs +recaptured +timetable +vila +1710 +aerodrome +mortals +scientology +##orne +angelina +mag +convection +unpaid +insertion +intermittent +lego +##nated +endeavor +kota +pereira +##lz +bwv +glamorgan +insults +agatha +fey +##cend +fleetwood +mahogany +protruding +steamship +zeta +##arty +mcguire +suspense +##sphere +advising +urges +##wala +hurriedly +meteor +gilded +inline +arroyo +stalker +##oge +excitedly +revered +##cure +earle +introductory +##break +##ilde +mutants +puff +pulses +reinforcement +##haling +curses +lizards +stalk +correlated +##fixed +fallout +macquarie +##unas +bearded +denton +heaving +##ocation +winery +assign +dortmund +##lkirk +everest +invariant +charismatic +susie +##elling +bled +lesley +telegram +sumner +bk +##ogen +wilcox +needy +colbert +duval +##iferous +##mbled +allotted +attends +imperative +##hita +replacements +hawker +##inda +insurgency +##zee +##eke +casts +##yla +ives +transitioned +##pack +##powering +authoritative +baylor +flex +cringed +plaintiffs +woodrow +##skie +drastic +ape +aroma +unfolded +commotion +preoccupied +theta +routines +lasers +privatization +wand +domino +ek +clenching +nsa +strategically +showered +bile +handkerchief +pere +storing +christophe +insulting +nakamura +romani +asiatic +magdalena +palma +cruises +stripping +konstantin +soaring +##berman +colloquially +forerunner +havilland +incarcerated +parasites +sincerity +##utus +disks +plank +saigon +##ining +corbin +homo +ornaments +powerhouse +##tlement +chong +fastened +feasibility +idf +morphological +usable +##nish +##zuki +aqueduct +jaguars +keepers +##flies +aleksandr +faust +assigns +ewing +bacterium +hurled +tricky +hungarians +integers +wallis +yamaha +##isha +hushed +oblivion +aviator +evangelist +friars +##eller +monograph +ode +##nary +airplanes +labourers +charms +##nee +1661 +hagen +tnt +rudder +fiesta +transcript +dorothea +ska +inhibitor +maccabi +retorted +raining +encompassed +clauses +menacing +1642 +lineman +##gist +vamps +##dick +gloom +##rera +dealings +easing +seekers +##nut +##pment +helens +unmanned +##anu +##isson +basics +##amy +##ckman +adjustments +1688 +brutality +horne +##zell +##mable +aggregator +##thal +rhino +##drick +##vira +counters +##rting +mn +montenegrin +packard +##unciation +##♭ +##kki +reclaim +scholastic +thugs +pulsed +##icia +syriac +quan +saddam +banda +kobe +blaming +buddies +dissent +##lusion +##usia +corbett +jaya +delle +erratic +lexie +##hesis +amiga +hermes +##pressing +##leen +chapels +gospels +jamal +##uating +compute +revolving +warp +##sso +##thes +armory +##eras +##gol +antrim +loki +##kow +##asian +##good +##zano +braid +handwriting +subdistrict +funky +pantheon +##iculate +concurrency +estimation +improper +juliana +##his +newcomers +johnstone +staten +communicated +##oco +##alle +sausage +stormy +##stered +##tters +superfamily +##grade +acidic +collateral +tabloid +##oped +##rza +bladder +austen +##ellant +mcgraw +##hay +hannibal +mein +aquino +lucifer +wo +badger +boar +cher +christensen +greenberg +interruption +##kken +jem +mocked +bottoms +cambridgeshire +##lide +sprawling +##bbly +eastwood +ghent +synth +##buck +advisers +##bah +nominally +hapoel +qu +daggers +estranged +fabricated +towels +vinnie +wcw +misunderstanding +anglia +nothin +unmistakable +##dust +##lova +chilly +marquette +truss +##edge +##erine +reece +##lty +##chemist +##connected +41st +bash +raion +waterfalls +##ump +##main +labyrinth +queue +theorist +##istle +bharatiya +flexed +soundtracks +rooney +leftist +patrolling +wharton +plainly +alleviate +eastman +schuster +topographic +engages +immensely +unbearable +fairchild +1620 +dona +lurking +parisian +oliveira +ia +indictment +hahn +bangladeshi +##aster +##uming +##ential +antonia +expects +indoors +kildare +harlan +##logue +##ogenic +##sities +forgiven +##wat +childish +tavi +##mide +##orra +plausible +grimm +successively +scooted +##bola +##rith +spartans +emery +flatly +epilogue +##wark +flourish +##iny +##tracted +##overs +##oshi +bestseller +distressed +receipt +spitting +hermit +topological +##cot +drilled +subunit +francs +##layer +eel +##fk +##itas +octopus +footprint +petitions +##say +##foil +interfering +leaking +palo +##metry +thistle +valiant +##pic +narayan +mcpherson +##fast +gonzales +##enne +dustin +novgorod +solos +##zman +doin +##patient +##meyer +soluble +ashland +cuffs +carole +pendleton +whistling +vassal +##river +deviation +revisited +constituents +rallied +rotate +loomed +##eil +##nting +amateurs +augsburg +auschwitz +crowns +skeletons +##cona +bonnet +dummy +globalization +simeon +sleeper +mandal +differentiated +##crow +##mare +milne +bundled +exasperated +talmud +owes +segregated +##feng +##uary +dentist +piracy +props +##rang +devlin +##torium +malicious +paws +##laid +dependency +##ergy +##fers +##enna +pistons +rourke +jed +grammatical +tres +maha +wig +ghostly +jayne +##achal +##creen +##ilis +##lins +designate +##with +arrogance +cambodian +clones +showdown +throttle +twain +##ception +lobes +metz +nagoya +braking +##furt +roaming +##minster +amin +crippled +##llary +indifferent +hoffmann +idols +intimidating +1751 +influenza +memo +onions +1748 +bandage +consciously +##landa +##rage +clandestine +observes +swiped +tangle +##ener +##jected +##trum +##bill +##lta +hugs +congresses +josiah +spirited +##dek +humanist +managerial +filmmaking +inmate +rhymes +debuting +grimsby +ur +##laze +duplicate +vigor +republished +bolshevik +refurbishment +antibiotics +martini +methane +newscasts +royale +horizons +levant +iain +visas +##ischen +paler +##around +manifestation +snuck +alf +chop +futile +pedestal +rehab +##kat +bmg +kerman +res +fairbanks +jarrett +abstraction +saharan +##zek +1746 +procedural +clearer +kincaid +sash +luciano +##ffey +crunch +helmut +##vara +revolutionaries +##tute +creamy +leach +##mmon +1747 +permitting +nes +plight +wendell +##lese +contra +clancy +ipa +mach +staples +autopsy +disturbances +nueva +karin +pontiac +##uding +proxy +venerable +haunt +leto +bergman +expands +##helm +wal +##pipe +canning +celine +cords +obesity +##enary +intrusion +planner +##phate +reasoned +sequencing +harrow +##chon +##dora +marred +mcintyre +repay +tarzan +darting +harrisburg +margarita +repulsed +##lding +belinda +hamburger +novo +compliant +runways +bingham +registrar +skyscraper +cuthbert +improvisation +livelihood +##corp +##elial +admiring +##dened +sporadic +believer +casablanca +popcorn +asha +shovel +##bek +##dice +coiled +tangible +##dez +casper +elsie +resin +tenderness +rectory +##ivision +avail +sonar +##mori +boutique +##dier +guerre +bathed +upbringing +vaulted +sandals +blessings +##naut +##utnant +1680 +foxes +pia +corrosion +hesitantly +confederates +crystalline +footprints +shapiro +tirana +valentin +drones +45th +microscope +shipments +texted +inquisition +wry +guernsey +unauthorized +resigning +ripple +schubert +stu +reassure +felony +##ardo +brittle +koreans +##havan +##ives +dun +implicit +tyres +##aldi +##lth +magnolia +##ehan +##puri +##poulos +aggressively +fei +gr +familiarity +##poo +indicative +##trust +fundamentally +jimmie +overrun +anchors +moans +##opus +britannia +armagh +purposely +seizing +##vao +bewildered +mundane +avoidance +cosmopolitan +geometridae +quartermaster +caf +chatter +engulfed +gleam +purge +##icate +juliette +jurisprudence +guerra +revisions +##bn +casimir +brew +##jm +1749 +clapton +cloudy +conde +hermitage +simulations +torches +vincenzo +matteo +##rill +hidalgo +booming +westbound +accomplishment +tentacles +unaffected +##sius +annabelle +flopped +sloping +##litz +dreamer +interceptor +vu +##loh +consecration +copying +messaging +breaker +climates +hospitalized +1752 +torino +afternoons +winfield +witnessing +##teacher +breakers +choirs +sawmill +coldly +##ege +sipping +haste +uninhabited +conical +bibliography +pamphlets +severn +edict +##oca +deux +illnesses +grips +rehearsals +sis +thinkers +tame +##keepers +1690 +acacia +reformer +##osed +##rys +shuffling +##iring +##shima +eastbound +ionic +rhea +flees +littered +##oum +rocker +vomiting +groaning +champ +overwhelmingly +civilizations +paces +sloop +adoptive +##tish +skaters +##vres +aiding +nikola +shriek +##ignon +pharmaceuticals +tuna +calvert +gustavo +stocked +yearbook +##urai +##mana +computed +subsp +riff +hanoi +kelvin +hamid +moors +pastures +summons +jihad +nectar +##ctors +bayou +untitled +pleasing +vastly +republics +intellect +##ulio +##tou +crumbling +stylistic +##ی +consolation +frequented +h₂o +walden +widows +##iens +##ignment +chunks +improves +grit +recited +##dev +snarl +sociological +##arte +##gul +inquired +##held +bruise +clube +consultancy +homogeneous +hornets +multiplication +pasta +prick +savior +##grin +##kou +##phile +yoon +##gara +grimes +vanishing +cheering +reacting +bn +distillery +##quisite +##vity +coe +dockyard +massif +##jord +escorts +voss +##valent +byte +chopped +hawke +illusions +workings +floats +##koto +##vac +kv +annapolis +madden +##onus +alvaro +noctuidae +##cum +##scopic +avenge +steamboat +forte +illustrates +erika +##trip +dew +nationalities +bran +manifested +thirsty +diversified +muscled +reborn +##standing +arson +##lessness +##dran +##logram +##boys +##kushima +##vious +willoughby +##phobia +alsace +dashboard +yuki +##chai +granville +myspace +publicized +tricked +##gang +adjective +##ater +relic +reorganisation +enthusiastically +indications +saxe +##lassified +consolidate +iec +padua +helplessly +ramps +renaming +regulars +pedestrians +accents +convicts +inaccurate +lowers +mana +##pati +barrie +bjp +outta +someplace +berwick +flanking +invoked +marrow +sparsely +excerpts +clothed +rei +##ginal +wept +##straße +##vish +##ptive +membranes +aquitaine +creeks +cutler +sheppard +implementations +##dur +fragrance +budge +concordia +magnesium +marcelo +##antes +gladly +vibrating +##rral +##ggles +montrose +##omba +lew +seamus +1630 +cocky +##ament +##uen +bjorn +##rrick +fielder +fluttering +##lase +methyl +kimberley +mcdowell +reductions +barbed +##jic +##tonic +aeronautical +condensed +distracting +##promising +huffed +##cala +##sle +claudius +invincible +missy +pious +balthazar +##lang +butte +combo +orson +##dication +myriad +1707 +silenced +##fed +##rh +netball +yourselves +##oza +clarify +heller +peg +durban +etudes +offender +roast +blackmail +curvature +##woods +vile +illicit +suriname +##linson +overture +1685 +bubbling +gymnast +tucking +##mming +##ouin +maldives +##bala +gurney +##dda +##eased +##oides +backside +pinto +jars +racehorse +tending +##rdial +baronetcy +wiener +duly +##rke +barbarian +cupping +flawed +##thesis +bertha +pleistocene +puddle +swearing +##nob +##tically +fleeting +prostate +amulet +educating +##mined +##tler +75th +jens +respondents +cavaliers +papacy +raju +##iente +##ulum +##tip +funnel +disneyland +##lley +sociologist +##iam +faulkner +louvre +menon +##dson +##ower +afterlife +mannheim +peptide +referees +comedians +meaningless +##anger +##laise +fabrics +hurley +renal +sleeps +##bour +##icle +breakout +kristin +roadside +animator +clover +disdain +unsafe +redesign +##urity +firth +barnsley +portage +reset +narrows +commandos +expansive +speechless +tubular +essendon +eyelashes +smashwords +##yad +##bang +##claim +craved +sprinted +chet +somme +astor +wrocław +orton +bane +##erving +##uing +mischief +##amps +##sund +scaling +terre +##xious +impairment +offenses +undermine +moi +soy +contiguous +arcadia +inuit +seam +##tops +macbeth +rebelled +##icative +##iot +elaborated +frs +uniformed +##dberg +powerless +priscilla +stimulated +qc +arboretum +frustrating +trieste +bullock +##nified +enriched +glistening +intern +##adia +locus +nouvelle +ollie +ike +lash +starboard +tapestry +headlined +hove +rigged +##vite +pollock +##yme +thrive +clustered +cas +roi +gleamed +olympiad +##lino +pressured +regimes +##hosis +##lick +ripley +##ophone +kickoff +gallon +rockwell +##arable +crusader +glue +revolutions +scrambling +1714 +grover +##jure +englishman +aztec +contemplating +coven +preach +triumphant +tufts +##esian +rotational +##phus +falkland +##brates +strewn +clarissa +rejoin +environmentally +glint +banded +drenched +moat +albanians +johor +rr +maestro +malley +nouveau +shaded +taxonomy +adhere +bunk +airfields +##ritan +1741 +encompass +remington +tran +##erative +amelie +mazda +friar +morals +passions +##zai +breadth +vis +##hae +argus +burnham +caressing +insider +rudd +##imov +##rso +italianate +murderous +textual +wainwright +armada +bam +weave +timer +##taken +##nh +fra +##crest +ardent +salazar +taps +tunis +##ntino +allegro +gland +philanthropic +##chester +implication +##optera +esq +judas +noticeably +wynn +##dara +inched +indexed +crises +villiers +bandit +royalties +patterned +cupboard +interspersed +accessory +isla +kendrick +entourage +stitches +##esthesia +headwaters +##ior +interlude +distraught +draught +1727 +##basket +biased +sy +transient +triad +subgenus +adapting +kidd +shortstop +##umatic +dimly +spiked +mcleod +reprint +nellie +pretoria +windmill +##cek +singled +##mps +reunite +##orous +bankers +outlying +##omp +##ports +##tream +apologies +cosmetics +patsy +##deh +##ocks +##yson +bender +nantes +serene +##nad +lucha +mmm +##cius +##gli +cmll +coinage +nestor +juarez +##rook +smeared +sprayed +twitching +sterile +irina +embodied +juveniles +enveloped +miscellaneous +cancers +dq +gulped +luisa +crested +swat +donegal +ref +##anov +##acker +hearst +mercantile +##lika +doorbell +vicki +##alla +##som +bilbao +psychologists +stryker +sw +horsemen +turkmenistan +wits +##national +anson +mathew +screenings +##umb +rihanna +##agne +##nessy +aisles +##iani +##osphere +hines +kenton +saskatoon +tasha +truncated +##champ +##itan +mildred +advises +fredrik +interpreting +inhibitors +##athi +spectroscopy +##hab +##kong +karim +panda +##oia +##nail +conqueror +kgb +leukemia +##dity +arrivals +cheered +pisa +phosphorus +shielded +##riated +mammal +unitarian +urgently +chopin +sanitary +##mission +spicy +drugged +hinges +##tort +tipping +trier +impoverished +westchester +##caster +epoch +nonstop +##gman +##khov +aromatic +centrally +cerro +##tively +##vio +billions +modulation +sedimentary +facilitating +outrageous +goldstein +##eak +##kt +ld +maitland +penultimate +pollard +##dance +fleets +spaceship +vertebrae +##nig +alcoholism +als +recital +##bham +##omics +##bm +trois +##tropical +commemorates +##meric +marge +##raction +1643 +cosmetic +ravaged +##ige +catastrophe +eng +##shida +albrecht +arterial +bellamy +decor +harmon +##rde +bulbs +synchronized +vito +easiest +shetland +shielding +wnba +##glers +##ssar +##riam +brianna +cumbria +##aceous +##rard +cores +thayer +##nsk +brood +hilltop +luminous +carts +keynote +larkin +logos +##cta +##mund +##quay +lilith +tinted +wrestle +mobilization +##uses +sequential +siam +bloomfield +takahashi +##ieving +presenters +ringo +blazed +witty +##oven +##ignant +devastation +haydn +harmed +newt +therese +##peed +gershwin +molina +rabbis +sudanese +innate +restarted +##sack +##fus +slices +wb +##shah +enroll +hypothetical +hysterical +1743 +fabio +indefinite +warped +exchanging +unsuitable +##sboro +gallo +1603 +bret +cobalt +homemade +##hunter +operatives +##dhar +terraces +durable +latch +pens +whorls +##ctuated +##eaux +billing +ligament +succumbed +##gly +regulators +spawn +##brick +##stead +filmfare +rochelle +##nzo +1725 +circumstance +saber +supplements +##nsky +##tson +crowe +wellesley +carrot +##9th +##movable +primate +drury +sincerely +topical +##mad +##rao +callahan +kyiv +smarter +tits +undo +##yeh +announcements +anthologies +barrio +nebula +##islaus +##shaft +##tyn +bodyguards +assassinate +barns +emmett +scully +##yd +##eland +##tino +##itarian +demoted +gorman +lashed +prized +adventist +writ +##gui +alla +invertebrates +##ausen +1641 +amman +1742 +align +healy +redistribution +##gf +##rize +insulation +##drop +adherents +hezbollah +vitro +ferns +yanking +registering +uppsala +cheerleading +confines +mischievous +tully +##ross +49th +docked +roam +stipulated +pumpkin +##bry +prompt +##ezer +blindly +shuddering +craftsmen +frail +scented +katharine +scramble +shaggy +sponge +helix +zaragoza +43rd +backlash +fontaine +seizures +posse +cowan +nonfiction +telenovela +wwii +hammered +undone +##gpur +encircled +irs +##ivation +artefacts +oneself +searing +smallpox +##belle +##osaurus +shandong +breached +upland +blushing +rankin +infinitely +psyche +tolerated +docking +evicted +##col +unmarked +##lving +gnome +lettering +litres +musique +##oint +benevolent +##jal +blackened +##anna +mccall +racers +tingle +##ocene +##orestation +introductions +radically +##hiff +##باد +1610 +1739 +munchen +plead +##nka +condo +scissors +##sight +##tens +apprehension +##cey +##yin +hallmark +watering +formulas +sequels +##llas +aggravated +bae +commencing +##building +enfield +prohibits +marne +vedic +civilized +euclidean +jagger +beforehand +blasts +dumont +##arney +##nem +conversions +hierarchical +rios +simulator +##dya +##lellan +hedges +oleg +thrusts +shadowed +darby +maximize +1744 +gregorian +##nded +##routed +sham +unspecified +##hog +emory +factual +##smo +fooled +##rger +ortega +wellness +marlon +##oton +##urance +casket +keating +ley +enclave +##ayan +char +influencing +jia +##chenko +ammonia +erebidae +incompatible +violins +cornered +##arat +grooves +astronauts +columbian +rampant +fabrication +kyushu +mahmud +vanish +##dern +mesopotamia +##lete +##rgen +caspian +kenji +pitted +##vered +grimace +roanoke +tchaikovsky +twinned +##analysis +##awan +xinjiang +arias +clemson +kazakh +sizable +1662 +##khand +##vard +plunge +tatum +vittorio +##nden +cholera +##dana +bracing +indifference +projectile +superliga +##chee +realises +upgrading +porte +retribution +##vies +nk +stil +##resses +ama +bureaucracy +blackberry +bosch +testosterone +collapses +greer +##pathic +ioc +fifties +malls +##erved +bao +baskets +adolescents +siegfried +##osity +##tosis +mantra +detecting +existent +fledgling +##cchi +dissatisfied +gan +telecommunication +mingled +sobbed +controversies +outdated +taxis +##raus +fright +slams +##lham +##fect +##tten +detectors +fetal +tanned +##uw +fray +goth +olympian +skipping +mandates +scratches +sheng +unspoken +hyundai +tracey +hotspur +restrictive +##buch +americana +mundo +##bari +burroughs +diva +vulcan +##6th +distinctions +thumping +##ngen +mikey +sheds +fide +rescues +springsteen +vested +valuation +##ece +##ely +pinnacle +rake +sylvie +##edo +almond +quivering +##irus +alteration +faltered +##wad +51st +hydra +ticked +##kato +recommends +##dicated +antigua +arjun +stagecoach +wilfred +trickle +pronouns +##pon +aryan +nighttime +##anian +gall +pea +stitch +##hei +leung +milos +##dini +eritrea +starved +snowfall +kant +parasitic +cot +discus +hana +strikers +appleton +kitchens +##erina +##partisan +##itha +##vius +disclose +metis +##channel +1701 +##vera +fitch +1735 +blooded +##tila +decimal +##tang +##bai +cyclones +eun +bottled +peas +pensacola +basha +bolivian +crabs +boil +lanterns +partridge +roofed +1645 +necks +##phila +opined +patting +##kla +##lland +chuckles +volta +whereupon +##nche +devout +euroleague +suicidal +##dee +inherently +involuntary +knitting +nasser +##hide +puppets +colourful +courageous +southend +stills +miraculous +hodgson +richer +rochdale +ethernet +greta +uniting +prism +umm +##haya +##itical +##utation +deterioration +pointe +prowess +##ropriation +lids +scranton +billings +subcontinent +##koff +##scope +brute +kellogg +psalms +degraded +##vez +stanisław +##ructured +ferreira +pun +astonishing +gunnar +##yat +arya +prc +gottfried +##tight +excursion +##ographer +dina +##quil +##nare +huffington +illustrious +wilbur +verandah +##zard +naacp +##odle +constructive +fjord +kade +##naud +generosity +thrilling +baseline +cayman +frankish +plastics +accommodations +zoological +##fting +cedric +qb +motorized +##dome +##otted +squealed +tackled +canucks +budgets +situ +asthma +dail +gabled +grasslands +whimpered +writhing +judgments +minnie +##carbon +bananas +grille +domes +monique +odin +maguire +markham +tierney +##estra +##chua +libel +poke +speedy +atrium +laval +notwithstanding +##edly +fai +kala +##sur +robb +##sma +listings +luz +supplementary +tianjin +##acing +enzo +jd +ric +scanner +croats +transcribed +arden +##hair +##raphy +##lver +seventies +staggering +alam +horticultural +hs +regression +timbers +blasting +##ounded +montagu +manipulating +##cit +catalytic +1550 +troopers +##meo +condemnation +fitzpatrick +##oire +##roved +inexperienced +1670 +castes +##lative +outing +dubois +flicking +quarrel +ste +learners +1625 +whistled +##class +classify +tariffs +temperament +folly +liszt +##yles +immersed +jordanian +ceasefire +apparel +extras +maru +fished +##bio +harta +stockport +assortment +craftsman +paralysis +transmitters +##cola +blindness +##wk +fatally +proficiency +solemnly +##orno +repairing +amore +groceries +ultraviolet +##chase +schoolhouse +##tua +resurgence +nailed +##otype +ruse +saliva +diagrams +##tructing +albans +rann +thirties +antennas +hilarious +cougars +paddington +stats +##eger +breakaway +reza +authorship +prohibiting +scoffed +##etz +##ttle +conscription +defected +trondheim +##fires +ivanov +keenan +##adan +##ciful +##fb +##slow +locating +##ials +##tford +cadiz +basalt +blankly +interned +rags +rattling +##tick +carpathian +reassured +bum +guildford +iss +staunch +##onga +astronomers +sera +sofie +emergencies +susquehanna +##heard +duc +mastery +vh1 +williamsburg +bayer +buckled +craving +##khan +##rdes +bloomington +##write +alton +barbecue +##bians +justine +##hri +##ndt +delightful +smartphone +newtown +photon +retrieval +peugeot +hissing +##monium +##orough +flavors +lighted +relaunched +tainted +##games +##lysis +anarchy +microscopic +hopping +adept +evade +evie +##beau +inhibit +sinn +adjustable +hurst +intuition +wilton +44th +lawful +lowlands +stockings +thierry +##dalen +##hila +##nai +fates +prank +maison +lobbied +provocative +1724 +utopia +##qual +carbonate +gujarati +purcell +##rford +curtiss +##mei +overgrown +arenas +mediation +swallows +##rnik +respectful +turnbull +##hedron +##hope +alyssa +ozone +##ʻi +ami +gestapo +johansson +snooker +canteen +cuff +declines +empathy +stigma +##ags +##raine +taxpayers +volga +##wright +##copic +lifespan +overcame +tattooed +enactment +giggles +##ador +##camp +barrington +bribe +obligatory +orbiting +peng +##enas +elusive +sucker +##vating +cong +hardship +empowered +anticipating +estrada +cryptic +greasy +detainees +planck +sudbury +plaid +dod +kayla +##ears +##vb +##zd +mortally +##hein +cognition +radha +liechtenstein +meade +richly +argyle +harpsichord +liberalism +trumpets +lauded +tyrant +salsa +tiled +lear +promoters +reused +slicing +trident +##chuk +##gami +##lka +cantor +checkpoint +##points +gaul +leger +mammalian +##tov +##aar +##schaft +doha +frenchman +nirvana +##vino +delgado +headlining +##eron +##iography +jug +tko +1649 +naga +intersections +benfica +nawab +##suka +ashford +gulp +##deck +##vill +##rug +brentford +frazier +pleasures +dunne +potsdam +shenzhen +dentistry +##tec +flanagan +##dorff +##hear +chorale +dinah +prem +quezon +##rogated +relinquished +sutra +terri +##pani +flaps +##rissa +poly +##rnet +homme +aback +##eki +linger +womb +##kson +##lewood +doorstep +orthodoxy +threaded +westfield +##rval +dioceses +fridays +subsided +##gata +loyalists +##biotic +##ettes +letterman +lunatic +prelate +tenderly +invariably +souza +thug +winslow +##otide +furlongs +gogh +jeopardy +##runa +pegasus +##umble +humiliated +standalone +tagged +##roller +freshmen +klan +##bright +attaining +initiating +transatlantic +logged +viz +##uance +1723 +combatants +intervening +stephane +chieftain +despised +grazed +cdc +galveston +godzilla +macro +simulate +##planes +parades +##esses +##ductive +##unes +equator +overdose +##cans +##hosh +##lifting +joshi +epstein +sonora +treacherous +aquatics +manchu +responsive +##sation +supervisory +##christ +##llins +##ibar +##balance +##uso +kimball +karlsruhe +mab +##emy +ignores +phonetic +spaghetti +almighty +danzig +rumbling +tombstone +designations +lured +outset +##felt +supermarkets +grupo +kei +kraft +susanna +##blood +comprehension +genealogy +##aghan +##verted +redding +##ythe +1722 +bowing +##pore +##roi +lest +sharpened +fulbright +valkyrie +sikhs +##unds +swans +bouquet +merritt +##tage +##venting +commuted +redhead +clerks +leasing +cesare +dea +hazy +##vances +fledged +greenfield +servicemen +##gical +armando +blackout +sagged +downloadable +intra +potion +pods +##4th +##mism +attendants +gambia +stale +##ntine +plump +asteroids +rediscovered +buds +flea +hive +##neas +1737 +classifications +debuts +##eles +olympus +scala +##eurs +##gno +##mute +hummed +sigismund +visuals +wiggled +await +pilasters +clench +sulfate +##ances +bellevue +enigma +trainee +snort +##sw +clouded +denim +##rank +churning +hartman +lodges +riches +sima +##missible +accountable +socrates +regulates +mueller +1702 +avoids +solids +himalayas +nutrient +pup +##jevic +squat +fades +nec +##lates +##pina +##rona +##ου +privateer +tequila +##gative +##mpton +hornet +immortals +##dou +asturias +cleansing +dario +##rries +##anta +etymology +servicing +zhejiang +##venor +##nx +horned +erasmus +rayon +relocating +£10 +##bags +escalated +promenade +stubble +2010s +artisans +axial +liquids +mora +sho +yoo +##tsky +bundles +oldies +##nally +notification +bastion +##ths +sparkle +##lved +1728 +leash +pathogen +highs +##hmi +immature +gonzaga +ignatius +mansions +monterrey +sweets +bryson +##loe +polled +regatta +brightest +pei +rosy +squid +hatfield +payroll +addict +meath +cornerback +heaviest +lodging +##mage +capcom +rippled +##sily +barnet +mayhem +ymca +snuggled +rousseau +##cute +blanchard +fragmented +leighton +chromosomes +risking +##strel +##utter +corinne +coyotes +cynical +hiroshi +yeomanry +##ractive +ebook +grading +mandela +plume +agustin +magdalene +##rkin +bea +femme +trafford +##coll +##lun +##tance +52nd +fourier +upton +##mental +camilla +gust +iihf +islamabad +longevity +##kala +feldman +netting +##rization +endeavour +foraging +mfa +orr +##open +greyish +contradiction +graz +##ruff +handicapped +marlene +tweed +oaxaca +spp +campos +miocene +pri +configured +cooks +pluto +cozy +pornographic +##entes +70th +fairness +glided +jonny +lynne +rounding +sired +##emon +##nist +remade +uncover +##mack +complied +lei +newsweek +##jured +##parts +##enting +##pg +finer +guerrillas +athenian +deng +disused +stepmother +accuse +gingerly +seduction +confronting +##going +gora +nostalgia +sabres +virginity +wrenched +##minated +syndication +wielding +eyre +##gnon +##igny +behaved +taxpayer +sweeps +##growth +childless +gallant +##ywood +amplified +geraldine +scrape +##ffi +babylonian +fresco +##rdan +##kney +##position +1718 +restricting +tack +fukuoka +osborn +selector +partnering +##dlow +kia +tak +whitley +gables +##mania +mri +softness +immersion +##bots +##evsky +1713 +chilling +insignificant +pcs +##uis +elites +lina +purported +supplemental +teaming +##americana +##dding +##inton +proficient +rouen +##nage +##rret +niccolo +selects +##bread +fluffy +1621 +gruff +knotted +mukherjee +polgara +thrash +nicholls +secluded +smoothing +thru +corsica +loaf +whitaker +inquiries +##rrier +##kam +indochina +marlins +myles +peking +##tea +extracts +pastry +superhuman +connacht +vogel +##ditional +##het +##udged +##lash +gloss +quarries +refit +teaser +##alic +##gaon +20s +materialized +sling +camped +pickering +tung +tracker +pursuant +##cide +cranes +##cini +##typical +##viere +anhalt +overboard +workout +chores +fares +orphaned +stains +##logie +fenton +surpassing +joyah +triggers +##itte +grandmaster +##lass +##lists +clapping +fraudulent +ledger +nagasaki +##cor +##nosis +##tsa +eucalyptus +tun +##icio +##rney +##tara +dax +heroism +ina +wrexham +onboard +unsigned +##dates +moshe +galley +winnie +droplets +exiles +praises +watered +noodles +##aia +fein +leland +multicultural +stink +bingo +comets +erskine +modernized +canned +constraint +domestically +chemotherapy +featherweight +stifled +##mum +darkly +irresistible +refreshing +hasty +isolate +##oys +kitchener +planners +##wehr +cages +yarn +implant +toulon +elects +childbirth +yue +##lind +rightful +sportsman +junctions +remodeled +specifies +##rgh +##oons +complimented +##urgent +lister +ot +##logic +bequeathed +cheekbones +fontana +gabby +##dial +amadeus +corrugated +maverick +resented +triangles +##hered +##usly +nazareth +tyrol +1675 +assent +poorer +sectional +aegean +##cous +nylon +ghanaian +##egorical +##weig +cushions +forbid +fusiliers +obstruction +somerville +##scia +dime +earrings +elliptical +leyte +oder +polymers +timmy +midtown +piloted +settles +continual +externally +mayfield +##uh +enrichment +henson +keane +persians +1733 +benji +braden +pep +##efe +contenders +pepsi +valet +##isches +##asse +##earing +goofy +stroll +##amen +authoritarian +occurrences +adversary +ahmedabad +tangent +toppled +dorchester +1672 +modernism +marxism +islamist +charlemagne +exponential +racks +brunette +pic +skirmish +##bund +##lad +##powered +##yst +hoisted +messina +shatter +##ctum +jedi +vantage +##music +##neil +clemens +mahmoud +corrupted +authentication +lowry +nils +##washed +omnibus +wounding +jillian +##itors +##opped +serialized +narcotics +handheld +##arm +##plicity +intersecting +stimulating +##onis +crate +fellowships +hemingway +casinos +climatic +fordham +copeland +drip +beatty +leaflets +robber +brothel +madeira +##hedral +sphinx +ultrasound +##vana +valor +forbade +leonid +villas +##aldo +duane +marquez +##cytes +disadvantaged +forearms +kawasaki +reacts +consular +lax +uncles +uphold +##hopper +concepcion +dorsey +lass +##izan +arching +passageway +1708 +researches +tia +internationals +##graphs +##opers +distinguishes +javanese +divert +##uven +plotted +##listic +##rwin +##erik +##tify +affirmative +signifies +validation +##bson +kari +felicity +georgina +zulu +##eros +##rained +##rath +overcoming +argyll +##rbin +1734 +chiba +ratification +windy +earls +parapet +##marks +hunan +pristine +astrid +punta +##gart +brodie +##kota +##oder +malaga +minerva +rouse +##phonic +bellowed +pagoda +portals +reclamation +##gur +##odies +##⁄₄ +parentheses +quoting +allergic +palette +showcases +benefactor +heartland +nonlinear +##tness +bladed +cheerfully +scans +##ety +1666 +girlfriends +pedersen +hiram +sous +##liche +##nator +1683 +##nery +##orio +##umen +bobo +primaries +smiley +##cb +unearthed +uniformly +fis +metadata +1635 +ind +##oted +recoil +##titles +##tura +##ια +hilbert +jamestown +mcmillan +tulane +seychelles +##frid +antics +coli +fated +stucco +##grants +1654 +bulky +accolades +arrays +caledonian +carnage +optimism +puebla +##tative +##cave +enforcing +rotherham +dunlop +aeronautics +chimed +incline +zoning +archduke +hellenistic +##oses +##sions +candi +thong +##ople +magnate +rustic +##rsk +projective +slant +##offs +danes +hollis +vocalists +##ammed +congenital +contend +gesellschaft +##ocating +##pressive +douglass +quieter +##kshi +howled +salim +spontaneously +townsville +buena +southport +##bold +kato +1638 +faerie +stiffly +##vus +##rled +flawless +realising +taboo +##7th +straightening +jena +##hid +cartwright +berber +bertram +soloists +noses +coping +fission +hardin +inca +##cen +1717 +mobilized +vhf +##raf +biscuits +curate +##anial +gaunt +neighbourhoods +1540 +##abas +blanca +bypassed +sockets +behold +coincidentally +##bane +nara +shave +splinter +terrific +##arion +##erian +commonplace +juris +redwood +waistband +boxed +caitlin +fingerprints +jennie +naturalized +##ired +balfour +craters +jody +bungalow +hugely +quilt +glitter +pigeons +undertaker +bulging +constrained +##sil +##akh +assimilation +reworked +##person +persuasion +##pants +felicia +##cliff +##ulent +1732 +explodes +##dun +##inium +##zic +lyman +vulture +hog +overlook +begs +northwards +ow +spoil +##urer +fatima +favorably +accumulate +sargent +sorority +corresponded +dispersal +kochi +toned +##imi +##lita +internacional +newfound +##agger +##lynn +##rigue +booths +peanuts +##eborg +medicare +muriel +nur +##uram +crates +millennia +pajamas +worsened +##breakers +jimi +vanuatu +yawned +##udeau +carousel +##hony +hurdle +##ccus +##mounted +##pod +rv +##eche +airship +ambiguity +compulsion +recapture +##claiming +arthritis +##osomal +1667 +asserting +ngc +sniffing +dade +discontent +glendale +ported +##amina +defamation +rammed +##scent +fling +livingstone +##fleet +875 +apocalyptic +comrade +##lowe +cessna +eine +persecuted +subsistence +demi +hoop +reliefs +coptic +progressing +stemmed +perpetrators +1665 +priestess +##nio +dobson +ebony +rooster +itf +tortricidae +##bbon +##jian +cleanup +##jean +##øy +1721 +eighties +taxonomic +holiness +##hearted +##spar +antilles +showcasing +stabilized +##nb +gia +mascara +michelangelo +dawned +##uria +##vinsky +extinguished +fitz +grotesque +£100 +##fera +##loid +##mous +barges +neue +throbbed +cipher +johnnie +##mpt +outburst +##swick +spearheaded +administrations +heartbreak +pixels +pleasantly +##enay +lombardy +plush +##nsed +bobbie +##hly +reapers +tremor +xiang +minogue +substantive +hitch +barak +##wyl +kwan +##encia +910 +obscene +elegance +indus +surfer +bribery +conserve +##hyllum +##masters +horatio +##fat +apes +rebound +psychotic +##pour +iteration +##mium +##vani +botanic +horribly +antiques +dispose +paxton +##hli +##wg +timeless +1704 +disregard +engraver +hounds +##bau +##version +looted +uno +facilitates +groans +masjid +rutland +antibody +disqualification +decatur +footballers +quake +slacks +48th +rein +scribe +stabilize +commits +exemplary +tho +##hort +##chison +pantry +traversed +##hiti +disrepair +identifiable +vibrated +baccalaureate +csa +interviewing +##iensis +##raße +greaves +wealthiest +classed +jogged +£5 +##atal +illuminating +knicks +respecting +##uno +scrubbed +##iji +##dles +kruger +moods +growls +raider +silvia +chefs +kam +cree +percival +##terol +gunter +counterattack +defiant +henan +ze +##rasia +##riety +equivalence +submissions +##fra +##thor +bautista +mechanically +##heater +cornice +herbal +templar +##mering +outputs +ruining +ligand +renumbered +extravagant +mika +blockbuster +eta +insurrection +##ilia +darkening +ferocious +pianos +strife +kinship +##aer +melee +##anor +##iste +##oue +decidedly +weep +##jad +##missive +##ppel +puget +unease +##gnant +1629 +hammering +kassel +wessex +##lga +bromwich +egan +paranoia +utilization +##atable +##idad +contradictory +provoke +##ols +##ouring +##tangled +knesset +##very +##lette +plumbing +##sden +greensboro +occult +sniff +zev +beaming +gamer +haggard +mahal +##olt +##pins +mendes +utmost +briefing +gunnery +##gut +##pher +##zh +##rok +1679 +khalifa +sonya +##boot +principals +urbana +wiring +##liffe +##minating +##rrado +dahl +nyu +skepticism +townspeople +ithaca +lobster +somethin +##fur +##arina +##−1 +freighter +zimmerman +biceps +contractual +##herton +amend +hurrying +subconscious +##anal +meng +clermont +spawning +##eia +##lub +dignitaries +impetus +snacks +spotting +twigs +##bilis +##cz +##ouk +libertadores +nic +skylar +##aina +gustave +asean +##anum +dieter +legislatures +flirt +bromley +trolls +umar +##bbies +##tyle +blah +parc +bridgeport +crank +negligence +##nction +46th +constantin +molded +bandages +seriousness +00pm +siegel +carpets +compartments +upbeat +statehood +##dner +##edging +marko +platt +##hane +paving +##iy +1738 +abbess +impatience +limousine +nbl +lucille +mojo +nightfall +robbers +##nais +karel +brisk +calves +replicate +ascribed +telescopes +##olf +intimidated +ballast +specialization +aerodynamic +caliphate +visionary +##arded +epsilon +##aday +##onte +aggregation +auditory +boosted +reunification +kathmandu +loco +robyn +acknowledges +appointing +humanoid +newell +redeveloped +restraints +##tained +barbarians +chopper +1609 +italiana +##lez +##lho +investigates +wrestlemania +##anies +##bib +##falls +creaked +dragoons +gravely +minions +stupidity +volley +##harat +##week +musik +##eries +##uously +fungal +massimo +semantics +malvern +##ahl +##pee +discourage +embryo +imperialism +1910s +profoundly +##ddled +jiangsu +sparkled +stat +##holz +sweatshirt +tobin +##iction +sneered +##cheon +##oit +brit +causal +smyth +##neuve +diffuse +perrin +silvio +##ipes +##recht +detonated +iqbal +selma +##nism +##zumi +roasted +##riders +tay +##ados +##mament +##mut +##rud +completes +nipples +flavour +hirsch +##laus +calderon +sneakers +moravian +##ksha +1622 +##imeters +bodo +##isance +##pre +##ronia +anatomical +excerpt +##lke +dh +kunst +##tablished +##scoe +biomass +panted +unharmed +gael +housemates +montpellier +coa +rodents +tonic +hickory +singleton +##taro +1719 +aldo +breaststroke +dempsey +och +rocco +##cuit +merton +dissemination +midsummer +serials +##idi +haji +polynomials +enoch +prematurely +shutter +taunton +£3 +##grating +##inates +archangel +harassed +##asco +archway +dazzling +##ecin +1736 +sumo +wat +##kovich +1086 +honneur +##ently +##nostic +##ttal +##idon +1605 +1716 +rents +##gnan +hires +##ikh +##dant +howie +##rons +handler +retracted +shocks +1632 +arun +duluth +kepler +trumpeter +##lary +peeking +seasoned +trooper +##mara +laszlo +##iciencies +##rti +heterosexual +##inatory +indira +jogging +##inga +##lism +beit +dissatisfaction +malice +##ately +nedra +peeling +##rgeon +47th +stadiums +vertigo +##ains +iced +restroom +##plify +##tub +illustrating +pear +##chner +##sibility +inorganic +rappers +receipts +watery +##kura +lucinda +##oulos +reintroduced +##8th +##tched +gracefully +saxons +nutritional +wastewater +rained +favourites +bedrock +fisted +hallways +likeness +upscale +##lateral +1580 +blinds +prequel +##pps +##tama +deter +humiliating +restraining +tn +vents +1659 +laundering +recess +rosary +tractors +coulter +federer +##ifiers +##plin +persistence +##quitable +geschichte +pendulum +quakers +##beam +bassett +pictorial +koln +##sitor +drills +reciprocal +shooters +##cton +##tees +converge +pip +dmitri +donnelly +yamamoto +aqua +azores +demographics +hypnotic +spitfire +suspend +wryly +roderick +##rran +sebastien +##asurable +mavericks +##fles +himalayan +prodigy +##iance +transvaal +demonstrators +handcuffs +dodged +mcnamara +sublime +1726 +crazed +##efined +##till +ivo +pondered +reconciled +shrill +sava +##duk +bal +heresy +jaipur +goran +##nished +lux +shelly +whitehall +##hre +israelis +peacekeeping +##wled +1703 +demetrius +ousted +##arians +##zos +beale +anwar +backstroke +raged +shrinking +cremated +##yck +benign +towing +wadi +darmstadt +landfill +parana +soothe +colleen +sidewalks +mayfair +tumble +hepatitis +ferrer +superstructure +##gingly +##urse +##wee +anthropological +translators +##mies +closeness +hooves +##pw +mondays +##roll +##vita +landscaping +##urized +purification +sock +thorns +thwarted +jalan +tiberius +##taka +saline +##rito +confidently +khyber +sculptors +##ij +brahms +hammersmith +inspectors +battista +fivb +fragmentation +hackney +##uls +arresting +exercising +antoinette +bedfordshire +##zily +dyed +##hema +1656 +racetrack +variability +##tique +1655 +austrians +deteriorating +madman +theorists +aix +lehman +weathered +1731 +decreed +eruptions +1729 +flaw +quinlan +sorbonne +flutes +nunez +1711 +adored +downwards +fable +rasped +1712 +moritz +mouthful +renegade +shivers +stunts +dysfunction +restrain +translit +pancakes +##avio +##cision +##tray +vial +##lden +bain +##maid +##oxide +chihuahua +malacca +vimes +##rba +##rnier +1664 +donnie +plaques +##ually +bangs +floppy +huntsville +loretta +nikolay +##otte +eater +handgun +ubiquitous +##hett +eras +zodiac +1634 +##omorphic +1820s +##zog +cochran +##bula +##lithic +warring +##rada +dalai +excused +blazers +mcconnell +reeling +este +##abi +geese +hoax +taxon +##bla +guitarists +condemning +hunts +inversion +moffat +taekwondo +##lvis +1624 +stammered +##rest +##rzy +sousa +fundraiser +marylebone +navigable +uptown +cabbage +daniela +salman +shitty +whimper +##kian +##utive +programmers +protections +##rmi +##rued +forceful +##enes +fuss +##tao +##wash +brat +oppressive +reykjavik +spartak +ticking +##inkles +##kiewicz +adolph +horst +maui +protege +straighten +cpc +landau +concourse +clements +resultant +##ando +imaginative +joo +reactivated +##rem +##ffled +##uising +consultative +##guide +flop +kaitlyn +mergers +parenting +somber +##vron +supervise +vidhan +##imum +courtship +exemplified +harmonies +medallist +refining +##rrow +##ка +amara +##hum +goalscorer +sited +overshadowed +rohan +displeasure +secretive +multiplied +osman +##orth +engravings +padre +##kali +##veda +miniatures +mis +##yala +clap +pali +rook +##cana +1692 +57th +antennae +astro +oskar +1628 +bulldog +crotch +hackett +yucatan +##sure +amplifiers +brno +ferrara +migrating +##gree +thanking +turing +##eza +mccann +ting +andersson +onslaught +gaines +ganga +incense +standardization +##mation +sentai +scuba +stuffing +turquoise +waivers +alloys +##vitt +regaining +vaults +##clops +##gizing +digger +furry +memorabilia +probing +##iad +payton +rec +deutschland +filippo +opaque +seamen +zenith +afrikaans +##filtration +disciplined +inspirational +##merie +banco +confuse +grafton +tod +##dgets +championed +simi +anomaly +biplane +##ceptive +electrode +##para +1697 +cleavage +crossbow +swirl +informant +##lars +##osta +afi +bonfire +spec +##oux +lakeside +slump +##culus +##lais +##qvist +##rrigan +1016 +facades +borg +inwardly +cervical +pointedly +stabilization +##odon +chests +1699 +hacked +ctv +orthogonal +suzy +##lastic +gaulle +jacobite +rearview +##erted +ashby +##drik +##igate +##mise +##zbek +affectionately +canine +disperse +latham +##istles +##ivar +spielberg +##orin +##idium +ezekiel +cid +##sg +durga +middletown +##cina +customized +frontiers +harden +##etano +##zzy +1604 +bolsheviks +coloration +yoko +##bedo +briefs +slabs +debra +liquidation +plumage +##oin +blossoms +dementia +subsidy +1611 +proctor +relational +jerseys +parochial +ter +##ici +esa +peshawar +cavalier +loren +idiots +shamrock +1646 +dutton +malabar +mustache +##endez +##ocytes +referencing +terminates +marche +yarmouth +##sop +acton +mated +seton +subtly +baptised +beige +extremes +jolted +kristina +telecast +##actic +safeguard +waldo +##baldi +##bular +endeavors +sloppy +subterranean +##ensburg +##itung +delicately +pigment +tq +##scu +1626 +collisions +coveted +herds +##personal +##meister +##nberger +chopra +##ricting +abnormalities +defective +galician +lucie +##dilly +alligator +likened +##genase +burundi +clears +complexion +derelict +deafening +diablo +fingered +champaign +dogg +enlist +isotope +labeling +mrna +##erre +brilliance +marvelous +##ayo +1652 +crawley +ether +footed +dwellers +deserts +hamish +rubs +warlock +skimmed +##lizer +buick +embark +heraldic +irregularities +##ajan +kiara +##kulam +##ieg +antigen +kowalski +##lge +oakley +visitation +##mbit +vt +##suit +1570 +murderers +##miento +##rites +chimneys +##sling +condemn +custer +exchequer +havre +##ghi +fluctuations +##rations +dfb +hendricks +vaccines +##tarian +nietzsche +biking +juicy +##duced +brooding +scrolling +selangor +##ragan +annum +boomed +seminole +sugarcane +##dna +departmental +dismissing +innsbruck +arteries +ashok +batavia +daze +kun +overtook +##rga +##tlan +beheaded +gaddafi +holm +electronically +faulty +galilee +fractures +kobayashi +##lized +gunmen +magma +aramaic +mala +eastenders +inference +messengers +bf +##qu +bathrooms +##vere +1658 +flashbacks +ideally +misunderstood +##jali +##weather +mendez +##grounds +uncanny +##iii +1709 +friendships +##nbc +sacrament +accommodated +reiterated +logistical +pebbles +thumped +##escence +administering +decrees +drafts +##flight +##cased +##tula +futuristic +picket +intimidation +winthrop +##fahan +interfered +afar +francoise +morally +uta +cochin +croft +dwarfs +##bruck +##dents +##nami +biker +##hner +##meral +##isen +##ometric +##pres +##ан +brightened +meek +parcels +securely +gunners +##jhl +##zko +agile +hysteria +##lten +##rcus +bukit +champs +chevy +cuckoo +leith +sadler +theologians +welded +##section +1663 +plurality +xander +##rooms +##formed +shredded +temps +intimately +pau +tormented +##lok +##stellar +1618 +charred +essen +##mmel +alarms +spraying +ascot +blooms +twinkle +##abia +##apes +internment +obsidian +##chaft +snoop +##dav +##ooping +malibu +##tension +quiver +##itia +hays +mcintosh +travers +walsall +##ffie +1623 +beverley +schwarz +plunging +structurally +rosenthal +vikram +##tsk +ghz +##onda +##tiv +chalmers +groningen +pew +reckon +unicef +##rvis +55th +##gni +1651 +sulawesi +avila +cai +metaphysical +screwing +turbulence +##mberg +augusto +samba +56th +baffled +momentary +toxin +##urian +##wani +aachen +condoms +dali +steppe +##oed +##year +adolescence +dauphin +electrically +inaccessible +microscopy +nikita +##ega +atv +##enter +##oles +##oteric +accountants +punishments +wrongly +bribes +adventurous +clinch +flinders +southland +##hem +##kata +gough +##ciency +lads +soared +##ה +undergoes +deformation +outlawed +rubbish +##arus +##mussen +##nidae +##rzburg +arcs +##ingdon +##tituted +1695 +wheelbase +wheeling +bombardier +campground +zebra +##lices +##oj +##bain +lullaby +##ecure +donetsk +wylie +grenada +##arding +##ης +squinting +eireann +opposes +##andra +maximal +runes +##broken +##cuting +##iface +##ror +##rosis +additive +britney +adultery +triggering +##drome +detrimental +aarhus +containment +jc +swapped +vichy +##ioms +madly +##oric +##rag +brant +##ckey +1560 +1612 +broughton +rustling +##stems +##uder +asbestos +mentoring +##nivorous +finley +leaps +##isan +apical +pry +slits +substitutes +##dict +intuitive +fantasia +insistent +unreasonable +##igen +##vna +domed +hannover +margot +ponder +##zziness +impromptu +jian +rampage +stemming +##eft +andrey +gerais +whichever +amnesia +appropriated +anzac +clicks +modifying +ultimatum +cambrian +maids +verve +yellowstone +##mbs +conservatoire +##scribe +adherence +dinners +spectra +imperfect +mysteriously +sidekick +tatar +tuba +##aks +##ifolia +distrust +##athan +##zle +ronin +zac +##pse +celaena +instrumentalist +scents +skopje +##mbling +comical +compensated +vidal +condor +intersect +jingle +wavelengths +##urrent +mcqueen +##izzly +carp +weasel +militias +postdoctoral +eugen +gunslinger +##ɛ +faux +hospice +##for +appalled +derivation +dwarves +##elis +dilapidated +##folk +astoria +philology +##lwyn +##otho +##saka +inducing +philanthropy +##bf +##itative +geek +markedly +##yce +bessie +indices +##flict +frowns +resolving +weightlifting +tugs +cleric +contentious +1653 +mania +rms +##miya +##reate +##ruck +##tucket +bien +eels +marek +##ayton +##cence +discreet +unofficially +##ife +leaks +##bber +1705 +dung +compressor +hillsborough +pandit +shillings +distal +##skin +##tat +nosed +##nir +mangrove +undeveloped +##idia +textures +##inho +##rise +irritating +nay +amazingly +bancroft +apologetic +compassionate +kata +symphonies +##lovic +airspace +##lch +gifford +precautions +fulfillment +sevilla +vulgar +martinique +##urities +looting +piccolo +tidy +##dermott +quadrant +armchair +incomes +mathematicians +stampede +nilsson +##inking +##scan +foo +quarterfinal +##ostal +shang +shouldered +squirrels +##owe +vinegar +##bner +##rchy +##systems +delaying +##trics +ars +dwyer +rhapsody +sponsoring +##gration +bipolar +cinder +starters +##olio +##urst +signage +##nty +aground +figurative +mons +acquaintances +duets +erroneously +soyuz +elliptic +recreated +##cultural +##quette +##ssed +##tma +##zcz +moderator +scares +##itaire +##stones +##udence +juniper +sighting +##just +##nsen +britten +calabria +ry +bop +cramer +forsyth +stillness +airmen +gathers +unfit +##umber +##upt +taunting +seeker +streamlined +##bution +holster +schumann +tread +vox +##gano +##onzo +strive +dil +reforming +covent +newbury +predicting +##orro +decorate +tre +##puted +andover +asahi +dept +dunkirk +gills +##tori +buren +huskies +##stis +##stov +abstracts +bets +loosen +##opa +1682 +yearning +##glio +##sir +berman +effortlessly +enamel +napoli +persist +##peration +##uez +attache +elisa +invitations +##kic +accelerating +reindeer +boardwalk +clutches +nelly +polka +##kei +adamant +huey +lough +unbroken +adventurer +embroidery +inspecting +stanza +##ducted +naia +taluka +##pone +##roids +chases +deprivation +florian +##ppet +earthly +##lib +##ssee +colossal +foreigner +vet +freaks +patrice +rosewood +triassic +upstate +##pkins +dominates +ata +chants +ks +vo +##bley +##raya +##rmed +agra +infiltrate +##ailing +##ilation +##tzer +##uppe +##werk +binoculars +enthusiast +fujian +squeak +##avs +abolitionist +almeida +boredom +hampstead +marsden +rations +##ands +inflated +bonuses +rosalie +patna +##rco +detachments +penitentiary +54th +flourishing +woolf +##dion +##etched +papyrus +##lster +##nsor +##toy +bobbed +dismounted +endelle +inhuman +motorola +wince +wreath +##ticus +hideout +inspections +sanjay +disgrace +infused +pudding +stalks +##urbed +arsenic +leases +##hyl +##rrard +collarbone +##waite +##wil +dowry +##bant +##edance +genealogical +nitrate +salamanca +scandals +thyroid +necessitated +##` +##¡ +##¢ +##¦ +##¨ +##ª +##¬ +##´ +##¶ +##¾ +##¿ +##ð +##þ +##ħ +##œ +##ƒ +##ɐ +##ɑ +##ɒ +##ɕ +##ɣ +##ɨ +##ɪ +##ɫ +##ɬ +##ɯ +##ɲ +##ɴ +##ɹ +##ɾ +##ʀ +##ʁ +##ʂ +##ʃ +##ʉ +##ʊ +##ʋ +##ʌ +##ʎ +##ʐ +##ʑ +##ʒ +##ʔ +##ʲ +##ʳ +##ʷ +##ʸ +##ʻ +##ʼ +##ʾ +##ʿ +##ˡ +##ˣ +##ˤ +##ζ +##ξ +##щ +##ъ +##э +##ю +##ђ +##є +##ј +##љ +##њ +##ћ +##ӏ +##ա +##բ +##գ +##դ +##ե +##թ +##ի +##լ +##կ +##հ +##մ +##յ +##ն +##ո +##պ +##ս +##վ +##տ +##ր +##ւ +##ք +##־ +##א +##ב +##ג +##ד +##ו +##ז +##ח +##ט +##י +##ך +##כ +##ל +##ם +##מ +##ן +##נ +##ס +##ע +##ף +##פ +##ץ +##צ +##ק +##ר +##ש +##ת +##، +##ء +##ث +##ج +##ح +##خ +##ذ +##ز +##ش +##ص +##ض +##ط +##ظ +##غ +##ـ +##ف +##ق +##ك +##ى +##ٹ +##پ +##چ +##ک +##گ +##ں +##ھ +##ہ +##ے +##अ +##आ +##उ +##ए +##क +##ख +##ग +##च +##ज +##ट +##ड +##ण +##त +##थ +##द +##ध +##न +##प +##ब +##भ +##म +##य +##र +##ल +##व +##श +##ष +##स +##ह +##ा +##ि +##ी +##ो +##। +##॥ +##ং +##অ +##আ +##ই +##উ +##এ +##ও +##ক +##খ +##গ +##চ +##ছ +##জ +##ট +##ড +##ণ +##ত +##থ +##দ +##ধ +##ন +##প +##ব +##ভ +##ম +##য +##র +##ল +##শ +##ষ +##স +##হ +##া +##ি +##ী +##ে +##க +##ச +##ட +##த +##ந +##ன +##ப +##ம +##ய +##ர +##ல +##ள +##வ +##ா +##ி +##ு +##ே +##ை +##ನ +##ರ +##ಾ +##ක +##ය +##ර +##ල +##ව +##ා +##ต +##ท +##พ +##ล +##ว +##ส +##། +##ག +##ང +##ད +##ན +##པ +##བ +##མ +##འ +##ར +##ལ +##ས +##မ +##ა +##ბ +##გ +##დ +##ე +##ვ +##თ +##ი +##კ +##ლ +##მ +##ნ +##ო +##რ +##ს +##ტ +##უ +##ᄊ +##ᴬ +##ᴮ +##ᴰ +##ᴵ +##ᴺ +##ᵀ +##ᵇ +##ᵈ +##ᵖ +##ᵗ +##ᵣ +##ᵤ +##ᵥ +##ᶜ +##ᶠ +##‐ +##‑ +##‒ +##– +##— +##― +##‘ +##’ +##‚ +##“ +##” +##‡ +##… +##⁰ +##⁴ +##⁵ +##⁶ +##⁷ +##⁸ +##⁹ +##⁻ +##₅ +##₆ +##₇ +##₈ +##₉ +##₊ +##₍ +##₎ +##ₐ +##ₑ +##ₒ +##ₓ +##ₕ +##ₖ +##ₗ +##ₘ +##ₚ +##ₛ +##ₜ +##₤ +##₩ +##₱ +##₹ +##ℓ +##ℝ +##⅓ +##⅔ +##↦ +##⇄ +##⇌ +##∂ +##∅ +##∆ +##∇ +##∈ +##∗ +##∘ +##∧ +##∨ +##∪ +##⊂ +##⊆ +##⊕ +##⊗ +##☉ +##♯ +##⟨ +##⟩ +##ⱼ +##⺩ +##⺼ +##⽥ +##亻 +##宀 +##彳 +##忄 +##扌 +##氵 +##疒 +##糹 +##訁 +##辶 +##阝 +##龸 +##fi +##fl diff --git a/src/comfyui/comfy/text_encoders/long_clipl.json b/src/comfyui/comfy/text_encoders/long_clipl.json new file mode 100644 index 0000000000000000000000000000000000000000..5e2056ff37ec907462bac7a557e12bb728a15990 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/long_clipl.json @@ -0,0 +1,25 @@ +{ + "_name_or_path": "openai/clip-vit-large-patch14", + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 49407, + "hidden_act": "quick_gelu", + "hidden_size": 768, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 248, + "model_type": "clip_text_model", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "projection_dim": 768, + "torch_dtype": "float32", + "transformers_version": "4.24.0", + "vocab_size": 49408 +} diff --git a/src/comfyui/comfy/text_encoders/long_clipl.py b/src/comfyui/comfy/text_encoders/long_clipl.py new file mode 100644 index 0000000000000000000000000000000000000000..b81912cb3d38ca9dafccd11a74ef96b97dfa5839 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/long_clipl.py @@ -0,0 +1,30 @@ +from comfy import sd1_clip +import os + +class LongClipTokenizer_(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(max_length=248, embedding_directory=embedding_directory, tokenizer_data=tokenizer_data) + +class LongClipModel_(sd1_clip.SDClipModel): + def __init__(self, *args, **kwargs): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "long_clipl.json") + super().__init__(*args, textmodel_json_config=textmodel_json_config, **kwargs) + +class LongClipTokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, tokenizer=LongClipTokenizer_) + +class LongClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}, **kwargs): + super().__init__(device=device, dtype=dtype, model_options=model_options, clip_model=LongClipModel_, **kwargs) + +def model_options_long_clip(sd, tokenizer_data, model_options): + w = sd.get("clip_l.text_model.embeddings.position_embedding.weight", None) + if w is None: + w = sd.get("text_model.embeddings.position_embedding.weight", None) + if w is not None and w.shape[0] == 248: + tokenizer_data = tokenizer_data.copy() + model_options = model_options.copy() + tokenizer_data["clip_l_tokenizer_class"] = LongClipTokenizer_ + model_options["clip_l_class"] = LongClipModel_ + return tokenizer_data, model_options diff --git a/src/comfyui/comfy/text_encoders/mt5_config_xl.json b/src/comfyui/comfy/text_encoders/mt5_config_xl.json new file mode 100644 index 0000000000000000000000000000000000000000..092fefd6e32dac566e443fc03eae53f6a8b57400 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/mt5_config_xl.json @@ -0,0 +1,22 @@ +{ + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dropout_rate": 0.1, + "eos_token_id": 1, + "dense_act_fn": "gelu_pytorch_tanh", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "mt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "vocab_size": 250112 +} diff --git a/src/comfyui/comfy/text_encoders/sa_t5.py b/src/comfyui/comfy/text_encoders/sa_t5.py new file mode 100644 index 0000000000000000000000000000000000000000..7778ce47ad98ed7baabda3ce29dca76e25c6880f --- /dev/null +++ b/src/comfyui/comfy/text_encoders/sa_t5.py @@ -0,0 +1,22 @@ +from comfy import sd1_clip +from transformers import T5TokenizerFast +import comfy.text_encoders.t5 +import os + +class T5BaseModel(sd1_clip.SDClipModel): + def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_config_base.json") + super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, model_options=model_options, special_tokens={"end": 1, "pad": 0}, model_class=comfy.text_encoders.t5.T5, enable_attention_masks=True, zero_out_masked=True) + +class T5BaseTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") + super().__init__(tokenizer_path, pad_with_end=False, embedding_size=768, embedding_key='t5base', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=128) + +class SAT5Tokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, clip_name="t5base", tokenizer=T5BaseTokenizer) + +class SAT5Model(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}, **kwargs): + super().__init__(device=device, dtype=dtype, model_options=model_options, name="t5base", clip_model=T5BaseModel, **kwargs) diff --git a/src/comfyui/comfy/text_encoders/sd2_clip.py b/src/comfyui/comfy/text_encoders/sd2_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..31fc89869e6e134b6de260d30443654a6cc4ac83 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/sd2_clip.py @@ -0,0 +1,23 @@ +from comfy import sd1_clip +import os + +class SD2ClipHModel(sd1_clip.SDClipModel): + def __init__(self, arch="ViT-H-14", device="cpu", max_length=77, freeze=True, layer="penultimate", layer_idx=None, dtype=None, model_options={}): + if layer == "penultimate": + layer="hidden" + layer_idx=-2 + + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd2_clip_config.json") + super().__init__(device=device, freeze=freeze, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"start": 49406, "end": 49407, "pad": 0}, return_projected_pooled=True, model_options=model_options) + +class SD2ClipHTokenizer(sd1_clip.SDTokenizer): + def __init__(self, tokenizer_path=None, embedding_directory=None, tokenizer_data={}): + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1024) + +class SD2Tokenizer(sd1_clip.SD1Tokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + super().__init__(embedding_directory=embedding_directory, tokenizer_data=tokenizer_data, clip_name="h", tokenizer=SD2ClipHTokenizer) + +class SD2ClipModel(sd1_clip.SD1ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}, **kwargs): + super().__init__(device=device, dtype=dtype, model_options=model_options, clip_name="h", clip_model=SD2ClipHModel, **kwargs) diff --git a/src/comfyui/comfy/text_encoders/sd2_clip_config.json b/src/comfyui/comfy/text_encoders/sd2_clip_config.json new file mode 100644 index 0000000000000000000000000000000000000000..00893cfdc9b00f8eb7cf5aaa9c343e7fcd298d82 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/sd2_clip_config.json @@ -0,0 +1,23 @@ +{ + "architectures": [ + "CLIPTextModel" + ], + "attention_dropout": 0.0, + "bos_token_id": 0, + "dropout": 0.0, + "eos_token_id": 49407, + "hidden_act": "gelu", + "hidden_size": 1024, + "initializer_factor": 1.0, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 77, + "model_type": "clip_text_model", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "pad_token_id": 1, + "projection_dim": 1024, + "torch_dtype": "float32", + "vocab_size": 49408 +} diff --git a/src/comfyui/comfy/text_encoders/sd3_clip.py b/src/comfyui/comfy/text_encoders/sd3_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..00d7e31ad32d495169e6bf3a7d511714326ca04a --- /dev/null +++ b/src/comfyui/comfy/text_encoders/sd3_clip.py @@ -0,0 +1,167 @@ +from comfy import sd1_clip +from comfy import sdxl_clip +from transformers import T5TokenizerFast +import comfy.text_encoders.t5 +import torch +import os +import comfy.model_management +import logging + +class T5XXLModel(sd1_clip.SDClipModel): + def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, attention_mask=False, model_options={}): + textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_config_xxl.json") + t5xxl_scaled_fp8 = model_options.get("t5xxl_scaled_fp8", None) + if t5xxl_scaled_fp8 is not None: + model_options = model_options.copy() + model_options["scaled_fp8"] = t5xxl_scaled_fp8 + + super().__init__(device=device, layer=layer, layer_idx=layer_idx, textmodel_json_config=textmodel_json_config, dtype=dtype, special_tokens={"end": 1, "pad": 0}, model_class=comfy.text_encoders.t5.T5, enable_attention_masks=attention_mask, return_attention_masks=attention_mask, model_options=model_options) + + +def t5_xxl_detect(state_dict, prefix=""): + out = {} + t5_key = "{}encoder.final_layer_norm.weight".format(prefix) + if t5_key in state_dict: + out["dtype_t5"] = state_dict[t5_key].dtype + + scaled_fp8_key = "{}scaled_fp8".format(prefix) + if scaled_fp8_key in state_dict: + out["t5xxl_scaled_fp8"] = state_dict[scaled_fp8_key].dtype + + return out + +class T5XXLTokenizer(sd1_clip.SDTokenizer): + def __init__(self, embedding_directory=None, tokenizer_data={}): + tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer") + super().__init__(tokenizer_path, embedding_directory=embedding_directory, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=77) + + +class SD3Tokenizer: + def __init__(self, embedding_directory=None, tokenizer_data={}): + clip_l_tokenizer_class = tokenizer_data.get("clip_l_tokenizer_class", sd1_clip.SDTokenizer) + self.clip_l = clip_l_tokenizer_class(embedding_directory=embedding_directory) + self.clip_g = sdxl_clip.SDXLClipGTokenizer(embedding_directory=embedding_directory) + self.t5xxl = T5XXLTokenizer(embedding_directory=embedding_directory) + + def tokenize_with_weights(self, text:str, return_word_ids=False): + out = {} + out["g"] = self.clip_g.tokenize_with_weights(text, return_word_ids) + out["l"] = self.clip_l.tokenize_with_weights(text, return_word_ids) + out["t5xxl"] = self.t5xxl.tokenize_with_weights(text, return_word_ids) + return out + + def untokenize(self, token_weight_pair): + return self.clip_g.untokenize(token_weight_pair) + + def state_dict(self): + return {} + +class SD3ClipModel(torch.nn.Module): + def __init__(self, clip_l=True, clip_g=True, t5=True, dtype_t5=None, t5_attention_mask=False, device="cpu", dtype=None, model_options={}): + super().__init__() + self.dtypes = set() + if clip_l: + clip_l_class = model_options.get("clip_l_class", sd1_clip.SDClipModel) + self.clip_l = clip_l_class(layer="hidden", layer_idx=-2, device=device, dtype=dtype, layer_norm_hidden_state=False, return_projected_pooled=False, model_options=model_options) + self.dtypes.add(dtype) + else: + self.clip_l = None + + if clip_g: + self.clip_g = sdxl_clip.SDXLClipG(device=device, dtype=dtype, model_options=model_options) + self.dtypes.add(dtype) + else: + self.clip_g = None + + if t5: + dtype_t5 = comfy.model_management.pick_weight_dtype(dtype_t5, dtype, device) + self.t5_attention_mask = t5_attention_mask + self.t5xxl = T5XXLModel(device=device, dtype=dtype_t5, model_options=model_options, attention_mask=self.t5_attention_mask) + self.dtypes.add(dtype_t5) + else: + self.t5xxl = None + + logging.debug("Created SD3 text encoder with: clip_l {}, clip_g {}, t5xxl {}:{}".format(clip_l, clip_g, t5, dtype_t5)) + + def set_clip_options(self, options): + if self.clip_l is not None: + self.clip_l.set_clip_options(options) + if self.clip_g is not None: + self.clip_g.set_clip_options(options) + if self.t5xxl is not None: + self.t5xxl.set_clip_options(options) + + def reset_clip_options(self): + if self.clip_l is not None: + self.clip_l.reset_clip_options() + if self.clip_g is not None: + self.clip_g.reset_clip_options() + if self.t5xxl is not None: + self.t5xxl.reset_clip_options() + + def encode_token_weights(self, token_weight_pairs): + token_weight_pairs_l = token_weight_pairs["l"] + token_weight_pairs_g = token_weight_pairs["g"] + token_weight_pairs_t5 = token_weight_pairs["t5xxl"] + lg_out = None + pooled = None + out = None + extra = {} + + if len(token_weight_pairs_g) > 0 or len(token_weight_pairs_l) > 0: + if self.clip_l is not None: + lg_out, l_pooled = self.clip_l.encode_token_weights(token_weight_pairs_l) + else: + l_pooled = torch.zeros((1, 768), device=comfy.model_management.intermediate_device()) + + if self.clip_g is not None: + g_out, g_pooled = self.clip_g.encode_token_weights(token_weight_pairs_g) + if lg_out is not None: + cut_to = min(lg_out.shape[1], g_out.shape[1]) + lg_out = torch.cat([lg_out[:,:cut_to], g_out[:,:cut_to]], dim=-1) + else: + lg_out = torch.nn.functional.pad(g_out, (768, 0)) + else: + g_out = None + g_pooled = torch.zeros((1, 1280), device=comfy.model_management.intermediate_device()) + + if lg_out is not None: + lg_out = torch.nn.functional.pad(lg_out, (0, 4096 - lg_out.shape[-1])) + out = lg_out + pooled = torch.cat((l_pooled, g_pooled), dim=-1) + + if self.t5xxl is not None: + t5_output = self.t5xxl.encode_token_weights(token_weight_pairs_t5) + t5_out, t5_pooled = t5_output[:2] + if self.t5_attention_mask: + extra["attention_mask"] = t5_output[2]["attention_mask"] + + if lg_out is not None: + out = torch.cat([lg_out, t5_out], dim=-2) + else: + out = t5_out + + if out is None: + out = torch.zeros((1, 77, 4096), device=comfy.model_management.intermediate_device()) + + if pooled is None: + pooled = torch.zeros((1, 768 + 1280), device=comfy.model_management.intermediate_device()) + + return out, pooled, extra + + def load_sd(self, sd): + if "text_model.encoder.layers.30.mlp.fc1.weight" in sd: + return self.clip_g.load_sd(sd) + elif "text_model.encoder.layers.1.mlp.fc1.weight" in sd: + return self.clip_l.load_sd(sd) + else: + return self.t5xxl.load_sd(sd) + +def sd3_clip(clip_l=True, clip_g=True, t5=True, dtype_t5=None, t5xxl_scaled_fp8=None, t5_attention_mask=False): + class SD3ClipModel_(SD3ClipModel): + def __init__(self, device="cpu", dtype=None, model_options={}): + if t5xxl_scaled_fp8 is not None and "t5xxl_scaled_fp8" not in model_options: + model_options = model_options.copy() + model_options["t5xxl_scaled_fp8"] = t5xxl_scaled_fp8 + super().__init__(clip_l=clip_l, clip_g=clip_g, t5=t5, dtype_t5=dtype_t5, t5_attention_mask=t5_attention_mask, device=device, dtype=dtype, model_options=model_options) + return SD3ClipModel_ diff --git a/src/comfyui/comfy/text_encoders/spiece_tokenizer.py b/src/comfyui/comfy/text_encoders/spiece_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..73739553d47c140ae7b985ed76004a72e6ddbf2b --- /dev/null +++ b/src/comfyui/comfy/text_encoders/spiece_tokenizer.py @@ -0,0 +1,32 @@ +import os +import torch + +class SPieceTokenizer: + add_eos = True + + @staticmethod + def from_pretrained(path): + return SPieceTokenizer(path) + + def __init__(self, tokenizer_path): + import sentencepiece + if torch.is_tensor(tokenizer_path): + tokenizer_path = tokenizer_path.numpy().tobytes() + + if isinstance(tokenizer_path, bytes): + self.tokenizer = sentencepiece.SentencePieceProcessor(model_proto=tokenizer_path, add_eos=self.add_eos) + else: + self.tokenizer = sentencepiece.SentencePieceProcessor(model_file=tokenizer_path, add_eos=self.add_eos) + + def get_vocab(self): + out = {} + for i in range(self.tokenizer.get_piece_size()): + out[self.tokenizer.id_to_piece(i)] = i + return out + + def __call__(self, string): + out = self.tokenizer.encode(string) + return {"input_ids": out} + + def serialize_model(self): + return torch.ByteTensor(list(self.tokenizer.serialized_model_proto())) diff --git a/src/comfyui/comfy/text_encoders/t5.py b/src/comfyui/comfy/text_encoders/t5.py new file mode 100644 index 0000000000000000000000000000000000000000..a1420c6cd2f208c9731d42b09fe077980a952aad --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5.py @@ -0,0 +1,241 @@ +import torch +import math +from comfy.ldm.modules.attention import optimized_attention_for_device +import comfy.ops + +class T5LayerNorm(torch.nn.Module): + def __init__(self, hidden_size, eps=1e-6, dtype=None, device=None, operations=None): + super().__init__() + self.weight = torch.nn.Parameter(torch.empty(hidden_size, dtype=dtype, device=device)) + self.variance_epsilon = eps + + def forward(self, x): + variance = x.pow(2).mean(-1, keepdim=True) + x = x * torch.rsqrt(variance + self.variance_epsilon) + return comfy.ops.cast_to_input(self.weight, x) * x + +activations = { + "gelu_pytorch_tanh": lambda a: torch.nn.functional.gelu(a, approximate="tanh"), + "relu": torch.nn.functional.relu, +} + +class T5DenseActDense(torch.nn.Module): + def __init__(self, model_dim, ff_dim, ff_activation, dtype, device, operations): + super().__init__() + self.wi = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) + self.wo = operations.Linear(ff_dim, model_dim, bias=False, dtype=dtype, device=device) + # self.dropout = nn.Dropout(config.dropout_rate) + self.act = activations[ff_activation] + + def forward(self, x): + x = self.act(self.wi(x)) + # x = self.dropout(x) + x = self.wo(x) + return x + +class T5DenseGatedActDense(torch.nn.Module): + def __init__(self, model_dim, ff_dim, ff_activation, dtype, device, operations): + super().__init__() + self.wi_0 = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) + self.wi_1 = operations.Linear(model_dim, ff_dim, bias=False, dtype=dtype, device=device) + self.wo = operations.Linear(ff_dim, model_dim, bias=False, dtype=dtype, device=device) + # self.dropout = nn.Dropout(config.dropout_rate) + self.act = activations[ff_activation] + + def forward(self, x): + hidden_gelu = self.act(self.wi_0(x)) + hidden_linear = self.wi_1(x) + x = hidden_gelu * hidden_linear + # x = self.dropout(x) + x = self.wo(x) + return x + +class T5LayerFF(torch.nn.Module): + def __init__(self, model_dim, ff_dim, ff_activation, gated_act, dtype, device, operations): + super().__init__() + if gated_act: + self.DenseReluDense = T5DenseGatedActDense(model_dim, ff_dim, ff_activation, dtype, device, operations) + else: + self.DenseReluDense = T5DenseActDense(model_dim, ff_dim, ff_activation, dtype, device, operations) + + self.layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) + # self.dropout = nn.Dropout(config.dropout_rate) + + def forward(self, x): + forwarded_states = self.layer_norm(x) + forwarded_states = self.DenseReluDense(forwarded_states) + # x = x + self.dropout(forwarded_states) + x += forwarded_states + return x + +class T5Attention(torch.nn.Module): + def __init__(self, model_dim, inner_dim, num_heads, relative_attention_bias, dtype, device, operations): + super().__init__() + + # Mesh TensorFlow initialization to avoid scaling before softmax + self.q = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.k = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.v = operations.Linear(model_dim, inner_dim, bias=False, dtype=dtype, device=device) + self.o = operations.Linear(inner_dim, model_dim, bias=False, dtype=dtype, device=device) + self.num_heads = num_heads + + self.relative_attention_bias = None + if relative_attention_bias: + self.relative_attention_num_buckets = 32 + self.relative_attention_max_distance = 128 + self.relative_attention_bias = operations.Embedding(self.relative_attention_num_buckets, self.num_heads, device=device, dtype=dtype) + + @staticmethod + def _relative_position_bucket(relative_position, bidirectional=True, num_buckets=32, max_distance=128): + """ + Adapted from Mesh Tensorflow: + https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593 + + Translate relative position to a bucket number for relative attention. The relative position is defined as + memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to + position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for + small absolute relative_position and larger buckets for larger absolute relative_positions. All relative + positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket. + This should allow for more graceful generalization to longer sequences than the model has been trained on + + Args: + relative_position: an int32 Tensor + bidirectional: a boolean - whether the attention is bidirectional + num_buckets: an integer + max_distance: an integer + + Returns: + a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets) + """ + relative_buckets = 0 + if bidirectional: + num_buckets //= 2 + relative_buckets += (relative_position > 0).to(torch.long) * num_buckets + relative_position = torch.abs(relative_position) + else: + relative_position = -torch.min(relative_position, torch.zeros_like(relative_position)) + # now relative_position is in the range [0, inf) + + # half of the buckets are for exact increments in positions + max_exact = num_buckets // 2 + is_small = relative_position < max_exact + + # The other half of the buckets are for logarithmically bigger bins in positions up to max_distance + relative_position_if_large = max_exact + ( + torch.log(relative_position.float() / max_exact) + / math.log(max_distance / max_exact) + * (num_buckets - max_exact) + ).to(torch.long) + relative_position_if_large = torch.min( + relative_position_if_large, torch.full_like(relative_position_if_large, num_buckets - 1) + ) + + relative_buckets += torch.where(is_small, relative_position, relative_position_if_large) + return relative_buckets + + def compute_bias(self, query_length, key_length, device, dtype): + """Compute binned relative position bias""" + context_position = torch.arange(query_length, dtype=torch.long, device=device)[:, None] + memory_position = torch.arange(key_length, dtype=torch.long, device=device)[None, :] + relative_position = memory_position - context_position # shape (query_length, key_length) + relative_position_bucket = self._relative_position_bucket( + relative_position, # shape (query_length, key_length) + bidirectional=True, + num_buckets=self.relative_attention_num_buckets, + max_distance=self.relative_attention_max_distance, + ) + values = self.relative_attention_bias(relative_position_bucket, out_dtype=dtype) # shape (query_length, key_length, num_heads) + values = values.permute([2, 0, 1]).unsqueeze(0) # shape (1, num_heads, query_length, key_length) + return values + + def forward(self, x, mask=None, past_bias=None, optimized_attention=None): + q = self.q(x) + k = self.k(x) + v = self.v(x) + if self.relative_attention_bias is not None: + past_bias = self.compute_bias(x.shape[1], x.shape[1], x.device, x.dtype) + + if past_bias is not None: + if mask is not None: + mask = mask + past_bias + else: + mask = past_bias + + out = optimized_attention(q, k * ((k.shape[-1] / self.num_heads) ** 0.5), v, self.num_heads, mask) + return self.o(out), past_bias + +class T5LayerSelfAttention(torch.nn.Module): + def __init__(self, model_dim, inner_dim, ff_dim, num_heads, relative_attention_bias, dtype, device, operations): + super().__init__() + self.SelfAttention = T5Attention(model_dim, inner_dim, num_heads, relative_attention_bias, dtype, device, operations) + self.layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) + # self.dropout = nn.Dropout(config.dropout_rate) + + def forward(self, x, mask=None, past_bias=None, optimized_attention=None): + normed_hidden_states = self.layer_norm(x) + output, past_bias = self.SelfAttention(self.layer_norm(x), mask=mask, past_bias=past_bias, optimized_attention=optimized_attention) + # x = x + self.dropout(attention_output) + x += output + return x, past_bias + +class T5Block(torch.nn.Module): + def __init__(self, model_dim, inner_dim, ff_dim, ff_activation, gated_act, num_heads, relative_attention_bias, dtype, device, operations): + super().__init__() + self.layer = torch.nn.ModuleList() + self.layer.append(T5LayerSelfAttention(model_dim, inner_dim, ff_dim, num_heads, relative_attention_bias, dtype, device, operations)) + self.layer.append(T5LayerFF(model_dim, ff_dim, ff_activation, gated_act, dtype, device, operations)) + + def forward(self, x, mask=None, past_bias=None, optimized_attention=None): + x, past_bias = self.layer[0](x, mask, past_bias, optimized_attention) + x = self.layer[-1](x) + return x, past_bias + +class T5Stack(torch.nn.Module): + def __init__(self, num_layers, model_dim, inner_dim, ff_dim, ff_activation, gated_act, num_heads, relative_attention, dtype, device, operations): + super().__init__() + + self.block = torch.nn.ModuleList( + [T5Block(model_dim, inner_dim, ff_dim, ff_activation, gated_act, num_heads, relative_attention_bias=((not relative_attention) or (i == 0)), dtype=dtype, device=device, operations=operations) for i in range(num_layers)] + ) + self.final_layer_norm = T5LayerNorm(model_dim, dtype=dtype, device=device, operations=operations) + # self.dropout = nn.Dropout(config.dropout_rate) + + def forward(self, x, attention_mask=None, intermediate_output=None, final_layer_norm_intermediate=True, dtype=None): + mask = None + if attention_mask is not None: + mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, attention_mask.shape[-1], attention_mask.shape[-1]) + mask = mask.masked_fill(mask.to(torch.bool), float("-inf")) + + intermediate = None + optimized_attention = optimized_attention_for_device(x.device, mask=attention_mask is not None, small_input=True) + past_bias = None + for i, l in enumerate(self.block): + x, past_bias = l(x, mask, past_bias, optimized_attention) + if i == intermediate_output: + intermediate = x.clone() + x = self.final_layer_norm(x) + if intermediate is not None and final_layer_norm_intermediate: + intermediate = self.final_layer_norm(intermediate) + return x, intermediate + +class T5(torch.nn.Module): + def __init__(self, config_dict, dtype, device, operations): + super().__init__() + self.num_layers = config_dict["num_layers"] + model_dim = config_dict["d_model"] + + self.encoder = T5Stack(self.num_layers, model_dim, model_dim, config_dict["d_ff"], config_dict["dense_act_fn"], config_dict["is_gated_act"], config_dict["num_heads"], config_dict["model_type"] != "umt5", dtype, device, operations) + self.dtype = dtype + self.shared = operations.Embedding(config_dict["vocab_size"], model_dim, device=device, dtype=dtype) + + def get_input_embeddings(self): + return self.shared + + def set_input_embeddings(self, embeddings): + self.shared = embeddings + + def forward(self, input_ids, *args, **kwargs): + x = self.shared(input_ids, out_dtype=kwargs.get("dtype", torch.float32)) + if self.dtype not in [torch.float32, torch.float16, torch.bfloat16]: + x = torch.nan_to_num(x) #Fix for fp8 T5 base + return self.encoder(x, *args, **kwargs) diff --git a/src/comfyui/comfy/text_encoders/t5_config_base.json b/src/comfyui/comfy/text_encoders/t5_config_base.json new file mode 100644 index 0000000000000000000000000000000000000000..71f68327c27280ce150d0c8e92fd61eca0b52a63 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_config_base.json @@ -0,0 +1,22 @@ +{ + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dropout_rate": 0.1, + "eos_token_id": 1, + "dense_act_fn": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "vocab_size": 32128 +} diff --git a/src/comfyui/comfy/text_encoders/t5_config_xxl.json b/src/comfyui/comfy/text_encoders/t5_config_xxl.json new file mode 100644 index 0000000000000000000000000000000000000000..28283b51a11bed6a874499f82d411c16cc646eb1 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_config_xxl.json @@ -0,0 +1,22 @@ +{ + "d_ff": 10240, + "d_kv": 64, + "d_model": 4096, + "decoder_start_token_id": 0, + "dropout_rate": 0.1, + "eos_token_id": 1, + "dense_act_fn": "gelu_pytorch_tanh", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "num_decoder_layers": 24, + "num_heads": 64, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "vocab_size": 32128 +} diff --git a/src/comfyui/comfy/text_encoders/t5_pile_config_xl.json b/src/comfyui/comfy/text_encoders/t5_pile_config_xl.json new file mode 100644 index 0000000000000000000000000000000000000000..ee4e03f97a5b3a9927fc676816f210a364ee234b --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_pile_config_xl.json @@ -0,0 +1,22 @@ +{ + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dropout_rate": 0.1, + "eos_token_id": 2, + "dense_act_fn": "gelu_pytorch_tanh", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "umt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 1, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "vocab_size": 32128 +} diff --git a/src/comfyui/comfy/text_encoders/t5_pile_tokenizer/tokenizer.model b/src/comfyui/comfy/text_encoders/t5_pile_tokenizer/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_pile_tokenizer/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 +size 499723 diff --git a/src/comfyui/comfy/text_encoders/t5_tokenizer/special_tokens_map.json b/src/comfyui/comfy/text_encoders/t5_tokenizer/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..17ade346a1042cbe0c1436f5bedcbd85c099d582 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_tokenizer/special_tokens_map.json @@ -0,0 +1,125 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer.json b/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..b11c92d7184d265f0dc857ec5d676aa81aa16262 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer.json @@ -0,0 +1,129428 @@ +{ + "version": "1.0", + "truncation": null, + "padding": null, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32000, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32001, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32002, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32003, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32004, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32005, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32006, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32007, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32008, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32009, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32010, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32011, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32012, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32013, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32014, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32015, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32016, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32017, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32018, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32019, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32020, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32021, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32022, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32023, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32024, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32025, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32026, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32027, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32028, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32029, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32030, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32031, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32032, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32033, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32034, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32035, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32036, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32037, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32038, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32039, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32040, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32041, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32042, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32043, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32044, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32045, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32046, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32047, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32048, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32049, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32050, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32051, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32052, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32053, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32054, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32055, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32056, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32057, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32058, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32059, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32060, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32061, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32062, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32063, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32064, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32065, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32066, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32067, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32068, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32069, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32070, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32071, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32072, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32073, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32074, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32075, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32076, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32077, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32078, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32079, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32080, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32081, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32082, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32083, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32084, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32085, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32086, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32087, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32088, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32089, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32090, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32091, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32092, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32093, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32094, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32095, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32096, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32097, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32098, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32099, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + } + ], + "normalizer": { + "type": "Sequence", + "normalizers": [ + { + "type": "Precompiled", + "precompiled_charsmap": "ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC//Q8AqFUOAKldDgCqYQ4Aq30OAKxlDgCttQ8Arr0PAK+1DwCapQCAnqUAgKKlAICmpQCAqqUAgK6lAICypQCAtqUAgLhVDwC5dQ8Aun0PALt1DwC8bQ8AvREPAL4RDwC/EQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1dQ8AtnEPALdxDwCj6Q8AuqUAgL6lAIDCpQCAxqUAgKY1DgCl7Q8AyqUAgKsRDgCqCQ4AzqUAgNKlAICvsQ4ArgkOAK0BDgCsCQ4A1qUAgIIdAACBHQAAgB0AANqlAIDepQCA4qUAgL6UAQCErAEA5qUAgIfgAQCGzAAA6qUAgO6lAIDypQCAlqQAgKhtDgCpiQEAqpkBAKuRAQCswQEArckBAK75AQCv+QEAhKAAAPalAID6pQCA/qUAgAKmAIAGpgCACqYAgA6mAIC4xQAAuc0AALrFAAC73QAAvM0AAL39AAC+9QAAv50AALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEAsxECABKmAIAWpgCAGqYAgB6mAIC2SQIAtUkCACKmAIC7hQIAuoUCACamAIAqpgCAv4UCAL6FAgC9lQIAvJUCAIU8GgCjVQIALqYAgDKmAICmDQIANqYAgDqmAIClDQIAqsECAKvBAgA+pgCAQqYAgK7BAgCvwQIArNECAK3RAgCCGQAARqYAgIAZAACBGQAASqYAgE6mAIBSpgCAWqYAgL4ABABepgCAYqYAgGamAIBqpgCAbqYAgHKmAIB2pgCA7+gOAHqmAICG6AQAh1ADAH6mAICCpgCA74ACAIamAIDhlAEAiqYAgONYAQCOpgCA4wAOAJKmAIDhaA0AlqYAgKhxAgCpcQIAqnECAKupAgCsuQIArbkCAK6pAgCvqQIAhKwFAJqmAICepgCAoqYAgKamAICqpgCArqYAgLKmAIC4bQEAuQ0BALoFAQC7GQEAvAkBAL09AQC+NQEAv9kBALDZAgCx2QIAsm0BALNlAQC0fQEAtWUBALZlAQC3VQEA4WAPAOP0AADjHA4A4bwBALamAICCOQAAgTEAAIA9AAC6pgCAvigEAL6mAIDCpgCAvjwHAO8QAADv0A4AyqYAgIbgBACHyAQAzqYAgLO1AgDSpgCAtX0CALZ1AgDWpgCA2qYAgN6mAIC6UQIAu1ECALz1AQC9/QEAvvUBAL/tAQBWpgCAxqYAgKqxBQCrsQUArBUGAK0dBgCuFQYArw0GAOKmAIDmpgCA6qYAgKNVBQDupgCApZ0FAKaVBQDypgCAs+kGAPamAID6pgCA/qYAgAKnAIC24QYAtekGAAanAIC7sQYAuqEGAAqnAIAOpwCAv50GAL6RBgC9pQYAvKkGAKgdBgCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvIQYAEqcAgBanAIAapwCAHqcAgCKnAIAmpwCAKqcAgC6nAIC45QcAue0HALrlBwC7/QcAvOUHAL3tBwC+5QcAv00HALAlBgCxNQYAsj0GALMxBgC0FQYAtRkGALYNBgC3AQYAo6kHAIIVAACBtQEAgLUBADKnAICmoQcApakHADanAICr8QcAquEHAISgAgA6pwCAr90HAK7RBwCt5QcArOkHAD6nAICzlQYAhugAAIcYAQC2tQYAQqcAgEanAIC1vQYAukkBALtVAQBKpwCATqcAgL45AQC/OQEAvEUBAL05AQCoPQYAqU0GAKpZBgCrUQYArHEGAK1xBgCuuQEAr7kBAISsAQBSpwCAVqcAgFqnAIBepwCAYqcAgGanAIBqpwCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCwyQEAsdUBALLVAQCzqQEAtLkBALW5AQC2qQEAt6EBAKPRBQBupwCAcqcAgHanAIB6pwCApvEFAKX5BQB+pwCAqxECAKoNAgCCpwCAhqcAgK99AgCufQIArX0CAKwBAgCKpwCAjqcAgJKnAICWpwCAgTEAAIANAACapwCAgjkAAJ6nAICipwCAviQDAKqnAICupwCAsqcAgIbYHACHTAMAtqcAgLqnAIC+pwCAhMAcAOMgAQDCpwCA4cgBAManAIDvMAIAyqcAgM6nAIDSpwCA1qcAgNqnAIDepwCA4qcAgLOVAwDmpwCA6qcAgO6nAIDypwCAtrkDALWxAwD2pwCAu1EDALpJAwD6pwCA/qcAgL/1AAC+SQMAvUEDALxJAwCoLQIAqUUCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAL5oHQACqACABqgAgAqoAICAHQAAgQkAAIKpAAAOqACAuFEBALlZAQC6YQEAu2EBALwRAQC9EQEAvhEBAL8RAQCwzQIAsdUCALLdAgCz1QIAtM0CALVxAQC2cQEAt3EBAOFYBgDhVAcA47AAAOO8BgASqACAGqgAgIYYHACHVB0AHqgAgCKoAIAmqACAKqgAgL74HAAuqACA7/AGAO/gBgCjlQIAMqgAgDaoAIA6qACAPqgAgKa5AgClsQIAQqgAgKtRAgCqSQIARqgAgEqoAICv9QEArkkCAK1BAgCsSQIAqG0eAKl1HgCqfR4Aq40eAKyVHgCtnR4Aro0eAK+BHgAWqACATqgAgFKoAIBWqACAWqgAgF6oAIBiqACAZqgAgLiJHgC5iR4AupkeALuRHgC8uR4AvbkeAL59HwC/dR8AsMUeALHNHgCyxR4As90eALTFHgC1zR4AtsUeALe5HgCz9R4AaqgAgG6oAIByqACAdqgAgLYdHgC1HR4AeqgAgLsJHgC6AR4AfqgAgIKoAIC/CR4AvgEeAL0JHgC8ER4Agm0AAKOxHgCAVQAAgWUAAKZZHgCEmAMAv9ABAKVZHgCqRR4Aq00eAIYABACHmAEArkUeAK9NHgCsVR4ArU0eAIqoAICOqACAhCQAAJKoAICWqACAmqgAgKanAICGqACAqLUeAKmFHgCqjR4Aq4UeAKydHgCtgR4Arv0eAK/1HgCwjR4AsZUeALKVHgCzpR4AtL0eALVxAQC2cQEAt3EBALhRAQC5UQEAulEBALtRAQC89QEAvf0BAL71AQC/7QEAsyUeAL4IBwCeqACAoqgAgKaoAIC2IR4AtTUeAKqoAIC7cR4AumkeAK6oAICyqACAv5UBAL5ZHgC9UR4AvGEeALaoAICjYR4AuqgAgL6oAICmZR4AwqgAgMaoAIClcR4Aqi0eAKs1HgDKqACAzqgAgK4dHgCv0QEArCUeAK0VHgDhVBoA0qgAgONcCgDWqACA2qgAgN6oAIDiqACA5qgAgOqoAIC+qAUA7qgAgPKoAICPMSoA+qgAgO/E+wD+qACAk2EuAJIdLwCR2SoAkEkqAJfZEgCWdRIAlQ0TAJTBLgCbHRsAmkEWAJlJFgCYDRcAn3EeAJ4RGwCdcRoAnHkaAKOhAgCinQMAoZUfAKCJHgDjiAEA4wgeAOFoAADh/B4A79wBAO98HwC1if4AtAH8ALMB+gCylfoAsQH4ALAR9gCv4fYArgH0AK0l8gCs7fIAqwHwAKrpDwCp1Q4AqN0OAKcBDACmyQoApe0KAKQBCACj4QYAovEGAKHlAwACqQCAggErAIMBKwAGqQCACqkAgIYxLwCHiS8AhIkrAIVFLgCKdRIAiwUTAIYIBQCHbAUAjhEXAI8RFwCMsRMAjV0WAJI9GgCTQRsAhMgFAIQABwCWUR8Al1EfAJRRGwCVORoAmn0eAJt9AgAOqQCAEqkAgIFZAQCAVQEAnFkDAIJRAQC+yAcAFqkAgBqpAIAeqQCAIqkAgCapAIAqqQCA79QeAC6pAIDhJB4AMqkAgONoAQA2qQCAOqkAgD6pAIBCqQCAu2kCALpZAgBGqQCASqkAgL8dAgC+HQIAvRkCALxxAgCz7QIATqkAgFKpAIBWqQCAWqkAgLZ9AgC17QIAXqkAgKMNBQD2qACAYqkAgGqpAIBmqQCApp0FAKUNBQBuqQCAq4kFAKq5BQCGCAMAh3wDAK/9BQCu/QUArfkFAKyRBQCAsQcAgbkHAIJBAACzsQYAcqkAgLVZBwC2MQcAdqkAgHqpAIB+qQCAuuEHALvhBwC84QcAveEHAL7hBwC/3QcAqLUGAKm5BgCqdQYAq4UHAKydBwCt/QcArvUHAK8ZBwCCqQCAhqkAgIqpAICOqQCAkqkAgJapAICaqQCAnqkAgLh1BwC5fQcAunUHALsFBwC8HQcAvTEHAL4xBwC/MQcAsGkHALFpBwCyeQcAs3kHALRpBwC1VQcAtlEHALdNBwCj/QcAoqkAgKapAICqqQCArqkAgKZ9BgClFQYAsqkAgKutBgCqrQYAtqkAgLqpAICvkQYArq0GAK2tBgCsrQYAvqkAgMKpAIDGqQCAyqkAgIAdAACBCQAAgjkAAM6pAIDSqQCA2qkAgIbIAACHpAEA3qkAgOKpAIDmqQCA6qkAgKiNAQCpmQEAqtkBAKvRAQCs8QEArfEBAK45AQCvOQEAhKAAAO6pAIDyqQCA9qkAgPqpAID+qQCAAqoAgAaqAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAAugUEALsJBAC44QcAueEHAL4JBAC/CQQAvAkEAL0JBACyjQcAs+UHALC1BwCxhQcAtuUHALftBwC08QcAtfEHAKpNBwCrVQcAqEkHAKlJBwCu3QcAr8UHAKxNBwCt1QcACqoAgA6qAIASqgCAFqoAgBqqAIAeqgCAIqoAgCaqAICz0QIAKqoAgC6qAIC+AAwAMqoAgLbxAgC1+QIANqoAgLsNAgC6DQIAOqoAgD6qAIC/DQIAvg0CAL0NAgC8DQIAghUAAKOVAgCAYQAAgWEAAKa1AgBCqgCASqoAgKW9AgCqSQIAq0kCAIbIDACHrAwArkkCAK9JAgCsSQIArUkCAKhlAgCpdQIAqn0CAKt1AgCsbQIArbECAK6xAgCvsQIAhKANAE6qAIBSqgCAVqoAgFqqAIBeqgCAYqoAgGaqAIC4MQEAuTEBALoxAQC7MQEAvNUBAL3dAQC+yQEAv8EBALDRAgCx0QIAstECALPRAgC0EQEAtREBALYRAQC3EQEA4bAGAGqqAIDj0AYAhEAPAG6qAIDhpAEAcqoAgOPABgB2qgCAeqoAgH6qAIDv1AYA7AAAAIKqAIDvZAcAhqoAgIqqAICOqgCAkqoAgLO5AgCWqgCAtakCALZ9AgCaqgCAnqoAgKKqAIC6WQIAu1kCALxJAgC9SQIAvpkBAL+ZAQCjdQ0ARqoAgKaqAICqqgCArqoAgKaxDQClZQ0AsqoAgKuVDQCqlQ0AvqQDALaqAICvVQ4ArlUOAK2FDQCshQ0AgE0AAIFVAACCVQAAs2UPALqqAIC1ZQ8Atm0PAL6qAICGQAMAhxQDALrtDwC7/Q8AvOkPAL3VDwC+3Q8Av9UPAKhZDgCpoQ8AqqEPAKuhDwCsoQ8AraEPAK6hDwCvoQ8AwqoAgMaqAIDKqgCAzqoAgNKqAIDWqgCA2qoAgN6qAIC4AQ8AuQEPALoBDwC7HQ8AvA0PAL01DwC+PQ8Av9UAALBlDwCxdQ8AsnEPALNNDwC0VQ8AtV0PALZNDwC3QQ8AoykOAOKqAIDmqgCA6qoAgO6qAICmIQ4ApSkOAPKqAICrsQ4AqqEOAPaqAID6qgCAr5kOAK6RDgCtmQ4ArKUOAP6qAIACqwCABqsAgAqrAIDvJA0ADqsAgBKrAIAWqwCA49AOABqrAIDhGA4AHqsAgIAVAACBGQAAggUAACKrAICo0QEAqdkBAKopAQCrKQEArDkBAK05AQCuKQEArykBAL5oAQAqqwCAhsgBAIesAAAuqwCAMqsAgDarAIA6qwCAuO0AALmFAAC6jQAAu4UAALydAAC9gQAAvoEAAL+BAACwWQEAsVkBALLtAACz5QAAtP0AALXlAAC25QAAt9UAALOhAgA+qwCAQqsAgEarAIBKqwCAtrkCALWxAgBOqwCAu50CALqdAgBSqwCAVqsAgL8hAwC+OQMAvTEDALw5AwCF+PUAo+UCAFqrAIBeqwCApv0CAGKrAIBmqwCApfUCAKrZAgCr2QIAaqsAgG6rAICufQMAr2UDAKx9AwCtdQMAuOkAALnpAAC6aQAAu2kAALx5AAC9ZQAAvm0AAL9lAACwsQAAsbkAALKBAACzgQAAtPkAALX5AAC27QAAt+UAAKhlAwCpdQMAqn0DAKt1AwCsbQMArdEAAK7RAACv0QAAcqsAgHarAIB6qwCA1qkAgH6rAICCqwCAhqsAgIqrAICA/QEAgQkAAIIZAACOqwCAkqsAgL5EAgCaqwCAnqsAgISsAgCiqwCAh/gCAIasBQCmqwCAqqsAgK6rAICyqwCAs/UCALarAIC6qwCAvqsAgMKrAIC2UQEAteUCAMarAIC7fQEAunUBAMqrAIDOqwCAvz0BAL49AQC9VQEAvFUBAOFwDwDSqwCA47gOAITABQDvyAAA1qsAgNqrAIDeqwCA4zwOAOKrAIDh0AEA5qsAgIR0BwDqqwCA72gBAO6rAIDyqwCApXkCAKbNAQD2qwCAgCEAAIEhAACC3QcAo2kCAKzJAQCtyQEArqEBAK+hAQD6qwCA/qsAgKrpAQCr4QEAlqsAgAKsAIC+QAIABqwAgIYwAwCHMAMACqwAgA6sAICoOQcAqTkHAKoNBwCrHQcArAUHAK0NBwCuBQcAr3kHALAJBwCxCQcAshkHALMRBwC0OQcAtTkHALbdBwC3yQcAuPkHALn5BwC6zQcAu8EHALzFBwC9yQcAvrkHAL+xBwCzpQcAEqwAgBasAIAarACAHqwAgLatBwC1rQcAIqwAgLvtBwC67QcAJqwAgCqsAIC/3QcAvt0HAL3lBwC87QcALqwAgKPhBwAyrACANqwAgKbpBwA6rACAPqwAgKXpBwCqqQcAq6kHAEKsAIBGrACArpkHAK+ZBwCsqQcAraEHAEqsAIBOrACAUqwAgFasAIBarACAXqwAgGKsAIBmrACAgREAAIANAABqrACAghkAAG6sAIByrACAvuQBAHasAICG4AAAhxgBAHqsAIB+rACAgqwAgIasAICKrACA77AEAI6sAIDh1AYAkqwAgONcBACWrACAmqwAgJ6sAICirACAqJkBAKmZAQCqDQEAqwUBAKwdAQCtBQEArgUBAK81AQCEiAEApqwAgKqsAICurACAsqwAgLasAIC6rACAvqwAgLjBAAC5wQAAusEAALvBAAC8wQAAvcEAAL7BAAC/wQAAsE0BALElAQCyIQEAsyEBALQlAQC1LQEAthEBALcRAQDCrACAxqwAgLONAgDKrACAtZ0CAM6sAIDSrACAto0CANasAIDarACAu+kCALqBAgC9/QIAvP0CAL/hAgC+6QIA3qwAgKbVAgClxQIAvggDAKPVAgCCLQAAgRkAAIB5AACvuQIArrECAK2lAgCspQIAq7ECAKrZAgDirACA6qwAgO80AgDurACAhxgDAIYs/ADyrACA9qwAgPqsAID+rACAAq0AgAatAIAKrQCADq0AgOMAAQASrQCA4eABABatAIC6tQMAu70DABqtAIAerQCAvnkDAL95AwC8pQMAvXkDACarAICztQMAIq0AgCatAIC2kQMAKq0AgC6tAIC1pQMAqEkCAKlJAgCqWQIAq1kCAKxJAgCtdQIArnECAK9tAgC+aP0AvqT/ADKtAIA2rQCAOq0AgD6tAIBCrQCARq0AgLj5AgC5+QIAukkBALtJAQC8XQEAvUEBAL5BAQC/fQEAsBUCALEdAgCyFQIAs8kCALTZAgC12QIAtskCALfJAgDjIAYA4bAGAOGAAQDjEAYAgA0AAIE1AACCPQAASq0AgE6tAIBSrQCAWq0AgF6tAIDvcAAAYq0AgGatAIDvTAEAhIz9AGqtAICjmQIAbq0AgKWJAgByrQCAdq0AgKa9AgCGwPwAh+T8AKuRAgCqmQIArVUCAKyJAgCvVQIArlUCAKh9/gCpgf4Aqpn+AKuZ/gCsif4ArYn+AK65/gCvuf4AVq0AgHqtAIB+rQCAgq0AgIatAICKrQCAjq0AgJKtAIC4tf4Aub3+ALph/wC7Yf8AvGH/AL1h/wC+Yf8Av2H/ALDJ/gCxyf4Ast3+ALPR/gC0uf4Atbn+ALaR/gC3kf4AsxH+AJatAICarQCAnq0AgKKtAIC2Cf4AtQH+AKatAIC7Df4Aug3+AKqtAICurQCAv33+AL59/gC9Bf4AvAn+ALKtAICjVf4Atq0AgLqtAICmTf4Avq0AgMKtAIClRf4Aqkn+AKtJ/gCEKAMAxq0AgK45/gCvOf4ArE3+AK1B/gCAzQEAgdEBAILRAQCzuf4Ayq0AgLXR/gC21f4Azq0AgIZgAQCHYAEAug0BALsFAQC8HQEAvQUBAL4NAQC/BQEA0q0AgNatAIDarQCA3q0AgOKtAIDhwP0A5q0AgOOM/ADqrQCA7q0AgPKtAIDvtPwA9q0AgPqtAID+rQCAAq4AgKgp/gCpKf4Aqj3+AKs1/gCsVf4ArVn+AK5N/gCvRf4ABq4AgAquAIAOrgCAEq4AgBauAIAargCAHq4AgCKuAIC4SQEAuUkBALpZAQC7UQEAvHkBAL15AQC+GQEAvxUBALDFAQCxzQEAssUBALPdAQC0xQEAtc0BALbFAQC3eQEAJq4AgCquAIAurgCAo7n9ADKuAICl0f0AptX9AITQAwBBrgCAvuACAKoNAgCrBQIArB0CAK0FAgCuDQIArwUCAIFJAACAQQAAowkDAIJdAAClGQMARa4AgEmuAICmEQMAhsAEAIfkAwCrDQMAqg0DAK0BAwCsHQMArwEDAK4JAwCw4QMAseEDALLhAwCz/QMAtOUDALXtAwC25QMAtz0DALgFAwC5DQMAugUDALsdAwC8BQMAvQ0DAL4FAwC/vQAATa4AgFGuAIBVrgCAWa4AgOasAIBdrgCAYa4AgGWuAICo8QMAqfkDAKqpAwCrqQMArLkDAK25AwCuqQMAr6UDALNBAgBprgCAba4AgHGuAIB1rgCAtlkCALVRAgB5rgCAu0UCALpFAgB9rgCAga4AgL9JAgC+QQIAvUkCALxVAgCFrgCAia4AgI2uAICRrgCA74wDAJWuAICZrgCAna4AgONsAwChrgCA4VAAAKWuAICprgCAvngFALGuAICEcAIAgOUAAIHpAACC+QAAta4AgIawBACHVAUAua4AgO9A/gC9rgCA4Vz+AMGuAIDjVAEAxa4AgMmuAIDNrgCA0a4AgLOZAQDVrgCA2a4AgN2uAIDhrgCAth0BALUdAQDlrgCAuz0BALo9AQDprgCA7a4AgL/hAAC++QAAvfEAALz5AACoIQYAqVEGAKpRBgCrzQYArNUGAK3dBgCu1QYAr8kGAK2uAIDxrgCA9a4AgPmuAID9rgCAAa8AgAWvAIAJrwCAuG0HALkFBwC6DQcAuwUHALwdBwC9AQcAvgEHAL8BBwCwuQYAsbkGALJtBwCzZQcAtH0HALVlBwC2ZQcAt1UHAKPZBgANrwCAEa8AgBWvAIAZrwCApl0GAKVdBgCEnAIAq30GAKp9BgC+JAMAHa8AgK+hBwCuuQcArbEHAKy5BwCASQAAgUkAAIJZAACzVQcAIa8AgLV9BwC2aQcAJa8AgIZAAACHVAMAulUHALspBwC8OQcAvTkHAL4pBwC/IQcAo5kGACmvAIAtrwCAMa8AgDWvAICmpQYApbEGADmvAICr5QYAqpkGAD2vAIBBrwCAr+0GAK7lBgCt9QYArPUGAOE4BQBFrwCA4yQEAEmvAIBNrwCAUa8AgFWvAIBZrwCAXa8AgGGvAIBlrwCAaa8AgG2vAIBxrwCA7/QEAHWvAICo+QYAqQkGAKoRBgCrLQYArDkGAK0lBgCuLQYAryUGAHmvAIB9rwCAga8AgIWvAICAGQAAgRkAAIIFAACJrwCAuOUBALntAQC65QEAu/0BALzlAQC97QEAvuUBAL9ZAQCwXQYAsSEGALIhBgCzIQYAtCEGALUpBgC2EQYAtxEGAKjRAgCp2QIAqg0DAKsFAwCsHQMArQUDAK4FAwCvNQMAvmQCAJGvAICVrwCAma8AgJ2vAIChrwCApa8AgKmvAIC4JQMAuS0DALolAwC7PQMAvCUDAL0pAwC++QMAv/kDALBNAwCxIQMAsiUDALM9AwC0JQMAtS0DALYlAwC3HQMAs4UDAITIAgCtrwCAhAgDALGvAIC2hQMAtZUDALWvAIC75QMAuokDAIYIDACHnAMAv+kDAL7hAwC96QMAvPEDAIXsCgA2rgCAo80DALmvAICl3QMAva8AgMGvAICmzQMAxa8AgMmvAICrrQMAqsEDAK2hAwCsuQMAr6EDAK6pAwDNrwCA0a8AgNWvAIDZrwCA78gDAN2vAIDhrwCA5a8AgOO0AwDprwCA4dABAO2vAICADQAAgXUAAIJ9AADxrwCA9a8AgPmvAICzZQEAvgQCALVlAQABsACABbAAgLZlAQCGQA0Ah1gNALv1AQC6/QEAvaUBALy5AQC/mQEAvqUBAAmwAIANsACAEbAAgIQADAAVsACAGbAAgB2wAIDvzAEAIbAAgOEsBgAlsACA4yABAOwAAAApsACALbAAgDGwAIA1sACAo+kBADmwAIA9sACApukBAEGwAIBFsACApekBAKpxAQCreQEASbAAgE2wAICuKQEArxUBAKw1AQCtKQEAqCUOAKktDgCqJQ4Aqz0OAKwlDgCtLQ4AriUOAK+VDgD9rwCAUbAAgFWwAIBZsACAXbAAgIKdAACBnQAAgJ0AALhFDwC5TQ8AukUPALtZDwC8SQ8AvUkPAL59DwC/cQ8AsPEOALH5DgCypQ4As7kOALSpDgC1lQ4Atp0OALd9DwCo1Q8Aqd0PAKoJDwCrCQ8ArBkPAK0FDwCuDQ8ArwUPAGGwAIBlsACAabAAgL6gAwBtsACAcbAAgId4AwCGEAAAuBUPALkdDwC6IQ8AuyEPALz1AAC9/QAAvvUAAL/tAACwQQ8AsU0PALJdDwCzVQ8AtE0PALU1DwC2MQ8AtzEPAHWwAIDvsAwAebAAgH2wAICBsACAhbAAgImwAICNsACAkbAAgJWwAICZsACAnbAAgKGwAIDjqA0ApbAAgOGMDQCzwQ4AqbAAgK2wAICxsACAtbAAgLbFDgC10Q4AubAAgLvJDgC6xQ4AvbAAgMGwAIC/sQ4AvskOAL3BDgC8yQ4AowEOAMWwAIDJsACAzbAAgNGwAICmBQ4ApREOANWwAICrCQ4AqgUOANmwAICErAIAr3EOAK4JDgCtAQ4ArAkOAIBRAACBWQAAgmEAALPFAAC+zAEAtcUAALbNAADhsACAhkAHAIcUAQC6yQAAu8kAALzZAAC92QAAvskAAL/FAACrDQMAqg0DAKkJAwCouQIArw0DAK4NAwCtDQMArA0DAL5gAwDlsACA6bAAgO2wAIDxsACA9bAAgPmwAIC+MAUAuykDALoZAwC5GQMAuAEDAL/dAwC+3QMAvd0DALwxAwCzTQMAsk0DALFNAwCwTQMAtzkDALYxAwC1QQMAtE0DAP2wAICmkQMApZkDAAGxAICjmQMABbEAgAmxAIANsQCAr5kDAK6VAwCthQMArIUDAKuVAwCqlQMAja8AgBGxAIAVsQCAGbEAgB2xAIAhsQCAJbEAgCmxAIAtsQCAMbEAgDWxAIA5sQCAPbEAgEGxAICAHQAAgQkAAIL9AQBFsQCAvwgHAEmxAIBRsQCA7yQAAFWxAICElAIAWbEAgF2xAICH4AIAhgQFAL4AGABhsQCAZbEAgOGQAQBpsQCA44AAAG2xAIBxsQCAdbEAgLNlAQB5sQCAtWUBALZtAQB9sQCAgbEAgIWxAIC65QEAu/kBALzpAQC96QEAvsUBAL+9AQCJsQCAjbEAgJGxAIC+xBkAlbEAgJmxAICdsQCA78gBAKGxAIDh3A4ApbEAgOMwDgCpsQCArbEAgLGxAICEMAQAgHkAAIEVAACCFQAAo+UBALWxAICl5QEApu0BALmxAICGQAYAh5AHAKplAQCreQEArGkBAK1pAQCuRQEArz0BAKjdBQCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvnQYATbEAgL2xAIDBsQCAhDABAMWxAIDJsQCAzbEAgNGxAIC4jQYAuZUGALqdBgC7lQYAvI0GAL21BgC+vQYAv7UGALDtBgCx8QYAsvEGALPxBgC0zQYAtbUGALa9BgC3tQYAqIkHAKmVBwCqkQcAq5EHAKy9BwCtpQcArqEHAK/dBwDVsQCA2bEAgN2xAIDhsQCA5bEAgOmxAIDtsQCA8bEAgLhJBwC5VQcAul0HALtVBwC8cQcAvX0HAL5pBwC/aQcAsKUHALGtBwCyuQcAs7EHALSRBwC1kQcAtnkHALd5BwD1sQCA+bEAgP2xAIABsgCA78gFAOHACQAFsgCA48AZAOMkBAAJsgCA4dAGAO/cKACinQMAoxUBAKAZBQChjQUAs1kGAA2yAIARsgCAFbIAgBmyAIC2ZQYAtXUGAB2yAIC7KQYAuiEGACGyAIAlsgCAvxUGAL4VBgC9JQYAvC0GAKOZBgCPmfwAKbIAgDGyAIA1sgCApqUGAKW1BgA5sgCAq+kGAKrhBgCGKB8Ah5wAAK/VBgCu1QYAreUGAKztBgCebQkAn30HAJwNCwCd7QkAmvENAJs5DQCY5fAAmQ0PAJbh8QCX6fEAlMX1AJUN8wCSHfcAk/H1AJD9+QCR7fkAgh3/AIMB+gA9sgCAQbIAgIYV9gCHOfYAhAn6AIXx9ACKwfAAiyXyAEWyAIBJsgCAjuEMAI8VDgCMNfIAjQHzAJKtDgCTgQgATbIAgFGyAICW6QQAl3UGAJR5CgCV8QoAmtEGAJvJAABVsgCAWbIAgIEdAwCAHQMAnFkCAIL1AwCrARAAqpUWAKmNFgCojRYAr5UuAK4BLACt/RIArJkSAKOlHgCipR4AoY0CAN2wAICnGRoAppUaAKUBGACknR8AXbIAgGGyAIBlsgCAabIAgG2yAIBxsgCAdbIAgHmyAICz5SoAsuUqALGtLwCw5S4AfbIAgIGyAIC1ASQAtBEqAKgpAwCpNQMAqj0DAKs1AwCsLQMArbUDAK69AwCvtQMAhbIAgImyAICNsgCAkbIAgIAdAACBCQAAgrkAAJWyAIC4TQIAuV0CALptAgC7CQIAvBkCAL0ZAgC+CQIAvwECALDNAwCx1QMAst0DALPVAwC0zQMAtXUCALZ9AgC3dQIAmbIAgITIHQChsgCAvgwfAKWyAICpsgCA70gGAO9YBwDhWAYA4ZgGAOOUAQDjAAYAhhAcAId8HQC+9B4ArbIAgLGyAIC2ZQMAtfUDALWyAICz5QMAubIAgL2yAIDBsgCAv+ECAL5ZAwC9UQMAvFkDALtBAwC6WQMAxbIAgMmyAIAtsgCAnbIAgM2yAIDRsgCA1bIAgNmyAIDdsgCA4bIAgKitHQCptR0AqrUdAKslHgCsPR4ArR0eAK4VHgCvdR4AsA0eALEtHgCyJR4As40eALSVHgC1nR4AtpUeALeNHgC4tR4Aub0eALq1HgC7nR4AvIUeAL1VHwC+XR8Av1UfALMdHQDlsgCA6bIAgO2yAIDxsgCAtr0eALWVHgD1sgCAu8keALrpHgD5sgCA/bIAgL95HgC+cR4AvXkeALzRHgCCKQAAo1kdAIAdAACBFQAApvkeAAGzAIAFswCApdEeAKqtHgCrjR4ACbMAgITgAwCuNR4Arz0eAKyVHgCtPR4AqIkeAKmVHgCqnR4Aq7EeAKzRHgCt2R4Ars0eAK/FHgANswCAEbMAgIaIAACHbAEAFbMAgBmzAIAdswCAIbMAgLhdAQC5wQEAusEBALvBAQC8wQEAvckBAL7xAQC/8QEAsL0eALGdHgCylR4As2UBALR9AQC1ZQEAtm0BALdlAQCqLR0AqzUdACWzAIApswCAri0dAK+VHACsLR0ArSUdAISMAQCjkR0ALbMAgDGzAICmER0ANbMAgDmzAIClgR0As1UeAD2zAIBBswCARbMAgEmzAIC2GR4AtRkeAE2zAIC7GR4AujkeAFGzAIBVswCAv+EBAL75AQC98QEAvAEeAFmzAIBdswCAYbMAgKOZHQBlswCApdUdAKbVHQBpswCAbbMAgHGzAICq9R0Aq9UdAKzNHQCtPQIArjUCAK8tAgCAZQAAgRUAAIIdAACEAAQAdbMAgHmzAICHcAMAhvwEAIGzAICFswCAibMAgI2zAICRswCAlbMAgJmzAICdswCAvsgEAKGzAIClswCAqbMAgK2zAICxswCAtbMAgO/cHwC5swCA4ZQBAL2zAIDjHAEAwbMAgMWzAIDJswCAzbMAgLt1AwC6aQMAvkgGANGzAIC/HQMAvh0DAL0dAwC8ZQMAs9UDANWzAIDZswCA3bMAgOGzAIC2fQMAtcUDAIRwBQCoJQIAqTUCAKo9AgCrNQIArC0CAK2dAgCulQIAr7UCAIIVAADlswCAgNkBAIEJAADEAAAA6bMAgPGzAID1swCAuKkCALmpAgC6SQEAu0kBALxZAQC9RQEAvkUBAL99AQCwzQIAsdECALLRAgCzqQIAtLkCALW5AgC2qQIAt6ECAOEoHgDhNBwA43QBAOMYHgD5swCA/bMAgIa4BACHVAUAhDgHAAG0AIAFtACACbQAgL6sBwANtACA78weAO/IGgCj9QIAEbQAgBW0AIAZtACAHbQAgKZdAgCl5QIAIbQAgKtVAgCqSQIAJbQAgCm0AICvPQIArj0CAK09AgCsRQIAqGEGAKlhBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgDtswCALbQAgDG0AIA1tACAObQAgD20AIBBtACARbQAgLjxBgC58QYAuvEGALvxBgC8nQYAvbEGAL6xBgC/sQYAsOUGALHtBgCy5QYAs/0GALTlBgC17QYAttkGALfVBgCz6QYASbQAgE20AIBRtACAVbQAgLbhBgC16QYAWbQAgLspBgC6IQYAXbQAgGG0AIC/KQYAviEGAL0pBgC8MQYAgl0AAKOtBgCARQAAgV0AAKalBgBltACAabQAgKWtBgCqZQYAq20GAIYADACHQAMArmUGAK9tBgCsdQYArW0GAG20AIDvfAUAcbQAgHW0AIB5tACAfbQAgIG0AICFtACAibQAgI20AICRtACAlbQAgJm0AIDjaAUAnbQAgOF4BQCz0QYAobQAgKW0AICptACArbQAgLb9BgC1/QYAsbQAgLupBgC6oQYAtbQAgLm0AIC/mQYAvqkGAL2pBgC8sQYAqLkGAKm5BgCqGQYAqxkGAKw1BgCtPQYArjUGAK8pBgC9tACAgh0AAIEdAACAHQAAwbQAgMW0AIDJtACA0bQAgLjpAQC56QEAuvkBALv5AQC86QEAvekBAL5dAQC/VQEAsCUGALEtBgCyJQYAsz0GALQtBgC1HQYAthUGALfZAQCGgAwAh+QCANW0AICjnQUA2bQAgKWxBQCmsQUA3bQAgOG0AIDltACAqu0FAKvlBQCs/QUAreUFAK7lBQCv1QUAtk0DAOm0AICExAMAtUUDAO20AICzjQIA8bQAgPW0AIC+SQMAv0kDALxJAwC9SQMAumkDALtpAwD5tACA/bQAgAG1AICmiQMApYEDAAW1AICjSQIACbUAgA21AIARtQCAr40DAK6NAwCtjQMArI0DAKutAwCqrQMAfbMAgBW1AIAZtQCAHbUAgIW0PQAhtQCAJbUAgCm1AIAttQCAMbUAgIA9AACBCQAAgh0AADW1AIC+sAMAObUAgIc4AwCG3AwAQbUAgEW1AIBJtQCATbUAgFG1AIDvXAYAVbUAgFm1AIC+6AwA45QGAF21AIDh3AEAYbUAgGW1AIBptQCAbbUAgLNRAQBxtQCAdbUAgHm1AIB9tQCAtnEBALV5AQCBtQCAuz0BALo9AQCFtQCAibUAgL/9AQC+9QEAvQUBALwFAQCNtQCAkbUAgJW1AICEQAwAmbUAgJ21AIChtQCA76wHAKW1AIDhJAYAqbUAgONABwCGkAwAh/wMALG1AIC1tQCAgFkAAIFlAACCYQAAo90BALm1AICl9QEApv0BAL21AIDBtQCAxbUAgKqxAQCrsQEArIkBAK2JAQCueQEAr3EBAM20AIA9tQCAybUAgM21AICttQCA0bUAgNW1AIDZtQCAqJ0NAKktDgCqOQ4AqzEOAKwRDgCtEQ4Arn0OAK9tDgCwGQ4AsRkOALIxDgCzMQ4AtNEOALXZDgC2zQ4At8UOALj9DgC52Q4AuqkOALupDgC8vQ4AvaUOAL6tDgC/pQ4AqIEPAKmBDwCqgQ8Aq4EPAKyBDwCtjQ8AroUPAK+1DwDdtQCA4bUAgOW1AIDptQCA7bUAgPG1AID1tQCA+bUAgLidDwC5rQ8AuqUPALtNDwC8VQ8AvV0PAL5JDwC/SQ8AsNEPALHRDwCy0Q8As9EPALS1DwC1vQ8AtrUPALetDwCzCQ4A/bUAgAG2AIAFtgCACbYAgLYNDgC1CQ4ADbYAgLsVDgC6FQ4AEbYAgBW2AIC/eQ4AvnEOAL0FDgC8BQ4AghUAAKNNDgCAYQAAgWEAAKZJDgAZtgCAvhABAKVNDgCqUQ4Aq1EOAIQkAQAhtgCArjUOAK89DgCsQQ4ArUEOAKg5DgCpOQ4AqlkOAKtRDgCscQ4ArXEOAK6RAQCvkQEAhgAAAIeEAAAltgCAKbYAgC22AIAxtgCANbYAgDm2AIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALD1AQCx/QEAsvUBALNNAQC0VQEAtV0BALZVAQC3TQEAuk0PALtVDwC4TQ8AuUUPAL59DwC/tQ8AvEUPAL11DwCyAQ8AswEPALAxDwCxMQ8AtgEPALcNDwC0EQ8AtREPAKqZDgCrRQ8AqOUOAKmZDgCuQQ8Ar0EPAKxRDwCtUQ8APbYAgEG2AIBFtgCASbYAgE22AIBRtgCAVbYAgFm2AICzUQ0AXbYAgGG2AIBltgCAabYAgLZxDQC1eQ0AbbYAgLu5AgC6sQIAcbYAgHW2AIC/GQIAvhECAL0ZAgC8oQIAebYAgKMVDQB9tgCAgbYAgKY1DQCFtgCAibYAgKU9DQCq9QIAq/0CAIToAwCRtgCArlUCAK9dAgCs5QIArV0CAKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvfQEAgO0BAIHxAQCC8QEAvqAFAJW2AICZtgCAh2gFAIYcBQC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALAFAQCxDQEAsgUBALMdAQC0BQEAtQ0BALYFAQC3+QEA4WQPAOGcDwDjFA4A49QPAJ22AIDhPA4AobYAgOPkAAC+rAQApbYAgKm2AIDvDAAArbYAgLG2AIDvYA4A77QPALW2AIC5tgCAhEQEALNhAgC9tgCAtWECALZhAgDBtgCAxbYAgMm2AIC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCjrQUAjbYAgM22AIDRtgCA1bYAgKatBQClrQUA2bYAgKtJBgCqQQYA3bYAgOG2AICvSQYArkEGAK1JBgCsUQYA5bYAgOm2AIDttgCA8bYAgIAdAACBCQAAgjkAAPW2AID5tgCA/bYAgIbIAACHIAMAAbcAgAW3AIAJtwCADbcAgKhtBgCptQcAqr0HAKsdBwCsCQcArTEHAK4xBwCvLQcAhKgDABG3AIAVtwCAGbcAgB23AIAhtwCAJbcAgCm3AIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBVBwCxJQcAsi0HALM9BwC0LQcAtRUHALYdBwC39QAALbcAgOG8BgAxtwCA4/QFADW3AIA5twCAPbcAgEG3AIBFtwCASbcAgE23AIBRtwCAVbcAgFm3AIBdtwCA7+gEALN1BgCCLQAAgRUAAIAdAABhtwCAtvEGALXBBgBltwCAu6EGALrRBgBptwCAvmwBAL+RBgC+qQYAvakGALy5BgCjtQYAcbcAgIYoAACHTAEAdbcAgKYxBgClAQYAebcAgKthBgCqEQYAfbcAgIG3AICvUQYArmkGAK1pBgCseQYAhbcAgLO9AQCJtwCAjbcAgLZ5AQCRtwCAlbcAgLV5AQC6VQEAu10BAJm3AICdtwCAvvkAAL/lAAC8RQEAvf0AAKhxAgCpcQIAqnECAKtxAgCstQIArb0CAK61AgCvrQIAhOw8AKG3AICltwCAqbcAgK23AICxtwCAtbcAgLm3AIC4XQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDVAgCx3QIAstUCALNtAwC0eQMAtWUDALZtAwC3ZQMAHbYAgL23AIDBtwCAo/UCAMW3AIClMQIApjECAMm3AIDNtwCA0bcAgKodAgCrFQIArA0CAK21AwCusQMAr60DAIBlAACBCQAAghkAANW3AIDZtwCA4bcAgL4QPADltwCAhsA8AIcgAwDptwCA7bcAgPG3AID1twCA+bcAgP23AICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAAG4AIAFuACACbgAgA24AIARuACAFbgAgBm4AIAduACAuHUBALl9AQC6dQEAu8kBALzZAQC9xQEAvsUBAL/9AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2VQEAt00BAOGkBgAhuACA41AGAL6APACEHDwAvoA/ACW4AIApuACALbgAgDG4AIA1uACAObgAgD24AIBBuACA7+AGAEW4AICBfQAAgHEAAEm4AICCBQAAUbgAgFW4AIDvTAAAWbgAgOGQAQBduACA41gBAGG4AIBluACAabgAgIZYPwCH/DwAs509AN23AIBNuACAbbgAgHG4AIC21T0AtbU9AHW4AIC7+T0AuvE9AHm4AIB9uACAvxk+AL4RPgC91T0AvNU9AIG4AICj2T0AhbgAgIm4AICmkT0AjbgAgJG4AICl8T0AqrU9AKu9PQCVuACAmbgAgK5VPgCvXT4ArJE9AK2RPQCoVT4AqVk+AKphPgCrYT4ArGE+AK1hPgCuYT4Ar2E+AISoAwCduACAobgAgKW4AICpuACArbgAgLG4AIC1uACAuEU/ALldPwC6VT8Au20/ALx1PwC9fT8AvnU/AL9tPwCwwT8AscE/ALLBPwCzwT8AtME/ALXBPwC2wT8At8E/AIC5AQCBuQEAggUAALm4AIDhgD4AwbgAgOMoPQDFuACAhoAAAIcEAQDvCD0AybgAgM24AIDRuACA1bgAgNm4AICzqT8AvbgAgN24AIDhuACA5bgAgLahPwC1qT8A6bgAgLtFPgC6RT4A7bgAgPG4AIC/RT4AvkU+AL1VPgC8VT4Ao2k/APW4AID5uACA/bgAgAG5AICmYT8ApWk/AAW5AICrhT4AqoU+AAm5AIANuQCAr4U+AK6FPgCtlT4ArJU+ABG5AICzGT4AFbkAgBm5AIC2IT4AHbkAgCG5AIC1MT4AuvEBALv5AQAluQCAKbkAgL6xAQC/vQEAvNEBAL3RAQCo0T0AqdE9AKrVPQCr6T0ArP09AK3lPQCu7T0ArxECAID5AwCBzQMAgsUDAIQkAwC+AAQAMbkAgIesAwCGvAQAuBkCALktAgC6JQIAu+kCALz5AgC9+QIAvukCAL/pAgCwcQIAsXkCALJBAgCzQQIAtDECALU9AgC2NQIAtykCAKVtPQA1uQCAObkAgKZ9PQA9uQCAbbcAgKNFPQBBuQCArY0CAKyNAgCv4QIAru0CAKwAAABFuQCAq6UCAKqtAgDh+AEASbkAgOP0AgCEwAQATbkAgFG5AIBVuQCAWbkAgF25AIBhuQCAZbkAgGm5AIBtuQCAcbkAgO8wAgB1uQCAqBUCAKkZAgCqJQIAqz0CAKwlAgCtLQIAriUCAK9VAgB5uQCAfbkAgIG5AICFuQCAibkAgI25AICEsAQAkbkAgLjRAgC52QIAuuECALvhAgC8kQIAvZ0CAL6VAgC/iQIAsC0CALE1AgCyNQIAswUCALQdAgC18QIAtvECALfxAgDheD8A4zQBAOMIPgDhbD4AgQkAAICpAACVuQCAgj0AAJm5AIChuQCApbkAgL4gBACpuQCA79g+AO/MPgCtuQCAsbkAgLPpAgCG6AQAh8AEALbpAgC1uQCAubkAgLXpAgC6rQIAu7UCAL25AIDBuQCAvp0CAL9xAgC8pQIAvZUCAC25AICduQCAxbkAgMm5AIDNuQCA0bkAgNW5AIDZuQCAqBUGAKmhBgCqoQYAq70GAKytBgCtgQYArv0GAK/tBgCwlQYAsZ0GALKVBgCzrQYAtLUGALW9BgC2tQYAt60GALiVBgC5mQYAukkHALtJBwC8WQcAvVkHAL5JBwC/SQcArN0FAK3tBQCu5QUArwkFAN25AIDhuQCAqtUFAKvNBQDluQCApZEFAKaRBQDpuQCA7bkAgPG5AID1uQCAo5EFALNJBgD5uQCA/bkAgAG6AIAFugCAtmEGALVFBgAJugCAuzkGALoxBgC+ZAAADboAgL8ZBgC+EQYAvRkGALwhBgCjiQcAgtkBAIHZAQCAwQEAEboAgKahBwClhQcAFboAgKv5BwCq8QcAhggBAId8AQCv2QcArtEHAK3ZBwCs4QcAGboAgLP1BgAdugCAIboAgLaFBgAlugCAKboAgLWdBgC6jQYAu20BAC26AIAxugCAvmUBAL9tAQC8dQEAvW0BAKglBgCpLQYAqjkGAKsxBgCsUQYArUEGAK5BBgCvdQYANboAgDm6AIA9ugCAQboAgEW6AIBJugCATboAgFG6AIC4VQEAuWUBALplAQC7fQEAvGUBAL1tAQC+HQEAvxUBALANBgCx7QEAsuUBALP9AQC05QEAte0BALblAQC3bQEAo7EFAFW6AIBZugCAvkgDAL5YDACmwQUApdkFAF26AICrKQIAqskFAGG6AIBlugCArykCAK4hAgCtKQIArDECAGm6AIBtugCAcboAgHW6AICAGQAAgRkAAIIFAAB5ugCAhKwDAIG6AICHGAMAhswMAIW6AICJugCAjboAgJG6AICokQMAqZkDAKrJAwCrxQMArN0DAK3BAwCuwQMAr/UDAJW6AICZugCAnboAgKG6AIClugCAqboAgK26AICxugCAuH0DALnBAAC6wQAAu9EAALz5AAC9+QAAvpkAAL+ZAACwjQMAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALNBAgC1ugCAuboAgL8EDwC9ugCAtkECALVVAgDBugCAu4ECALpJAgDFugCAyboAgL+BAgC+mQIAvZECALyZAgDNugCA0boAgNW6AIDZugCA76QDAN26AIDhugCA5boAgOMQAwDpugCA4VgAAIQgDQCAKQAAgSkAAIIdAADxugCA4VAGAOGgBwDjoAYA41AHAIWUDAD1ugCA70gbAPm6AIDhJAIA/boAgONwGgABuwCABbsAgAm7AIDvqAEA7+gGAIagDwCHDA0Ao4kCAA27AIClnQIAEbsAgBW7AICmiQIAGbsAgB27AICrSQIAqoECAK1ZAgCsUQIAr0kCAK5RAgCoZQ4AqXUOAKp9DgCrdQ4ArG0OAK21DgCuvQ4Ar7UOAO26AIAhuwCAJbsAgCm7AIAtuwCAOLsAgDy7AIBAuwCAuF0PALltDwC6ZQ8Auw0PALwVDwC9HQ8AvhUPAL8JDwCwzQ4AsdUOALLdDgCz1Q4AtM0OALVxDwC2cQ8At20PALP1DgBEuwCASLsAgEy7AIBQuwCAtjUOALXlDgBUuwCAuxEOALoJDgBYuwCAXLsAgL+1DwC+CQ4AvQEOALwJDgCCFQAAo7EOAIBhAACBYQAApnEOAGC7AIC+EAEApaEOAKpNDgCrVQ4AaLsAgIQgAQCuTQ4Ar/EPAKxNDgCtRQ4An0UIAJ4NCQCdDQkAnJkLAJt1NQCaETUAmZk3AJgNMQCXJTEAliUxAJWBPQCUDT0Ak4k/AJIVOACRPTkAkD05AI9lJQDvrA0AhgAEAIegAQBsuwCAcLsAgHS7AIDv6AEAeLsAgOE0AgB8uwCA4zQBAIC7AIDjCAwAhLsAgOEIDQChoQEAiLsAgKMJBQCibQMApc0EAKQRBQCnHRkAph0ZAKmhHQCoORkAq+kcAKqpHQCtkREArAEQAK8BFACuUREAsfkVALDlFQCz6WkAsgFoALUBbAC0eWkAjLsAgJC7AICUuwCAmLsAgJy7AICguwCAowkDAKIZDQCh/Q0AoP0NAIIlJgCDBToApLsAgKi7AICGqTwAhzU+AIQdOgCFPTsAiok+AIslMgCsuwCAsLsAgI6xNACPMTYAjD0yAI0tMgCSJTYAk9EIAIREAwC+wAQAlhULAJdVDgCUXQoAlVUKAJplDgCbiQ4AtLsAgLi7AIC8uwCAwLsAgJyBAADEuwCAuLUCALm9AgC6tQIAuwkCALwZAgC9GQIAvgkCAL8BAgCwdQ0AsX0NALJJDQCzSQ0AtJUCALWdAgC2lQIAt40CAKi9DQCpUQ0AqlUNAKtpDQCsfQ0ArWUNAK5tDQCvEQ0AZLsAgILtAQCBHQAAgB0AAMi7AIDMuwCAfboAgL5wBQCznQwAhIwFANC7AIDYuwCA3LsAgLalDAC1tQwA4LsAgLv5DAC68QwAhigFAIcgBQC/GQMAvhEDAL3dDAC83QwA5LsAgKPZDADouwCA7LsAgKbhDADwuwCA9LsAgKXxDACqtQwAq70MAPi7AID8uwCArlUDAK9dAwCsmQwArZkMAAC8AIAEvACACLwAgAy8AIAQvACAFLwAgBi8AIDvvAEAHLwAgOF8DgAgvACA41ABACS8AIAovACALLwAgDC8AICzlQIANLwAgDi8AIA8vACAQLwAgLa9AgC1uQIASLwAgLs5AgC6YQIAhsgEAIesBAC/GQIAvhECAL0ZAgC8IQIAo1UFAILVBwCBxQcAgMUHAEy8AICmfQUApXkFAFC8AICr+QUAqqEFAFS8AIBYvACAr9kFAK7RBQCt2QUArOEFAFy8AICzWQcAYLwAgGS8AIC2HQcAaLwAgGy8AIC1FQcAugkHALsJBwBwvACAdLwAgL75BwC/+QcAvPkHAL35BwDUuwCARLwAgHi8AIB8vACAgLwAgIS8AICIvACAjLwAgKitBwCptQcAqrUHAKvtBwCs+QcArfkHAK7tBwCv5QcAsKkHALGpBwCySQcAs0kHALRZBwC1WQcAtkkHALdJBwC4eQcAuUUHALpBBwC7XQcAvEUHAL1NBwC+RQcAvzkHAKMdBgCQvACAlLwAgJi8AICcvACAplkGAKVRBgCgvACAq00GAKpNBgCkvACAqLwAgK+9BgCuvQYArb0GAKy9BgCAbQAAgQkAAIIZAACsvACAsLwAgISYAQC+kAEAtLwAgIYAHACHxAEAuLwAgLy8AIDAvACAxLwAgMi8AIDMvACAqF0GAKmVAQCqlQEAq6UBAKy9AQCt1QEArtEBAK/RAQDQvACA1LwAgNi8AIDcvACA4LwAgOS8AIDovACA7LwAgLhZAQC5WQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsLUBALG9AQCygQEAs4EBALR5AQC1eQEAtmkBALdpAQCzHQIA8LwAgPS8AIC+gBwA+LwAgLZVAgC1NQIA/LwAgLt5AgC6cQIAAL0AgAS9AIC/vQIAvr0CAL1VAgC8VQIACL0AgKNZAgAMvQCAEL0AgKYRAgAUvQCAGL0AgKVxAgCqNQIAqz0CABy9AIAgvQCArvkCAK/5AgCsEQIArRECACi9AIAsvQCAvgQdAL4AHgAwvQCANL0AgDi9AIA8vQCAgPkAAIHNAACCxQAAhCADAIawHACHlAMAQL0AgES9AIBIvQCATL0AgFC9AIBUvQCA42wCAFi9AIDhoAEAXL0AgO8UAgBgvQCAZL0AgGi9AIBsvQCAcL0AgHS9AIB4vQCA4fAGAOE0BgDjTAAA4xgGAHy9AICAvQCAhL0AgIi9AICAPQAAgQkAAIIZAACMvQCAkL0AgIS8HQDvmAAA7zgHALMxAgDRAAAAh9gdAIZsHACYvQCAtikCALUhAgCcvQCAu80CALrNAgCgvQCApL0AgL/NAgC+zQIAvc0CALzNAgCyXQYAs2UGALANBgCxVQYAtn0GALedBQC0fQYAtXUGALqNBQC7zQUAuKUFALmFBQC+xQUAv8kFALzVBQC9zQUAqL0AgKy9AICwvQCAtL0AgLi9AIC8vQCAwL0AgMS9AICqtQYAq70GAKgBBwCpvQYAroEGAK+NBgCsmQYArZUGAKNxHQDIvQCAzL0AgNC9AIDUvQCApmkdAKVhHQDYvQCAq40dAKqNHQDcvQCA4L0AgK+NHQCujR0ArY0dAKyNHQDkvQCAs9UeAOi9AIDsvQCAts0eAPC9AID0vQCAtcUeALqhHgC7oR4A+L0AgPy9AIC+pR4Av6keALyxHgC9sR4AJL0AgJS9AIAAvgCAhAQDAID5AACB+QAAghEAAAS+AICoIR4AqSEeAKo5HgCrOR4ArCkeAK0pHgCuAR4ArwEeALABHgCxAR4AsgEeALMBHgC0BR4AtQkeALY9HgC3NR4AuA0eALkVHgC6HR4AuxUeALwNHgC95R8Avu0fAL/lHwCjkR8ACL4AgIYoAQCHSAEADL4AgKaJHwClgR8AEL4AgKvlHwCq5R8AFL4AgBi+AICv7R8AruEfAK31HwCs9R8AHL4AgLMtHgAgvgCAJL4AgLaVHgAovgCALL4AgLWdHgC6sR4Au7EeADC+AIA0vgCAvnUBAL99AQC8oR4AvaEeAKjRHgCp2R4AquEeAKvhHgCsUR4ArVEeAK5RHgCvUR4AOL4AgDy+AIBAvgCARL4AgEi+AIBMvgCAUL4AgFS+AIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALAxHgCxMR4AsjEeALMxHgC09QEAtf0BALb1AQC37QEAo2kdAFi+AIBcvgCAYL4AgGS+AICm0R0ApdkdAGi+AICr9R0AqvUdAGy+AIBwvgCArzkCAK4xAgCt5R0ArOUdAIFpAACAWQAAvgAEAIJhAAB4vgCAfL4AgIC+AICEvgCAhOwDAIi+AICHiAMAhuwEAIy+AICQvgCAlL4AgJi+AICohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAJy+AICgvgCApL4AgKi+AICsvgCAsL4AgLS+AIC4vgCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAOFUHgDhrB8A45QBAOMoHgDjYAMAvL4AgOEIAADAvgCA75ADAMS+AIDIvgCAzL4AgNC+AIDUvgCA70wfAO9MHwCzXQIA2L4AgNy+AIDgvgCA6L4AgLYVAgC1dQIA7L4AgLs5AgC6MQIAhCQFAL7gBAC/1QIAvtUCAL0VAgC8FQIAuJEdALmZHQC6oR0Au6EdALzRHQC93R0AvtUdAL/JHQCwCR4AsQkeALIZHgCzGR4AtAkeALUJHgC2vR0At7UdAKipHgCpqR4AqrkeAKu5HgCsqR4ArakeAK55HgCveR4AgKUAAIGtAACCpQAA8L4AgIbQBACH+AQA9L4AgPi+AIB0vgCA5L4AgPy+AIAAvwCABL8AgAi/AIAMvwCAEL8AgKhxBgCpcQYAqnEGAKtxBgCsVQYArUUGAK5NBgCvRQYAsD0GALHlBgCy7QYAs+UGALT9BgC15QYAtu0GALflBgC43QYAuXEHALp1BwC7SQcAvFkHAL1ZBwC+SQcAv0kHALPZBgAUvwCAGL8AgBy/AIAgvwCAtuUGALX9BgAkvwCAuwEGALrZBgAovwCALL8AgL8BBgC+GQYAvREGALwZBgAwvwCAo9kFADS/AIA4vwCAppEFADy/AIBAvwCApfEFAKq1BQCrvQUARL8AgEi/AICuUQUAr1EFAKyRBQCtkQUAo1kHAIIZAACBGQAAgOEBAEy/AICmZQcApX0HAFC/AICrgQcAqlkHAISgAgC+rAEAr4EHAK6ZBwCtkQcArJkHAFS/AICzqQYAhugAAIcsAQC2WQEAWL8AgFy/AIC1oQYAunUBALt9AQBgvwCAZL8AgL75AQC/+QEAvGUBAL35AQCo0QYAqdkGAKplBgCrdQYArG0GAK2dAQCulQEAr40BAITsAQBovwCAbL8AgHC/AIB0vwCAeL8AgHy/AICAvwCAuGkBALlpAQC6CQEAuwUBALwdAQC9AQEAvgEBAL81AQCw9QEAsf0BALL1AQCzaQEAtHkBALV5AQC2aQEAt2EBAIS/AICIvwCAjL8AgKPhBQCQvwCApekFAKYRAgCUvwCAmL8AgJy/AICqPQIAqzUCAKwtAgCtsQIArrECAK+xAgCgvwCApL8AgL4EAwCEAAwAqL8AgKy/AICwvwCAtL8AgIANAACBFQAAgh0AALi/AIC8vwCAwL8AgIdEAwCG3AwAs+kDAMi/AIDMvwCA0L8AgNS/AIC2PQMAtT0DANi/AIC7GQMAuhEDANy/AIDgvwCAv7kAAL6xAAC9uQAAvAEDAOS/AIDhlAEA6L8AgON8AQDsvwCA8L8AgPS/AID4vwCA/L8AgADAAIAEwACACMAAgAzAAIAQwACAFMAAgO9MAgCoVQIAqV0CAKphAgCrYQIArLUCAK29AgCutQIAr60CAL5oDQAYwACAHMAAgCDAAIAkwACAgq0AAIGtAACArQAAuGEBALlhAQC6CQEAuwkBALwBAQC9AQEAvgEBAL8BAQCw1QIAsd0CALLVAgCzbQEAtHUBALV9AQC2aQEAt2EBAOFoBgDh8AcA47AAAOP0BgAowACALMAAgDDAAIA4wACAPMAAgEDAAIBEwACASMAAgL78DABMwACA72wAAO8oBgCjqQIAUMAAgIZoDACHBA0AVMAAgKZ9AgClfQIAWMAAgKtZAgCqUQIAXMAAgGDAAICv+QEArvEBAK35AQCsQQIAqIUOAKmNDgCqhQ4Aq50OAKyNDgCtvQ4ArrUOAK/dDgA0wACAZMAAgGjAAIBswACAcMAAgHTAAIB4wACAfMAAgLitDgC5tQ4Aur0OALu1DgC8dQ8AvX0PAL51DwC/bQ8AsKkOALG1DgCyvQ4As7UOALStDgC1lQ4Atp0OALeVDgCzDQ4AgMAAgITAAICIwACAjMAAgLY9DgC1BQ4AkMAAgLtxDgC6bQ4AlMAAgJjAAIC/UQ4AvmkOAL1hDgC8aQ4AghkAAKNJDgCAZQAAgRkAAKZ5DgCcwACAoMAAgKVBDgCqKQ4AqzUOAIS8AwCkwACAri0OAK8VDgCsLQ4ArSUOAKidDgCppQ4Aqq0OAKulDgCsvQ4AraEOAK7dDgCvzQ4AhiABAIdkAQCowACArMAAgLDAAIC0wACAuMAAgLzAAIC4eQEAuXkBALrNAQC7xQEAvN0BAL3FAQC+xQEAv/UBALC9DgCxjQ4AsoUOALNJAQC0WQEAtVkBALZJAQC3SQEAtS0OAMDAAIDEwACAtjkOAMjAAIDMwACAsz0OANDAAIC9hQEAvEkOAL+FAQC+hQEA1MAAgMS/AIC7UQ4AumEOAKNlDgDYwACA3MAAgODAAIDkwACApmEOAKV1DgDowACAqwkOAKo5DgDswACA8MAAgK/dAQCu3QEArd0BAKwRDgD0wACA+MAAgO/QDwD8wACAAMEAgATBAIAIwQCADMEAgBDBAIC+aAMAGMEAgBzBAIDhVA4AIMEAgONkDgAkwQCAgFkAAIFZAACCaQAAhIwDAIbwBACHFAMAKMEAgCzBAIAwwQCANMEAgDjBAIA8wQCAQMEAgETBAIBIwQCATMEAgFDBAIBUwQCAWMEAgFzBAIBgwQCAZMEAgGjBAIBswQCAqIkDAKmJAwCqmQMAq5kDAKyJAwCtiQMArj0DAK81AwCwUQMAsVEDALJVAwCzfQMAtBUDALUdAwC2FQMAtw0DALg9AwC5DQMAugUDALvtAAC89QAAvfkAAL7pAAC/6QAAcMEAgHTBAIB4wQCAsz0CAHzBAIC1LQIAtiUCAIDBAIC+aAUAiMEAgLq5AgC7uQIAvK0CAL2FAgC+/QIAv/UCAIBJAACBVQAAglUAAIQABQDvjAMAvhgEAId0BQCG/AQA4zwDAIzBAIDhUAAAkMEAgJTBAICYwQCAnMEAgKDBAICkwQCAqMEAgKzBAICwwQCAtMEAgLjBAIC8wQCA79QOAL4oBgDhdA4AwMEAgONUAQDEwQCAyMEAgMzBAIDQwQCAo/ECANTBAIDYwQCA3MEAgODBAICm6QIApeECAOTBAICrdQIAqnUCAOjBAIDswQCArzkCAK4xAgCtSQIArGECAKgpBgCpKQYAqj0GAKsxBgCsSQYArUkGAK55BgCveQYAhMEAgIIVAACBxQcAgMUHAPDBAICEaAMA9MEAgPjBAIC4yQYAuckGALrZBgC72QYAvMkGAL3JBgC+WQcAv1kHALAJBgCxCQYAshkGALMZBgC0CQYAtQkGALb5BgC3+QYAs7UGAPzBAICGrAAAh0ADAADCAIC2yQYAtcEGAATCAIC7zQYAus0GAAjCAIAMwgCAv80GAL7NBgC9zQYAvM0GABDCAICj8QYAFMIAgBjCAICmjQYAHMIAgCDCAIClhQYAqokGAKuJBgAkwgCAKMIAgK6JBgCviQYArIkGAK2JBgCoJQYAqWEGAKplBgCrfQYArGUGAK1tBgCuZQYAr50GACzCAIAwwgCANMIAgDjCAIA8wgCAQMIAgETCAIBIwgCAuPUGALn9BgC69QYAu4kGALyZBgC9mQYAvokGAL+BBgCw5QYAse0GALLlBgCz/QYAtOUGALXtBgC20QYAt80GAEzCAIC2/QYAtf0GAFDCAICz/QYAVMIAgFjCAIBcwgCAvzkGAL4xBgC9OQYAvCEGALs5BgC6MQYAFMEAgGDCAICjrQYAgnkAAIFVAACAVQAAhFwBAKatBgClrQYAaMIAgKtpBgCqYQYAhkh/AIfkAACvaQYArmEGAK1pBgCscQYAbMIAgO/cBwBwwgCAdMIAgHjCAIB8wgCAgMIAgITCAICIwgCAhKADAIzCAIC/JHkAkMIAgONoBwCUwgCA4XQGALPRAgCYwgCAvgQDAISAfQCcwgCAtvkCALXxAgCgwgCAu7UCALqpAgCkwgCAqMIAgL9RAwC+mQIAvZECALylAgCpBQIAqLkCAKsVAgCqHQIArT0CAKw9AgCvUQIArl0CAL5ofQCswgCAsMIAgLTCAIC4wgCAvMIAgMDCAIDEwgCAufEDALjpAwC78QMAuvkDAL1RAwC86QMAv00DAL5RAwCxNQIAsCkCALMBAgCyNQIAtdEDALQZAgC30QMAttkDAIIpAACjlQMAgB0AAIEVAACmvQMAyMIAgMzCAICltQMAqu0DAKvxAwDQwgCA2MIAgK7dAwCvFQIArOEDAK3VAwCGYH0Ah3h9ALNBAQCEAH8AtUEBANzCAIDgwgCAtkkBAOTCAIDowgCAu0EBALpNAQC9SQEAvEUBAL8pAQC+OQEA7MIAgO/cBgDwwgCA9MIAgPjCAID8wgCAAMMAgO8wBgCELH4A4eAGAATDAIDjiAEACMMAgON0AAAMwwCA4SwBAKPJAQAQwwCAFMMAgIVweQAYwwCApsEBAKXJAQAcwwCAq8kBAKrFAQAgwwCAJMMAgK+hAQCusQEArcEBAKzNAQCo3X0AqQV+AKoBfgCrAX4ArAF+AK0BfgCuAX4ArwF+ANTCAIAowwCALMMAgDDDAIA0wwCAgp0AAIGdAACAnQAAuC1+ALnhfgC64X4Au+F+ALzhfgC94X4AvuF+AL/hfgCwQX4AsU1+ALJZfgCzVX4AtDV+ALUlfgC2JX4AtxV+AKitfwCp0X8AqtF/AKvtfwCs9X8ArRV/AK4RfwCvEX8AOMMAgDzDAIBAwwCARMMAgIbwAwCHuAAASMMAgEzDAIC4EX8AuRl/ALohfwC7IX8AvPUAAL39AAC+9QAAv+0AALBxfwCxcX8AsnF/ALNFfwC0QX8AtU1/ALY9fwC3NX8As1l+AFDDAIBUwwCAWMMAgFzDAIC2lX4AtX1+AGDDAIC7tX4AurV+AGTDAIBowwCAv4l+AL6FfgC9kX4AvKV+AGzDAICjHX4AcMMAgHTDAICm0X4AeMMAgHzDAIClOX4AqvF+AKvxfgCAwwCAhMMAgK7BfgCvzX4ArOF+AK3VfgCwrQAAscUAALLBAACzwQAAtMUAALXNAAC28QAAt/EAALhhAAC5YQAAumEAALt9AAC8ZQAAvW0AAL5lAAC/vQMAiMMAgIzDAICQwwCAZMIAgJTDAICYwwCAnMMAgKDDAICoWQEAqVkBAKrtAACr5QAArP0AAK3lAACu5QAAr9UAAKTDAICCHQAAgR0AAIAdAACowwCArMMAgLDDAIC+VAIAhoAEAIfsAgC4wwCAvMMAgMDDAIDEwwCAyMMAgL54AwDjdH4AzMMAgOG4fQDQwwCA1MMAgNjDAIDcwwCA4MMAgOTDAIDowwCA7MMAgPDDAIDvwH4A9MMAgPjDAID8wwCAs4UDAADEAIAExACACMQAgAzEAIC2hQMAtZUDABDEAIC74QMAuokDAL4kBgAUxACAv+kDAL7hAwC99QMAvPUDAIIpAACjwQMAgB0AAIEVAACmwQMAGMQAgBzEAICl0QMAqs0DAKulAwAgxACAheAFAK6lAwCvrQMArLEDAK2xAwDh+AMAKMQAgONcHwAsxACA7/QDADDEAICGPAcAh6wCAON8fgA0xACA4YABADjEAIA8xACAQMQAgO/kEwBExACAs3EBAEjEAIBMxACAUMQAgFTEAIC2EQEAtWEBAFjEAIC7OQEAujEBAFzEAIBgxACAvxkBAL4RAQC9GQEAvCEBAGTEAIBoxACAbMQAgHDEAIB0xACAeMQAgHzEAIDvxH8AgMQAgOH8fgCExACA4/B/AIANAACBdQAAgn0AAIjEAICMxACAkMQAgKP5AQC+AAgApekBAJjEAICcxACAppkBAISoBQCgxACAq7EBAKq5AQCtkQEArKkBAK+RAQCumQEAqCkGAKkpBgCqOQYAqzkGAKwpBgCtUQYArlUGAK9NBgAkxACAhCABAKTEAICUxACAo+EBAKKZBAChGQQAoPEFALg5BgC5OQYAus0GALvFBgC83QYAvcUGAL7FBgC/8QYAsDUGALE9BgCyNQYAsw0GALQVBgC1HQYAthUGALcJBgCPoWwAs5EHAIYoAQCHfAMAtqEHAKjEAICsxACAtbEHALrlBwC77QcAsMQAgLTEAIC+7QcAv90HALz1BwC97QcAn/l4AJ7leACdcXkAnCF8AJvxfACaYX0AmZlxAJjZcACX4XAAlnl0AJVtdACUbXQAk61pAJJxaACReWgAkB1uAIIhbQCD5W8AuMQAgLzEAICGTWgAh5V1AISZaQCFmWkAiqV1AIu5dQDAxACAxMQAgI5xcACPgXwAjDlxAI05cQCSYX0Ak6l9AMjEAIDMxACAlml5AJeZBACU4XgAlX15AJpBBQCbyQUA0MQAgNTEAIDYxACA3MQAgJypAADgxACAo4ENAKKpAQChqQEA5MQAgKexCQCmAQgApU0NAKSZDQCrkRUAqoUVAKkBFACocQkArx0QAK7pEQCtvREArAEQALMBGACy8RwAscEdALDJHQC0wwCA6MQAgLXhGAC0/RkA7MQAgPDEAID0xACA+MQAgIAdAACBCQAAgv0DAPzEAICjFQUAAMUAgIaIDACHPAMACMUAgKYlBQClNQUADMUAgKtpBQCqYQUAEMUAgBTFAICvWQUArmkFAK1pBQCscQUAGMUAgBzFAICEBAwAIMUAgCTFAIDhbAYAKMUAgOPsewAsxQCAMMUAgDTFAIDvqAYAOMUAgDzFAIBAxQCARMUAgKmNBQCogQUAq60FAKqZBQCtoQUArLkFAK+lBQCuqQUAhGgNAEjFAIBMxQCAUMUAgFTFAIBYxQCAXMUAgL70DAC5SQUAuEEFALtZBQC6QQUAvUkFALxBBQC/cQUAvn0FALGpBQCwoQUAs7kFALKhBQC1mQUAtKkFALd5BQC2kQUAqNUEAKndBACq7QQAqyUDAKyFAwCtjQMArrEDAK+xAwBgxQCAZMUAgGjFAIBsxQCAgBkAAIEZAACCBQAAcMUAgLgxAgC5MQIAujUCALvBAgC8hQIAvbUCAL69AgC/tQIAsGkCALFpAgCyQQIAs0ECALQ5AgC1OQIAthECALcRAgCGoAwAh0wNAHjFAIB8xQCA76QGAIDFAICExQCA78wHAOOUAQDhpAYA4TgBAONcBgCIxQCAjMUAgJDFAICUxQCAmMUAgJzFAICzLQQAoMUAgLVFAwCkxQCAqMUAgLZFAwCsxQCAsMUAgLvlAgC65QIAvd0CALzdAgC/tQIAvrUCAATFAIB0xQCAtMUAgLjFAIC8xQCAwMUAgMTFAIDIxQCAqDEOAKk5DgCqAQ4AqwEOAKxxDgCtcQ4ArnUOAK9tDgCwGQ4AsSUOALItDgCzJQ4AtCEOALUhDgC2IQ4AtyEOALjFDgC5zQ4AusUOALvdDgC8xQ4Avc0OAL5ZDwC/WQ8As6kOAMzFAIDQxQCA1MUAgNjFAIC20Q4AtdkOANzFAIC7wQ4Auv0OAODFAIC+LAAAv8UOAL7FDgC90Q4AvNkOAIJpAACj7Q4AgFkAAIFRAACmlQ4A5MUAgOjFAIClnQ4AqrkOAKuFDgCGyAAAh6wAAK6BDgCvgQ4ArJ0OAK2VDgDsxQCAs5EOAPDFAID0xQCAtqUOAPjFAID8xQCAta0OALrhDgC74Q4AAMYAgATGAIC+6Q4Av9UOALz1DgC96Q4Ao6UKAAjGAIAMxgCAEMYAgBTGAICmzQ0Apc0NABjGAICrbQwAqm0MABzGAIAgxgCArz0MAK49DACtVQwArFUMAKgJDgCpCQ4Aqh0OAKsVDgCsIQ4ArSEOAK4hDgCvIQ4AJMYAgCjGAIAsxgCAMMYAgDTGAIA4xgCAPMYAgEDGAIC4zQEAudUBALrdAQC71QEAvM0BAL1RAQC+UQEAv1EBALAhDgCxIQ4AsiUOALM5DgC0KQ4AtRUOALYdDgC39QEARMYAgEjGAIBMxgCAo5kNAFDGAIClpQ0Apq0NAL7cAgCE7AMAWMYAgKrpDQCr6Q0ArP0NAK3hDQCu4Q0Ar90NAIBFAACBTQAAglkAAKNFAwBcxgCApUEDAKZBAwBgxgCAhsAEAIcAAwCqLQMAqyUDAKw9AwCtJQMAriUDAK8VAwCoWQIAqYUDAKqBAwCrgQMArIUDAK2NAwCusQMAr7EDAGTGAIBoxgCAbMYAgHDGAIB0xgCAeMYAgHzGAICAxgCAuGUDALltAwC6ZQMAu30DALxlAwC9bQMAvmUDAL/dAACwpQMAsa0DALKlAwCzvQMAtK0DALWdAwC2lQMAt10DALMJAgCExgCAiMYAgIzGAICQxgCAtg0CALUNAgCUxgCAu2kCALphAgCYxgCAnMYAgL9ZAgC+aQIAvWkCALxxAgCgxgCApMYAgKjGAICsxgCA4aABALDGAIDjaAMAtMYAgIEVAACAFQAA74wDAIIVAAC4xgCAvMYAgMDGAIC+cAUA4RgOAOGUDwDjOA8A49QPAISUAgDIxgCAzMYAgNDGAIDUxgCA2MYAgNzGAIDgxgCA5MYAgOjGAIDv7AEA7/gPAIZgBACHBAUAs5UBAITMBQC1dQEA7MYAgPDGAIC2dQEA9MYAgPjGAIC7UQEAulkBAL31AAC8SQEAv/UAAL71AACoJQYAqVUGAKpVBgCrrQYArLUGAK29BgCutQYAr60GAMTGAID8xgCAAMcAgATHAIAIxwCADMcAgBDHAIAUxwCAuGkHALlpBwC6CQcAuwkHALwZBwC9GQcAvg0HAL8BBwCw1QYAsd0GALLVBgCzaQcAtHkHALV5BwC2aQcAt2EHAKPdBgAYxwCAHMcAgCDHAIAkxwCApj0GAKU9BgAoxwCAqxkGAKoRBgAsxwCAMMcAgK+9BwCuvQcArb0HAKwBBgCAXQAAgW0AAIJlAACzUQcAvtgDALVxBwC2cQcANMcAgIbgAACHFAMAul0HALs5BwC8KQcAvRUHAL4dBwC/2QAAqJUGAKmdBgCqlQYAq60GAKy1BgCtvQYArrUGAK+tBgA4xwCAPMcAgEDHAIBExwCASMcAgEzHAIBQxwCAVMcAgLhxAQC5cQEAunEBALtxAQC81QEAvd0BAL7VAQC/zQEAsNUGALGxBgCysQYAs40GALSVBgC1UQEAtlEBALdRAQBYxwCAoxkGAFzHAIBgxwCApjkGAFTGAIBkxwCApTkGAKoVBgCrcQYAaMcAgGzHAICuVQYAr5EBAKxhBgCtXQYAcMcAgHTHAIB4xwCAfMcAgIDHAICExwCAiMcAgIzHAICQxwCAlMcAgJjHAICcxwCAgBkAAIEZAACCBQAAoMcAgISAAgC+gAMAhwwDAIasHADhaAYAqMcAgOOYBwCsxwCAsMcAgLTHAIDvrAcAuMcAgLzHAIDAxwCAxMcAgMjHAIDMxwCA0McAgNTHAICzZQMA2McAgLVlAwC2bQMA3McAgODHAIDkxwCAuukDALvlAwC8/QMAve0DAL7RAwC/0QMA6McAgOzHAIDwxwCA9McAgPjHAID8xwCAAMgAgATIAICogQMAqYEDAKqBAwCrgQMArIEDAK2BAwCugQMAr4EDALBBAwCxTQMAskUDALNVAwC0eQMAtXkDALYZAwC3GQMAuCkDALkpAwC6OQMAuzkDALwpAwC9KQMAvhkDAL8ZAwCBGQAAgBEAAKMhAgCCLQAApSECAAjIAIAMyACApikCABDIAIAYyACAq6ECAKqtAgCtqQIArLkCAK+VAgCulQIAhEwCAL5IHQCHZB0AhuwcAONAAwAcyACA4aABACDIAIDvnAMAJMgAgCjIAIAsyACAMMgAgDTIAIA4yACAPMgAgEDIAIBEyACASMgAgEzIAIBQyACAVMgAgFjIAIDvtAEAhKgdAOF8BgBcyACA43AGAGDIAIBkyACAaMgAgGzIAICz4QEAcMgAgHTIAIB4yACAfMgAgLblAQC19QEAgMgAgLuhAQC62QEAvuQcAIjIAIC/rQEAvqUBAL2xAQC8uQEAqBUeAKkZHgCqKR4AqykeAKw9HgCtJR4Ari0eAK8lHgAUyACAgvkfAIH5HwCA4R8AhMgAgIzIAICGHAAAh7ADALjBHgC5wR4AusEeALvBHgC8wR4AvcEeAL7BHgC/wR4AsF0eALElHgCyLR4AsyUeALQhHgC1KR4AthkeALcZHgCjoR4AkMgAgJTIAICYyACAnMgAgKalHgCltR4AoMgAgKvhHgCqmR4ApMgAgKjIAICv7R4AruUeAK3xHgCs+R4ArMgAgLOZHwCwyACAtMgAgLa9HwC4yACAvMgAgLW1HwC6mR8Au5kfAMDIAIDEyACAvnkfAL95HwC8eR8AvXkfAKglHgCpUR4AqlUeAKtpHgCseR4ArXkeAK5pHgCvaR4AyMgAgMzIAIDQyACA1MgAgNjIAIDcyACA4MgAgOTIAIC42R4Aue0eALr5HgC7+R4AvOkeAL3pHgC+nR4Av5UeALAZHgCxGR4AsukeALPpHgC0+R4AtfkeALbpHgC36R4Ao90eAIIpAACBFQAAgB0AAOjIAICm+R4ApfEeAOzIAICr3R4Aqt0eAKTHAIDwyACArz0eAK49HgCtPR4ArD0eAITIAgCzQQEAvgwBAPjIAIC2QQEA/MgAgADJAIC1UQEAuk0BALslAQCGSAAAh1ABAL4lAQC/LQEAvDEBAL0xAQAEyQCACMkAgIQEAwC+gAQADMkAgO+oHwAQyQCAFMkAgL8oMQDjdB8AGMkAgOE4HgAcyQCAIMkAgCTJAIAoyQCALMkAgDDJAICjzQIANMkAgKXdAgA4yQCAPMkAgKbNAgBAyQCARMkAgKupAgCqwQIArb0CAKy9AgCvoQIArqkCAKm1AgCoaR0AqwECAKoJAgCtAQIArBkCAK8xAgCuAQIAhGwFAEjJAIBMyQCAUMkAgFTJAICCnQEAgZ0BAICdAQC55QMAuOUDALvlAwC65QMAveUDALzlAwC/5QMAvuUDALEhAgCwSQIAsyUCALIlAgC1KQIAtCECALcVAgC2FQIAqM0CAKnRAgCq0QIAqw0BAKwVAQCtBQEArgEBAK8BAQBYyQCAXMkAgGDJAIBoyQCAvvgEAGzJAIBwyQCAdMkAgLgVAQC5HQEAuikBALspAQC89QEAvf0BAL71AQC/7QEAsEkBALFVAQCyXQEAs1UBALRNAQC1NQEAtj0BALcxAQCGoAUAh8gFAHjJAIDvvAAAfMkAgIDJAICEyQCA74weAIQsBwDh8B4AiMkAgOMcHgCMyQCA4ZQBAJDJAIDjbAAAsxkCAJTJAICYyQCAnMkAgIQACAC2xQEAtd0BAKDJAIC70QEAus0BAKTJAICoyQCAv7EBAL7JAQC9wQEAvMkBAKPZBQBkyQCArMkAgLDJAIC0yQCApgUGAKUdBgC4yQCAqxEGAKoNBgC8yQCAwMkAgK9xBgCuCQYArQEGAKwJBgDEyQCAgh0AAIEdAACAHQAAyMkAgMzJAIDQyQCA1MkAgIZAAwCHxAMA2MkAgNzJAIDgyQCA5MkAgOjJAIDsyQCAqK0HAKmxBwCqsQcAq7EHAKwZBwCtBQcArg0HAK8FBwDwyQCA9MkAgPjJAID8yQCAAMoAgATKAIAIygCADMoAgLgtBwC5zQAAusUAALvdAAC8zQAAvf0AAL71AAC/nQAAsEkHALFVBwCyUQcAsykHALQ5BwC1OQcAtiUHALcVBwCzOQYAEMoAgBTKAIAYygCAHMoAgLaFBgC1kQYAIMoAgLuRBgC6jQYAJMoAgCjKAIC//QYAvv0GAL39BgC8hQYALMoAgKN9BgAwygCANMoAgKbBBgA4ygCAPMoAgKXVBgCqyQYAq9UGAEDKAIC+bAEArrkGAK+5BgCswQYArbkGAKjpAQCp6QEAqvkBAKv5AQCs6QEArekBAK45AQCvOQEAgPUAAIH9AACCwQAARMoAgIYQAACHdAEASMoAgPTIAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+kQAAv5EAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAA7/QGAEzKAIBQygCAVMoAgO8wAgBYygCAXMoAgGDKAIDj4AcAZMoAgOGAAQBoygCA4ygGAGzKAIDhyAUAcMoAgLMxAgB0ygCAeMoAgJYAAAB8ygCAtikCALUhAgCAygCAu80CALrNAgCEygCAiMoAgL/NAgC+zQIAvc0CALzNAgCMygCAkMoAgJTKAICj/QIAmMoAgKXtAgCm5QIAnMoAgKDKAICkygCAqgECAKsBAgCsAQIArQECAK4BAgCvAQIAgA0AAIEVAACCHQAAqMoAgKzKAICwygCAvlQMALjKAICGwAwAhyQDALzKAIDAygCAxMoAgMjKAIDMygCA0MoAgKi5AgCpAQEAqgEBAKsBAQCsBQEArQ0BAK4FAQCvOQEAhKgNANTKAIDYygCA3MoAgODKAIDkygCA6MoAgOzKAIC4LQEAucUBALrNAQC7xQEAvMEBAL3JAQC++QEAv/kBALBNAQCxUQEAslUBALMpAQC0OQEAtSUBALYlAQC3FQEA4RgGAPDKAIDjOAcA9MoAgPjKAIC+WAwA/MoAgADLAICEbA8ABMsAgL5gDwAIywCADMsAgBDLAIDvcAYAFMsAgIAVAACBGQAAgi0AAITMDwDjYAYAGMsAgOGgAQAcywCA73QAACDLAICGyAwAh/wMACjLAIAsywCAMMsAgDTLAICjCQ4AtMoAgCTLAIA4ywCAPMsAgKYNDgClDQ4AQMsAgKsVDgCqCQ4ARMsAgEjLAICvYQ4Arn0OAK19DgCsAQ4ATMsAgLOpDgBQywCAVMsAgLapDgBYywCAXMsAgLWpDgC6SQ8Au0kPAGDLAIBkywCAvkkPAL9JDwC8SQ8AvUkPAKhdDgCpbQ4AqmUOAKt9DgCsZQ4ArW0OAK5lDgCvuQ8AaMsAgGzLAIBwywCAdMsAgHjLAIB8ywCAgMsAgITLAIC4UQ8AuV0PALpVDwC7aQ8AvH0PAL1lDwC+bQ8Av2EPALDJDwCxyQ8AstkPALPZDwC0yQ8AtckPALZ9DwC3cQ8AiMsAgLURDwC2EQ8AjMsAgIARAACBGQAAgikAALMVDwC8HQ8AvWEPAL5hDwC/fQ8AkMsAgJTLAIC6FQ8AuwkPAKOtDwCYywCAhugAAIfIAQCcywCApq0PAKWtDwCgywCAq00OAKpNDgCkywCAqMsAgK9NDgCuTQ4ArU0OAKxNDgCocQ4AqXEOAKpxDgCrcQ4ArJ0BAK2FAQCuhQEAr7UBAL7sAACsywCAsMsAgLTLAIC4ywCAvMsAgMDLAIDEywCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCwzQEAsaUBALKhAQCzoQEAtKUBALWtAQC2kQEAt5EBALP5DQDIywCAzMsAgNDLAIDUywCAtgUCALUVAgDYywCAu2ECALoJAgDcywCA4MsAgL9pAgC+YQIAvXUCALx1AgDkywCAo70NAOjLAIDsywCApkECAPDLAID0ywCApVECAKpNAgCrJQIA+MsAgPzLAICuJQIAry0CAKwxAgCtMQIAge0AAIDtAADv0AEAgh0AAADMAIAIzACAhjgEAIdQAwAMzACAEMwAgBTMAIAYzACA4eABABzMAIDjZA8AIMwAgCTMAIAozACALMwAgLORAwAwzACAtbkDALZ9AwA0zACAOMwAgDzMAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAL5oBQBAzACARMwAgEjMAIBMzACAUMwAgFTMAIBYzACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOF4DwDjNA4A47gOAOF8DgBczACAYMwAgGTMAIBozACAbMwAgHDMAIB4zACAfMwAgIDMAIDv5A4A79QOAITMAICjnQIAgmEAAIFpAACAUQAAhJwFAKZxAgCltQIAiMwAgKtVAgCqVQIAhkgEAIfMBACv+QEArvEBAK1FAgCsRQIAqJUGAKmlBgCqrQYAq6UGAKy9BgCtoQYArqUGAK/dBgB0zACAjMwAgJDMAICUzACAmMwAgJzMAICgzACApMwAgLhtBwC5dQcAun0HALt1BwC8bQcAvcUHAL7NBwC/xQcAsKUGALGtBgCyuQYAs7EGALSRBgC1kQYAtl0HALdVBwCzJQYAqMwAgKzMAICwzACAtMwAgLYhBgC1NQYAuMwAgLtpBgC6YQYAvMwAgMDMAIC/VQYAvlUGAL1lBgC8bQYAxMwAgKNhBgDIzACAzMwAgKZlBgDQzACA1MwAgKVxBgCqJQYAqy0GANjMAIDczACArhEGAK8RBgCsKQYArSEGAKipBgCpqQYAqrkGAKuxBgCszQYArTEBAK4xAQCvMQEAgMkBAIHJAQCCBQAA4MwAgL54AgCEeAIA5MwAgOjMAIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALBRAQCxUQEAslEBALNRAQC09QEAtf0BALb1AQC37QEAszEGAOzMAICGKAAAh9wBAPDMAIC2sQEAtUUGAPTMAIC7lQEAupUBAPjMAID8zACAvzkBAL4xAQC9hQEAvIUBAATMAICjdQYAAM0AgATNAICm9QEACM0AgAzNAIClAQYAqtEBAKvRAQAQzQCAFM0AgK51AQCvfQEArMEBAK3BAQAYzQCAHM0AgCDNAIAkzQCAKM0AgCzNAIAwzQCANM0AgDjNAIA8zQCAQM0AgETNAIBIzQCATM0AgFDNAIC+cAMAhQA8AOHEBgCERAIA44wHAIBhAACBYQAAgmEAAO9oAwCFRDwA4RACAFjNAIDj2CsAhlA9AIf0AwBczQCA76QHAGDNAIDvQAIAZM0AgGjNAIBszQCAcM0AgHTNAIB4zQCAhDw8AHzNAICAzQCAhM0AgIjNAIDj7AIAjM0AgOEsAQCzUQMAkM0AgJTNAICYzQCAnM0AgLZ5AwC1cQMAoM0AgLs5AwC6MQMApM0AgKjNAIC/9QAAvvUAAL0VAwC8FQMAqD0CAKmBAgCqmQIAq5ECAKy5AgCtuQIArtECAK/RAgCEqD8Avqg/AKzNAICwzQCAtM0AgLjNAIC8zQCAwM0AgLhRAQC5UQEAulEBALtRAQC8cQEAvXEBAL5xAQC/cQEAsLUCALG9AgCygQIAs4ECALRxAQC1cQEAtnEBALdxAQCAtQAAgb0AAIK1AADIzQCAhrA/AIfgPADMzQCA71QAAL4sPgDhVAYA0M0AgOOIAADUzQCA2M0AgNzNAIDgzQCAo1ECAOTNAIC/2CYA6M0AgOzNAICmeQIApXECAPDNAICrOQIAqjECAPTNAID4zQCAr/UBAK71AQCtFQIArBUCAJAtJACRBSgAkg0oAJPZKACUhS0AlTUsAJbFLACXtTEAmAEwAJkVMACalTUAmyk0AJxtNACdmTUAnj04AJ81OABUzQCAttU+ALXFPgDEzQCAs9E+APzNAIAAzgCABM4AgL/ZPgC+1T4AvcU+ALzFPgC71T4Auuk+AAjOAICPXSQAqeUJAKgVCACrBQwAqg0MAK0BEACsAQwAr0EQAK69EACh4QAADM4AgKMBBACi4QAApZ0EAKSVBACnuQgApgEIAKD1OQChBT0Aouk8AKP1PQAQzgCAFM4AgBjOAIAczgCAscEUALABFACzARgAsn0UALXVGAC01RgAIM4AgCTOAICCISUAgyklACjOAIAszgCAhsUpAIeBLACEGSkAhRkpAIoBLQCL+S0AMM4AgDjOAICOATEAj4k0AIyRMACNHTEAkkU1AJMZNQCG6AcAh+wBAJZZOQCXYTgAlPU0AJVZOQCaoTwAm0U9ADzOAIBAzgCAgX0AAIB9AACcQTwAglUAAKjpPwCp/T8Aqgk/AKsFPwCsHT8ArQU/AK4NPwCvBT8ARM4AgEjOAIBMzgCAUM4AgFTOAIBYzgCAXM4AgGDOAIC4DT8AuRU/ALoVPwC7JT8AvD0/AL39PgC+9T4Av+0+ALB9PwCxQT8AskE/ALNBPwC0QT8AtU0/ALY9PwC3NT8Ao4E8AGTOAIBozgCAbM4AgHDOAICmhTwApZU8AHTOAICrhTwAqrk8AHjOAIB8zgCAr4k8AK6FPACtlTwArJU8AITIAwCz7T0AgM4AgITOAIC26T0AiM4AgIzOAIC16T0Auq09ALu1PQCQzgCAlM4AgL6dPQC/IQIAvKU9AL2VPQCoDT0AqR09AKohPQCrPT0ArCU9AK0tPQCuJT0Ar1k9AIANAACBFQAAgh0AAJjOAICczgCAoM4AgKjOAIC+uAMAuLkCALlhAgC6GQIAuxkCALwJAgC9CQIAviECAL8hAgCwLT0AsTU9ALI1PQCzBT0AtB09ALWhAgC2oQIAt6ECAKOpPACszgCAhigFAIfsAgCwzgCApq08AKWtPAC0zgCAq/E8AKrpPAC4zgCAvM4AgK9lAwCu2TwArdE8AKzhPADAzgCAsykCAMTOAIDIzgCAtvkCAMzOAIDQzgCAtfkCALrVAgC73QIA1M4AgNjOAIC+eQEAv3kBALzFAgC9eQEA3M4AgODOAICj5QIA5M4AgKU1AgDozgCA7M4AgKY1AgDwzgCA9M4AgKsRAgCqGQIArbUBAKwJAgCvtQEArrUBAOPwPgDhrD8A4UA+AON8PwD4zgCA/M4AgADPAIAEzwCAgA0AAIERAACCEQAACM8AgO+oPgAMzwCAEM8AgO8gPgCoLQUAqW0FAKplBQCrrQUArLUFAK29BQCutQUAr60FAKTOAICE6AMAvuADABTPAICGEAMAh5gDABjPAIAczwCAuGkGALlpBgC6AQYAuwEGALwFBgC9DQYAvjEGAL8xBgCw1QUAsd0FALLVBQCzaQYAtHkGALV5BgC2aQYAt2EGAKg5BgCpgQcAqpkHAKuRBwCsuQcArbkHAK7ZBwCv1QcAIM8AgCTPAIA0zgCAKM8AgCzPAIAwzwCANM8AgDjPAIC4VQcAuV0HALppBwC7aQcAvAEHAL0BBwC+AQcAvwEHALCtBwCxsQcAsrEHALOFBwC0nQcAtXUHALZ9BwC3cQcAsxEGADzPAIBAzwCARM8AgEjPAIC2OQYAtTEGAEzPAIC7dQYAumkGAFDPAIBUzwCAv7EGAL5ZBgC9UQYAvGUGAFjPAICjVQYAXM8AgGDPAICmfQYAZM8AgGjPAICldQYAqi0GAKsxBgBszwCAcM8AgK4dBgCv9QYArCEGAK0VBgCouQEAqbkBAKopAQCrKQEArD0BAK0lAQCuLQEAryUBAHTPAICCHQAAgR0AAIAdAAB4zwCAfM8AgIDPAIC+cAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwXQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAAITIAgCzpQIAhzgDAIYoAgC2oQIAiM8AgIzPAIC1sQIAup0CALshAwC+bAMAkM8AgL4hAwC/KQMAvDEDAL0xAwCj4QIAlM8AgJjPAICczwCAoM8AgKblAgCl9QIApM8AgKtlAwCq2QIAqM8AgKzPAICvbQMArmUDAK11AwCsdQMAqZkAAKiRAACrzQAAqqEAAK3dAACs3QAAr8UAAK7NAAC+LA0AsM8AgLTPAIC4zwCAvM8AgMDPAIDEzwCAyM8AgLnBAQC4eQAAu8EBALrJAQC9wQEAvNkBAL/FAQC+xQEAsY0AALCNAACzQQAAskkAALVBAAC0WQAAt0EAALZJAADMzwCA0M8AgNTPAIDYzwCA3M8AgO9QBwDgzwCA5M8AgL74DwDjdAcA6M8AgOF8BACAGQAAgQkAAIJ5AADszwCA8M8AgLNpAQD4zwCAhMQCALYdAQD8zwCAANAAgLUVAQC6CQEAuwkBAIboDQCH6A0Avt0BAL/FAQC83QEAvdUBAATQAIAI0ACADNAAgBDQAIDv1AAAFNAAgBjQAIDvTAEA47ADAOG0BgDhgAEA45gBABzQAIAg0ACAJNAAgCjQAIAs0ACAMNAAgKPlAQCEwA0ApZkBADTQAIA40ACAppEBADzQAIBA0ACAq4UBAKqFAQCtWQEArFEBAK9JAQCuUQEA9M8AgETQAIBI0ACATNAAgFDQAIBU0ACAWNAAgFzQAICoaQ8AqXEPAKpxDwCrrQ8ArLUPAK29DwCutQ8Ar6kPALDZDwCx9Q8Asv0PALP1DwC07Q8AtZUPALadDwC3iQ8AuLkPALmFDwC6jQ8Au2kAALx5AAC9eQAAvmkAAL9pAACBnQAAgJ0AAGDQAICCBQAAZNAAgGjQAIBs0ACAcNAAgIaAAwCH9AMAdNAAgHjQAIB80ACAgNAAgITQAICEzwCAs5kPAIjQAICM0ACAkNAAgJTQAIC2XQ8AtV0PAJjQAIC7UQ8Aun0PAJzQAICg0ACAvzEPAL5JDwC9QQ8AvEkPAKNZDgCk0ACAqNAAgKzQAICw0ACApp0OAKWdDgC00ACAq5EOAKq9DgC40ACAvNAAgK/xDgCuiQ4ArYEOAKyJDgDA0ACAxNAAgMjQAIDM0ACAgBkAAIEZAACCBQAA0NAAgISgAQDU0ACAh+gBAIYABADY0ACA3NAAgODQAIDk0ACAqBUBAKkdAQCqFQEAqyUBAKw9AQCtJQEAri0BAK8lAQDo0ACA7NAAgPDQAID00ACA+NAAgPzQAIAA0QCABNEAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsCUBALEtAQCyJQEAsz0BALQtAQC1HQEAthUBALf5AAAI0QCADNEAgBDRAICzkQIAFNEAgLW5AgC2qQIAGNEAgBzRAIAg0QCAuu0CALvlAgC8/QIAveUCAL7lAgC/1QIApvECACTRAIAo0QCApeECACzRAICjyQIAMNEAgDTRAICuvQIAr40CAKylAgCtvQIAqrUCAKu9AgA40QCAPNEAgID5AACB+QAAggUAAEDRAIC+yAMAhBgDAEjRAIBM0QCAUNEAgFTRAIBY0QCAXNEAgGDRAIBk0QCAhhgEAIecAwBo0QCAbNEAgHDRAIB00QCAeNEAgHzRAIDvsAIAgNEAgOGUAQCE0QCA42wCAIjRAICM0QCAkNEAgJTRAICY0QCA79APAJzRAICg0QCApNEAgKjRAIDhrAEArNEAgONsAACAMQAAgT0AAIIdAADv9A4A42wOALDRAIDhLA8AvnAFALM5AgCEDAUAhugEAIdgBQDcAAAAtvECALX5AgC40QCAu9UCALrVAgC80QCAwNEAgL91AQC+dQEAvcUCALzFAgDE0QCA4fQOAMjRAIDjUA4AzNEAgNDRAIDU0QCA2NEAgNzRAIDg0QCA5NEAgOjRAIDs0QCA8NEAgPTRAIDv5A8ApmUCAPjRAID80QCApW0CAADSAICjrQIABNIAgAjSAICu4QEAr+EBAKxRAgCtUQIAqkECAKtBAgAM0gCAENIAgKiZBgCpmQYAqqkGAKupBgCsuQYArbkGAK6pBgCvqQYAFNIAgIIdAACBHQAAgB0AABjSAIAc0gCAINIAgL50AwC4rQYAubUGALq9BgC7tQYAvK0GAL1RBwC+UQcAv1EHALChBgCxoQYAsqEGALOhBgC0oQYAtaEGALalBgC3mQYARNEAgLMlBgCExAMAtNEAgLY9BgAk0gCAKNIAgLU1BgC6YQYAu2EGAIYIAACHiAAAvmEGAL9hBgC8cQYAvXEGAKNhBgAs0gCAMNIAgDTSAIA40gCApnkGAKVxBgA80gCAqyUGAKolBgBA0gCARNIAgK8lBgCuJQYArTUGAKw1BgCoXQYAqW0GAKplBgCrjQYArJkGAK2FBgCujQYAr4UGAEjSAIBM0gCAUNIAgFTSAIBY0gCAXNIAgGDSAIBk0gCAuIUGALmNBgC6mQYAu5UGALyNBgC9rQYAvqUGAL99AQCw/QYAscUGALLNBgCzxQYAtN0GALXFBgC2zQYAt8UGALPtBgBo0gCAbNIAgHDSAIB00gCAtgUGALURBgB40gCAuwEGALo5BgB80gCAgNIAgL8BBgC+GQYAvREGALwZBgCE0gCAo6kGAIjSAICM0gCApkEGAJDSAICElAEApVUGAKp9BgCrRQYAvqABAJjSAICuXQYAr0UGAKxdBgCtVQYAqJkCAKnBAgCqwQIAq8ECAKzBAgCtyQIArvECAK/xAgCB7QMAgO0DAJzSAICC+QMAhpAcAId0AwCg0gCApNIAgLjFAwC5zQMAusUDALvdAwC8zQMAvf0DAL71AwC/nQMAsEEDALFBAwCyQQMAs0EDALRBAwC1QQMAtkEDALdBAwCzSQIAqNIAgKzSAICw0gCAtNIAgLZJAgC1SQIAuNIAgLuFAwC6hQMAvNIAgMDSAIC/hQMAvoUDAL2VAwC8lQMAxNIAgKMNAgDI0gCAzNIAgKYNAgDQ0gCA1NIAgKUNAgCqwQMAq8EDANjSAIDc0gCArsEDAK/BAwCs0QMArdEDAOOYAQDhpAcA4VgGAONYBgDhoAEA4NIAgOPQAADk0gCA6NIAgOzSAIDvOAAA8NIAgO/0AQD00gCA+NIAgO/4BgCAeQAAgRUAAIIdAACEAB0A/NIAgADTAIC+EB0ACNMAgIbAHACHrB0ADNMAgBDTAIAU0wCAGNMAgBzTAIAg0wCAu8UFALqhBQC5qQUAuJEFAL/NBQC+zQUAvckFALzVBQCzHQYAsh0GALEdBgCwHQYAt6EFALa9BQC1vQUAtL0FAKu9BgCqvQYAqb0GAKi9BgCvfQYArn0GAK19BgCsfQYAJNMAgCjTAIAs0wCAMNMAgDTTAIA40wCAPNMAgEDTAICo7R0AqS0eAKoxHgCrMR4ArJUeAK2dHgCulR4Ar40eAATTAIBE0wCASNMAgEzTAIBQ0wCAVNMAgFjTAIBc0wCAuKkeALmpHgC6XR8Au1EfALxxHwC9cR8AvnUfAL9pHwCw/R4Asc0eALLFHgCzrR4AtLkeALW5HgC2rR4At6UeALO5HgBg0wCAZNMAgGjTAICU0gCAth0eALUdHgBs0wCAuwkeALo5HgBw0wCAhOADAL99HgC+fR4AvXkeALwRHgCCaQAAo/0eAIBFAACBUQAAplkeAL6cAwB00wCApVkeAKp9HgCrTR4AhkgAAIdsAACuOR4ArzkeAKxVHgCtPR4AqF0eAKltHgCqZR4Aq30eAKxlHgCtbR4ArmUeAK/9HgB40wCAfNMAgIDTAICE0wCAiNMAgIzTAICQ0wCAlNMAgLhpAQC5aQEAunkBALt5AQC8aQEAvWkBAL7dAQC/1QEAsIUeALGNHgCyhR4As50eALSFHgC1jR4AtoUeALdZAQCz7R4AmNMAgJzTAICg0wCApNMAgLbtHgC17R4AqNMAgLtJHgC6QR4ArNMAgLDTAIC/SR4AvkEeAL1JHgC8UR4AtNMAgKOpHgC40wCAvNMAgKapHgDA0wCAxNMAgKWpHgCqBR4Aqw0eAMjTAIDM0wCArgUeAK8NHgCsFR4ArQ0eAKghAwCpIQMAqiEDAKshAwCsIQMArSEDAK4hAwCvIQMA0NMAgNTTAIDY0wCAvmACANzTAIDg0wCA6NMAgOzTAIC4iQMAuYkDALqdAwC7lQMAvLkDAL25AwC+eQAAv3kAALDlAwCx7QMAsuUDALP9AwC07QMAtd0DALbVAwC3vQMAgKkAAIG1AACCvQAAs6UDAPDTAIC1pQMAtq0DAPTTAICE4AIA+NMAgLotAwC7JQMAvD0DAL0lAwC+JQMAvxUDAKPpAwD80wCAhmgEAIeAAwAA1ACApuEDAKXpAwAE1ACAq2kDAKphAwAI1ACADNQAgK9ZAwCuaQMArWkDAKxxAwAQ1ACAFNQAgBjUAIAc1ACAINQAgOE8HwAk1ACA40AeACjUAIAs1ACAMNQAgO+MHgA01ACAONQAgDzUAIBA1ACARNQAgIIlAACBEQAAgB0AAEjUAIDj5AMATNQAgOGsAQBQ1ACA77ADAIRkAgC+YAUAhtAEAIdEBQBY1ACAXNQAgGDUAIBk1ACAaNQAgGzUAIBw1ACAdNQAgHjUAIDvsAEAhKQFAOHcHgB81ACA4xABAIDUAICE1ACAiNQAgIzUAICzUQEAkNQAgJTUAICY1ACAnNQAgLYRAQC1fQEAoNQAgLsNAQC6DQEApNQAgKjUAIC//QAAvv0AAL39AAC8/QAAqDkGAKk5BgCqmQYAq5EGAKy1BgCt0QYArskGAK/BBgBU1ACArNQAgLDUAIC01ACAgA0AAIGxAACCsQAAuNQAgLhhBwC5YQcAumEHALt9BwC8ZQcAvW0HAL5lBwC/HQcAsIkGALGJBgCyaQcAs2kHALR5BwC1eQcAtmkHALdlBwCjEQYAvNQAgMDUAIC+gAMAxNQAgKZRBgClPQYAyNQAgKtNBgCqTQYAhggAAId8AwCvvQcArr0HAK29BwCsvQcAzNQAgNDUAICzSQcA1NQAgLVZBwDY1ACA3NQAgLZRBwDg1ACA5NMAgLtBBwC6dQcAvUUHALxFBwC/RQcAvkUHAKh5BgCpeQYAqokGAKuJBgCsmQYArZkGAK6JBgCviQYA5NQAgOjUAIDs1ACA8NQAgPTUAID41ACA/NQAgADVAIC4jQYAuZUGALqVBgC7pQYAvL0GAL1xAQC+cQEAv3EBALD5BgCxzQYAstkGALPZBgC0yQYAtckGALa9BgC3tQYAowEGAATVAIAI1QCADNUAgBDVAICmGQYApREGABTVAICrCQYAqj0GABjVAIAc1QCArw0GAK4NBgCtDQYArA0GACDVAIAk1QCAKNUAgCzVAICAGQAAgRkAAIIFAAAw1QCAhKwBAL6sAQCH6AAAhkwPADjVAIA81QCAQNUAgETVAIConQIAqcUCAKrNAgCrwQIArMUCAK3NAgCu+QIArz0DAEjVAIBM1QCAUNUAgFTVAIC+PAwAWNUAgFzVAIBg1QCAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+ZAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDALNFAgBk1QCAaNUAgGzVAIBw1QCAtk0CALVNAgB01QCAu4kDALqBAwB41QCAfNUAgL+JAwC+gQMAvYkDALyRAwCA1QCAowECAITVAICI1QCApgkCAIzVAICQ1QCApQkCAKrFAwCrzQMAlNUAgJjVAICuxQMAr80DAKzVAwCtzQMAgO0BAIEVAACCEQAAhAACAJzVAIDhpAEAoNUAgOPsAACo1QCArNUAgLDVAIDvMAAAtNUAgLjVAIC81QCAwNUAgIbgDACH9AIAxNUAgMjVAIDM1QCA0NUAgO/MBgDU1QCA4bAHANjVAIDjEAYA3NUAgODVAIDk1QCA6NUAgOzVAIDw1QCA9NUAgPjVAID81QCAANYAgATWAIAI1gCA7+gBAIUYDwDhzAYADNYAgOMcBgCAKQAAgR0AAIIFAAAQ1gCAszkCAITMDQCGaA8Ah/wMAOHQ0gO28QEAtfkBABjWAIC72QEAutEBAL7kDAAc1gCAv30BAL59AQC9fQEAvMEBAKjxDQCp8Q0AqvENAKvxDQCsMQ4ArTEOAK4xDgCvMQ4ApNUAgBTWAIAg1gCAJNYAgCjWAIAs1gCAMNYAgDTWAIC46Q4AuekOALqJDgC7hQ4AvJ0OAL2BDgC+gQ4Av7UOALBVDgCxXQ4AslUOALPpDgC0+Q4AtfkOALbpDgC34Q4Ao3kNADjWAIA81gCAQNYAgETWAICmsQ4ApbkOAEjWAICrmQ4AqpEOAEzWAIBQ1gCArz0OAK49DgCtPQ4ArIEOAFTWAICz7Q8AWNYAgFzWAIC26Q8AYNYAgGTWAIC16Q8Auq0PALu1DwA01QCAaNYAgL6VDwC/mQ8AvK0PAL2hDwCoIQ4AqSEOAKohDgCrPQ4ArCUOAK0tDgCuJQ4Ar1UOAGzWAIBw1gCAdNYAgHjWAICAHQAAgQkAAIK9AAB81gCAuDkOALk5DgC6yQ4Au8kOALzZDgC92Q4AvskOAL/JDgCwLQ4AsTUOALI9DgCzMQ4AtBUOALUZDgC2CQ4AtwkOAKOpDgCA1gCAhIACAL6AAQCFAAQApq0OAKWtDgCI1gCAq/EOAKrpDgCGKAcAhxgAAK/dDgCu0Q4AreUOAKzpDgCM1gCAs+0BAJDWAICU1gCAtuUBAJjWAICc1gCAte0BALplAQC7bQEAoNYAgKTWAIC+bQEAv10BALx1AQC9bQEAqN0NAKnpDQCqIQIAqyECAKwhAgCtIQIAriECAK8hAgCo1gCArNYAgLDWAIC01gCAohECAKMRAgCgqQ4AodUCALiJAgC5iQIAup0CALuVAgC8vQIAvXUDAL59AwC/dQMAsOUCALHtAgCy5QIAs/0CALTtAgC13QIAttUCALe9AgCjqQIAj8UaALjWAIC81gCAwNYAgKahAgClqQIAxNYAgKspAgCqIQIAyNYAgMzWAICvGQIArikCAK0pAgCsMQIAniUOAJ/lDgCc6QoAnRUKAJpFFgCbRQoAmFkWAJlRFgCWcRIAl4ETAJRVEgCV7RIAktEeAJPZHgCQtRoAkVUeAISpHwCFJR8AhiUfAIexEwDQ1gCA1NYAgIJZGwCDURsAjEUSAI2lFwCOpRcAj7kXAIA5+wHY1gCAijkTAIutEwCUmQsAlaEPAJZpDwCX3Q8A3NYAgO+cDwCSyQsAk30LAJxFAwDjeA4A4NYAgOGYDADk1gCAhHgCAJqRAwCbXQMA4QQAAL6IBQDj3OoD6NYAgOzWAIDw1gCA7+wAAO+MDgDhcA4A4fwOAOMwAADjeA4AgSEAAIA5AADvtO0DgikAALMJAgD41gCAhmgEAIcsBQD81gCAtg0CALUNAgAA1wCAu8UBALrFAQAE1wCACNcAgL99AQC+fQEAvdUBALzVAQCE1gCA9NYAgAzXAIAQ1wCAFNcAgBjXAIAc1wCAINcAgKi9BQCp5QUAquEFAKvhBQCs5QUAre0FAK7RBQCv0QUAsGEGALFhBgCyYQYAs2EGALTZBgC12QYAtskGALfBBgC4yQYAuckGALp5BwC7eQcAvEUHAL0lBwC+EQcAvw0HAKNJBQAk1wCAKNcAgCzXAIAw1wCApk0FAKVNBQA01wCAq4UGAKqFBgA41wCAPNcAgK89BgCuPQYArZUGAKyVBgBA1wCARNcAgEjXAIBM1wCAUNcAgFTXAIBY1wCAXNcAgIA5AACBOQAAggUAAGDXAIC+uAMAhLgDAGjXAIBs1wCAqMUGAKnVBgCq1QYAq+UGAKz9BgCtHQEArhUBAK8NAQBk1wCAcNcAgIaIAQCHHAEAdNcAgHjXAIB81wCAgNcAgLjpAQC56QEAuokBALuJAQC8mQEAvZkBAL6JAQC/iQEAsHUBALF9AQCydQEAs+kBALT5AQC1+QEAtukBALfhAQCzXQYAhNcAgIjXAICM1wCAhLwBALadAQC1dQYAkNcAgLu5AQC6sQEAlNcAgJjXAIC/PQEAvj0BAL09AQC8oQEAnNcAgKMZBgCg1wCApNcAgKbZAQCo1wCArNcAgKUxBgCq9QEAq/0BALDXAIC01wCArnkBAK95AQCs5QEArXkBAKj5AgCp+QIAqi0DAKs9AwCsJQMArS0DAK4lAwCvmQMAuNcAgLzXAIDA1wCAxNcAgIANAACBsQAAgrEAAMjXAIC4lQMAuZ0DALqhAwC7oQMAvHEAAL1xAAC+cQAAv3EAALDpAwCx6QMAsvUDALPFAwC03QMAtbUDALaxAwC3sQMAvswDAMzXAIDQ1wCA2NcAgNzXAIDg1wCA5NcAgO/kAgDo1wCA4ZQBAOzXAIDjLAEA8NcAgPTXAICHGAMAhhz8A7tNAwC6TQMA+NcAgPzXAIC/EQMAvnkDAL1xAwC8QQMAs8UDAITo/AMA2ACABNgAgAjYAIC2zQMAtc0DAAzYAICkAfwDpSX/A6bZ/wOnAfgDENgAgKEVAwCiHQMAoz0CAKwR9wOtAfADri3zA68B8wOoEfsDqZn7A6oB9AOrHfcDtAHoA7Vl6wO+xPwDhMT8A7AB7AOxVe8Dsk3vA7Nx7gMU2ACAGNgAgBzYAIAg2ACAJNgAgCjYAIAs2ACAMNgAgOFQBgDhNAQA42wBAOPoBgA02ACAONgAgDzYAIBA2ACAgDUAAIE9AACCNQAASNgAgEzYAIBQ2ACA77ABAO/ABgCj5QIAVNgAgIbo/AOHfP0DWNgAgKbtAgCl7QIAXNgAgKttAgCqbQIAYNgAgGTYAICvMQIArlkCAK1RAgCsYQIAqI3+A6mV/gOqnf4Dq5X+A6yx/gOtvf4Drqn+A6+p/gNE2ACAaNgAgGzYAIBw2ACAdNgAgHjYAIB82ACAgNgAgLgl/wO5Lf8DuiX/A7s9/wO8Jf8DvS3/A74l/wO/zf8DsKn+A7Gp/gOygf4Ds4H+A7SB/gO1if4Dtmn/A7cd/wOE2ACA4SD8A4jYAIDjePwDjNgAgJDYAICU2ACAmNgAgJzYAICg2ACApNgAgKjYAICAHQAAgXEAAIJxAADvDP0Ds1X+A6zYAICw2ACAvkAAALTYAIC2ff4DtXn+A7jYAIC7Lf4Dui3+A4boAACHrAAAvw3+A74F/gO9Ff4DvBX+A6OV/wO82ACAwNgAgMTYAIDI2ACApr3/A6W5/wPM2ACAq+3/A6rt/wPQ2ACA1NgAgK/N/wOuxf8DrdX/A6zV/wPY2ACAs/H+A9zYAIDg2ACAto3+A+TYAIDo2ACAtY3+A7pFAQC7TQEA7NgAgPDYAIC+RQEAv00BALxVAQC9TQEAqC3+A6k1/gOqPf4Dq0n+A6xB/gOtSf4DrnH+A69x/gP02ACA+NgAgPzYAIAA2QCABNkAgAjZAIAM2QCAENkAgLhJAQC5VQEAul0BALtVAQC8TQEAvXUBAL59AQC/dQEAsMUBALHNAQCyxQEAs90BALTFAQC1zQEAtsUBALd9AQCjtf0DFNkAgBjZAICExAMAHNkAgKbJ/QOlyf0DINkAgKsJAgCqAQIAKNkAgL7sAgCvCQIArgECAK0JAgCsEQIAgEkAAIFVAACCVQAAo0UDACzZAIClRQMApkUDADDZAICGwAQAhxQDAKopAwCrJQMArD0DAK0hAwCuIQMArxUDADTZAIA42QCAPNkAgEDZAIBE2QCASNkAgEzZAIBQ2QCAqH0CAKmhAwCqoQMAq6EDAKyhAwCtqQMArpEDAK+RAwCwgQMAsY0DALKFAwCzmQMAtIkDALW9AwC2tQMAt30DALhFAwC5TQMAukUDALtdAwC8RQMAvU0DAL5FAwC/+QAA1NcAgLMNAgBU2QCAWNkAgLYNAgBc2QCAYNkAgLUNAgC6YQIAu20CAGTZAIBo2QCAvmkCAL9dAgC8dQIAvWkCAGzZAIBw2QCAdNkAgHjZAIB82QCA4aQBAIDZAIDjQAMAhNkAgIjZAICM2QCA77gDAIAVAACBHQAAggUAAJDZAICEgAIAvsgFAIcYBQCGLAQAmNkAgJzZAICg2QCA76gBAKTZAIDhdP4DqNkAgOPw/gOs2QCAsNkAgLTZAIC42QCAvNkAgMDZAIDE2QCAs5EBAMjZAIC1UQEAtlEBAMzZAIDQ2QCA1NkAgLp9AQC7dQEAvG0BAL39AAC+9QAAv+kAAKgpBgCpVQYAqlUGAKuNBgCslQYArZ0GAK6VBgCvjQYAlNkAgNjZAIDc2QCA4NkAgOTZAIDo2QCA7NkAgPDZAIC4bQcAuQUHALoNBwC7BQcAvB0HAL0FBwC+AQcAvz0HALD1BgCx/QYAsvUGALNlBwC0fQcAtWEHALZhBwC3VQcA4xAFAPTZAIDh8AQA+NkAgIAdAACBCQAAgjkAAPzZAIAA2gCAhOgDAL7gAwAE2gCA78wFAAjaAICHOAAAhhgAAKOdBgAM2gCAENoAgBTaAIAY2gCApl0GAKVdBgAc2gCAq3kGAKpxBgAg2gCAJNoAgK/lBwCu+QcArfEHAKxhBgCokQYAqZEGAKqRBgCrrQYArLkGAK2lBgCurQYAr6UGACjaAIAs2gCAMNoAgDTaAIA42gCAPNoAgEDaAIBE2gCAuGUBALltAQC6ZQEAu30BALxlAQC9bQEAvmUBAL/ZAQCw3QYAsaUGALKtBgCzpQYAtKEGALWpBgC2mQYAt5kGALMZBgBI2gCATNoAgFDaAIBU2gCAtiUGALUxBgBY2gCAu2EGALoZBgBc2gCAYNoAgL9tBgC+ZQYAvXEGALx5BgBk2gCAo10GAGjaAIBs2gCApmEGAHDaAICEmAEApXUGAKpdBgCrJQYAvqQBAHjaAICuIQYArykGAKw9BgCtNQYAqcUCAKixAgCrxQIAqsUCAK3NAgCsxQIAr/UCAK71AgB82gCAgNoAgITaAICI2gCAjNoAgJDaAICU2gCAmNoAgLnJAwC4wQMAu9kDALrBAwC9+QMAvMkDAL+ZAwC+8QMAsUUDALBFAwCzRQMAskUDALVFAwC0RQMAt0UDALZFAwCASQMAgUkDAIJdAwCzRQIAvtwMALVFAgC2RQIAnNoAgIYADACH5AMAuokDALuJAwC8mQMAvZkDAL6JAwC/iQMAowkCAKDaAICk2gCAqNoAgKzaAICmCQIApQkCALDaAICrxQMAqsUDALTaAIC42gCAr8UDAK7FAwCt1QMArNUDALzaAIDA2gCAxNoAgCTZAIDvAAAAyNoAgMzaAIDQ2gCA4+gAANTaAIDhjAEA2NoAgNzaAIDg2gCA6NoAgOzaAICAbQAAgXUAAIJ9AACEQAIAhvAMAId4DQDw2gCA9NoAgPjaAID82gCAANsAgATbAIAI2wCADNsAgBDbAIAU2wCAGNsAgBzbAIAg2wCAJNsAgCjbAIAs2wCAMNsAgO/MAQCE7AwA4TAGADTbAIDjGAEAONsAgDzbAIBA2wCARNsAgLPlAQBI2wCAhIQPAEzbAIBQ2wCAtuUBALX1AQBY2wCAu30BALrZAQC+oAwAXNsAgL8hAQC+OQEAvTEBALw5AQCo7Q0AqSUOAKotDgCrJQ4ArD0OAK0lDgCuLQ4AryUOAOTaAICC9Q8AgeUPAIDpDwBU2wCAYNsAgIaYAACHDAMAuK0OALlFDwC6TQ8Au0UPALxFDwC9TQ8AvkUPAL95DwCwXQ4AsfkOALKtDgCzpQ4AtL0OALWlDgC2pQ4At5UOAGTbAIDv7AwAaNsAgGzbAIBw2wCAdNsAgHjbAIB82wCAvugAAIDbAICE2wCAiNsAgIzbAIDj6A0AkNsAgOEEDACj5Q4AlNsAgJjbAICc2wCAoNsAgKblDgCl9Q4ApNsAgKt9DgCq2Q4AqNsAgKzbAICvIQ4ArjkOAK0xDgCsOQ4AqDkOAKk5DgCqUQ4Aq1EOAKxxDgCtcQ4ArnEOAK9xDgCw2wCAtNsAgLjbAIC82wCAgBkAAIEZAACCBQAAwNsAgLjRDgC50Q4AutEOALvlDgC84Q4AveEOAL7hDgC/4Q4AsBEOALERDgCyEQ4AsxEOALTxDgC18Q4AtvEOALfxDgCz2Q4AyNsAgIYoAACHuAAAzNsAgLbxDgC1+Q4A0NsAgLvVDgC61Q4A1NsAgNjbAIC/NQ4AvjUOAL3FDgC8xQ4A3NsAgKOdDgDg2wCA5NsAgKa1DgDo2wCA7NsAgKW9DgCqkQ4Aq5EOAPDbAID02wCArnEOAK9xDgCsgQ4ArYEOAKjdDQCp6Q0Aqj0CAKuNAgCsmQIArZkCAK6JAgCviQIAvqwEAPjbAID82wCAhCADAADcAIAE3ACACNwAgAzcAIC4iQIAuYkCALqZAgC7kQIAvLkCAL25AgC+eQMAv3kDALD5AgCx+QIAss0CALPFAgC03QIAtcUCALbBAgC3uQIAs7UCABDcAIAU3ACAGNwAgBzcAIC2GQIAtRECACDcAIC7PQIAuj0CACTcAIAo3ACAvwECAL4ZAgC9EQIAvBkCACzcAICj8QIAMNwAgDjcAICmXQIAPNwAgEDcAIClVQIAqnkCAKt5AgCGSAUAh6wEAK5dAgCvRQIArF0CAK1VAgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAETcAIBI3ACATNwAgFDcAICB8QEAgJkBAHTaAICC9QEAuHkBALl5AQC6zQEAu8UBALzdAQC9xQEAvsUBAL/1AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2SQEAt0kBAFTcAIBY3ACAXNwAgO/UAQCEEAUAYNwAgGTcAIDvjA4AvuwFAOHsDgBo3ACA4xwOAGzcAIDhlAEAcNwAgONkDgCzXQIAdNwAgHjcAIB83ACAgNwAgLYVAgC1dQIAhNwAgLs5AgC6MQIAiNwAgIzcAIC/2QEAvtEBAL0VAgC8FQIAo50FADTcAICQ3ACAlNwAgJjcAICm1QUApbUFAJzcAICr+QUAqvEFAKDcAICk3ACArxkGAK4RBgCt1QUArNUFAIBRAACBWQAAgmEAALOVBgCo3ACAtXEHALZxBwCs3ACAhkADAIdUAwC67QcAu+UHALzlBwC97QcAvtEHAL/NBwCw3ACAtNwAgLjcAIC83ACAwNwAgMTcAIDvQAQAyNwAgOEwBwDM3ACA45QEANDcAIDU3ACA2NwAgNzcAIDg3ACAoxkGAOTcAIDo3ACA7NwAgPDcAICm/QcApf0HAPTcAICraQcAqmEHAPjcAID83ACAr0EHAK5dBwCtYQcArGkHAKjNBwCp0QcAqtEHAKstBgCsNQYArT0GAK41BgCvnQYAAN0AgATdAIAI3QCADN0AgIAZAACBGQAAggUAABDdAIC4iQYAuYkGALqZBgC7kQYAvLkGAL25BgC+UQEAv1EBALDlBgCx7QYAsv0GALP1BgC02QYAtcUGALbBBgC3uQYAqNEBAKnZAQCqCQEAqwkBAKwZAQCtGQEArgkBAK8JAQCEYAEAvnwBAIeoAACGjAEAGN0AgBzdAIAg3QCAJN0AgLgJAQC5CQEAuhkBALsRAQC8OQEAvTkBAL75AAC/+QAAsH0BALFBAQCyRQEAs10BALRFAQC1TQEAtkUBALc5AQAo3QCALN0AgDDdAICzjQIANN0AgLWdAgC2lQIAON0AgDzdAIBA3QCAurUCALuJAgC8nQIAvYUCAL6NAgC/hQIAps0CAETdAIBI3QCApcUCAEzdAICj1QIAUN0AgFTdAICu1QIAr90CAKzFAgCt3QIAqu0CAKvRAgCE9AMAWN0AgKgxAwCpMQMAqjEDAKsxAwCskQAArZEAAK6RAACvjQAAXN0AgGDdAIBk3QCAaN0AgGzdAIBw3QCAdN0AgHjdAIC4vQAAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALD9AACxxQAAss0AALOpAAC0uQAAtaUAALahAAC3oQAAgL0BAIEJAACCGQAAfN0AgIDdAIC+WAIAhxQdAIacHQCEbB0AxNsAgIjdAICM3QCAvrwcAJDdAICU3QCAmN0AgLP5AgCc3QCAoN0AgKTdAICo3QCAtlEBALVZAQC+3B8Au0EBALp5AQCs3QCAsN0AgL8hAQC+PQEAvT0BALxZAQDhcAcAtN0AgOMIBgC43QCA78wAALzdAIDA3QCAxN0AgOMQAADI3QCA4dABAMzdAICGkBwAh/QcAO/gBgDQ3QCAo3kCANTdAIDY3QCA3N0AgODdAICm0QEApdkBAOTdAICrwQEAqvkBAOjdAIDs3QCAr6EBAK69AQCtvQEArNkBAITdAICCFQAAgeUfAIDlHwDw3QCA9N0AgPjdAID83QCAqAkfAKkJHwCqHR8AqxUfAKwNHwCtcR8ArnEfAK9xHwCwER8AsS0fALIlHwCzyR8AtN0fALXBHwC2wR8At8EfALjFHwC5yR8AutUfALupHwC8uR8AvbkfAL6pHwC/oR8As7UfAADeAIAE3gCACN4AgAzeAIC20R8AtaUfABDeAIC7yR8AuvUfABTeAIAY3gCAvyUfAL45HwC9PR8AvNEfABzeAIAg3gCAJN4AgCjeAIAs3gCA4WAfADDeAIDjtBwANN4AgDjeAIA83gCA7wAdAEDeAIBE3gCASN4AgEzeAICjNR4AUN4AgFTeAIBY3gCAXN4AgKZRHgClJR4AYN4AgKtJHgCqdR4AhKgCAGTeAICvpR4ArrkeAK29HgCsUR4AgE0AAIFVAACCVQAAs8kBAGjeAIC12QEAtskBAGzeAICGoAAAhwQBALrFAQC7rQEAvLUBAL29AQC+tQEAv60BAKiZAQCpmQEAqg0BAKsFAQCsHQEArQUBAK4FAQCvNQEAcN4AgHTeAIB43gCAfN4AgIDeAICE3gCAiN4AgIzeAIC4JQEAuS0BALo5AQC7OQEAvCkBAL0pAQC+3QAAv9UAALBNAQCxJQEAsi0BALMlAQC0PQEAtSUBALYhAQC3HQEAkN4AgJTeAICY3gCAo4kCAJzeAIClmQIApokCAKDeAICk3gCAqN4AgKqFAgCr7QIArPUCAK39AgCu9QIAr+0CAKzeAICw3gCAtN4AgIRAAgC43gCAvN4AgMDeAIDE3gCAgA0AAIEVAACCHQAAyN4AgMzeAIDQ3gCAh7QDAIbcBAC+zAMA2N4AgNzeAIDg3gCA7+gCAOTeAIDo3gCA7N4AgOP8AgDw3gCA4dABAPTeAID43gCA/N4AgADfAIAE3wCAs2EDAAjfAIAM3wCAEN8AgBTfAIC2eQMAtXEDABjfAIC7XQMAul0DABzfAIAg3wCAv+EAAL79AAC9/QAAvP0AALC5AgCxuQIAsgkBALMJAQC0GQEAtQUBALYFAQC3PQEAuAUBALllAQC6bQEAu2UBALxhAQC9YQEAvmEBAL9hAQCFXAcAJN8AgCjfAIAs3wCAFN0AgDDfAIA03wCAON8AgKgxAgCpOQIAqskCAKvJAgCs2QIArdkCAK7JAgCvyQIAhMwFAOGAHgA83wCA47weAOE4HgBA3wCA46AAAL4QBABI3wCATN8AgO8MHgBQ3wCAVN8AgFjfAIBc3wCA73QeAKNhAgCCUQAAgUEAAICRAABg3wCApnkCAKVxAgBk3wCAq10CAKpdAgCGyAQAhzwFAK/hAQCu/QEArf0BAKz9AQCohQYAqY0GAKqFBgCrmQYArIkGAK2JBgCuvQYAr7EGAETfAIBo3wCAbN8AgHDfAIB03wCAeN8AgHzfAICA3wCAuJ0GALmtBgC6pQYAuwkHALwZBwC9GQcAvg0HAL8FBwCw0QYAsdEGALLRBgCz0QYAtLUGALW9BgC2tQYAt60GALMNBgCE3wCAiN8AgIzfAICQ3wCAtgkGALUBBgCU3wCAuxUGALoVBgCY3wCAnN8AgL95BgC+cQYAvQUGALwFBgCg3wCA4aAEAKTfAIDjXAUAgA0AAIE1AACCPQAAqN8AgKzfAICw3wCAhGADAL5sAAC/8AEAhZAAALTfAIDvmAUAo40HAIQIAACGAAwAh4wAALjfAICmiQcApYEHALzfAICrlQcAqpUHAMDfAIDE3wCAr/kHAK7xBwCthQcArIUHAMjfAICz6QYAzN8AgNDfAIC26QYA1N8AgNjfAIC16QYAukUBALtNAQDc3wCA4N8AgL5FAQC/TQEAvFUBAL1NAQCoIQYAqSEGAKolBgCrPQYArCUGAK0tBgCuSQYAr0EGAOTfAIDo3wCA7N8AgPDfAID03wCA+N8AgPzfAIAA4ACAuEkBALlJAQC6WQEAu1EBALx5AQC9eQEAvhkBAL8VAQCwxQEAsc0BALLFAQCz3QEAtMUBALXNAQC2xQEAt3kBAATgAIAI4ACADOAAgKOhBQAQ4ACApaEFAKahBQAU4ACAjyHqAxjgAICqDQIAqwUCAKwdAgCtBQIArg0CAK8FAgCX7RIAlmUSAJVFEQCUnRYAk3EWAJJVFQCReesDkFnqA59hBgCeNQUAnUUaAJxpGgCbVRkAmkUeAJlZHgCYRR0A4WAAABzgAIDjTD4AIOAAgKOxAgCi1QEAobUHAKCJBgCxATgAsAk+ALOVOgCyjToAtbUmALQBJADvaDoAvjAMAKnJNgCowTYAqwEwAKrhNwCtzTMArPUyAK/5PgCuATwAoRkCACjgAICjbQ4Aom0OAKX1CgCkAQgAp4ULAKaZCgCGAA0Ah0QNAIIJ6wODCesDhDHqA4UVFACGORcAh80XAISgDQAs4ACAiiUQAIsNEwCMnRMAjQ0cAI4ZHwCPDR8A1N4AgO8AAwCSbRgAk0kbAJR9GwCVBQQAllkHAJdJBwAw4ACANOAAgJpFBgCbLQAAnFEDAONgAAA44ACA4WwAAIClAQCBAQEAggUBAL4ADAA84ACAQOAAgETgAIDviAEASOAAgOFUBgBM4ACA41QBAFDgAIBU4ACAWOAAgFzgAICz6QIAYOAAgGTgAIBo4ACAbOAAgLadAgC1mQIAcOAAgLuJAgC6vQIAdOAAgHjgAIC/WQIAvlECAL1ZAgC8kQIAoykNAHzgAICA4ACAhOAAgIjgAICmXQ0ApVkNAIzgAICrSQ0Aqn0NAJDgAICY4ACAr5kNAK6RDQCtmQ0ArFENAIBRAACBWQAAgmEAALMtDwCc4ACAtS0PALbJDwCg4ACAhkADAIcIAwC6yQ8Au8UPALzBDwC9wQ8AvsEPAL/BDwAk4ACAlOAAgKTgAICo4ACArOAAgLDgAIC04ACAuOAAgKhFDgCpgQ8AqskPAKvJDwCsyQ8ArSUPAK4tDwCvJQ8AsGEPALFtDwCyeQ8As3kPALRpDwC1aQ8Ath0PALcVDwC4LQ8AuTUPALo1DwC7BQ8AvB0PAL3xAAC+8QAAv/EAAKNhDgC84ACAhMQBAMDgAIDE4ACApoUOAKVhDgDI4ACAq4kOAKqFDgDM4ACA0OAAgK+NDgCujQ4ArY0OAKyNDgDU4ACA2OAAgNzgAIDg4ACA5OAAgOjgAIDs4ACA8OAAgPTgAICCHQAAgR0AAIAdAAD44ACA/OAAgADhAIC+tAEAqK0BAKnVAQCq1QEAqwUBAKwdAQCtBQEArg0BAK8FAQCGgAEAhxgBAAjhAIAM4QCAEOEAgBThAIAY4QCAHOEAgLiFAAC5jQAAuoUAALudAAC8hQAAvY0AAL6FAAC/vQAAsH0BALHhAACy5QAAs/0AALTtAAC13QAAttUAALe9AACzXQIAIOEAgCThAIAo4QCALOEAgLaFAgC1lQIAMOEAgLslAwC6uQIANOEAgDjhAIC/GQMAvikDAL0pAwC8MQMAvswEAKMZAgA84QCAQOEAgKbBAgBE4QCASOEAgKXRAgCq/QIAq2EDAEzhAIBQ4QCArm0DAK9dAwCsdQMArW0DAKgpAwCpKQMAqjkDAKs5AwCsKQMArSkDAK6dAACvlQAAVOEAgFjhAIBc4QCAYOEAgGThAICCqQEAga0BAICtAQC4mQAAua0AALqlAAC7bQAAvHUAAL19AAC+dQAAv20AALDtAACx9QAAsvUAALPFAAC03QAAtb0AALa1AAC3qQAA4XgBAOEcDgDjEAAA4zwOAGjhAIBs4QCAvhQEAHDhAICErAIAeOEAgId4BQCGDAUAfOEAgIDhAIDvvAAA70gOALPxAgCE4QCAiOEAgIzhAICQ4QCAtukCALXhAgCU4QCAu3EBALppAQCY4QCAhKAEAL85AQC+WQEAvVEBALxhAQCc4QCAhIwEAKDhAICEADgApOEAgKjhAICs4QCAsOEAgKqJDgCriQ4AqLkOAKmxDgCu/Q4Ar+EOAKz5DgCt9Q4Asq0OALNlDgCwkQ4AsaUOALZ9DgC3ZQ4AtH0OALV1DgC6XQ4Au+UNALhdDgC5VQ4AvuENAL/pDQC8/Q0AvfUNAKOxBQB04QCAtOEAgLjhAIC84QCApqkFAKWhBQDA4QCAqzEGAKopBgDE4QCAyOEAgK95BgCuGQYArREGAKwhBgDM4QCA0OEAgNThAIDY4QCAgB0AAIEJAACCOQAA3OEAgODhAIDk4QCAhsgAAIcMAwDo4QCA7OEAgPDhAID04QCAqKUHAKm1BwCqvQcAq8kHAKzZBwCt2QcArskHAK/BBwC+oAAA+OEAgPzhAIAA4gCABOIAgAjiAIAM4gCAEOIAgLjNAAC51QAAutUAALvlAAC8/QAAvZUAAL6dAAC/lQAAsIkHALFlBwCyYQcAs30HALRlBwC1bQcAtmUHALf1AACzNQYAFOIAgBjiAIAc4gCAIOIAgLZZBgC1UQYAJOIAgLuhBgC6TQYAKOIAgCziAIC/qQYAvqEGAL2pBgC8tQYAMOIAgDTiAIDv8AUAOOIAgDziAIBA4gCAROIAgEjiAICAPQAAgQkAAIIdAABM4gCA4cgGAFDiAIDjSAQAVOIAgKO1BgBY4gCAhigAAIdAAQBc4gCAptkGAKXRBgBg4gCAqyEGAKrNBgBk4gCAaOIAgK8pBgCuIQYArSkGAKw1BgBs4gCAs70BAHDiAIB04gCAtnkBAHjiAIB84gCAtXkBALpVAQC7XQEAgOIAgITiAIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgC+rDwAiOIAgIziAICQ4gCAlOIAgJjiAICc4gCAoOIAgLhpAwC5aQMAugkDALsJAwC8HQMAvQUDAL4NAwC/BQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwCk4gCAqOIAgKziAICj9QIAsOIAgKUxAgCmMQIAtOIAgLjiAIC84gCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMA7xgCAIIVAACBbQAAgG0AAMDiAIDI4gCAhvg8AIcYAwDM4gCA0OIAgNTiAIDY4gCA42wHAAThAIDhaAEA3OIAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIA4OIAgOTiAIDo4gCA7OIAgPDiAID04gCA+OIAgPziAIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4bQGAADjAIDj9AYABOMAgIQYPQAI4wCADOMAgBDjAIAU4wCAGOMAgBzjAIAg4wCAJOMAgCjjAIDvWAYALOMAgIF9AACAcQAAMOMAgIIFAAA44wCAPOMAgO+AAQC+VDwA4ZABAEDjAIDjfAYAROMAgEjjAIBM4wCAhtg8AIf0PACjnT0AxOIAgDTjAIBQ4wCAVOMAgKbVPQCltT0AWOMAgKv5PQCq8T0AXOMAgGDjAICvGT4ArhE+AK3VPQCs1T0AZOMAgLOhPgBo4wCAbOMAgLatPgBw4wCAdOMAgLWxPgC6ST8Au0k/AHjjAIB84wCAvkk/AL9JPwC8ST8AvUk/AKhVPgCpZT4Aqm0+AKtlPgCsfT4ArWk+AK65PwCvuT8AgOMAgITjAICI4wCAjOMAgJDjAICU4wCAmOMAgJzjAIC4VT8AuV0/ALpVPwC7bT8AvHU/AL19PwC+dT8Av20/ALDJPwCxyT8Astk/ALPZPwC0yT8Atck/ALZ9PwC3cT8AghUAAKPhPwCAsQEAgbEBAKbtPwCg4wCAvtABAKXxPwCqCT4Aqwk+AITkAQCk4wCArgk+AK8JPgCsCT4ArQk+ALPdPACo4wCAhugAAIfMAQCs4wCAtpU8ALX1PACw4wCAu7k8ALqxPAC04wCAuOMAgL9ZPwC+UT8AvZU8ALyVPACoUT4AqVE+AKptPgCrYT4ArGE+AK1hPgCulQEAr40BAISgAQC84wCAwOMAgMTjAIDI4wCAzOMAgNDjAIDU4wCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCw/QEAsc0BALLFAQCzrQEAtLkBALW5AQC2rQEAt6UBALPlPQDY4wCA3OMAgODjAIDk4wCAtuE9ALXpPQDo4wCAuwkCALo5AgDs4wCA8OMAgL99AgC+fQIAvXkCALwRAgD04wCAo6E9APjjAID84wCApqU9AADkAIAE5ACApa09AKp9AgCrTQIACOQAgAzkAICuOQIArzkCAKxVAgCtPQIAgOkAAIHpAACCHQAAvsADAO/kAgAQ5ACAh1QDAIY8BADjEAEAGOQAgOH4AQAc5ACAIOQAgCTkAIAo5ACALOQAgDDkAIA05ACAOOQAgLORAwA85ACAtbkDALZ9AwBA5ACAROQAgEjkAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAIRsBQBM5ACAUOQAgFTkAIBY5ACAXOQAgL5wBQBg5ACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOFAPwDjvAAA4wg+AOFsPgBk5ACAaOQAgGzkAIBw5ACAdOQAgHjkAIB85ACAgOQAgL5sBwDvVAAA75w+AIjkAICjnQIAgmkAAIFhAACAaQAAjOQAgKZxAgCltQIAkOQAgKtVAgCqVQIAhsgEAIfsBACv+QEArvEBAK1FAgCsRQIAqKUGAKmpBgCquQYAq7kGAKypBgCtqQYArtkGAK/ZBgCE5ACAlOQAgJjkAICc5ACAoOQAgKTkAICo5ACArOQAgLhxBwC5cQcAunUHALvdBwC8xQcAvc0HAL7FBwC//QcAsKkGALG1BgCytQYAs40GALSVBgC1UQcAtlEHALdRBwCzMQYAsOQAgLTkAIC45ACAvOQAgLYpBgC1IQYAwOQAgLtxBgC6bQYAxOQAgMjkAIC/lQcAvlEGAL1ZBgC8YQYAzOQAgKN1BgDQ5ACA1OQAgKZtBgDY5ACA3OQAgKVlBgCqKQYAqzUGAODkAIDk5ACArhUGAK/RBwCsJQYArR0GAIANAACBFQAAgh0AAOjkAIDs5ACA8OQAgITcAQD05ACAhoAAAIcgAQD45ACA/OQAgADlAIAE5QCACOUAgAzlAIAQ5QCA43QEABTlAIDhyAUAGOUAgBzlAIAg5QCAJOUAgCjlAIAs5QCAMOUAgDTlAIA45QCA77QEADzlAIBA5QCAqD0GAKlVBgCqVQYAq6kBAKy5AQCtuQEArqkBAK+pAQCErAEAROUAgEjlAIBM5QCAUOUAgFTlAIBY5QCAXOUAgLhtAQC5BQEAugEBALsBAQC8BQEAvQ0BAL4xAQC/MQEAsNkBALHZAQCybQEAs2UBALR9AQC1ZQEAtmUBALdVAQCBvQMAgL0DALPVBQCCGQAAtTkCAGDlAIC+VAMAtjECAGjlAIBs5QCAuxUCALoVAgC9uQIAvLECAL+pAgC+sQIAcOUAgKZpAgClYQIAhAAMAKONBQB05QCAhvgMAId8AwCv8QIArukCAK3hAgCs6QIAq00CAKpNAgB45QCAfOUAgIDlAICE5QCAiOUAgIzlAIDjIAEAkOUAgOGgAQCU5QCA70ACAJjlAICc5QCAoOUAgKTlAICo5QCArOUAgLDlAICz8QMAtOUAgBTkAIC45QCAvOUAgLbpAwC14QMAwOUAgLu1AwC6tQMAxOUAgMjlAIC/lQMAvpUDAL2lAwC8pQMAqCkCAKkpAgCqOQIAqzkCAKwpAgCtKQIArlkCAK9VAgCAzQEAgQkAAIIZAADM5QCA0OUAgL58DQCHtA0AhhwMALgxAgC5PQIAujUCALvpAgC8+QIAvfkCAL7pAgC/6QIAsDECALExAgCyMQIAszECALQRAgC1EQIAthECALcRAgDY5QCA3OUAgODlAIDk5QCA6OUAgOzlAIDw5QCA79QGAPTlAIDhVAYA+OUAgOOkAACsDBUA/OUAgADmAIAE5gCAo/ECAAjmAIAM5gCAEOYAgBTmAICm6QIApeECABjmAICrtQIAqrUCABzmAIAg5gCAr5UCAK6VAgCtpQIArKUCAKghDgCpIQ4AqkkOAKtZDgCsaQ4ArWkOAK6ZDgCvmQ4A1OUAgCTmAIAo5gCALOYAgDDmAIA05gCAOOYAgDzmAIC49Q4Auf0OALr1DgC7iQ4AvJ0OAL2FDgC+hQ4Av7UOALDpDgCx6Q4Asv0OALPxDgC01Q4Atd0OALbVDgC3zQ4As8EOAIIVAACBtQAAgLUAAEDmAIC26Q4AteEOAL4QAAC7LQ4Aui0OAIRkAwBE5gCAvxkOAL4RDgC9JQ4AvCkOAEjmAICjhQ4AhogAAIdsAwCmrQ4ATOYAgFDmAIClpQ4AqmkOAKtpDgBU5gCAWOYAgK5VDgCvXQ4ArG0OAK1hDgCziQ4AXOYAgGDmAIBk5gCAaOYAgLaBDgC1iQ4AbOYAgLuVDgC6jQ4AcOYAgHTmAIC/+Q4AvvEOAL2FDgC8hQ4AeOYAgHzmAICA5gCAhOYAgOMMDQCI5gCA4RgNAIzmAIDvrAwAkOYAgJTmAICY5gCAnOYAgKDmAICk5gCAqOYAgKgBDgCpAQ4AqgEOAKsBDgCsAQ4ArQEOAK4BDgCvPQ4AgN0AAIEJAACCGQAArOYAgLDmAICEPAEAvnQAALjmAIC4HQ4AuS0OALolDgC76QEAvPkBAL35AQC+6QEAv+kBALBJDgCxUQ4AslEOALNRDgC0NQ4AtT0OALY1DgC3LQ4Ao4kNALzmAICGrAQAhzwDAMDmAICmgQ0ApYkNAMTmAICrlQ0Aqo0NAMjmAIDM5gCAr/kNAK7xDQCthQ0ArIUNANDmAICznQIAhEgDAL5ABAC2VQMA1OYAgNjmAIC1sQIAunEDALt5AwDc5gCA4OYAgL4xAwC/MQMAvFEDAL1RAwCwkQMAsZkDALKhAwCzoQMAtNEDALXRAwC20QMAt9EDALj1AwC5+QMAus0DALvFAwC83QMAvcUDAL7NAwC/xQMA5OYAgOjmAIDs5gCA8OYAgIV8GQD05gCA+OYAgGTlAICoIQIAqTECAKoxAgCrBQIArB0CAK3xAwCu8QMAr/EDAPzmAIAA5wCABOcAgAjnAIDvUAAADOcAgBDnAIAU5wCA44QAABjnAIDh+AEAHOcAgIAVAACBGQAAggUAACDnAICjmQMAKOcAgIZoBACHYAUALOcAgKZRAgCltQMAMOcAgKt9AgCqdQIANOcAgDjnAICvNQIArjUCAK1VAgCsVQIAPOcAgEDnAIBE5wCASOcAgEznAIBQ5wCAVOcAgO/4AQC+bAQA4YAOAFjnAIDjFAEAXOcAgGDnAIBk5wCAaOcAgGznAIBw5wCAdOcAgLPdAQB45wCAtf0BALb1AQB85wCAgOcAgITnAIC6sQEAu4UBALydAQC9NQEAvj0BAL81AQCpBQYAqLkFAKsVBgCqHQYArT0GAKw9BgCvTQYArl0GACTnAICCHQAAgR0AAIAdAACI5wCAjOcAgJDnAICU5wCAuUEHALidBgC7QQcAukkHAL1FBwC8WQcAv0UHAL5FBwCxCQYAsD0GALOpBgCyAQYAtbkGALSxBgC3rQYAtrEGAKORBgCEjAIAhigAAIfAAwCY5wCAprkGAKWxBgCc5wCAq8kGAKr9BgCg5wCApOcAgK95BgCucQYArXkGAKzRBgCo5wCAs5kHAKznAICw5wCAtlEHALTnAIC45wCAtbEHALptBwC7dQcAvOcAgMDnAIC+WQcAv0UHALxtBwC9ZQcAxOcAgMjnAIDM5wCA0OcAgNTnAIDY5wCA3OcAgO+oBQDg5wCA4TQFAOTnAIDjdAUA6OcAgOznAIDw5wCA9OcAgKMdBgCCLQAAgRUAAIAdAAD45wCAptUGAKU1BgD85wCAq/EGAKrpBgAA6ACAhCgBAK/BBgCu3QYAreEGAKzpBgCoxQYAqdUGAKrVBgCr5QYArP0GAK0VBgCuHQYArxUGAL7sAQAI6ACAhggAAIcgAAAM6ACAEOgAgBToAIAY6ACAuH0GALkFBgC6DQYAuwUGALwBBgC9CQYAvjkGAL85BgCwbQYAsXUGALJ9BgCzdQYAtFkGALVFBgC2TQYAt0UGAKiRAgCpmQIAqqECAKuhAgCs0QIArd0CAK7VAgCvyQIAHOgAgCDoAIAk6ACAvyweACjoAIAs6ACAMOgAgDToAIC4VQMAuV0DALppAwC7ZQMAvGEDAL1hAwC+YQMAv2EDALC5AgCxjQIAsoUCALNtAwC0dQMAtX0DALZ1AwC3bQMAOOgAgDzoAICzIQIAQOgAgLVRAgCEiAMAROgAgLZVAgC05gCAvigcALtBAgC6dQIAvbEDALxZAgC/sQMAvrkDAKNpAgBI6ACATOgAgFDoAIBU6ACAph0CAKUZAgBY6ACAqwkCAKo9AgBc6ACAYOgAgK/5AwCu8QMArfkDAKwRAgCopQIAqbUCAKq9AgCrtQIArK0CAK01AQCuPQEArzUBAL4sHABk6ACAaOgAgGzoAIBw6ACAeOgAgIdoHQCGHB0AuIUBALmNAQC6hQEAu50BALyNAQC9vQEAvrUBAL95AACwUQEAsVEBALJRAQCzUQEAtPEBALXxAQC29QEAt+UBAO/YAACCtQAAgaUAAIClAAB86ACAgOgAgIToAIDvxAYAiOgAgOH0BgCM6ACA4zgBAOPMAACQ6ACA4SgBAJToAICY6ACAtuUBALV1AgCEQBwAs2UCAJzoAICg6ACApOgAgL9lAQC+ZQEAvdUBALzVAQC7xQEAusUBAKjoAICs6ACAo7UdAHToAICw6ACAtOgAgLjoAICmNR4ApaUdALzoAICrFR4AqhUeAMDoAIDE6ACAr7UeAK61HgCtBR4ArAUeAMjoAIDM6ACA0OgAgNToAICADQAAgTUAAII9AADY6ACA3OgAgODoAIC1BQAAcRoAgOG0AgCs2AIAtQUAAHUaAICotR8AqRUfAKodHwCrFR8ArDEfAK09HwCuLR8AryEfAOG0AgCs2AIAtQUAAHkaAIDhtAIArNgCALUFAAB9GgCAuNEAALnZAAC64QAAu+EAALyRAAC9kQAAvpEAAL+RAACwIR8AsTEfALIxHwCzMR8AtAkfALUJHwC28QAAt/EAAOG0AgCs3AIA71QdALUdAACBGgCA4bwCAKzQAgC1KQAAoyUBAKKRAwChFR0AoA0dAOGAHgCFGgCA47wdAOHEAgCz1R4AtQkAAKzYAgCJGgCA4bwCALb9HgC1+R4ArOACALu1HgC6pR4AtQUAAI0aAIC/jR4Avo0eAL2lHgC8pR4AoxUeAOG8AgCs0AIAtREAAI9pJQCmPR4ApTkeAJEaAICrdR4AqmUeAOG0AgCseAEAr00eAK5NHgCtZR4ArGUeAJvdFACa5RUAmQEXAJjhEACfcR8AnnkZAJ35GQCcARsAk+UtAJIRLwCRbSkAkG0pAJf5EQCW8REAlYUsAJSZLQC1JQAA4ZQCAILxJgCDjSoAhJUqAIXhLACGHS4Ah3kuAKy0AgCVGgCAilUvAIspEgCMORIAjRkTAI7xFACPHRYAtQUAAJkaAICSVRcAk5EYAJRxGgCV+RoAlvkcAJd9HgCC4AMAkwsAgJpVHgCb2QAAnHUCAIMMAICzDACAuIkKAKwBBACthQYAroEGAMwQAgDMfAMAtgwAgJ0aAIDCDACAxQwAgMgMAIAACwCAgaUyArwMAIAE6ACAmpUGAJtVIwK8kQYAvbEAAL6RBgC/rQYAuOkGALmVBgC6kQYAoRoAgLTBBgC1zQYAts0GALfdBgCw/QYAseUGALKdAACz5QYAhVTHA6UaAICH/AAAuAEKAK0aAIDpDACAsRoAgIyRcwCNpAEAzPACAL4NAIDBDQCAiRQAALgZCgCLDAAAGg4AgFMOAIC5DACAvwwAgBkKAICRwAEAywwAgLhtCgDODACA1AwAgNoMAIDdDACA4AwAgLUaAIAoDQCA5gwAgLkaAIDhpB4AKw0AgONUHgCvIXMAzCgCAO8MAIDsDACA8gwAgPUMAID4DACAzIACAJS4AwD7DACAkhQCAO9gHgCQAAIA/gwAgAoNAIC48QoADQ0AgJ8LAIAQDQCAiSkLABMNAICpGgCAvDABAL/EAQC+7AEAFg0AgMzsAgC4xQoAukQBAK0JAIAZDQCAygYAgN8GAIDyBgCAHA0AgPoGAIAfDQCACgcAgC0HAIAYBwCA9gcAgC8HAICpDQCAOgcAgK8NAIBKBwCAtXkAAGcHAIC3cSoCcgcAgLFhAAB0BwCAsw0pAo0HAIC96QAAoAcAgPoHAICtBwCAuRkrAsMHAIC7WRQCHwgAgFoJAIA8CACALw4AgFsIAIA5AACAgQgAgHEAAIDHCACAKwAAgCAJAIA9AACAXAkAgEMAAIBeCQCARQgAgGoIAIBJAACAAAgAgFMAAIB5CQCAWQAAgCINAIBfAACAuw0iAtANAIDMFDYCHwAAgL9lAAC+EQAAvW0AAOUHAICAaQEAgXUBAIJxAQCD3SEChGkHAIWBBwCGgQcAh3EBAIihAQCJrQEAirUHAIuNBwCMlQcAjaUBAE8AAICPpQEAkOEBAJHtBwCSsSECk/0HAJSNBwCVUQYAlvEBAJfZAQCY0QEAmXUGAJp9BgCb1QEAnGkGAJ2ZFAKeUQYAn1EGAKB1FAKhuQYAokkBAKOFLQKkIQEApS0BAKZ1FAKntQYAqKERAqlRFAKqlQYAsSEAgMy8NQLNPDUCbQAAgKoDAICsAwCArwMAgL0hAIDEIQCA2yEAgOIhAIDJAACADwAAgLihBgC6BgCAtwYAgMwAAIDOIQCAtQMAgN0FAIAYBgCAugUCALvVAgC46QUAuf0FAL7JAgC/5RcCvA0CAL0BAgCy4QUAs+EFALCNBQCxnQUAtuUFALfpBQC09QUAte0FAKo9BQCrwQUAqD0FAKk1BQCuzQUAr/UFAKzNBQCtxQUAoj0FAKMFBQCg1QIAoTkFAKYdBQCnBQUApB0FAKUVBQC/BgCAm8EFAD4GAIBVBgCAnt0FAJ8xBACcUQIAndUFAHIGAICJBgCApAMAgDAiAIDbAACAoAMAgI8HAIDuBwCA8gcAgJAJAIACCACABggAgJYLAICUCQCArwoAgG8HAICLBwCAlwcAgKIHAICqBwCAqgkAgPsOAIASDwCAHw8AgMwEMwLNsDACzCAzAs3gMALMEDACzGgwAsxYMALNjDACzGgxAs0UMQLM1DECzRQ2AsxwIALN0CcCzDA2AswkMQLMDDwCzWg/AswYPwLNND8CzBg9As3AMgLMRDwCzBg5Asw4MgLNqDICzIgyAs34MwLMfDMCzUAzAswoMwLNCDMCzMghAs0kJgLMrCYCzEA4AsyYJQLNyDoCzBwkAs0QJALMhDsCzag7AsysJQLNvDoCzKw4Asz4JwLM4DgCzXQ4AicPAID2BgCAYQ0AgIgNAIDNICoCzBwrAqoGAIAsIgCAzKQgAs2gJwLMOCYCygQAgMw4OgLNPDsCzBA5As1gPgLMoAMAvj0NAL3tLALWBACAu1UjAgQJAIC5PSICzwYAgNkHAIClBACAoA0AgLIEAIBvBQCA9AYAgL4EAIB1BQCAr70MAK6ZLgKtpQwAwgUAgKvFIgIDBgCAxAQAgCMGAIDQBACAyAUAgCkGAIBdBgCAowEYAqAEAIAaBwCAHQcAgJ9dDACeUQwAnUUMACcHAICbWSECrwcAgLEHAIC0BwCAuAcAgCoHAIDOBwCA0AcAgJMtJgLTBwCAbAgAgG8IAICPBQwAjnEMAI1lDAB5CACAi0UgAmAJAICJNS8CYwkAgGcJAIB8CACAcAkAgHMJAIC9AwCAACIAgIFdDACAYQwAgAABAIEYAACCAAQABCIAgIQQBwCFFAYAhuQIAIc8AgCILAUAiaQFAIoAeAAIIgCAjCQAAAwiAIAUIgCAECIAgLgRAACRxHsAkkh6AJNMeQAcIgCAzOgCAJbwCQC4OQAAkMAJACQiAICS8AkAzPgCAJS0CQC4DQAAKCIAgMwcAgC4BQAANCIAgMzkAgC4HQAAOCIAgDwiAIBDIgCAWiIAgKiMCACp5HsAYSIAgKvUBgDM5AIAuA0AAGsiAIDMlAIAbyIAgLGAewC4CQAAuBUAAMz8AgC15AgAcyIAgMzYAgB3IgCAuAUAALqcBQC7XAUAvAB8AL30fwC++H0Av/xyAIAJOgKBDToCggE6AoMFOgKEGToChR06AoYROgKHFToCiCk6AoktOgKKIToCiyU6Aow5OgKNPToCjjE6Ao81OgLM8AIAkekPAIMiAIDMzAIAuBkAAH8iAIDM3AIAl+UPALg1AAC4DQAAjyIAgMz8AgC4BQAAkyIAgMwwAgCXIgCAzNACAJsiAICfIgCAzIgCAKQtDwClVQ8Apl0PAMyUAgCoqToCqa06ArjVAACjIgCAuDUAAKciAIDMUAMAr7U6AswsAwCrIgCAzBgDALMFDwC0HQ8AzyIAgLYJDwC3CQ8Avmh9ALhtAAC4RQAAzDgDALwpDwDTIgCAviUPAMxYAwCH5Q4AzOg6Ari9AQC4yQEAzPA1As2kMwLMgCICzXwlAs2UNgLMBCkCzew7AsxkOgK45QEAuMEBAInVDgCI1Q4Al7EOALgNAACvIgCAsyIAgLciAIC4GQAAuyIAgNciAICfaTsC2yIAgL8iAIC4PQAAzMQCAMz4AgDDIgCAxyIAgLjZAADLIgCA3yIAgLjRAADjIgCAuPEAAMzMMwLnIgCAuMkAAMzoMwLrIgCAuNUAAKllAAC4yQAAzNgCAKq5BgC3TQ0Atk0NALU1DgC0NQ4AuFUAABUjAICxGQ8AsCkOAL/1AwC+UQ0AvVkNALw1DAC7XQ0Aul0NALldDQC4XQ0AgL0KAIHFCgCCFQQAg8kKAMx8BQCF3QoAhtUKAIfNCgDMVAUAifEKAIq5CACLDQgAjBEIAI0VCACOtScCj+UKAJBpCACRbQgAknEIAJNtJALMEAUAlR0IAJaFCgDMEAUAzDQFAJk9CACaiQoAmw0IAJwRCACdFQgAzEgFAMwQAgCgZQoAoW0KAKJlCgC4BQcApLEEAMzoAgCmsQQAuA0HAKiBBADM/AIAqpkIAKtdCgCsuQgArakEALglBwCvNQgAsNEIALHxBADMwAIAs40IALQpKAK1IQoAtiEKALchCgC4IQsAuSUIALhBBwC7KQsAvA0dAr3dDwC+MQsAvzELAIDdCgAZIwCAnKF9ANADAIDpAwCAhRkJAIaZCQCHlQkAiOEJAIklJQICBACAGwQAgC4EAIBBBACAVAQAgGcEAICQrQoAkUkFAJJtBQCTYQUAlGEFAJVtBQCWZQUAlxEFAJg1BQCZPQUAmjUFAJsNBQCcFQUAnR0FAJ4VBQCfCQUAoKkJAKH9BQCi9QUAowEFAKQFBQClDQUApgUFAKc9BQCoBQUAqQ0FAKoFBQCrGQUArIkJAK2pBQCutQkAr/0JALABCQCxfQUAsnUFALMBBQC0aQkAtQEFALYFBQC3PQUAuAUFALnhJQK6AQUAuwEFALzRJQK9PQkAvnkJAL9dCQCDMAUAoXgHAJ+xfgB6BACApHgHAKVIBwCNBACA8wQAgIt8BADdAACAEwEAgIhIBAAcAQCAIAEAgCQBAIAoAQCALAEAgDABAICyAAcAs/wHADQBAIDhAACAtuQHALfwBwDmAACA6wAAgLrgBwC7nAcAvIgHAL2oBwDwAACAs8F+AKPMBAD1AACA+gAAgIMABAD/AACAhXQEAKUgBAAEAQCAiEwEAAkBAIAOAQCAFwEAgK8tBwCNxAcArSEHAKwpBwDNAwCA8AQAgI8FAICwZQcA4gUAgB0GAIBDBgCAWgYAgHcGAICOBgCA0wMAgOwDAIAFBACAHgQAgDEEAIC8fAQAgt0rAoPlKwKA/QoAgfkrAoaZCQCHmQkAhOEKAIXhCgCKiQkAi4kJAIiJCQCJiQkAjoUJAEQEAICM4QgAjY0JAJK5KwKTQScCkJkrApHFCwCWyQsAl3UnApTFDQCV0SQCmskLAJvZKgKYyQsAmXkHAFcEAIBqBACAnP0LAH0EAICQBACA9gQAgKABAICkAQCAqAEAgONkAgCsAQCAsAEAgLQBAIDvvAcAqBEJALgBAIC8AQCAwAEAgMQBAIDIAQCAzAEAgNABAIDUAQCA2AEAgNwBAIDgAQCA5AEAgOgBAIDsAQCA8AEAgPQBAID4AQCA/AEAgAACAICCnH4ABAIAgKD1VAKh2VQCoulUAqP1dQCk7XUApZ12AKaVdgCnvXYAqIV2AKkpfQCqOX0AqwV9AKwdfQCtBX0Arg19AK8FfQCwfX0AsUl+ALJRfgCzUX4AtHV+ALV9fgC2aX4At2l+ALhZfgC5WX4Auil+ALspfgC8IX4AvSF+AL4ZfgC/GX4AkgcAgDkJAIDXBwCATSIAgLQNAAC1NQAAtj0AAKIGAICsBgCArwYAgAMjAIAJIwCAvSV4ALy1WALGMQCALjoAgJkqAIC9KgCAySoAgNkqAIDhKgCA7SoAgPUqAID9KgCACSsAgF0rAIB1KwCAhSsAgJUrAIClKwCAtSsAgNUrAICAeX8AgYF/AIKBfwCDnX8AhI1/AIWxfwCGsX8Ah7F/AIjhfwCJ4X8AiuF/AIv9fwCM5X8Aje1/AI7lfwCP3X8AkKV/AJGtfwCSpX8Ak71/AJSlfwCVrX8Alm1+AJctfgCYFX4AmRl+AJrpfgCb6X4AnPl+AJ35fgCe6X4An+V+AKAdfgChJX4AoiV+AKM9fgCkJX4ApS1+AKYlfgCnXX4AqGV+AKltfgCqZX4Aq31+AKxlfgCtbX4ArmV+AK9dfgCwJX4AsS1+ALIlfgCzPX4AtCV+ALUpfgC2WXcAt9V1ALj9eQC56XUAuvl1ALvZeQC86XUAvdV1AL7RdQC/2XUAgDF2AIE9dgCCSXYAg0V2AIRBdgCFTXYAhvl0AId9dgCIoQIAiU12AIpZdgCLuXoAjEl2AI2degCOsQIAjx16AJCRVgKRKXYAkoF2AJPNdgCU2XYAlel2AJbJdgCX0VkCmKF2AJllWgKa8XYAm01aApzRdgCdYXoAnoFWAp/VdgCgBQIAoY1aAqI1VwKjCXYApCF2AKUtdgCmiVoCp5laAqi5WgKpdXYAql13ANkrAIDdKwCAESwAgDksAIBJLACAUSwAgFUsAIBhLACAfSwAgIEsAICZLACAnSwAgKUsAIC1LACAUS0AgGUtAIClLQCAuS0AgMEtAIDFLQCA1S0AgJl1CgD4LQCAJC4AgDAuAIBQLgCAXC4AgGAuAIBkLgCAgux6AINkewB8LgCAgC4AgIZ0ewCHvHsArC4AgLguAIDALgCAyC4AgNguAIDnLgCA7y4AgBsvAIAfLwCAJy8AgJJwfAArLwCAMy8AgJFMfAA7LwCASy8AgGcvAIDfLwCA8y8AgKvMfACo5HwAqdx8APcvAIB3MACAezAAgI8wAICiwHwAkzAAgJswAICjMACAzEBJAs0ASQLM/EoCzWhLAqswAIC3MACA7TAAgP0wAIARMQCAjjEAgJoxAICqMQCAsqx8ALNAfAC2MQCAwjEAgMoxAIDOMQCAtGx8ALUEfACAlQcAgZ0HAIKVBwCDqQcAhLkHAIW5BwCG2QcAh9kHAIjpBwCJ6QcAivkHAIv5BwCM6QcAjekHAI7RBwCP0QcAkLEHAJGxBwCSSQEAk0kBAJRZAQCVWQEAlkkBAJdJAQCYeQEAmXkBAJpJAQCbSQEAnFkBAJ1ZAQCeSQEAn0kBAKC5AQChuQEAoskBAKPJAQCk2QEApdkBAKbJAQCnyQEAqPkBAKn5AQCqyQEAq8kBAKzZAQCt2QEArskBAK/JAQCwuQEAsbkBALJJAQCzSQEAtFkBALVZAQC2SQEAt0kBALh5AQC5eQEAukkBALtJAQC8WQEAvVkBAL5JAQC/SQEA0jEAgNYxAIDaMQCAkjIAgNoyAIDmMgCA6jIAgO4yAIDyMgCA+jIAgP4yAIASMwCALjMAgDYzAIB2MwCAejMAgIIzAICGMwCAjjMAgJIzAIC2MwCAujMAgNYzAIDaMwCA3jMAgOIzAID2MwCAGjQAgB40AIAiNACARjQAgIY0AICKNACAqjQAgLo0AIDCNACA4jQAgAY1AIBKNQCAUjUAgGY1AIByNQCAejUAgII1AICGNQCAijUAgKI1AICmNQCAwjUAgMo1AIDSNQCA1jUAgOI1AIDqNQCA7jUAgPI1AID6NQCA/jUAgJ42AICyNgCAnoUMAOY2AIDqNgCA8jYAgIC5AwCBuQMAgskDAIPJAwCE2QMAhdkDAIbJAwCHyQMAiPkDAIn5AwCKyQMAi8kDAIzZAwCN2QMAjs0DAI/FAwCQvQMAkQEMAJJJDgCTSQ4AlFkOAJVZDgCWSQ4Al0kOAJh5DgCZeQ4AmkkOAJtJDgCcWQ4AnVkOAJ5JDgCfSQ4AoLkOAKG5DgCiyQ4Ao8kOAKTZDgCl2Q4ApskOAKfJDgCo+Q4AqfkOAKrJDgCryQ4ArNkOAK3ZDgCuyQ4Ar8kOALC5DgCxuQ4AskkOALNJDgC0WQ4AtVkOALZJDgC3SQ4AuHkOALl5DgC6SQ4Au0kOALxZDgC9WQ4AvkkOAL9JDgC8eQQAvXkEAL6JBAC/nQQAuHUEALl9BAC6aQQAu2kEALRxBAC1cQQAtnEEALdxBACwcQQAsXEEALJxBACzcQQArGkEAK1pBACucQQAr3EEAKhBBACpQQQAqkEEAKtBBACknQUApWEEAKZhBACnYQQAoJ0FAKGFBQCijQUAo4UFAJxdBQCdZQUAnm0FAJ9lBQCYXQUAmUUFAJpNBQCbRQUAlB0FAJVlBQCWbQUAl2UFAJAdBQCRBQUAkg0FAJMFBQCMMQcAjTEHAI4xBwCPMQcAiDEHAIkxBwCKMQcAizEHAIQxBwCFMQcAhjEHAIcxBwCAMQcAgTEHAIIxBwCDMQcAJjcAgC43AIA2NwCAcjcAgHY3AIB+NwCAgjcAgIY3AICyNwCAtjcAgL43AIDSNwCA1jcAgPI3AID6NwCA/jcAgCI4AIBCOACAUjgAgFY4AIBeOACAijgAgI44AICeOACAwjgAgM44AIDeOACA9jgAgP44AIACOQCABjkAgAo5AIAWOQCAGjkAgCI5AIA+OQCAQjkAgEY5AIBeOQCAYjkAgGo5AIB+OQCAgjkAgIY5AICOOQCAkjkAgJY5AICaOQCAnjkAgK45AIDGOQCAyjkAgNY5AIDaOQCA3jkAgOI5AIDqOQCA7jkAgPI5AID+OQCABjoAgA46AIASOgCAGjoAgIC5AQCBuQEAgskBAIPJAQCE2QEAhdkBAIbJAQCHyQEAiPkBAIn5AQCKyQEAi8kBAIzZAQCN2QEAjskBAI/JAQCQuQEAkbkBAJIRAACTEQAAlDEAAJUxAAAeOgCAIjoAgCo6AIAyOgCAPSMAgGUsAIBpLACAJSQAgIJgAgCZ4QAAgIAAAIGYAACC5AYAg4gEAITUGwCFlBoAhhgfALMjAICIxB4AiQAQAIqoEwCLrBEAjAAoAI20KwCOuCoAj7wpAOOwAgC+dAIAnlUAAOMUAgCCbAIAtyMAgJkNAAC+RAIAnjUAAIJoAgCZBQAAuyMAgO/MAgC+oAAAgoQAAO/YAgDj7AEA4/QBAL8jAIDjCAMAwyMAgOM4AwDHIwCA44gDAMsjAIDv4AMAzyMAgO+IAwDvPAEA78QDANMjAIDv1AMA4+wDAB43AIDXIwCA4+wDAOPsAwDj5AMA2yMAgOO4AwDvXAMA70wDAN8jAIDvSAMA7/QDAOMjAIDnIwCA7zQDAON8AwDjlAQA6yMAgO8jAIDzIwCA47QEAPcjAID7IwCA/yMAgO9sBAADJACAByQAgO9YBADvUAQACyQAgBYkAIAaJACAvQAAgOP4BADCAACAMSQAgB4kAIBtKQCA45wEAAglAIBrJQCAriUAgO9QBADaJQCABCYAgO88BAApJgCAgAlLAoYcdwC+RAIAgnQCAL5QAgA+JgCAmREBAJkNAQCPrAIAggQCAI1oAQCewQIAi3wBAJ49AQCeKQEAvggCAJfQAgCZXQEAldACAJ5VAQCT0AIAmXUBAJHQAgC+SAIAn7gCAEYmAICdtAIAnk0BAJuwAgCZXQEAmbQCAL6EAgCeqQEApowCAGImAICkgAIAmakBAGomAIChSAIAgqwCAK/kAgCCtAIAglwCAJnlAQC+CAIAgnwCAIIABACopAIAnvkBAL5wAgC1HAQAnoUBAL6oBQCyhAIAtrECAL6sBQC4KQkAuYkCALqZAgCCjAUAu+gEAIKcBQByJgCAuPAEAJ5ZBgCZbQYAnmEGAJl5BgC+fAIAnmEGAIJcAgC+QAIAmVkGAJ5dBgCCYAIAmaUGAL58AgCevQYAghwCAL4UAgCZzQYAvkwCAIJMAgCa3QYAnt0GAJ/FBgDjDAIAgrwCAJn5BgC+ZAIA7/QCAJrxBgCe6QYAn+kGAJ7ZBgCf1QYA4wQCAJklBgCaIQYAgngCAJk9BgDjBAIAgkQCAJolBgC+cAIA75wCAJ4FBgCfFQYA7+gCAJp1BgCZBQYAggQCAL5wAgDjcAIAnnUGAJ8NBgCeAQYAvnwCAOM0AgCZDQYAvmACAIJsAgDv8AIAmTUGAIKQAwDv2AIAniEGAIQmAICbxQcAmeUHAL58AgCe7QcAn8UHAOPsAwCdUAIAnNEHAIJsAgDv1AIAmc0HAIJ8AgC+cAIAmd0HAJ7dBwC+AAIA42gCAJ6tBwCZuQcA42gCAIJ8AgDjDAIAvkgCAJmpBwCCWAIA78QCAJ6ZBwC+bAIA77gCAIKUAgCejQcA77gCALsAAACZeQcAuQwAAJ5xBwC/AAAAglQCAL0EAAC+aAIAs9QDAJmxBgCxcAMAggQCALc4AACeoQYAtTQAAL5wAgCrWAMAnqEGAO9cAgCZqQYArxADAIJQAgCtFAMAmYUHAJlpBgC+WAIAnmEGAL58AgCCaAIApqACAOOQAgCZaQYA43wBAOOYAQDjrAEA49ABAOPoAQC+dAIAno0FAOMwAgDvzAIAgmgCAJnRBQDvlAIA71QBAO9wAQDvJAEA7ygBAL58AgCevQUA4wwCAIJ4AgCZrQIAvnQCAJ6lAgDjNAIAgmACAJkZAAC+YAIA7/wCAJ4NAACClAIA79QCAJAmAIDj/AIAmQkAAL5gAgCYJgCAnh0AAOMAAgCwJSoAglgCAJkNAADv9AIAvmQCAK4mAIDvwAIAnhkAAIIYAgCCOAIA43ACAJkRAACaNQAAmSkBAL50AgDsJgCAnyUAAJ4JAACZ6QEAvrQDAL7gAwCazQEA79gCAJ4RAQCC2AMA/SYAgIHEAgDjsAMAHycAgOP8AwC+/AIAhMQCAIIoAgCGEAIAKicAgIg8AgCeIQAAnw0AAHonAIDvKAMAj3QCAO8sAwCCiAIAmXUAAJoVAACSxAMAldADAJktAACa0QAAjicAgL7IAgCYaAMAm3wDAILEAwCeQQAAnykAALAnAICChAIA45ACAL4IAwC+JwCABigAgJ8ZAACe7QAA49ACAJlxAACaFQAAvhQCAO8wAgCZIQAA71gCABQoAICv7AMAggQCALFMHACwABwAniUAALJMHACeXQAAn2EAAOO8AgCZIQAA+QAAAHEpAIDvlAIAdSkAgL08HACCgB0Av8EfAHkpAIDjtB0AvnQCAJ71HwDj8B0AmQUAAH0pAIC+fAIAngkAAIJgAgCZDQAAiSkAgL5gAgDvzAIAnh0AAOklAIDv3AIA42gCAPkYAIDjPB0AIRoAgP0YAIABGQCAJRoAgCkaAIAtGgCAMRoAgDUaAIA5GgCA76QCAD0aAIDvJB0AQRoAgLHFAAAFGQCAs8UAALLdAAC1yQAAtMEAALcdAAC2wQAAuWUAALhlAAC7zQAAus0AAL3dAAC83QAAv8UAAL7JAAAJGQCADRkAgE0ZAIBhGQCAERkAgBUZAIDvFHgD7wBIA+HYTQPhOKgC41x5A+O0UAOtGQCAsRkAgLUZAIC5GQCAgMkBAIHVAQCC3QEAg20CAITdAQCFcQIAhgEEAIcdBQCIJQUAiTUFAIo9BQCLbQUAjHUFAI1lBQCObQUAj80BAJC1AQCRvQEAkrUBAJNNAwCUVQMAlV0DAJZVAwCXTQMAmHUDAJl9AwCadQMAm00DAJxVAwCdWQMAnkkDAJ9JAwCguQMAobkDAKLBAwCj3QMApMUDAKXNAwCmxQMAp/0DAKjJAwCpyQMAqtEDAKvRAwCsMQMArTEDAK4xAwCvMQMAsFEDALFRAwCyUQMAs1EDALRxAwC1cQMAtnEDALdxAwC4UQMAuVEDALpRAwC7UQMAvDEDAL0xAwC+MQMAvzEDAL0ZAIDBGQCAxRkAgMkZAIDNGQCA0RkAgNUZAIDZGQCA3RkAgOEZAIDwIAIA5RkAgOkZAIDtGQCA8RkAgPUZAICc9TYAnf02APkZAICRkAIA/RkAgKkZAIBFGQCASRkAgEUaAIC6adgASRoAgE0aAIC4sTYAubE2AFEaAIBVGgCAWRoAgF0aAIBRGQCAYRoAgGUaAIBVGQCAWRkAgF0ZAIBlGQCAaRkAgG0ZAIBxGQCAdRkAgHkZAIB9GQCAgRkAgIUZAICJGQCAjRkAgJEZAICVGQCAglgCAJkZAIBpGgCA8FgCAG0aAICdGQCAoRkAgKUZAIABGgCABRoAgJF0AwDhtDsCCRoAgOPYIgINGgCAERoAgBUaAIAZGgCAHRoAgKUqAIBVLQCAqSoAgMEqAICtKgCAljMAgO/IPwK1KgCA4ZTzAuGY0gLjlPcC4xDGAuGUtgLhkJ0C44SiAuMIhwIZGQCAHRkAgO+4swLvOIsCnSoAgOAtAIDvIJcC7+DgAoLkAgBpLQCACAIAgLrF2QAOAgCAFAIAgBoCAIAgAgCAJgIAgCwCAIAyAgCAOAIAgD4CAIBEAgCASgIAgFACAIDhgHgC8OQGAOMUagKCgAgA4aAPAuEIEwLjhA4C4xgeAlYCAIA0AwCA7zQ7Au8wHwI6AwCAQAMAgO8MEgJGAwCAJRkAgCkZAIBMAwCAUgMAgC0ZAIAxGQCAWAMAgF4DAIB2AwCAggMAgIgDAICOAwCAlAMAgJoDAIB8AwCAZAMAgDUZAIA5GQCAbQMAgFwCAIA9GQCAQRkAgHQCAIBoAgCAvAIAgHoCAICYAgCAYgIAgJICAIBuAgCApAIAgNQCAICAUQYAgV0GAIJVBgCDaQYAhHkGAIV5BgCGaQYAh2kGAIhZBgCJoQcAiqUHAIu9BwCMpQcAja0HAI6lBwDyAgCA7AIAgOACAICSCRQAkxUUAJTxBwCV8QcAlvEHAJfxBwCY0QcAmdEHAJo5FACb0QcAnIEHAJ2BBwCefQcAnx0UAJktAQCYLQEAmz0BAJo9AQCdLQEAnC0BACEZAICeVQEAkd0GAJDRBgCTJQEAkiUBAJUtAQCULQEAlx0BAJYdAQCJ8QYAiOkGAIvxBgCK+QYAjbEGAIzpBgCPqQYAjrkGAIHxBgCA7QYAg/EGAIL5BgCF0QYAhOkGAIfRBgCG2QYAua0DALitAwC7vQMAur0DAL2tAwC8rQMAv90DAL7dAwCxrQMAsK0DALO9AwCyvQMAta0DALStAwC3nQMAtp0DAKm5AQCosQEAq3UBAKqxAQCtFQEArBUBAK/dAwCu3QMAobkBAKCpAQCjiQEAorEBAKWZAQCkkQEAp4kBAKaRAQAuAwCAwgIAgM4CAIDmAgCA2gIAgAQDAICwAgCA+AIAgCIDAIAKAwCAngIAgIACAIC2AgCAyAIAgP4CAICGAgCAKAMAgKoCAIAQAwCAjAIAgBYDAIAcAwCACS0AgOsuAIDKNACAhAcAgAYFAIAVBQCAJAUAgDMFAIBCBQCASwUAgPAsOABUBQCAXQUAgGYFAICSBQCA40huA5sFAIDhTG4DpAUAgO/0AQOnBQCAqgUAgK0FAIBGOgCApkwAgNZVAIA2aACAZnEAgJZ6AID2jACAVp8AgIaoAIDtugCAJMQAgFTNAICE1gCAtN8AgDG7AIA6rgCABqUAgPkqAICJKwCAoSoAgOUqAIBBMQCAATEAgE40AIDVLACABjMAgIo3AIBiNACAHSwAgJI0AICeMwCAEjgAgFkrAICFLACA+jEAgCY5AIAdKwCArSsAgJ4xAIC8LgCAySwAgFksAIA4LgCALC4AgJGgBgDuMwCAGSsAgJ43AIB1LACAzS0AgLAFAIDh1D8D4VgaA+PcLwPjUA4D4RTyA+FA0wPjQOoD40DDA7MFAIC2BQCA73jrA+9c8gO5BQCA5QUAgO9E3gPvmCUD4bSLA+E8lwPjfKID45iLA+EwQQDhUKwD4xx/AOOIRgDoBQCA6wUAgO84ewDv4EEA7gUAgPEFAIDvzIoD7yCHA4DBGACB3RgAgikLAIMpCwCE6Q4AhekOAIYZDwCH8RgAiCUPAIntGgCK5RsAiyEdAIw5HQCN5RsAjmkQAI/VGgCQhRsAkU0PAJJFDwCTXQ8AlEUPAJVNDwCWRQ8Al30PAJhFDwCZTQ8AmkUPAJtpGwCcQQ8AnUEPAJ5BDwCfQQ8AoMEPAKHBDwCiwQ8Ao8EPAKS5CwCluQsApqkLAKfNDwCo9Q8Aqf0PAKr1DwCrzQ8ArNkPAK3ZDwCuyQ8Ar8kPALC5DwCxuQ8AsmkPALNpDwC0YQ8AtWEPALY5DwC3OQ8AuBEPALkRDwC66QEAu+kBALz5AQC9+QEAvukBAL/pAQD0BQCA9wUAgPoFAID9BQCAAAYAgCAGAIDhBACAgAUAgNMFAIAOBgCANAYAgEsGAIBoBgCAfwYAgJYGAIDdAwCA9gMAgA8EAIASBwCAQQgAgD4IAIA/BwCAOSQAgHIkAICjJACAyCQAgLkmAIDEJgCAyCYAgMwmAIDQJgCALygAgG4oAICWKACAmigAgL8oAIDHKACA4ygAgPUoAID5KACA/SgAgLrp0wAVKQCAMCkAgEspAIA9JACASiQAgFckAIBkJACAdiQAgIMkAICVJACApyQAgLckAIDMJACA1iQAgOQkAIDuJACA+yQAgAwlAIAWJQCAbyUAgHYlAIAkJQCAgBkDAIEZAwCCKQMAgykDAIQ5AwCFOQMAhikDAIcpAwCIGQMAiRkDAIppAwCLaQMAjHkDAI15AwCOaQMAj2kDAJAZAwCRGQMAkgEEAJMtAwCUNQMAlVUGAJZdBgCXVQYAmG0GAJl1BgCafQYAm3UGAJxtBgCdNQYAnj0GAJ81BgCgzQYAodUGAKLdBgCj1QYApPkDAKX5AwCm6QMAp+kDAKjZAwCp+QYAqikGAKspBgCsOQYArTkGAK7FAwCvPQMAsEUDALFNAwCyRQMAs10DALRFAwC1TQMAtkUDALd9AwC4SQMAuUkDALpZAwC7fQYAvGUGAL1tBgC+ZQYAgCUAgKkVDwCoAQ8Aq00PAKpNDwCtRQ8ArEUPAK+hDQCuqQ0AoXULAKBhCwCj7QsAoqkLAKXlCwCk5QsApzkPAKZZCAC5oQ0AuJkNALuhDQC6qQ0AvaENALy5DQAxJQCAvqkNALGhDQCw2Q0As6ENALKpDQC1oQ0AtLkNALehDQC2qQ0AOCUAgEglAIBbJQCAsiUAgLwlAICRJQCAoSUAgNAlAICB7Q0AgO0NAIP9DQCC/Q0Ahe0NAITtDQCH2Q0AhiEYAJlNDQCYTQ0Am1ENAJpdDQCdeQ0AnHUNAJ9pDQCecQ0AkYkNAJCBDQCTmQ0AkoENAJWJDQCUgQ0Al30NAJaBDQDgJACAICUAgI0lAIDMJQCA3iUAgAgmAIAtJgCAQiYAgPAlAID6JQCADCYAgBkmAIAxJgCATiYAgFgmAIB2JgCASiYAgGYmAIBuJgCAgCYAgIwmAICUJgCAoyYAgN4mAICcJgCAsiYAgKcmAIC9JgCA1CYAgOImAIABJwCAEScAgBsnAIBPJwCAkicAgOcnAIBPKQCAXSkAgGEpAIBlKQCA8CYAgC4nAIA+JwCASCcAgCMnAIBTJwCAYycAgH4nAIBwJwCAlicAgMInAIDJJwCApicAgNMnAIDdJwCAtCcAgBgoAIAKKACA6ycAgCUoAIDyJwCA/CcAgDMoAIBAKACASigAgFQoAIBeKACAcigAgH8oAICGKACAnigAgKUoAICyKACAyygAgNUoAIDnKACAASkAgA4pAIAZKQCAIykAgDQpAIA7KQCAUykAgMMDAIDmBACAhQUAgNgFAIATBgCAOQYAgFAGAIBtBgCAhAYAgJsGAIDjAwCA/AMAgBUEAIAoBACAOwQAgE4EAIBhBACAdAQAgIcEAICaBACAAAUAgA8FAIAeBQCALQUAgDwFAIBjCACAJAgAgMEGAID8BwCAHQkAgOMoEwAzCQCAKggAgC0IAIAxCACAJAcAgNwuAIDKMACA2S0AgLswAIBFMQCAJwkAgO/sEwAGCQCA3A0AgM8IAICDCACAMQcAgEwHAID8BgCACggAgJQIAIAqCQCACQkAgOANAIDsDQCA2wgAgJkIAIAVBwCAhggAgFUHAID/BgCApgcAgJEkAIDwDQCA4ggAgCcIAICcCACAWAgAgBUJAID0DQCA5QgAgBQIAICfCACA6AgAgBcIAIDJCACAoggAgOwIAIAbCACAzAgAgKYIAID3CACA/QgAgIgHAICKCACAWQcAgAMHAIA9CQCAQQkAgEkJAIA2CQCAGAkAgPgNAID0CACALQkAgAwJAIDkDQCA0ggAgI4IAIBdBwCAMAkAgA8JAIDoDQCA1QgAgJEIAIBgBwCArQgAgGMHAIDjSBIA4xQSAOP4EwDjuBMA4+wSAOOgEgDjbBIA43gSAO/ADQDv2A0A73QSAO9QEgDvqBIA79wSAO8oEwDvIBMA6QcAgMwGAIAOCACAEQgAgNgGAIDUBgCAIQgAgAcHAIBnCACADAcAgHYIAIA0BwCANwcAgKoIAIC2CACAuQgAgOPYEADjoBAA46AQAON0EQDjNBAA4wgQAOPkEADj9BAA77wQAO/gEADvzBAA7zgQAO8QEADvcBAA73AQAO9MEADjhBMA4+gTAOMwEADjEBAA42ATAONAEwDjpBMA47QTAO/IEwDvtBMA75gTAO98EwDvXBMA70wTAO8UEwDv6BAAgO08AIH1PACC/TwAg/U8AITtPACFFT0Ahh09AIcVPQCILT0AiTU9AIo9PQCLNT0AjC09AI0VPQCOHT0AjxU9AJBtPQCRdT0Akn09AJN1PQCUbT0AlRU9AJYdPQCXFT0AmC09AJk1PQCaPT0AmzU9AJwtPQCdFT0Anh09AJ8VPQCg7T0AofU9AKL9PQCj9T0ApO09AKUVPQCmHT0ApxU9AKgtPQCpNT0Aqj09AKs1PQCsLT0ArRU9AK4dPQCvFT0AsG09ALF1PQCyfT0As3U9ALRtPQC1FT0AthE9ALcRPQC4MT0AuTE9ALoxPQC7MT0AvBE9AL0RPQC+ET0AvxE9AIDxPACB/TwAgvU8AIMNPwCEFT8AhR0/AIYVPwCHDT8AiDU/AIk9PwCKNT8Aiw0/AIwVPwCNHT8AjhU/AI8NPwCQdT8AkX0/AJJ1PwCTDT8AlBU/AJUZPwCWCT8Alwk/AJg5PwCZOT8Amgk/AJsJPwCcGT8AnRk/AJ4JPwCfCT8AoPk/AKH5PwCiCT8Aowk/AKQZPwClGT8Apgk/AKcJPwCoOT8AqTk/AKoJPwCrCT8ArBk/AK0ZPwCuCT8Arwk/ALB5PwCxeT8Asgk/ALMJPwC0GT8AtRk/ALYJPwC3CT8AuDk/ALk5PwC6CT8Auwk/ALwZPwC9GT8Avgk/AL8JPwCA+TwAgfk8AIJJPQCDST0AhFk9AIVZPQCGST0Ah0k9AIh5PQCJeT0Aikk9AItJPQCMWT0AjVk9AI5JPQCPST0AkDk9AJE5PQCSAQQAk00GAJRVBgCVXQYAllUGAJdNBgCYdQYAmX0GAJp1BgCbTQYAnFUGAJ1dBgCeVQYAn00GAKC1BgChvQYAorUGAKPNBgCk1QYApd0GAKbVBgCnzQYAqPUGAKn9BgCq9QYAq80GAKzVBgCt3QYArtUGAK/NBgCwtQYAsb0GALK1BgCzTQYAtFUGALVdBgC2VQYAt00GALh1BgC5fQYAunUGALtNBgC8VQYAvV0GAL5VBgC/TQYArH0/AK2lPwCurT8Ar6U/AKh9PwCpZT8Aqm0/AKtlPwCkHT8ApUU/AKZNPwCnRT8AoB0/AKEFPwCiDT8AowU/ALydPwC9pT8Avq0/AL+lPwC4nT8AuYU/ALqNPwC7hT8AtN0/ALWlPwC2rT8At6U/ALDdPwCxxT8Ass0/ALPFPwCMZToAjW06AI5lOgCPfToAiEU6AIlNOgCKRToAi306AIRlOgCFbToAhmU6AId9OgCABToAgQ06AIIFOgCDfToAnF04AJ3lPwCe7T8An+U/AJhdOACZRTgAmk04AJtFOACUuTgAlWU4AJZtOACXZTgAkAU6AJENOgCSBToAkwE5AMAIAIDYCACA3ggAgPAIAIB2BwCAIgkAgHkHAICBBwCAVAkAgJ0HAIDLBwCAvQcAgMQGAIDcBACAewUAgM4FAIAJBgCALwYAgEYGAIBjBgCAegYAgJEGAIDXAwCA8AMAgAkEAIAiBACANQQAgEgEAIBbBACAbgQAgIEEAICUBACA+gQAgAkFAIAYBQCAJwUAgDYFAIBFBQCATgUAgFcFAIBgBQCAaQUAgJUFAICeBQCAXQgAgFYOAIBZDgCAOjoAgKwKAIAVCwCANjoAgD46AICcGQAAnRkAAJ45AACfOQAA4wwAgEI6AIB6NwCA8TAAgKI3AIBaMgCAxSoAgLksAICaMDUA7C0AgB0tAIDoLQCA1y8AgJ+ENQDSMwCAnUQpAGI1AICaNgCA1jYAgAo3AIAeOACAdjEAgAIyAICuMgCARjMAgGI2AIBGOACAcjkAgOkqAICNLACAijEAgNIyAICWNgCAwjkAgJQuAIB6MgCAhjYAgBo3AIALMACAvjUAgLSAGgC1hBkAtojmALeM5ACwABwAsZQeALIAGACznBsAvADsAL2k7wC+qO4Av6TtALgA4AC5tOMAurjiALu84QCkwAAApQAMAKbIDgCnAAgA4jYAgAcvAIAFMQCArXwDAKwAEACt5BMArugSAK9gEQCo8AoAqRwJAKr4FgCr/BQAGjIAgB4zAIAqOACAKSsAgMErAIAtLACAczAAgIIxAIDOMgCA8jMAgI42AICmNgCAyjcAgO44AICiOQCAvjkAgC40AIBuNACAvAgAgCY1AIBGNgCAejgAgE43AIChLQCAIy8AgN40AICeNQCAAjMAgDY0AICaNwCA5jgAgJ0tAIBwLgCAejEAgC4yAIBiMgCAFjUAgD41AICmOACAKSwAgJwAAACqNQCAzSsAgMkrAICaNACAKjUAgF42AICuOACAajcAgA8wAIBaNwCA0SoAgEQuAIB7LwCAMjMAgLIzAIBNLACAPjQAgDkrAIBfLwCAsSoAgO4xAICLMACAEjUAgIDpAwCB6QMAgjkvAIP9AwCE5QMAhe0DAIblAwCHfS4AiEEuAIkhAgCKeS8AiyUCAIw9AgCNJQIAjiECAI8dAgCQZQIAkW0CAJJlAgCTfQIAlGUCAJVtAgCWZQIAlx0CAJglAgCZLQIAmiUCAJs9AgCcJQIAnS0CAJ4lAgCfHQIAoOUCAKHtAgCi5QIAo/0CAKTlAgCl7QIApuUCAKdNAgCodQIAqX0CAKqpAQCrqQEArLkBAK25AQCuqQEAr6kBALDZAQCx2QEAsukBALPpAQC0eSIAtf0BALb1AQC37QEAuNUBALndAQC61QEAu60BALy1AQC9uQEAvqkBAL+pAQChLACAjS0AgP4zAIBmNgCAPjcAgLoxAIDmMQCAHzAAgB42AIA/MACArjMAgAUrAICBKwCAxSsAgFYxAID+NACA9jUAgEo3AIBaOACANSwAgOksAIAXLwCApzAAgH4yAIBCNACAljgAgHo5AIDOOQCA5jkAgOkwAICmMQCA7jcAgOMuAIC/LwCA2y8AgGswAIBuMgCAujIAgGozAICONACAMjUAgJY1AIDeNwCAbjYAgAY4AIB+OACA6SsAgBUsAID9LACAqjIAgPY2AIADLwCAcy8AgDcwAICyMQCA2jQAgCYzAIAVKwCAWS0AgKguAIB/LwCAQjMAgF4zAIBuNQCAgFEBAIEBKgCCXQEAg1UBAIRNAQCFdQEAhn0BAId1AQCITQEAiVUBAIqdKwCLWQEAjEkBAI1JAQCOuQEAj7kBAJDJAQCRyQEAktkBAJPZAQCUyQEAlckBAJb5AQCX+QEAmMkBAJnJAQCa2QEAm9kBAJzJAQCdyQEAnrkBAJ+5AQCgSQEAoZUBAKJFAQCjXQEApEUBAKVNAQCmRQEAp30BAKhFAQCpTQEAqnkPAKtBAQCsQQEArUEBAK5BAQCvQQEAsMEDALHBAwCywQMAs8EDALTBAwC1wQMAtsEDALfBAwC4wQMAucEDALrBAwC7wQMAvMEDAL3BAwC+wQMAv8kMAI41AIBiOACA4jgAgPI4AIAuOQCALSsAgII0AIBOOACAyjgAgJcvAIDxKgCAUSsAgEguAIBoLgCAlzAAgMYyAIDOMwCAejYAgBo4AIDZMACAojgAgA0sAIAlMQCAMTEAgBIyAIBKMgCATjMAgKozAIAqNACADjUAgDo5AIDrLwCAsjgAgEErAICMLgCAMjIAgOI3AIBPLwCAny8AgDkxAIC6OACA8SsAgNksAIB4LgCAwjAAgBUxAIBiMQCA9jEAgEozAIC+MwCAWjUAgPo2AIAGNwCA1jgAgF0sAIBOMgCA3SwAgMoyAIBuMwCAijYAgL44AICqOQCA0jkAgC0xAICxOSMAsBEDALMVAwCyFQMAtTUDALQ1AwC3NQMAtjUDALkVAwC4FQMAuxUDALoVAwC9dQMAvHUDAL91AwC+dQMAoZkNAKCRDQCjqQ0AopENAKW5DQCksQ0Ap6kNAKaxDQCpmQ0AqJENAKtpAwCqkQ0ArXkDAKxxAwCvaQMArnEDAJEZDQCQEQ0Aky0NAJIRDQCVPQ0AlD0NAJctDQCWLQ0AmR0NAJgdDQCbbQ0Amm0NAJ15DQCcgQ4An2kNAJ5xDQCBmQ0AgAkjAIOpDQCCkQ0AhbkNAISxDQCHqQ0AhrENAImZDQCIkQ0Ai2kNAIqRDQCNeQ0AjHENAI9pDQCOcQ0AKjIAgMY1AIDGNACA6jQAgBozAICiMgCAZjcAgA0rAIAuNgCA9SsAgOUrAIDzLgCAEzAAgPY0AIA0LgCABjIAgOUwAIDqNwCAqjgAgA8vAIBhKwCANS0AgIktAIDVMACA0SsAgCIzAIDmMwCASjQAgGY0AIBqNACAfjQAgPo4AIDuNACAkjYAgFY3AIAKOACANjgAgE45AIBSOQCAVjkAgLo5AIAuOACAxjgAgDErAIBVKwCAaSsAgCUsAIAxLACAcSwAgCUtAIBBLQCASS0AgIUtAICRLQCAdC4AgIsvAICzLwCAuy8AgJH4EADTLwCAfzAAgK8wAIDdMACAWjEAgIApAQCBKQEAgjkBAIM5AQCEKQEAhSkBAIZZAQCHWQEAiNkoAIltAQCKKSUAi2EBAIxhAQCNYQEAHjIAgDoyAICQGQEAajIAgJIVAQC+MgCA3jIAgJU1AQCWPQEAlzUBAJgNAQCZFQEAmh0BAJsVAQCcDQEAnfUBAJ7dKABSMwCAoAUBADI0AICiAQEAVjQAgFI0AIClGQEApgkBAFo0AIBeNACAdjQAgKo9AQCrNQEArC0BAK0VAQCuHQEArxUBALBtAQCxdQEAsn0BALN1AQC0bQEAtRUBALYdAQC3FQEAuC0BALk1AQC6PQEAuzUBALzZLgC9KQEAvhkBAL8ZAQC6eR4Au3keALjNAgC5eR4AvpUeAL+dHgC8QQIAvZ0eALJ9HgCzRR4AsH0eALF1HgC2XR4At0UeALRdHgC1VR4AqgUeAKsNHgCodR4AqQ0eAHo0AICeNACArBUeAK0NHgCiSR4Ao0keAKBJHgChSR4ApkkeAKf5AgCkSR4ApUkeAJqNHgCblR4AmI0eAJmFHgCeiR4An4keAJyNHgCdhR4AkgUDAJP1AACQCQMAkY05AJaxHgCXFQYAlO0AAJUBHACKvQMAi0EDAIiFAwCJnQMAjkEDAI9JAwCMyTkAjVEDAIIVAgCDHQIAgAUCAIEdAgCGzQMAh7EDAIQFAgCFxQMAs/kFALLxBQCx+QUAsOEFALeZKgC2EQMAtRkDALThBQC7NQMAujUDALklAwC4JQMAvxUDAL4VAwC9JQMAvCUDAKP9BQCi/QUAof0FAKD9BQCnnQUApp0FAKWdBQCknQUAq7kFAKqxBQCpJScAqL0FAK+ZBQCukQUArZkFAKyhBQCTAQUAkvkFAJF1OQCQ9QUAlwEFAJYZBQCVEQUAlBkFAJt5CQCaOQUAmTEFAJg5BQCfHQUAnh0FAJ0dBQCcHQUAg4kFAIKBBQCBiQUAgPEFAIeFBQCGhQUAhZUFAISBJgCLhQUAioUFAIm1BQCItQUAj4UFAI6FBQCNlQUAjJUFAM40AIA6NQCAQjUAgFY1AIB+NQCAzjUAgAI2AIBqNgCAEjcAgCo3AIBeNwCAYjcAgKY3AICqNwCAAjgAgNo4AIAeOQCANjkAgIMvAICQ6gCA5jUAgLkqAIC9KwCAfSsAgCUrAIBlKwCAkSsAgCEsAIA9LACAES0AgCEtAIA9LQCAmS0AgOQtAIDwLQCADC4AgBwuAIALLwCAEy8AgEMvAIBjLwCAky8AgKsvAICbLwCAry8AgO8vAIBHMACAUzAAgFswAICDMACACTEAgB0xAIBeMgCAVjIAgIYyAIAWNACA4jIAgBYzAIBiMwCAfjMAgKIzAIDGMwCAyjMAgOozAICAjQEAgZUBAIKdAQCDlQEAhI0BAIW1AQCGvQEAh7UBAIiNAQCJwR0AipkBAIvBHQCMhQEAjY0BAI6FAQCP/QEAkIUBAJEZHQCSkRQAk4UBAJSdAQCViTIAlk0ZAJc9GwCYsQEAmbEBAJotHACbtQEAnD0cAJ2pAQCemQEAn5kBAKDlHQChbQEAomUBAKN9AQCkZQEApW0BAKbxHQCnYQEAqKEDAKmhAwCqoQMAq6EDAKyhAwCttQEArq0DAK+lAwCwYRkAsdkDALLZAQCz7QMAtPUDALX9AwC29QMAt+0DALjFAQC50QMAumEdALvVAwC82QEAvT0XAL7FAwC/0QEA+jMAgA40AIAKNACAOjQAgLY0AIDmNACAHjUAgE41AIAyNgCAWjYAgM42AIAWNwCAIjcAgEI3AIBGNwCAUjcAgG43AIDmNwCAFjgAgEo4AIBqOACAtjgAgA45AIAqOQCAijkAgCfqAIAi6gCAVOoAgOEpAIAJKgCADSoAgNbqAIAD6wCAe+sAgBY6AIAmOgCARwgAgFIIAIBVCACASggAgE4IAIBXCQCA8Q4AgOIOAIDnDgCA9g4AgOwOAICyNACASw8AgMoPAICBDwCALw8AgFoPAIBnDwCAbw8AgJ0PAIDCDwCAuA8AgL0PAICqDwCAsQ8AgP4OAIADDwCACA8AgIBBAQCBMQMAgk0BAINFAQCEXQEAhUUBAIZNAQCHIQMAiF0fAIl9AQCKaQMAi3EBAIx1AwCNVQEAjlk6AI9ZAQCQKQEAkSkBAJI5AQCTOQEAlCkBAJUpAQCW2QEAl9kBAJjpAQCZ6QEAFQ8AgCIPAIAqDwCAMg8AgDwPAIBBDwCARg8AgFAPAIBVDwCAXQ8AgGoPAIByDwCAdw8AgHwPAICEDwCAiQ8AgJMPAICYDwCAoA8AgKUPAIDFDwCANw8AgBoPAIBiDwCAjg8AgA0PAIDdFgCA5hYAgOkWAIDvFgCA4xYAgOwWAIDgFgCAExcAgBYXAID1FgCA8hYAgPgWAICAmQcAgZkHAPsWAICDrQcAhLUHAAQXAICGsQcAh7EHAIiRBwCJkQcAipEHAIuRBwCM8QcAjfEHAI7xBwCP8QcAkJEHAJGVBwCSnQcAk5kHAJSFBwCVgQcAloEHAJeFBwCYuQcAmb0HAJq1BwCbsQcAnK0HAJ2pBwCemQcAn50HAKBhBwChZQcAom0HAKNpBwCkdQcApXEHAKZxBwCndQcAqEkHAKlNBwCqRQcAq0EHAKxdBwCtWQcArkkHAK9NBwCwMQcAsTUHALI9BwCzOQcAtCUHALUhBwC2IQcAtyUHALgZBwC5HQcAuhUHALsRBwC8DQcAvQkHAL7xAAC/9QAAgAkBAIENAQCCHQEAgxkBAITZAACF3QAAhtUAAIfRAACI8QAAifUAAIr9AACL+QAAjOkAAI3tAACO5QAAj+EAAJCdAACRmQAAkq0AAJOpAACUtQAAlbEAAJaxAACXtQAAmIkAAJmNAACahQAAm4EAAJydAACdmQAAnokAAJ+NAACgdQAAoXEAAKJ9AACjeQAApGlQAqVtUAKmYQAAp2UAAKhZAACpXQAAqlUAAKtRAACsTQAArUkAAK49AwCvOQMAsClQArEtUAIBFwCABxcAgP4WAIANFwCAChcAgBkXAIDZXFICHxcAgCUXAIAiFwCAKBcAgCsXAIA0FwCALhcAgKOhAACipQAAoZEAAKCVAACntQAAprEAAKW9AACkuQAAq40AAKqJAACpgQAAqIUAAK+FAACugQAArYkAAKyNAACz/QAAsvkAALHxAACw9QAAt5kAALadAAC1nQAAtJkAALutAAC6qQAAuaUAALilAAC/ZQEAvmEBAL1tAQC8aQEAHBcAgFcXAIBAFwCAPRcAgEgXAIBOFwCAOhcAgNksUQJLFwCAVBcAgHkWAIDhDwCAMRAAgA4QAIAiEACAHRAAgJNBAAAnEACALBAAgBMQAICXWQAAllUAAJVZAACUXQAAm3EAAJppAACZZQAAmGUAAJ9lAACeYQAAnTFTApxtAAC4gQQAuYEEALqBBAC7gQQAvIEEAFEXAIC+jQQA5g8AgLDdBQCxTQQAskUEALNdBAC0RQQAtU0EALZFBADrDwCAqKEFAKntQQCqrQUAq6UFAKy9BQCtpQUArq0FAK+lBQCgqQUAoZFBAKKpQACjoQUApKEFAKWhBQCmoQUAp6EFAP8PAIAYEACAWBAAgF0QAIBpEACAnVUFAH8QAICfWQUAjhAAgJMQAICeEACAkwUFAJQdBQCVBQUAlg0FAJcFBQC4EACAyxAAgO8QAIAhEQCAJhEAgC4RAIA9EQCATBEAgIBxBQCBcQUAgnEFAINxBQCEUQUAhVEFAIZdBQBREQCAWREAgHwRAICjEQCArxEAgM8RAIDUEQCA2REAgBMSAIAmEgCAMhIAgEoSAIDEEgCAGhMAgDMTAIA4EwCASxMAgFwTAIBuEwCAcxMAgJoTAICiEwCAtxMAgN4TAIDjEwCAPRQAgEIUAIBHFACAUxQAgF8UAIBkFACAbBQAgHgUAICSFACAlxQAgJ8UAICkFACAqRQAgK4UAICzFACAuBQAgMsUAIDQFACA7BQAgAYVAIAgFQCALBUAgEQVAIBJFQCAVhUAgHcVAICaFQCAtBUAgMAVAIDFFQCAzRUAgO4VAIAIFgCAFxYAgDQWAIA5FgCAQRYAgEYWAIBZFgCAXhYAgICtAQCBtQEAgr0BAIO1AQCErQEAhdUBAIbdAQCH1QEAiO0BAIn1AQCK/QEAi/UBAIztAQCN1QEAjt0BAI/VAQCQrQEAkbUBAJK9AQCTtQEAlK0BAJVVAwCWXQMAl1UDAJhtAwCZdQMAmn0DAJt1AwCcbQMAnVUDAJ5dAwCfVQMAoK0DAKG1AwCivQMAo7UDAKStAwCl1QMAphkOAKfZAwCobQ8AqSEOAKrhAwCr4QMArCkOAK3lAwCuGQ4ArxkOALCVAwCxnQMAsgEOALORAwC0HQ4AtQUOALa5AwC3uQMAuDkOALmNAwC6NQ4AuxEOALyBAQC9gQEAvnkBAL95AQCEFgCAkBYAgJwWAICrFgCAyBYAgM0WAIDuEQCA/xEAgHwWAICBAACAiwAAgJUAAICfAACAqQAAgLMAAID1DwCA+g8AgAQQAIB1EACAehAAgIQQAIDlEACA6hAAgBcRAIAzEQCAOBEAgEIRAIBRFQCADRYAgBIWAIAqFgCAoRYAgKYWAIC+FgCA8A8AgAkQAICJEACAHBEAgNcSAIA/FQCALxYAgGMWAIDDFgCARxEAgGQSAICfEgCAshIAgBEUAIAdFACAKRQAgI0TAICSEwCA0RMAgNYTAID9EwCAAhQAgGkSAIBuEgCAtxIAgLwSAIDCEQCAxxEAgJYRAICbEQCApD0DAKVFAwCmTQMAp0UDAKA9AwChJQMAoi0DAKMlAwCsfQMArUUDAK5NAwCvRQMAqH0DAKllAwCqbQMAq2UDALQ9AwC1xQMAts0DALfFAwCwPQMAsSUDALItAwCzJQMAvP0DAL3FAwC+zQMAv8UDALj9AwC55QMAuu0DALvlAwCEBQwAhQ0MAIYFDACHHQwAgI0MAIGpDACCGQwAg1ENAIxhDACNYQwAjmEMAI9hDACIKQwAiRUMAIodDACLFQwAlD0MAJXFAwCWzQMAl8UDAJABDACRAQwAkgEMAJMBDACc/QMAncUDAJ7NAwCfxQMAmP0DAJnlAwCa7QMAm+UDAIBpBACBaQQAgnEEAINxBACEnQQAhYUEAIaNBACHhQQAiL0EAImNBACKhQQAi50EAIyFBACNqQYAjvkEAI/5BACQiQQAkYkEAJKRBACTkQQAlLEEAJWxBACW+QYAl60EAJiVBACZwQYAmmkGAJtpBgCceQYAnXkGAJ7RBgCf/QsAoA0GAKEdCwCiGQYAo0ULAKQFBgClTQsApjUGAKe1BACoEQYAqREGAKoRBgCrNQQArC0EAK0BBACuXQQArx0GALDNBgCxbQYAsnUGALMNBgC0FQYAtR0GALYVBgC3DQYAuDUGALk9BgC6NQYAuw0GALwVBgC9HQYAvhUGAL8NBgCA9QcAgf0HAIL1BwCD9QAAhO0AAIURAwCGEQMAhxEDAIgxAwCJMQMAijEDAIsxAwCMhQcAjRUDAI4dAwCPFQMAkG0DAJGNBwCShQcAk50HAJSFBwCVjQcAloUHAJe9BwCYhQcAmY0HAJqFBwCbnQcAnIUHAJ2NBwCehQcAn4UAAKB9AAChgQMAooEDAKOBAwCkgQMApYEDAKaBAwCngQMAqBUHAKmFAwCqjQMAq4UDAKydAwCtoQMArqEDAK+hAwCwdQcAsXUHALJxBwCzhQUAtM0FALX1BQC2/QUAt8kDALj5AwC5+QMAuqEFALuhBQC8wQMAvcUDAN4RAIDjEQCAhJz7ACYTAIArEwCAYRMAgGYTAIB2EgCAghIAgJUSAICaEgCARRIAgNwSAIBXEwCASxAAgKMQAIC9EACAxBAAgJB1AACRfQAAknEAAJNxAACUAfwAlVX+AJZd/gCXVf4AmG3+AJlp/gCaef4Am3n+AJxp/gCdaf4Anln+AJ9Z/gCgpf4Aoa3+AKKl/gCjof4ApKH+AKWl/gCmrf4Ap6X+AKiZ/gCpmf4Aqun+AKvt/gCs9f4ArfH+AK7x/gCv8f4AsI3+ALGV/gCymf4As5n+ALSJ/gC1if4Atrn+ALe9/gC4hf4AuY3+ALqF/gC7nf4AvIX+AL2B/gC+gf4Av4H+AKbZCACnBQcApMEIAKWZBQCi0QgAo9EIAKCJBQChtQgArgEHAK8BBwCsMQcArTEHAKo9BwCrJQcAqD0HAKk1BwC2fQcAtwUHALR9BwC1dQcAsskFALNlBwCwcQcAsXEHAL4BBwC/AQcAvDEHAL0xBwC6IQcAuyEHALg9BwC5MQcAhjkHAIc5BwCELQcAhTkHAIINBwCDNQcAgBEHAIEFBwCOSQcAj0kHAIxNBwCN1QUAisEFAIvBBQCI1QUAiXEHAJbVBQCX2QgAlE0FAJXdBQCSUQUAk9kFAJD5BQCRoQUAnnEIAJ99CACcYQgAnWEIAJpxCACbeQUAmMUIAJl1BQD0EACA+xAAgAIRAICBEQCAuxEAgLQRAIArEgCAGBIAgB8SAIBWEgCATxIAgF0SAIDJEgCAHxMAgIcSAIB7EgCApBIAgKsSAIA9EwCAUBMAgHgTAIB/EwCAhhMAgKcTAIC8EwCAwxMAgOgTAID2EwCA7xMAgEwUAIB9FACAhBQAgAsVAIAZFQCAEhUAgPEUAIAlFQCAMRUAgHwVAICDFQCAkxUAgFsVAIBpFQCAnxUAgKYVAIBiFQCASxYAgFIWAIDzFQCA+hUAgNkVAIDgFQCAIxYAgBwWAICwFgCAbhAAgLEQAICqEACA3hAAgNcQAIAQEQCACREAgI8RAIBeEQCAgIEBAIGBAQCCgQEAg4EBAISdAQCFhQEAhokBAIeJAQCItQEAib0BAIq1AQCLjQEAjJUBAI2dAQCOlQEAj40BAIgRAIA3EgCAkv0BAJP1AQCU7QEAlZUBAJadAQCXlQEAmKkBAJmpAQCauQEAm7kBAJypAQCdrQEAnqUBAJ+dAQCgZQEAoW0BAKJlAQCjfQEApGUBAKVtAQCmZQEAp90AAKjlAACppQMAqq0DAKulAwCsvQMAraUDAK6tAwCvpQMAsN0DALHlAwCy7QMAs+UDALSpAQC1VQEAtvUDALftAwC41QMAud0DALrVAwC7rQMAvM0DAL3BAwC+vQMAv7UDANASAICOEgCARBMAgP8UAIA4FQCAlRYAgIkWAIC3FgCAuRUAgIsUAIABFgCAyhMAgMQUAIDSFQCArRUAgPgUAIC9FACAZREAgKgRAIBwFQCA0BAAgFgUAIBiEACAPhIAgOcVAIATEwCAcRQAgEIQAIA5EACAihUAgOESAID2EQCArhMAgGsWAIDqEgCA8RIAgGwRAIAEEgCApgMAgA0jAIARIwCAoAYAgMcAAIC1BgCAqyMAgK8jAIC5IQCAtSEAgOMHAIB7CQCAfwkAgEEjAICnIwCANSMAgDkjAIAdIwCAISMAgCUjAIApIwCALSMAgDEjAIDbBwCA3wcAgNEAAICATQEAgVEBAIJRAQCDTQEAhE0DAIUhAwCGRQEAh30BANcAAICiAwCAqAMAgN0HAIDTAACA1QAAgL0GAIB5AACABxQAgH0AAICHAACAkQAAgAwUAICbAACAGBQAgKUAAIAkFACArwAAgDAUAIC5AACANRQAgM8PAIBVEACAmBAAgJsQAIArEQCAVhEAgKARAIDMEQCA6BEAgOsRAIDzEQCADRIAgBASAIBzEgCAwRIAgDATAIBrEwCAlxMAgJ8TAICwpQEAsa0BALKlAQCzvQEAtKUBALWtAQC2pQEAt10BALhlAQC5bQEAumUBALt9AQC8ZQEA2xMAgDoUAIBpFACAgAW5AIHhBgCC4QYAg+EGAIThBgCoBgCAswYAgIfpBgCI2QYAifmxAIr1sQCL8bEAjO2xAI31BgCO+QYAj/0GAJDZBgCR2QYAkvWxAJwUAICUiZIClfEGAJb1BgCX9QYAmNkGAJnVsgCa3bIAm6kGAJy5BgCduQYAnqkGAJ+BBgCgoQcAoaEHAKIhsgCjpQcApIUAAKWNAACmQbMA1RQAgKiNBwCplQcAqp0HAKuVBwBOFQCAyhUAgDYQAIA+FgCAsP0HALGFBwCyjQcAaBYAgLSZBwCBFgCAtpUHALeNBwC4tQcAub0HALq1BwC7jQcAvJUHAL2dBwC+lQcAv40HAIB1BgCBlaACgpmgAoOZoAKEhaAChb2gAoaxoAKHhaACiLmgAomRoAKKnaACi5mgAoyFoAKNjQEAjoEBAI9FBgCQOQYAkT0GAJIxBgCTMQYAlC0GAJXVBgCW2QYAl90GAJjhBgCZ4QYAmu0GAJvpBgCc9QYAnf0GAJ7xBgCf9QYAoAkGAKEJBgCiBQYAowEGAKQdBgClBQYApgkGAKcNBgCoMQYAqTEGAKo9BgCrNQYArCkGAK0pBgCuJQYArx0GALBhBgCxYQYAsm0GALNpBgC0dQYAtX0GALZxBgC3dQYAuEkGALlJBgC6RQYAu0EGALxdBgC9RQYAvkkGAL9NBgCAsQUAgbEFAIK9BQCDuQUAhKUFAIWtBQCGoQUAh6UFAIiZBQCJmQUAipUFAIuRBQCMjQUAjcEFAI7NBQCPyQUAkLUFAJG9BQCSsQUAk7UFAJSpBQCVqQUAlqUFAJehBQCYnQUAmSkCAJolAgCbIQIAnD0CAJ3pAgCe5QIAn+ECAKAdAgChNQIAojkCAKM9AgCkIQIApSECAKYtAgCnKQIAqBUCAKkZAgCqFQIAqxECAKwNAgCteQIArnUCAK8V8ACwafAAsRECALIdAgCzGQIAtAUCALUhAAC2LQAAtyUAALgZAAC54QEAuu0BALvlAQC8+QEA2BQAgN0UAIC/9YYCp2kNAOIUAIDnFACAzwAAgNkAAICzAwCA4QcAgH0JAID7IgCAzNSFAszghQL/IgCAgSkAgDUkAIBuJACAjSQAgLyZBQC9mQUAvqkFAL+ZvAC4mQUAuZkFALqJBQC7iQUAtKEFALXVsQC23bEAt6kFALCxsgCxzQUAssUFALO9BQCfJACAxCQAgMMoAIDfKACA8SgAgIgmAICFKQCAaSkAgCkkAIAtJACA2WSgAoEJAIDZUKAChAkAgI0JAICKCQCAhwkAgOwhAIDvIgCA9CEAgJhlBQCZEbIA/CEAgNkwoAKUOZEClU0FAJZFBQCXXQUAkGkFAJFpBQCSWQUAk1kFAID9vACB1ZwCgmW8AIPFvACEkbwAhZ28AIalvACHjbwAiK2TAonlvACKKZACi7W8AIwRkAKNlbwAji2wAI/FnAKQ6bwAkcHIAJJBkAKT8Z0ClNW8AJXlvACW4bwAl02QAphlkAKZfZACmrm8AJupCgCcbQ8Anb0KAPMiAICfXQ8AoK0PAKElCgCibQoAo2UKAKQNCgClpQ8ApgXUAKepDwComQ8AqZkPAKopDwCrKQ8ArDkPAK05DwCuKQ8ArykPALBZDwCxndEAspXRALOF1gC0sdEAtbHRALbZ1AC32dQAuOnUALnp1AC6+dQAu/nUALzp1AC96dQAvrnUAL+51ACASdUAgUnVAIJZ1QCDWdUAhEnVAIV90ACGddAAh23QAIhV0ACJXdAAinXVAIut1QCMtdUAjb3VAI611QCPQdAAkMHQAJHB0ACSwdAAk8HQAJTB0ACVwdAAlsHQAJfB0ACYwdAAmc3QAJrF0ACb3dAAnOHVAJ3pDgCe2Q4An9kOAKDV2wChwdkAotnZAKPB2QCkxdkApc3ZAKbF2QCnGdkAqGHZAKlh2QCqydkAq8nZAKzZ2QCt2dkArs3ZAK/B2QCwCdkAsRXZALId2QCzrdoAtB3ZALWx2gC2wdwAt93dALjl3QC59d0Auv3dALut3QC8td0AvaXdAL6t3QDwIQCAgvHaAIPx2gD3IgCA5OgAgIYR2ACHEdgAhOHaAIXh2gCKKdgAiynYAK9AEwClKNoAjinYAI8p2ACMKdgAjSnYAJJh2ACTYdgA6egAgO7oAICWZdgAl23YAJR12ACVbdgAml3YAJst2ADz6ACA8FwCALEw3wCR8AIAnCnYALLQAwCiOQ0Ao1GeAqAlDQChOQ0AplUNAIS8AgCkJQ0ApV0NAKptDQCrAQQAqGENAKlRAwCuuQAAp3UAAKxhDQCtxQIA+OgAgIfMAwDwVAIAzFC6AJHYBACb9NsAkRgCAJk02wCddAQAvh0AAJ9gBQCejAUAjOwCAI2sBAD96ACAvfWKAqghvwCpLb8Aqi2/AKs9vwCsKb8ArVW/AK5RvwCvTb8AoBkIAKGlvQCiIb8AozGzAKQ9vwClJb8Apg2zAKclvwC46bMAuc3LALppswC7uQkAvH0IAL2tCQC+QQwAv50JALA5vwCxhb0Asgm/ALPtywC0Gb8AtQW/ALbtswC3Bb8AiDG9AIkxvQCKrQgAiyW9AIwJCQCNvQgAjiW+AI+JDAAC6QCAgQ0JAIKlDACDUQkAhIEIAIWBCACGmQgAh60MAJhhvQCZYb0Amm0JAJsVnQKcxQ8AnQ28AJ7BDwCfcQkAkBW+AJERnwKSNZ8Ckw2fApQJvgCVCb4AlnG9AJdxvQCCuAQAl6UHALnEAwDwWAIAkUwCAJLIAgCErAQAsD0AAAzpAIAH6QCAvQUAABHpAIDwTAIAuhEAAJEkAgCN5AQAkqwCAJasAgC4uAMAudADAJb4AgCvDQAAFukAgPB4AgCRXAIAlrACAK8FAAAb6QCAIOkAgCnpAIAy6QCAP+kAgIX4AwBM6QCAh4ADAIbAAgBZ6QCAZukAgHPpAICW6QCAuzkAAHzpAICf6QCAiekAgL8dAAC+HQAAvR0AALwhAACVwB0AlMQfAJfIGgCWABgAkSAAAJDUAQCT2B4AkgAcAJ3gEgCcABAAn+gRAJ7sEwCZ8BkAmPQbAJv4FwCaABQAnnEBAJ9xAQCABQAArOkAgM0KAICwDACAXg0AgGQNAIBqDQCAdg0AgHkNAIB8DQCAfw0AgIINAICRDQCAlw0AgJoNAICdDQCAICIAgMcNAIDWDQCA/A0AgP8NAIAODgCAEQ4AgB0OAIAYIgCAMg4AgDUOAIDXFgCAEBcAgNoWAIC4ACwAuYwvALqILgC6AwCAhpwXAMx4vACEmC0AhVwXALcDAIDKAwCAiAAoAIksFADtBACAjAUAgN8FAIAaBgCAQAYAgFcGAIB0BgCAiwYAgDgBAIA8AQCAQAEAgEQBAIBIAQCATAEAgKR9AQBQAQCAonUBAKNlAQCggQEAoYEBALxxugC9kbYAvnG6AL+ltgC48bgAuXW6ALqZzgC7dboAtGG6ALVtugC2eboAt3W6ALAZugCxEboAsgm6ALMFugCsUboArXG2AK5RugCvbboAqNG4AKldugCqRbYAq1G6AKRxlgKlYZYCpnGWAqe9ugCgzZsCofG6AKLJugCjxboAnHmaAp0tugCeDc4An4WWApgJugCZtZYCmjm6AJuJtgCUMboA+CEAgJZpugCXrZYCkHm6AJE1ugCSMboAkwG6AIxJzgCN5bYAjhmaAo+hugCIoboAiUG2AIqhugCLdbYAhAG4AIWFugCGac4Ah4W6AICxugCBvboAgqm6AIOlugCAgbkAgQ27AIIVtwCDAbsAhAG7AIUhtwCGAbsAhz27AIgJuwCJAbsAihm7AIsVuwCMcbsAjX27AI5puwCPZbsAkKG5AJEluwCSyc8AkyW7AJQhuwCVwbcAliG7AJf1twCY6c8AmUW3AJq5mwKbAbsAnLm7AJ31uwCe8bsAn8G7AKARuwChCZQCokm7AKONlwKkCbsApbWXAqY5uwCnibcAqFmbAqkNuwCqLc8Aq6WXAqwNmgKtMbsArgm7AK8FuwCw0ZcCscGXArLRlwKzHbsAtFG5ALXduwC2xbcAt9G7ALjxuwC50bcAuvG7ALvNuwC82bsAvdG7AL7JuwC/xbsAgJmkAIEliAKCqaQAgxmoAFsNAICFvaQAhp3QAIcViAKInYUCiaGkAIqZpACLlaQAjCGIAo0xiAKOIYgCj+2kAJDBpgCRTaQAklWoAJNBpACUQaQAlWGoAJZBpACXfaQAmEmkAJlBpACaWaQAm1WkAJwxpACdPaQAnimkAJ8lpACgYaYAoeWkAKIJ0ACj5aQApOGkAKUBqACm4aQApzWoAKgp0ACphagAqnmEAqvBpACseaQArTWkAK4xpACvAaQAsFGkALFJiwKyCaQAs82IArRJpAC19YgCtnmkALfJqAC4GYQCuU2kALpt0AC75YgCvE2FAr1xpAC+SaQAv0WkAIARiQKBAYkCghGJAoPdpQCEkacAhR2lAFQBAICHEaUAiDGlAIkRqQCKMaUAWAEAgFwBAICNEaUAjgmlAI8FpQCQAaUAkQ2lAJIZpQCTFaUAlLGnAGABAICW2dEAlzWlAJgRpQCZ8akAmhGlAJvFqQCc+dEAZAEAgJ6phQKfEaUAoEmlAKEFpQCiAaUAozGlAKQBpQClGYoCplmlAKediQKoOaUAqYWJAqoJpQCruakArEmFAq0dpQCuPdEAr7WJArB9hAKxQaUAsnmlALN1pQC0wYkCtdGJArbBiQK3DaUAuGGnALntpQBoAQCAu+GlALzhpQC9wakAvuGlAGwBAIC3baYAttWGArUpqgC0hdIAs7mqALJtpgCxjaoAsG2mAL8higK+5aYAvaWJAnABAIC7jaYAdAEAgLm5pgC49aYAeAEAgKZ1pgClbaYAfAEAgIABAICiTaYAhAEAgIgBAICvCaYAruXSAIwBAICsjaQAqymmAKolpgCpMaYAkAEAgJc5pgCWNaYAlQ2mAJQxhwKTmYoCkhHSAJExpgCQZYYCn62mAJ65qgCUAQCAnC2kAJthpgCarYoCmb2KApitigKHfaYAhk2mAIVJpgCEBaYAg72mAIIFhgKB+aoAgFXSAI/1qgCORaYAjcmKAox1pgCL8YoCijWmAIl1iQKIbaYAgCmnAIEhpwCCOacAgzWnAIRRpwCYAQCAhkmnAJwBAIDMSIkCzYiJAoqp0wCLRacAjEGnAI2hqwCOQacAj5WrAJDJ0wBFIwCAkpmHApMhpwCUmacAldWnAJbRpwCX4acAmPGnAJnpiAKaqacAm22LApzppwCdVYsCntmnAJ9pqwCgeYcCoS2nAKIN0wCjhYsCpC2GAqURpwCmKacApyWnAKixiwKpoYsCqrGLAqt9pwCsMaUArb2nAK6lqwCvsacAsNGnALHxqwCy0acAs+2nALT5pwC18acAtumnALflpwC4oacAua2nALq5pwC7tacAvBGlAL2VpwC+edMAv5WnAICRoACBiY8CgsmgAIMNjAKEiaAAhTWMAoa5oACHCawAiNmAAomNoACKrdQAiyWMAoyNgQKNsaAAjomgAI+FoACQUYwCkUGMApJRjAKTnaAAlNGiAJVdoACWRawAl1GgAJhxoACZUawAmnGgAJtNoACcWaAAnVGgAJ5JoACfRaAAoMGgAKHNoACi2aAAo9WgAKRxogCl9aAAphnUAKf1oACo0aAAqTGsAKrRoACrBawArDnUAK2VrACuaYACr9GgALAJoACxRaAAskGgALNxoAC0QaAAtVmPArYZoAC33YwCuHmgALnFjAK6SaAAu/msALwJgAK9XaAAvn3UAL/1jAKAvYACgYGhAIK5oQCDtaEAhAGNAoURjQKGAY0Ch82hAIihowCJLaEAijWtAIshoQCMIaEAjQGtAI4hoQCPHaEAkGmhAJFhoQCSeaEAk3WhAJQRoQCVHaEAlgmhAJcFoQCYgaMAmQWhAJrp1QCbBaEAnAGhAJ3hrQCeAaEAn9WtAKAJ1QChpa0AolmBAqPhoQCkWaEApRWhAKYRoQCnIaEAqDGhAKkpjgKqaaEAq62NAqwpoQCtlY0CrhmhAK+prQCwOYECsW2hALJN1QCzxY0CtG2AArVRoQC2aaEAt2WhALjxjQK54Y0CuvGNArs9oQC8caMAvf2hAL7lrQC/8aEAs2miALKF1gCxaaIAsO2gALe5rgC2baIAtY2uALRtogC7TaIAuvWCArkJrgC4pdYAv42iAL69ogC9uaIAvPWiAKNNogCiWa4AoUGiAKDNoACncaIApk2iAKVtrgCkTaIAq1miAKpVogCpTaIAqEWiAK8pogCuJaIArTGiAKw9ogCTla4AkiWiAJGpjgKQFaIAl5mOApYR1gCVMaIAlGWCApsZogCaFaIAmS2iAJgRgwKfYaIAnq2OAp29jgKcrY4Cg2muAIK9ogCBXa4AgL2iAIe9ogCGBYIChfmuAIRV1gCLXaIAim2iAIlpogCIJaIAj/GOAo41ogCNdY0CjG2iAIARowCBMa8AghGjAIMtowCEOaMAhTGjAIYpowCHJaMAiGGjAIltowCKeaMAi3WjAIzRoQCNVaMAjrnXAI9VowCQMaMAkdGvAJIxowCT5a8AlNnXAJV1rwCWiYMClzGjAJipowCZ5aMAmuGjAJvRowCc4aMAnfmMAp65owCffY8CoBmjAKGljwKiKaMAo5mvAKRpgwKlPaMAph3XAKeVjwKoHYICqSGjAKoZowCrFaMArKGPAq2xjwKuoY8Cr22jALBBoQCxzaMAstWvALPBowC0waMAteGvALbBowC3/aMAuMmjALnBowC62aMAu9WjALyxowC9vaMAvqmjAL+lowBnDQCA0QYAgG0NAIDIBwCAcw0AgA8HAICFDQCAlAcAgIsNAICaBwCAuA0AgH0HAIDKDQCAxQcAgAIOAIBPBwCAFA4AgFIHAIAgDgCAkB0AAOEGAIAPJACA4iUAgCguAICtLACAyS0AgKpVAACrKQAAMjcAgAErAIDGMACAsjIAgAEsAIBTLwCAmSsAgJ8wAIDtKwCAGjUAgI43AICtLQCA5SwAgGYyAIADMACALzAAgA44AIAjMACA+y8AgHI0AICAIa4AgaWsAIJJ2ACDpawAhKGsAIVBoACGoawAh3WgAIhp2ACJxaAAiv0AAIsxxgCM7QAAjdEAAI7VAACPyQAAgCmhAIFNFACCIQEAg+G4AoQ5qgCFOaoAhhG9AodRFACIEQEAidW4AorNrQCLLbsCjGEUAI3ZjQKObRQAj2UUAJB5AQCRubgCkkm9ApNFuwKUDRQAlTUUAJYZAQCXqbgCmF2qAJkBFACaIQEAmwUUAJx5vQKdhbgCnnm7Ap+JuAKggb0CoXm4AqKZCQCjlRQApFmuAKWJFACmmQEAp70UAKipAQCpvbsCqrkBAKuJFACsmRQArZkUAK6JFACviRQAsNkBALEJrgCy6QEAs9W7ArTNuwK17RQAtpW8ArfhFAC4oRQAuaEUALrBoQC7pRQAvNkBAL0ZuAK+0aoAv9GqAL9FFwC+RRcAvTUXALxBvwK7KRcAugm4ArkBuAK4PQIAt+2tALY9AgC1HRcAtB0XALMdFwCyHRcAsR0XALAtAgCvWbgCrk0CAK1pFwCsTQIAq00XAKqdrQCpQRcAqE0KAK40AIDRLACApX0XAKR9FwCjoa4Aom2CAqF9ggKgbYICnzmuAJ41rgCdDa4AnDGPApuZggKaEdoAmTGuAJhljgKXtaIAlgWuAJWJggKUNa4Ak7GCApJ1rgCRNYECkC2uAI99rgCOTa4AjUmuAIwFrgCLva4AigWOAon5ogCIVdoAh0miAIadrgCFfaIAhJ2uAIOZrgCCddoAgZmuAIAdrADMqIQCzUyGAswguQLNTLkCzECOAkYyAIDMmIUCzTyEAswQgwLNUIMCzKCDAs2MgwLMMIACzSSAAswYgALNhIACmjMAgAUsAIAxLQCAiSMAgE0jAIBXIwCAayMAgJMjAIB1IwCAnSMAgGEjAIB/IwCAzPC5As2EuQLMULgCzay7AoDNAACB1QAAgt0AAIPVAACEzQAAhfUAAIb9AACH9QAAiM0AAFcvAIDBLACA1SoAgM0qAIDdKgCAuekAgCErAICQZQAAkW0AAKiIKgA1KwCAPSsAgEUrAIBJKwCATSsAgKIAMACjzDMAoOg9AKHsPACm8DYAp/QoAKQANACl/DUAgFERAIHpiAKCXREAg1URAIQpBACF6b0Chhm4AocVvgKIfREAiUURAIppBACL2b0CjA2vAI1REQCOcQQAj1URAJBJuAKRtb0Ckkm+ApO5vQKUUbgClam9ApZJDACXRREAmKmrAJl5EQCaaQQAm00RAJx5BACdbb4CnmkEAJ9ZEQCgqREAoakRAKK5EQCjuREApIkEAKVZqwCmuQQAp4W+Aqi9vgKpnREAquW5AquREQCs8REArfERAK6RpACv9REAsOkEALEpvQKy4a8As+GvALTZuAK1mREAtukEALctvQK4BagAueW+Arq5EQC7AYgCvKURAL2tEQC+wQQAvwG9AoABuQKBDb8CglUQAINtEACEUQUAheG8AoYlrgCHeRAAiGkFAIlNEACKIbkCi928AowxvwKNwbwCjjm5Ao/BvAKQUQ0AkV0QAJKBqgCTURAAlFEFAJV1EACWUQUAl0W/AphxBQCZQRAAmkEQAJtBEACcQRAAnUEQAJ5hBQCfsaoAoKEFAKGdvwKilb8Co7UQAKTduAKlqRAAptkQAKfZEACoiaUAqe0QAKqBBQCrQbwCrJmuAK2ZrgCusbkCr/EQALDxBQCxNbwCsi2pALPNvwK0gRAAtTmJAraNEAC3hRAAuNkFALkZvAK66bkCu+W/ArytEAC9lRAAvrkFAL8JvAK5La0AuC2tALtFEwC6BboCveG/ArwlBgC/GbwCvvmqALEdEwCwabsCs20TALJtEwC1eRMAtB2mALfVvwK2FQYAqXUTAKh1EwCrhakAqlUGAK1JvAKsdQYAr2ETAK5BvAKhQRMAoGUGAKNxvAKiZQYApVUTAKRlBgCnVRMAplUTAJl1vwKYhbwCm3W/ApqNugKdiRMAnIUOAJ+FEwCeVakAkVW/ApDlBgCTzRMAkpGtAJXZEwCU/QYAl0m/Apa1ugKJmRMAiJETAIs1vwKK9QYAjdm8AozVugKPuRMAjoETAIGtEwCA7boCgxm/AoLdBgCF8bwChBGqAIcVigKGrRMAgD2sAIFhEgCCQQcAg2USAIQZuwKF5b4Chhm9AofpvgKIIbsCidm+AopFEgCLXRIAjSkAgM3pAICOzaoAj8mLApCdiwKRpYsCkrGqAJOxqgCU2akAldmpAJb5qQCX+akAmJWqAJmRiwKatYsCm42LApyJqgCdiaoAnvGpAJ/xqQCgIakAoSGpAKJ9qgCjeYsCpE2LAqV1iwKmYaoAp2GqAKgpqQCpKakAqgmpAKsJqQCsRaoArUGLAq5liwKvXYsCsDmqALE5qgCyQakAs0GpALRxqQC1cakAti2qALcpiwK4PYsCuQWLAroRqgC7EaoAvHmpAL15qQC+WakAv1mpAIKJIwBtKwCAcSsAgI0rAIC+6QCAh5kjAJEpAIB5KwCAyOkAgIu5JACpKwCAifkkAI6VIwCPiSMAsSsAgI2JJACSvSMAESsAgLkrAICR4SMAo+sAgJfFIwCU8SMA4SsAgJkpAICbkSMA+SsAgJndIwD9KwCAnwktAAksAICdjdUAogkjAJ0pAIBBLACAofUjAEUsAICnGSMApCUkAG0sAICq7SQAeSwAgKgdIwCpeSQArhUjAK8JIwCsCSQArQkkALI9IwCJLACAsDEjALFhIwC2VSMAt0UjALRxIwC1XSMAulkjALsRIwCRLACAuV0jAL6JLQCVLACAvI0tANzpAICAuSUAgX0iAIKBIgCDmSIAhK0lAIXZJQCGuSIAh5EiAIiVIgCJ8SUAljIAgIuxJQCMgSUAjYElAI6dIgCPgSIAkLkiAJHpIgCStSIAk9EiAJT5IgCV1SIAlt0iAJfNIgCY+SIAmdUiAJrRIgCbmSIAqSwAgLEsAIDh6QCAvSwAgGUAAACh/SIAogEiAKMZIgDFLACApVklAKY5IgCnESIAqBUiAKlxJQDNLACAqzElAKwBJQCtASUArh0iAK8BIgCwOSIAsWkiALI1IgCzUSIAtHkiALVVIgC2XSIAt00iALh5IgC5VSIAulEiALsZIgD1LACA4SwAgO0sAIDxLACAgI0vAIGlLwCCrS8Ag70vAISlLwCFrS8AhqUvAIfdLwCI5S8Aie0vAIrlLwD5LACAAS0AgAUtAIANLQCAFS0AgJCRLwCRkS8AkpEvAJORLwCUsS8AlbEvAJa1LwCXRTMAmE0zAJlVMwCaPTMAmxkzAJyZMwCdiTMAnlUwAJ9JMACgwTAAockwAKLZMACj1TAApM0wAKX9MACm5TAApzUwAKi1MQCpuTEAqu0xAKuxmgCs0ZYArbE6AK61OgAZLQCAsEGUALHNlgCy1ZoAs8GWALTBlgC14ZoAtsGWALf9lgC4yZYAucGWALrZlgC71ZYAvLGWAL29lgC+qZYAv6WWAMUAAAChfSAAooEgACktAICkrScALS0AgDktAICnkSAAXS0AgKnxJwCqZScAq7EnAKyBJwCtgScArp0gAK+BIACwuSAAsekgALK1IABhLQCAtPkgALXVIAC23SAAt80gAEUtAIC51SAATS0AgLuZIACpLQCAcS0AgHUtAIB5LQCAgDknAIH9IACCASAAgxkgAG0tAICFWScAhjkgAIcRIACIFSAAiXEnAIrlJwCLMScAjAEnAI0BJwCOHSAAjwEgAJA5IACRaSAAkjUgAJNRIACUeSAAlVUgAJZdIACXTSAAmHkgAJlVIACaUSAAmxkgAJyFLgCdBdYAnoEuAJ+BLgCArT8AgbU/AIK9PwCDtT8AhK0/AIW5yACG1T8Ah80/AIj1PwCJ/T8AipnIAIvxPwCMATsAjQE7AI6NyACPOQQAkEkEAJFJBACSWQQAk1UEAJRNBACV3TwAlnkEAJd1BACYWQQAmSEEAJohBACbNdQAnCEEAJ3Z5gCeJQQAnx0EAKDpBACh9QQAos0/AKP1BACkFQQApfnUAKYhyACnIcgAqNHUAKktBACqOQQAq03CAKwtBACtdcgArh0EAK95BACwKQQAsTEEALI9BACzOQQAtC0EALX9BQC2qQUAt6kFALiZBQC5mQUAunkFALtFBQC8AQUAvQEFAL4BBQC/AQUAgC0HAIE1BwCCPQcAgzUHAIQtBwCFqQcAhqUHAIdl1QCILQYAiTEGAIoxBgCLDQYAjPnJAI15BgCOWQYAj1UGAJBpyQCRNQYAkj0GAJM1BgCULQYAlcUGAJZdAwCXVQMAmG0DAJl1AwCafQMAm3UDAJxtAwCdET0AnlkDAJ9ZAwCgqQMAoakDAKK5AwCjuQMApKkDAKWpAwCm2QMAp9kDAKjpAwCp6QMAqvkDAKv9AwCs5QMAre0DAK7lAwCvbcMAsKEDALGhAwCyoQMAs6EDALShAwC1zeYAtq0DALelAwC4yeYAuZkDALppAwC7aQMAvHkDAL15AwC+aQMAv2kDAIAAAACBLQCAfS0AgJUtAIDm6QCAsS0AgLUtAIC9LQCA0S0AgPQtAIDr6QCA8OkAgAAuAIAELgCACC4AgPwtAIAQLgCAoSkAgKUpAIAYLgCAIC4AgPXpAIA8LgCAQC4AgEwuAID66QCAVC4AgFguAIA3LwCAqSkAgGwuAICILgCAhC4AgATqAICQLgCACeoAgJwuAICYLgCAoC4AgLAuAIC0LgCArSkAgMQuAIDMLgCA0C4AgNQuAICxKQCADuoAgLUpAID3LgCA+y4AgP8uAIDV6wCAGOoAgNo1AIAvLwCAuSkAgDvqAIAN6wCAPy8AgEcvAIC9KQCAWy8AgGsvAICqIfQAq7U/AKilPwCpzecArkXwAK+hPwCsSfAArTH0AKJl4gCjvT8AoLk/AKG5PwCmlT8Ap50/AKSlPwClnT8Augk8AG8vAIC4CTwAuQk8AHcvAICHLwCAxSkAgMEpAICy3T8AswU9ALBN7wCx1T8Atn3wALe55AC0HT0AtWk8AB3qAICPLwCAoy8AgKcvAIC3LwCAyy8AgMMvAIDHLwCAgrX7AM8vAICA/T8AgfU/AOMvAIDnLwCA/y8AgAcwAICavT8Am/3NAJi9PwCZtT8Anlk/AJ9ZPwCcWT8AnVk/AJKBPwCTaekAkHnkAJGxPwCWgT8Al4H0AJQh5wCVmT8AFzAAgCswAIAs6gCAJzAAgBswAIAzMACAOzAAgE8wAIAx6gCAVzAAgEoAAABLMACAQzAAgMkpAIBfMACAZzAAgG8wAIBjMACAzSkAgIcwAIA26gCAszAAgPUwAIDRMACA2SkAgNUpAIDRKQCAnSsAgKErAID5MACA4TAAgK41AIA9KgCADTEAgCExAIAZMQCAT+oAgN0pAIA1MQCAKTEAgFIxAIBZ6gCAXjEAgD0xAIBmMQCAajEAgG4xAIByMQCAfjEAgF7qAICGMQCA5SkAgJIxAIBj6gCAljEAgOkpAICiMQCArjEAgL4xAIBo6gCA/+kAgG3qAIDeMQCAcuoAgLgJAQC5CQEAuhkBALsZAQC8CQEAvQkBAL45AQC/OQEAsM3FALE1zACymQ4As5kOALSJDgC1iQ4AtjkBALc5AQCo6dkAqckOAKrZDgCrqcUArMUOAK3NDgCuxQ4Ar/kOAKA1DgChPQ4AojUOAKOxxQCk8Q4ApfEOAKbxDgCn8Q4AmGkPAJlpDwCaeQ8Am3kPAJxpDwCdaQ8Ant0OAJ/NDgCQ+eoAkXEPAJJ9DwCTdQ8AlG0PAJVpDwCWWQ8Al1kPAIh5DwCJeQ8AigkPAIsJDwCMGQ8AjRkPAI4NzACPDQ8AgHkPAIF5DwCCSQ8Ag0kPAIRZDwCFWQ8AhkkPAIdJDwCKUQIAi1ECAIj5xgCJQQIAjnECAI/txgCMQQIAjUECAIIVAgCDHQIAgAUCAIEdAgCGdQIAh30CAIQFAgCFfQIAmsUCAJvNAgCYkc8AmYXaAJ7FAgCfzQIAnNUCAJ3NAgCSDQIAkxUCAJANAgCRBQIAlg0CAJf1AgCUDQIAlQUCAKo9AgCrRQIAqD0CAKk1AgCuXQIAr0UCAKxdAgCtVQIAol3GAKMBAgCgNQIAoQ0CAKYBAgCnxdgApBECAKURAgC6OQIAuzkCALg5AgC5OQIAvtkBAL/ZAQC82QEAvdkBALI9AgCzBQIAsD0CALE1AgC2GQIAtxkCALQdAgC16cIA6jEAgPIxAIDiMQCA/jEAgA4yAIAWMgCAIjIAgCYyAIB36gCACjIAgD4yAIBCMgCA7SkAgFIyAIB86gCANjIAgHIyAICB6gCAhuoAgHYyAICKMgCAgjIAgPEpAICOMgCAnjIAgJoyAICmMgCAw+kAgLYyAICL6gCAwjIAgJXqAIDWMgCA9jIAgJrqAIAKMwCADjMAgJ/qAICk6gCAKjMAgDozAID1KQCAPjMAgPkpAIBWMwCAWjMAgGYzAIByMwCA/SkAgIozAICp6gCApjMAgK7qAIAT6gCAwjMAgLPqAIC4AAAAuOoAgL3qAIABKgCABSoAgMfqAIDC6gCAzOoAgIAB3gCB8QcAgvEHAIPxBwCEFQIAhR0CAIYVAgCHEQIAiCXeAIld3gCKOQIAizkCAIwpAgCNKQIAjhkCAI99ygCQTd4AkWECAJJhAgCT7cEAlH0CAJVlAgCWIcAAl2kCAJhZAgCZMcIAmlUCAJstAgCcNQIAnT0CAJ4xAgCfMQIAoNECAKHRAgCi0QIAo9ECAKTxAgCl8QIApvECAKfxAgCo0QIAqdECAKrRAgCr0QIArDECAK0xAgCuMQIArzECALBRAgCxUQIAslECALNRAgC0cQIAtXECALZxAgC3cQIAuFECALlRAgC6+dwAu1UCALxNAgC9NQIAvj0CAL81AgC+7QYAv/UGALztBgC95QYAuskGALvJBgC4xcsAuckGALbtBgC39QYAtO0GALXlBgCyjQYAs/UGALDR3QCxhQYArvEGAK/xBgCs5QYAreEGAKr1BgCr/QYAqMUGAKn9BgCm9QYAp/0GAKTlBgCl/QYAovUGAKP9BgCg+QYAoZ3dAJ75BgCf+QYAnPkGAJ35BgCa+QYAm/kGAJj5BgCZ+QYAlvkGAJf5BgCUcd0AlfkGAJL9BgCT5QYAkP0GAJH1BgCO/QYAj4UGAIz9BgCN9QYAiuEGAIsB3QCI8QYAifEGAIbBBgCHwQYAhPEGAIXxBgCCkccAg+EGAIDpBgCBxcAAgAAAANHqAIACNACABjQAgBI0AIARKgCAFSoAgNvqAIAmNACAGSoAgODqAIDl6gCA6uoAgJY0AIAdKgCAojQAgKY0AIDv6gCA9OoAgL40AIAhKgCA+eoAgNI0AIDWNACAJSoAgP7qAIDyNACAKSoAgAI1AID6NACACjUAgAjrAIAiNQCALSoAgC41AIA2NQCARjUAgDEqAIAS6wCAF+sAgDUqAIAc6wCAXjUAgCHrAIBqNQCAdjUAgCbrAIAr6wCAkjUAgDDrAICaNQCAQOoAgDkqAICyNQCAtjUAgEEqAIC6NQCAFC4AgDXrAIA66wCAReoAgErqAIDeNQCA9jcAgIDNAQCB1QEAgt0BAIPVAQCEzQEAhfUBAIb9AQCH9QEAiM0BAInVAQCK3QEAi/UJAIzJAQCNyQEAjgEcAI89HwCQRR8AkU0fAJJFHwCTXR8AlEUfAJVNHwCWRR8Al30fAJhBxwCZQR8AmkEfAJtBHwCcQR8AnUEfAJ5BHwCfYd8AoL0fAKHFHwCizR8Ao8UfAKTdHwClxR8Aps0fAKfFHwCo/R8AqcUfAKrNHwCrxR8ArN0fAK3FHwCuzR8Ar8UfALC9HwCxRR8Ask0fALNFHwC0/ckAtVkfALZJHwC3SR8AuHkfALl5HwC6SR8Au8XdALxVHwC9XR8AvlUfAL9NHwAKNgCABjYAgA42AIAZLACAEjYAgBY2AIAaNgCAIjYAgD/rAIAmNgCAOjYAgD42AIAqNgCAQjYAgFY2AIA2NgCASjYAgE42AIBSNgCAROsAgE7rAIBJ6wCASSoAgHI2AIB2NgCAfjYAgGLrAICCNgCAU+sAgE0qAIBRKgCAWOsAgF3rAIBVKgCAojYAgKo2AICuNgCAujYAgLY2AIDCNgCAvjYAgMY2AIDKNgCA0jYAgFkqAIDaNgCA3jYAgF0qAIDuNgCAZ+sAgP42AIACNwCAYSoAgA43AICVKQCAbOsAgHHrAIBlKgCAaSoAgDo3AIB26wCAkjcAgJY3AICuNwCAgLUBAIG9AQCCtQEAg80BAITt9ACF0QEAhtEBAIfRAQCI8QEAifEBAIrxAQCL8QEAjNEBAI3RAQCO0QEAj9EBAJB9wwCRBcMAkl35AJO9AQCUpQEAla0BAJalAQCXXQMAmGUDAJltAwCaZQMAm30DAJxlAwCdbQMAnmUDAJ85wwCgoQMAoaEDAKKhAwCjoQMApKEDAKWhAwCmoQMAp6EDAKjhAwCp4QMAquEDAKvhAwCs4QMAreEDAK7hAwCv4QMAsKEDALGhAwCyoQMAs6EDALShAwC1oQMAtqEDALehAwC4YQMAuWEDALphAwC7YQMAvGEDAL1hAwC+pcMAv6HDALo3AICA6wCA0ukAgMY3AIDCNwCAzjcAgNfpAIDaNwCAhesAgIrrAIAmOACAMjgAgDo4AICP6wCAPjgAgGY4AIByOACAdjgAgG44AICCOACAhjgAgJTrAICSOACAbSoAgJo4AICZ6wCAcSoAgNI4AICkLgCA6jgAgJ7rAICo6wCAdSoAgHkqAIASOQCAresAgH0qAICy6wCAMjkAgLfrAIBKOQCAgSoAgFo5AIBmOQCAbjkAgHY5AICFKgCAvOsAgKY5AICyOQCAiSoAgI0qAIC2OQCAwesAgJEqAIDG6wCAy+sAgNDrAICVKgCA9jkAgPo5AIACOgCACjoAgNrrAICQ1QEAkd0BAJLVAQCT7QEAlPUBAJXB+wCW8QEAl/n7AJjNAQCZ1QEAmt0BAJvVAQCcyfsAnckBAEUqAICPAAAAgNkBAIHZAQCC6QEAg+kBAIT5AQCF+QEAhukBAIfpAQCI2QEAidkBAIoJwQCLrQEAjLUBAI29AQCOtQEAj60BAKAAAAChAAAAogAAAKMAAACkAAAApQAAAKYAAACnAAAAqAAAAKkAAACqAAAAqwAAAKwAAACtAAAArgAAAK8AAACwAAAAsQAAALIAAACzAAAAtAAAALUAAAC2AAAAtwAAALgAAAC5AAAAugAAALsAAAC8AAAAvQAAAL4AAAC/AAAAACAAIMyBACDMgwAgzIQAIMyFACDMhgAgzIcAIMyIACDMiMyAACDMiMyBACDMiM2CACDMigAgzIsAIMyTACDMk8yAACDMk8yBACDMk82CACDMlAAgzJTMgAAgzJTMgQAgzJTNggAgzKcAIMyoACDMswAgzYIAIM2FACDZiwAg2YwAINmM2ZEAINmNACDZjdmRACDZjgAg2Y7ZkQAg2Y8AINmP2ZEAINmQACDZkNmRACDZkQAg2ZHZsAAg2ZIAIOOCmQAg44KaACEAISEAIT8AIgAjACQAJQAmACcAKAAoMSkAKDEwKQAoMTEpACgxMikAKDEzKQAoMTQpACgxNSkAKDE2KQAoMTcpACgxOCkAKDE5KQAoMikAKDIwKQAoMykAKDQpACg1KQAoNikAKDcpACg4KQAoOSkAKEEpAChCKQAoQykAKEQpAChFKQAoRikAKEcpAChIKQAoSSkAKEopAChLKQAoTCkAKE0pAChOKQAoTykAKFApAChRKQAoUikAKFMpAChUKQAoVSkAKFYpAChXKQAoWCkAKFkpAChaKQAoYSkAKGIpAChjKQAoZCkAKGUpAChmKQAoZykAKGgpAChpKQAoaikAKGspAChsKQAobSkAKG4pAChvKQAocCkAKHEpAChyKQAocykAKHQpACh1KQAodikAKHcpACh4KQAoeSkAKHopACjhhIApACjhhIIpACjhhIMpACjhhIUpACjhhIYpACjhhIcpACjhhIkpACjhhIspACjhhIwpACjhhI4pACjhhI8pACjhhJApACjhhJEpACjhhJIpACjkuIApACjkuIMpACjkuIkpACjkuZ0pACjkuowpACjkupQpACjku6MpACjkvIEpACjkvJEpACjlhaspACjlha0pACjlirQpACjljYEpACjljZQpACjlkI0pACjlkbwpACjlm5spACjlnJ8pACjlraYpACjml6UpACjmnIgpACjmnIkpACjmnKgpACjmoKopACjmsLQpACjngaspACjnibkpACjnm6MpACjnpL4pACjnpZ0pACjnpa0pACjoh6opACjoh7MpACjosqEpACjos4cpACjph5EpACjqsIApACjrgpgpACjri6QpACjrnbwpACjrp4gpACjrsJQpACjsgqwpACjslYQpACjsmKTsoIQpACjsmKTtm4QpACjsnpApACjso7wpACjssKgpACjsubQpACjtg4ApACjtjIwpACjtlZgpACkAKgArACwALQAuAC4uAC4uLgAvADAAMCwAMC4AMOKBhDMAMOeCuQAxADEsADEuADEwADEwLgAxMOaXpQAxMOaciAAxMOeCuQAxMQAxMS4AMTHml6UAMTHmnIgAMTHngrkAMTIAMTIuADEy5pelADEy5pyIADEy54K5ADEzADEzLgAxM+aXpQAxM+eCuQAxNAAxNC4AMTTml6UAMTTngrkAMTUAMTUuADE15pelADE154K5ADE2ADE2LgAxNuaXpQAxNueCuQAxNwAxNy4AMTfml6UAMTfngrkAMTgAMTguADE45pelADE454K5ADE5ADE5LgAxOeaXpQAxOeeCuQAx4oGEADHigYQxMAAx4oGEMgAx4oGEMwAx4oGENAAx4oGENQAx4oGENgAx4oGENwAx4oGEOAAx4oGEOQAx5pelADHmnIgAMeeCuQAyADIsADIuADIwADIwLgAyMOaXpQAyMOeCuQAyMQAyMeaXpQAyMeeCuQAyMgAyMuaXpQAyMueCuQAyMwAyM+aXpQAyM+eCuQAyNAAyNOaXpQAyNOeCuQAyNQAyNeaXpQAyNgAyNuaXpQAyNwAyN+aXpQAyOAAyOOaXpQAyOQAyOeaXpQAy4oGEMwAy4oGENQAy5pelADLmnIgAMueCuQAzADMsADMuADMwADMw5pelADMxADMx5pelADMyADMzADM0ADM1ADM2ADM3ADM4ADM5ADPigYQ0ADPigYQ1ADPigYQ4ADPml6UAM+aciAAz54K5ADQANCwANC4ANDAANDEANDIANDMANDQANDUANDYANDcANDgANDkANOKBhDUANOaXpQA05pyIADTngrkANQA1LAA1LgA1MAA14oGENgA14oGEOAA15pelADXmnIgANeeCuQA2ADYsADYuADbml6UANuaciAA254K5ADcANywANy4AN+KBhDgAN+aXpQA35pyIADfngrkAOAA4LAA4LgA45pelADjmnIgAOOeCuQA5ADksADkuADnml6UAOeaciAA554K5ADoAOjo9ADsAPAA9AD09AD09PQA+AD8APyEAPz8AQABBAEFVAEHiiJVtAEIAQnEAQwBDRABDby4AQ+KIlWtnAEQAREoARFoARHoARMW9AETFvgBFAEYARkFYAEcAR0IAR0h6AEdQYQBHeQBIAEhQAEhWAEhnAEh6AEkASUkASUlJAElKAElVAElWAElYAEoASwBLQgBLSwBLTQBMAExKAExURABMagBMwrcATQBNQgBNQwBNRABNSHoATVBhAE1WAE1XAE3OqQBOAE5KAE5qAE5vAE8AUABQSABQUE0AUFBWAFBSAFBURQBQYQBRAFIAUnMAUwBTRABTTQBTUwBTdgBUAFRFTABUSHoAVE0AVQBWAFZJAFZJSQBWSUlJAFbiiJVtAFcAV0MAV1oAV2IAWABYSQBYSUkAWQBaAFsAXABdAF4AXwBgAGEAYS5tLgBhL2MAYS9zAGHKvgBiAGJhcgBjAGMvbwBjL3UAY2FsAGNjAGNkAGNtAGNtMgBjbTMAZABkQgBkYQBkbABkbQBkbTIAZG0zAGR6AGTFvgBlAGVWAGVyZwBmAGZmAGZmaQBmZmwAZmkAZmwAZm0AZwBnYWwAaABoUGEAaGEAaQBpaQBpaWkAaWoAaW4AaXYAaXgAagBrAGtBAGtIegBrUGEAa1YAa1cAa2NhbABrZwBrbABrbQBrbTIAa20zAGt0AGvOqQBsAGxqAGxtAGxuAGxvZwBseABswrcAbQBtMgBtMwBtQQBtVgBtVwBtYgBtZwBtaWwAbWwAbW0AbW0yAG1tMwBtb2wAbXMAbeKIlXMAbeKIlXMyAG4AbkEAbkYAblYAblcAbmoAbm0AbnMAbwBvVgBwAHAubS4AcEEAcEYAcFYAcFcAcGMAcHMAcQByAHJhZAByYWTiiJVzAHJhZOKIlXMyAHMAc3IAc3QAdAB1AHYAdmkAdmlpAHZpaWkAdwB4AHhpAHhpaQB5AHoAewB8AH0AwqIAwqMAwqUAwqYAwqwAwrBDAMKwRgDCtwDDgADDgQDDggDDgwDDhADDhQDDhgDDhwDDiADDiQDDigDDiwDDjADDjQDDjgDDjwDDkQDDkgDDkwDDlADDlQDDlgDDmQDDmgDDmwDDnADDnQDDoADDoQDDogDDowDDpADDpQDDpwDDqADDqQDDqgDDqwDDrADDrQDDrgDDrwDDsADDsQDDsgDDswDDtADDtQDDtgDDuQDDugDDuwDDvADDvQDDvwDEgADEgQDEggDEgwDEhADEhQDEhgDEhwDEiADEiQDEigDEiwDEjADEjQDEjgDEjwDEkgDEkwDElADElQDElgDElwDEmADEmQDEmgDEmwDEnADEnQDEngDEnwDEoADEoQDEogDEowDEpADEpQDEpgDEpwDEqADEqQDEqgDEqwDErADErQDErgDErwDEsADEsQDEtADEtQDEtgDEtwDEuQDEugDEuwDEvADEvQDEvgDFgwDFhADFhQDFhgDFhwDFiADFiwDFjADFjQDFjgDFjwDFkADFkQDFkwDFlADFlQDFlgDFlwDFmADFmQDFmgDFmwDFnADFnQDFngDFnwDFoADFoQDFogDFowDFpADFpQDFqADFqQDFqgDFqwDFrADFrQDFrgDFrwDFsADFsQDFsgDFswDFtADFtQDFtgDFtwDFuADFuQDFugDFuwDFvADFvQDFvgDGjgDGkADGoADGoQDGqwDGrwDGsADHjQDHjgDHjwDHkADHkQDHkgDHkwDHlADHlQDHlgDHlwDHmADHmQDHmgDHmwDHnADHngDHnwDHoADHoQDHogDHowDHpgDHpwDHqADHqQDHqgDHqwDHrADHrQDHrgDHrwDHsADHtADHtQDHuADHuQDHugDHuwDHvADHvQDHvgDHvwDIgADIgQDIggDIgwDIhADIhQDIhgDIhwDIiADIiQDIigDIiwDIjADIjQDIjgDIjwDIkADIkQDIkgDIkwDIlADIlQDIlgDIlwDImADImQDImgDImwDIngDInwDIogDIpgDIpwDIqADIqQDIqgDIqwDIrADIrQDIrgDIrwDIsADIsQDIsgDIswDItwDJkADJkQDJkgDJlADJlQDJmQDJmwDJnADJnwDJoQDJowDJpQDJpgDJqADJqQDJqgDJqwDJrQDJrwDJsADJsQDJsgDJswDJtADJtQDJuADJuQDJuwDKgQDKggDKgwDKiQDKigDKiwDKjADKkADKkQDKkgDKlQDKnQDKnwDKuQDKvG4AzIAAzIEAzIjMgQDMkwDOhgDOiADOiQDOigDOjADOjgDOjwDOkADOkQDOkgDOkwDOlADOlQDOlgDOlwDOmADOmQDOmgDOmwDOnADOnQDOngDOnwDOoADOoQDOowDOpADOpQDOpgDOpwDOqADOqQDOqgDOqwDOrADOrQDOrgDOrwDOsADOsQDOsgDOswDOtADOtQDOtgDOtwDOuADOuQDOugDOuwDOvADOvEEAzrxGAM68VgDOvFcAzrxnAM68bADOvG0AzrxzAM69AM6+AM6/AM+AAM+BAM+CAM+DAM+EAM+FAM+GAM+HAM+IAM+JAM+KAM+LAM+MAM+NAM+OAM+cAM+dANCAANCBANCDANCHANCMANCNANCOANCZANC5ANC9ANGKANGMANGQANGRANGTANGXANGcANGdANGeANG2ANG3ANOBANOCANOQANORANOSANOTANOWANOXANOaANObANOcANOdANOeANOfANOiANOjANOkANOlANOmANOnANOqANOrANOsANOtANOuANOvANOwANOxANOyANOzANO0ANO1ANO4ANO5ANWl1oIA1bTVpQDVtNWrANW01a0A1bTVtgDVvtW2ANeQANeQ1rcA15DWuADXkNa8ANeQ15wA15EA15HWvADXkda/ANeSANeS1rwA15MA15PWvADXlADXlNa8ANeV1rkA15XWvADXlta8ANeY1rwA15nWtADXmda8ANea1rwA15sA15vWvADXm9a/ANecANec1rwA150A157WvADXoNa8ANeh1rwA16IA16PWvADXpNa8ANek1r8A16bWvADXp9a8ANeoANeo1rwA16nWvADXqda814EA16nWvNeCANep14EA16nXggDXqgDXqta8ANey1rcA2KEA2KIA2KMA2KQA2KUA2KYA2KbYpwDYptisANim2K0A2KbYrgDYptixANim2LIA2KbZhQDYptmGANim2YcA2KbZiADYptmJANim2YoA2KbbhgDYptuHANim24gA2KbbkADYptuVANinANin2YPYqNixANin2YTZhNmHANin2YsA2KfZtADYqADYqNisANio2K0A2KjYrdmKANio2K4A2KjYrtmKANio2LEA2KjYsgDYqNmFANio2YYA2KjZhwDYqNmJANio2YoA2KkA2KoA2KrYrADYqtis2YUA2KrYrNmJANiq2KzZigDYqtitANiq2K3YrADYqtit2YUA2KrYrgDYqtiu2YUA2KrYrtmJANiq2K7ZigDYqtixANiq2LIA2KrZhQDYqtmF2KwA2KrZhditANiq2YXYrgDYqtmF2YkA2KrZhdmKANiq2YYA2KrZhwDYqtmJANiq2YoA2KsA2KvYrADYq9ixANir2LIA2KvZhQDYq9mGANir2YcA2KvZiQDYq9mKANisANis2K0A2KzYrdmJANis2K3ZigDYrNmEINis2YTYp9mE2YcA2KzZhQDYrNmF2K0A2KzZhdmJANis2YXZigDYrNmJANis2YoA2K0A2K3YrADYrdis2YoA2K3ZhQDYrdmF2YkA2K3ZhdmKANit2YkA2K3ZigDYrgDYrtisANiu2K0A2K7ZhQDYrtmJANiu2YoA2K8A2LAA2LDZsADYsQDYsdiz2YjZhADYsdmwANix24zYp9mEANiyANizANiz2KwA2LPYrNitANiz2KzZiQDYs9itANiz2K3YrADYs9iuANiz2K7ZiQDYs9iu2YoA2LPYsQDYs9mFANiz2YXYrADYs9mF2K0A2LPZhdmFANiz2YcA2LPZiQDYs9mKANi0ANi02KwA2LTYrNmKANi02K0A2LTYrdmFANi02K3ZigDYtNiuANi02LEA2LTZhQDYtNmF2K4A2LTZhdmFANi02YcA2LTZiQDYtNmKANi1ANi12K0A2LXYrditANi12K3ZigDYtdiuANi12LEA2LXZhNi52YUA2LXZhNmJANi12YTZiSDYp9mE2YTZhyDYudmE2YrZhyDZiNiz2YTZhQDYtdmE25IA2LXZhQDYtdmF2YUA2LXZiQDYtdmKANi2ANi22KwA2LbYrQDYttit2YkA2LbYrdmKANi22K4A2LbYrtmFANi22LEA2LbZhQDYttmJANi22YoA2LcA2LfYrQDYt9mFANi32YXYrQDYt9mF2YUA2LfZhdmKANi32YkA2LfZigDYuADYuNmFANi5ANi52KwA2LnYrNmFANi52YTZitmHANi52YUA2LnZhdmFANi52YXZiQDYudmF2YoA2LnZiQDYudmKANi6ANi62KwA2LrZhQDYutmF2YUA2LrZhdmJANi62YXZigDYutmJANi62YoA2YDZiwDZgNmOANmA2Y7ZkQDZgNmPANmA2Y/ZkQDZgNmQANmA2ZDZkQDZgNmRANmA2ZIA2YEA2YHYrADZgditANmB2K4A2YHYrtmFANmB2YUA2YHZhdmKANmB2YkA2YHZigDZggDZgtitANmC2YTbkgDZgtmFANmC2YXYrQDZgtmF2YUA2YLZhdmKANmC2YkA2YLZigDZgwDZg9inANmD2KwA2YPYrQDZg9iuANmD2YQA2YPZhQDZg9mF2YUA2YPZhdmKANmD2YkA2YPZigDZhADZhNiiANmE2KMA2YTYpQDZhNinANmE2KwA2YTYrNisANmE2KzZhQDZhNis2YoA2YTYrQDZhNit2YUA2YTYrdmJANmE2K3ZigDZhNiuANmE2K7ZhQDZhNmFANmE2YXYrQDZhNmF2YoA2YTZhwDZhNmJANmE2YoA2YUA2YXYpwDZhdisANmF2KzYrQDZhdis2K4A2YXYrNmFANmF2KzZigDZhditANmF2K3YrADZhdit2YUA2YXYrdmF2K8A2YXYrdmKANmF2K4A2YXYrtisANmF2K7ZhQDZhdiu2YoA2YXZhQDZhdmF2YoA2YXZiQDZhdmKANmGANmG2KwA2YbYrNitANmG2KzZhQDZhtis2YkA2YbYrNmKANmG2K0A2YbYrdmFANmG2K3ZiQDZhtit2YoA2YbYrgDZhtixANmG2LIA2YbZhQDZhtmF2YkA2YbZhdmKANmG2YYA2YbZhwDZhtmJANmG2YoA2YcA2YfYrADZh9mFANmH2YXYrADZh9mF2YUA2YfZiQDZh9mKANmH2bAA2YgA2YjYs9mE2YUA2YjZtADZiQDZidmwANmKANmK2KwA2YrYrNmKANmK2K0A2YrYrdmKANmK2K4A2YrYsQDZitiyANmK2YUA2YrZhdmFANmK2YXZigDZitmGANmK2YcA2YrZiQDZitmKANmK2bQA2a4A2a8A2bEA2bkA2boA2bsA2b4A2b8A2oAA2oMA2oQA2oYA2ocA2ogA2owA2o0A2o4A2pEA2pgA2qEA2qQA2qYA2qkA2q0A2q8A2rEA2rMA2roA2rsA2r4A24AA24EA24IA24UA24YA24cA24fZtADbiADbiQDbiwDbjADbkADbkgDbkwDgpJXgpLwA4KSW4KS8AOCkl+CkvADgpJzgpLwA4KSh4KS8AOCkouCkvADgpKkA4KSr4KS8AOCkr+CkvADgpLEA4KS0AOCmoeCmvADgpqLgprwA4Kav4Ka8AOCniwDgp4wA4KiW4Ki8AOCol+CovADgqJzgqLwA4Kir4Ki8AOCosuCovADgqLjgqLwA4Kyh4Ky8AOCsouCsvADgrYgA4K2LAOCtjADgrpQA4K+KAOCviwDgr4wA4LGIAOCzgADgs4cA4LOIAOCzigDgs4sA4LWKAOC1iwDgtYwA4LeaAOC3nADgt50A4LeeAOC5jeC4sgDguqvgupkA4Lqr4LqhAOC7jeC6sgDgvIsA4L2A4L61AOC9guC+twDgvYzgvrcA4L2R4L63AOC9luC+twDgvZvgvrcA4L2x4L2yAOC9seC9tADgvbHgvoAA4L6Q4L61AOC+kuC+twDgvpzgvrcA4L6h4L63AOC+puC+twDgvqvgvrcA4L6y4L2x4L6AAOC+suC+gADgvrPgvbHgvoAA4L6z4L6AAOGApgDhg5wA4YSAAOGEgQDhhIIA4YSDAOGEhADhhIUA4YSGAOGEhwDhhIgA4YSJAOGEigDhhIsA4YSMAOGEjQDhhI4A4YSPAOGEkADhhJEA4YSSAOGElADhhJUA4YSaAOGEnADhhJ0A4YSeAOGEoADhhKEA4YSiAOGEowDhhKcA4YSpAOGEqwDhhKwA4YStAOGErgDhhK8A4YSyAOGEtgDhhYAA4YWHAOGFjADhhZcA4YWYAOGFmQDhhaAA4YWhAOGFogDhhaMA4YWkAOGFpQDhhaYA4YWnAOGFqADhhakA4YWqAOGFqwDhhawA4YWtAOGFrgDhha8A4YWwAOGFsQDhhbIA4YWzAOGFtADhhbUA4YaEAOGGhQDhhogA4YaRAOGGkgDhhpQA4YaeAOGGoQDhhqoA4YasAOGGrQDhhrAA4YaxAOGGsgDhhrMA4Ya0AOGGtQDhh4cA4YeIAOGHjADhh44A4YeTAOGHlwDhh5kA4YedAOGHnwDhh7EA4YeyAOGshgDhrIgA4ayKAOGsjADhrI4A4aySAOGsuwDhrL0A4a2AAOGtgQDhrYMA4bSCAOG0lgDhtJcA4bScAOG0nQDhtKUA4bW7AOG2hQDhuIAA4biBAOG4ggDhuIMA4biEAOG4hQDhuIYA4biHAOG4iADhuIkA4biKAOG4iwDhuIwA4biNAOG4jgDhuI8A4biQAOG4kQDhuJIA4biTAOG4lADhuJUA4biWAOG4lwDhuJgA4biZAOG4mgDhuJsA4bicAOG4nQDhuJ4A4bifAOG4oADhuKEA4biiAOG4owDhuKQA4bilAOG4pgDhuKcA4bioAOG4qQDhuKoA4birAOG4rADhuK0A4biuAOG4rwDhuLAA4bixAOG4sgDhuLMA4bi0AOG4tQDhuLYA4bi3AOG4uADhuLkA4bi6AOG4uwDhuLwA4bi9AOG4vgDhuL8A4bmAAOG5gQDhuYIA4bmDAOG5hADhuYUA4bmGAOG5hwDhuYgA4bmJAOG5igDhuYsA4bmMAOG5jQDhuY4A4bmPAOG5kADhuZEA4bmSAOG5kwDhuZQA4bmVAOG5lgDhuZcA4bmYAOG5mQDhuZoA4bmbAOG5nADhuZ0A4bmeAOG5nwDhuaAA4bmhAOG5ogDhuaMA4bmkAOG5pQDhuaYA4bmnAOG5qADhuakA4bmqAOG5qwDhuawA4bmtAOG5rgDhua8A4bmwAOG5sQDhubIA4bmzAOG5tADhubUA4bm2AOG5twDhubgA4bm5AOG5ugDhubsA4bm8AOG5vQDhub4A4bm/AOG6gADhuoEA4bqCAOG6gwDhuoQA4bqFAOG6hgDhuocA4bqIAOG6iQDhuooA4bqLAOG6jADhuo0A4bqOAOG6jwDhupAA4bqRAOG6kgDhupMA4bqUAOG6lQDhupYA4bqXAOG6mADhupkA4bqgAOG6oQDhuqIA4bqjAOG6pADhuqUA4bqmAOG6pwDhuqgA4bqpAOG6qgDhuqsA4bqsAOG6rQDhuq4A4bqvAOG6sADhurEA4bqyAOG6swDhurQA4bq1AOG6tgDhurcA4bq4AOG6uQDhuroA4bq7AOG6vADhur0A4bq+AOG6vwDhu4AA4buBAOG7ggDhu4MA4buEAOG7hQDhu4YA4buHAOG7iADhu4kA4buKAOG7iwDhu4wA4buNAOG7jgDhu48A4buQAOG7kQDhu5IA4buTAOG7lADhu5UA4buWAOG7lwDhu5gA4buZAOG7mgDhu5sA4bucAOG7nQDhu54A4bufAOG7oADhu6EA4buiAOG7owDhu6QA4bulAOG7pgDhu6cA4buoAOG7qQDhu6oA4burAOG7rADhu60A4buuAOG7rwDhu7AA4buxAOG7sgDhu7MA4bu0AOG7tQDhu7YA4bu3AOG7uADhu7kA4byAAOG8gQDhvIIA4byDAOG8hADhvIUA4byGAOG8hwDhvIgA4byJAOG8igDhvIsA4byMAOG8jQDhvI4A4byPAOG8kADhvJEA4bySAOG8kwDhvJQA4byVAOG8mADhvJkA4byaAOG8mwDhvJwA4bydAOG8oADhvKEA4byiAOG8owDhvKQA4bylAOG8pgDhvKcA4byoAOG8qQDhvKoA4byrAOG8rADhvK0A4byuAOG8rwDhvLAA4byxAOG8sgDhvLMA4by0AOG8tQDhvLYA4by3AOG8uADhvLkA4by6AOG8uwDhvLwA4by9AOG8vgDhvL8A4b2AAOG9gQDhvYIA4b2DAOG9hADhvYUA4b2IAOG9iQDhvYoA4b2LAOG9jADhvY0A4b2QAOG9kQDhvZIA4b2TAOG9lADhvZUA4b2WAOG9lwDhvZkA4b2bAOG9nQDhvZ8A4b2gAOG9oQDhvaIA4b2jAOG9pADhvaUA4b2mAOG9pwDhvagA4b2pAOG9qgDhvasA4b2sAOG9rQDhva4A4b2vAOG9sADhvbIA4b20AOG9tgDhvbgA4b26AOG9vADhvoAA4b6BAOG+ggDhvoMA4b6EAOG+hQDhvoYA4b6HAOG+iADhvokA4b6KAOG+iwDhvowA4b6NAOG+jgDhvo8A4b6QAOG+kQDhvpIA4b6TAOG+lADhvpUA4b6WAOG+lwDhvpgA4b6ZAOG+mgDhvpsA4b6cAOG+nQDhvp4A4b6fAOG+oADhvqEA4b6iAOG+owDhvqQA4b6lAOG+pgDhvqcA4b6oAOG+qQDhvqoA4b6rAOG+rADhvq0A4b6uAOG+rwDhvrAA4b6xAOG+sgDhvrMA4b60AOG+tgDhvrcA4b64AOG+uQDhvroA4b68AOG/ggDhv4MA4b+EAOG/hgDhv4cA4b+IAOG/igDhv4wA4b+QAOG/kQDhv5IA4b+WAOG/lwDhv5gA4b+ZAOG/mgDhv6AA4b+hAOG/ogDhv6QA4b+lAOG/pgDhv6cA4b+oAOG/qQDhv6oA4b+sAOG/sgDhv7MA4b+0AOG/tgDhv7cA4b+4AOG/ugDhv7wA4oCQAOKAkwDigJQA4oCy4oCyAOKAsuKAsuKAsgDigLLigLLigLLigLIA4oC14oC1AOKAteKAteKAtQDigqkA4oaQAOKGkQDihpIA4oaTAOKGmgDihpsA4oauAOKHjQDih44A4oePAOKIggDiiIQA4oiHAOKIiQDiiIwA4oiRAOKIkgDiiKQA4oimAOKIq+KIqwDiiKviiKviiKsA4oir4oir4oir4oirAOKIruKIrgDiiK7iiK7iiK4A4omBAOKJhADiiYcA4omJAOKJoADiiaIA4omtAOKJrgDiia8A4omwAOKJsQDiibQA4om1AOKJuADiibkA4oqAAOKKgQDiioQA4oqFAOKKiADiiokA4oqsAOKKrQDiiq4A4oqvAOKLoADii6EA4ouiAOKLowDii6oA4ourAOKLrADii60A4pSCAOKWoADil4sA4qaFAOKmhgDiq53MuADitaEA44CBAOOAggDjgIgA44CJAOOAigDjgIsA44CMAOOAjQDjgI4A44CPAOOAkADjgJEA44CSAOOAlADjgJRT44CVAOOAlOS4ieOAlQDjgJTkuozjgJUA44CU5Yud44CVAOOAlOWuieOAlQDjgJTmiZPjgJUA44CU5pWX44CVAOOAlOacrOOAlQDjgJTngrnjgJUA44CU55uX44CVAOOAlQDjgJYA44CXAOOBjADjgY4A44GQAOOBkgDjgZQA44GWAOOBmADjgZoA44GcAOOBngDjgaAA44GiAOOBpQDjgacA44GpAOOBsADjgbEA44GzAOOBtADjgbYA44G3AOOBuQDjgboA44G744GLAOOBvADjgb0A44KI44KKAOOClADjgpkA44KaAOOCngDjgqEA44KiAOOCouODkeODvOODiADjgqLjg6vjg5XjgqEA44Ki44Oz44Oa44KiAOOCouODvOODqwDjgqMA44KkAOOCpOODi+ODs+OCsADjgqTjg7Pjg4EA44KlAOOCpgDjgqbjgqnjg7MA44KnAOOCqADjgqjjgrnjgq/jg7zjg4kA44Ko44O844Kr44O8AOOCqQDjgqoA44Kq44Oz44K5AOOCquODvOODoADjgqsA44Kr44Kk44OqAOOCq+ODqeODg+ODiADjgqvjg63jg6rjg7wA44KsAOOCrOODreODswDjgqzjg7Pjg54A44KtAOOCreODpeODquODvADjgq3jg60A44Kt44Ot44Kw44Op44OgAOOCreODreODoeODvOODiOODqwDjgq3jg63jg6/jg4Pjg4gA44KuAOOCruOCrADjgq7jg4vjg7wA44Ku44Or44OA44O8AOOCrwDjgq/jg6vjgrzjgqTjg60A44Kv44Ot44O844ONAOOCsADjgrDjg6njg6AA44Kw44Op44Og44OI44OzAOOCsQDjgrHjg7zjgrkA44KyAOOCswDjgrPjgrMA44Kz44OIAOOCs+ODq+ODigDjgrPjg7zjg50A44K0AOOCtQDjgrXjgqTjgq/jg6sA44K144Oz44OB44O844OgAOOCtgDjgrcA44K344Oq44Oz44KwAOOCuADjgrkA44K6AOOCuwDjgrvjg7Pjg4EA44K744Oz44OIAOOCvADjgr0A44K+AOOCvwDjg4AA44OA44O844K5AOODgQDjg4IA44ODAOODhADjg4UA44OGAOODhwDjg4fjgrcA44OIAOODiOODswDjg4kA44OJ44OrAOODigDjg4rjg44A44OLAOODjADjg40A44OOAOODjuODg+ODiADjg48A44OP44Kk44OEAOODkADjg5Djg7zjg6zjg6sA44ORAOODkeODvOOCu+ODs+ODiADjg5Hjg7zjg4QA44OSAOODkwDjg5Pjg6sA44OUAOODlOOCouOCueODiOODqwDjg5Tjgq/jg6sA44OU44KzAOODlQDjg5XjgqHjg6njg4Pjg4kA44OV44Kj44O844OIAOODleODqeODswDjg5YA44OW44OD44K344Kn44OrAOODlwDjg5gA44OY44Kv44K/44O844OrAOODmOODq+ODhADjg5kA44OZ44O844K/AOODmgDjg5rjgr0A44Oa44OL44OSAOODmuODs+OCuQDjg5rjg7zjgrgA44ObAOODm+ODswDjg5vjg7zjg6sA44Ob44O844OzAOODnADjg5zjg6vjg4gA44OdAOODneOCpOODs+ODiADjg53jg7Pjg4kA44OeAOODnuOCpOOCr+ODrQDjg57jgqTjg6sA44Oe44OD44OPAOODnuODq+OCrwDjg57jg7Pjgrfjg6fjg7MA44OfAOODn+OCr+ODreODswDjg5/jg6oA44Of44Oq44OQ44O844OrAOODoADjg6EA44Oh44KsAOODoeOCrOODiOODswDjg6Hjg7zjg4jjg6sA44OiAOODowDjg6QA44Ok44O844OJAOODpOODvOODqwDjg6UA44OmAOODpuOCouODswDjg6cA44OoAOODqQDjg6oA44Oq44OD44OI44OrAOODquODqQDjg6sA44Or44OU44O8AOODq+ODvOODluODqwDjg6wA44Os44OgAOODrOODs+ODiOOCsuODswDjg60A44OvAOODr+ODg+ODiADjg7AA44OxAOODsgDjg7MA44O0AOODtwDjg7gA44O5AOODugDjg7sA44O8AOODvgDjkp4A45K5AOOSuwDjk58A45SVAOObrgDjm7wA456BAOOgrwDjoaIA46G8AOOjhwDjo6MA46ScAOOkugDjqK4A46msAOOrpADjrIgA46yZAOOtiQDjrp0A47CYAOOxjgDjtLMA47aWAOO6rADjurgA47ybAOO/vADkgIgA5ICYAOSAuQDkgYYA5IKWAOSDowDkhK8A5IiCAOSIpwDkiqAA5IyBAOSMtADkjZkA5I+VAOSPmQDkkIsA5JGrAOSUqwDklZ0A5JWhAOSVqwDkl5cA5Je5AOSYtQDkmr4A5JuHAOSmlQDkp6YA5KmuAOSptgDkqrIA5KyzAOSvjgDks44A5LOtAOSzuADktZYA5LiAAOS4gQDkuIMA5LiJAOS4igDkuIsA5LiNAOS4mQDkuKYA5LioAOS4rQDkuLIA5Li2AOS4uADkuLkA5Li9AOS4vwDkuYEA5LmZAOS5nQDkuoIA5LqFAOS6hgDkuowA5LqUAOS6oADkuqQA5LquAOS6ugDku4AA5LuMAOS7pADkvIEA5LyRAOS9oADkvoAA5L6GAOS+iwDkvq4A5L67AOS+vwDlgIIA5YCrAOWBugDlgpkA5YOPAOWDmgDlg6cA5YSqAOWEvwDlhYAA5YWFAOWFjQDlhZQA5YWkAOWFpQDlhacA5YWoAOWFqQDlhasA5YWtAOWFtwDlhoAA5YaCAOWGjQDlhpIA5YaVAOWGlgDlhpcA5YaZAOWGpADlhqsA5YasAOWGtQDlhrcA5YeJAOWHjADlh5wA5YeeAOWHoADlh7UA5YiAAOWIgwDliIcA5YiXAOWInQDliKkA5Yi6AOWIuwDliYYA5YmNAOWJsgDlibcA5YqJAOWKmwDliqMA5YqzAOWKtADli4cA5YuJAOWLkgDli54A5YukAOWLtQDli7kA5Yu6AOWMhQDljIYA5YyVAOWMlwDljJoA5Yy4AOWMuwDljL8A5Y2BAOWNhADljYUA5Y2JAOWNkQDljZQA5Y2aAOWNnADljakA5Y2wAOWNswDljbUA5Y29AOWNvwDljoIA5Y62AOWPgwDlj4gA5Y+KAOWPjADlj58A5Y+jAOWPpQDlj6sA5Y+vAOWPsQDlj7MA5ZCGAOWQiADlkI0A5ZCPAOWQnQDlkLgA5ZC5AOWRggDlkYgA5ZGoAOWSngDlkqIA5ZK9AOWTtgDllJAA5ZWPAOWVkwDllZUA5ZWjAOWWhADllocA5ZaZAOWWnQDllqsA5ZazAOWWtgDll4AA5ZeCAOWXogDlmIYA5ZmRAOWZqADlmbQA5ZuXAOWbmwDlm7kA5ZyWAOWclwDlnJ8A5ZywAOWeiwDln44A5Z+0AOWgjQDloLEA5aCyAOWhgADloZoA5aGeAOWiqADloqwA5aKzAOWjmADlo58A5aOrAOWjrgDlo7AA5aOyAOWjtwDlpIIA5aSGAOWkigDlpJUA5aSaAOWknADlpKIA5aSnAOWkp+atowDlpKkA5aWEAOWliADlpZEA5aWUAOWlogDlpbMA5aeYAOWnrADlqJsA5ainAOWpogDlqaYA5aq1AOWsiADlrKgA5ay+AOWtkADlrZcA5a2mAOWugADlroUA5a6XAOWvgwDlr5gA5a+nAOWvrgDlr7MA5a+4AOWvvwDlsIYA5bCPAOWwogDlsLgA5bC/AOWxoADlsaIA5bGkAOWxpQDlsa4A5bGxAOWyjQDls4AA5bSZAOW1gwDltZAA5bWrAOW1rgDltbwA5bayAOW2ugDlt5sA5behAOW3ogDlt6UA5bemAOW3sQDlt70A5be+AOW4qADluL0A5bmpAOW5sgDlubPmiJAA5bm0AOW5ugDlubwA5bm/AOW6pgDlurAA5bqzAOW6tgDlu4kA5buKAOW7kgDlu5MA5buZAOW7rADlu7QA5bu+AOW8hADlvIsA5byTAOW8ogDlvZAA5b2TAOW9oQDlvaIA5b2pAOW9qwDlvbMA5b6LAOW+jADlvpcA5b6aAOW+qQDlvq0A5b+DAOW/jQDlv5cA5b+1AOW/uQDmgJIA5oCcAOaBtQDmgoEA5oKUAOaDhwDmg5gA5oOhAOaEiADmhYQA5oWIAOaFjADmhY4A5oWgAOaFqADmhboA5oaOAOaGkADmhqQA5oavAOaGsgDmh54A5oeyAOaHtgDmiIAA5oiIAOaIkADmiJsA5oiuAOaItADmiLYA5omLAOaJkwDmiZ0A5oqVAOaKsQDmi4kA5ouPAOaLkwDmi5QA5ou8AOaLvgDmjIcA5oy9AOaNkADmjZUA5o2oAOaNuwDmjoMA5o6gAOaOqQDmj4QA5o+FAOaPpADmkJwA5pCiAOaRkgDmkakA5pG3AOaRvgDmkpoA5pKdAOaThADmlK8A5pS0AOaVjwDmlZYA5pWsAOaVuADmlocA5paXAOaWmQDmlqQA5pawAOaWuQDml4UA5pegAOaXogDml6MA5pelAOaYjuayuwDmmJMA5pigAOaYreWSjADmmYkA5pm0AOaaiADmmpEA5pqcAOaatADmm4YA5puwAOabtADmm7gA5pyAAOaciADmnIkA5pyXAOacmwDmnKEA5pyoAOadjgDmnZMA5p2WAOadngDmnbsA5p6FAOaelwDmn7MA5p+6AOaglwDmoJ8A5qCqAOagquW8j+S8muekvgDmoZIA5qKBAOaihQDmoo4A5qKoAOaklADmpYIA5qajAOanqgDmqIIA5qiTAOaqqADmq5MA5qubAOashADmrKAA5qyhAOatlADmraIA5q2jAOatsgDmrbcA5q25AOaunwDmrq4A5q6zAOauugDmrrsA5q+LAOavjQDmr5QA5q+bAOawjwDmsJQA5rC0AOaxjgDmsacA5rKIAOayvwDms4wA5rONAOazpQDms6gA5rSWAOa0mwDmtJ4A5rS0AOa0vgDmtYEA5rWpAOa1qgDmtbcA5rW4AOa2hQDmt4sA5reaAOa3qgDmt7kA5riaAOa4rwDmua4A5rqAAOa6nADmuroA5ruHAOa7iwDmu5EA5rubAOa8jwDmvJQA5ryiAOa8owDmva4A5r+GAOa/qwDmv74A54CbAOeAngDngLkA54GKAOeBqwDngbAA54G3AOeBvQDngpkA54KtAOeDiADng5kA54ShAOeFhQDnhYkA54WuAOeGnADnh44A54eQAOeIkADniJsA54ioAOeIqgDniKsA54i1AOeItgDniLsA54i/AOeJhwDniZAA54mZAOeJmwDniaIA54m5AOeKgADnipUA54qsAOeKrwDni4AA54u8AOeMqgDnjbUA5426AOeOhADnjocA546JAOeOiwDnjqUA546yAOePngDnkIYA55CJAOeQogDnkYcA55GcAOeRqQDnkbEA55KFAOeSiQDnkpgA55OKAOeTnADnk6YA55SGAOeUmADnlJ8A55SkAOeUqADnlLAA55SyAOeUswDnlLcA55S7AOeUvgDnlZkA55WlAOeVsADnlosA55aSAOeXogDnmJAA55idAOeYnwDnmYIA55mpAOeZtgDnmb0A55quAOeavwDnm4oA55ubAOebowDnm6cA55uuAOebtADnnIEA55yeAOecnwDnnYAA552KAOeeiwDnnqcA55+bAOefogDnn7MA56GOAOehqwDnoowA56KRAOejigDno4wA56O7AOekqgDnpLoA56S8AOekvgDnpYgA56WJAOelkADnpZYA56WdAOelngDnpaUA56W/AOemgQDnpo0A56aOAOemjwDnpq4A56a4AOemvgDnp4oA56eYAOenqwDnqJwA56mAAOepigDnqY8A56m0AOepugDnqoEA56qxAOeriwDnq64A56u5AOesoADnro8A56+AAOevhgDnr4kA57C+AOexoADnsbMA57G7AOeykgDnsr4A57OSAOezlgDns6MA57OnAOezqADns7gA57SAAOe0kADntKIA57SvAOe1ggDntZsA57WjAOe2oADntr4A57eHAOe3tADnuIIA57iJAOe4twDnuYEA57mFAOe8tgDnvL4A572RAOe9sgDnvbkA5726AOe+hQDnvooA576VAOe+mgDnvr0A57+6AOiAgQDogIUA6ICMAOiAkgDogLMA6IGGAOiBoADoga8A6IGwAOiBvgDogb8A6IKJAOiCiwDogq0A6IKyAOiEgwDohL4A6IeYAOiHowDoh6gA6IeqAOiHrQDoh7MA6Ie8AOiIgQDoiIQA6IiMAOiImADoiJsA6IifAOiJrgDoia8A6ImyAOiJuADoibkA6IqLAOiKkQDoip0A6IqxAOiKswDoir0A6IulAOiLpgDojJ0A6IyjAOiMtgDojZIA6I2TAOiNowDojq0A6I69AOiPiQDoj4oA6I+MAOiPnADoj6cA6I+vAOiPsQDokL0A6JGJAOiRlwDok64A6JOxAOiTswDok7wA6JSWAOiVpADol40A6Je6AOiYhgDomJIA6JitAOiYvwDomY0A6JmQAOiZnADomacA6JmpAOiZqwDomogA6JqpAOibogDonI4A6JyoAOidqwDonbkA6J6GAOieugDon6EA6KCBAOignwDooYAA6KGMAOihoADooaMA6KOCAOijjwDoo5cA6KOeAOijoQDoo7gA6KO6AOikkADopYEA6KWkAOilvgDopoYA6KaLAOimlgDop5IA6KejAOiogADoqqAA6KqqAOiqvwDoq4sA6KuSAOirlgDoq60A6Ku4AOirvgDorIEA6Ky5AOitmADoroAA6K6KAOiwtwDosYYA6LGIAOixlQDosbgA6LKdAOiyoQDosqkA6LKrAOizgQDos4IA6LOHAOiziADos5MA6LSIAOi0mwDotaQA6LWwAOi1twDotrMA6La8AOi3iwDot68A6LewAOi6qwDou4oA6LuUAOi8pgDovKoA6Ly4AOi8uwDovaIA6L6bAOi+ngDovrAA6L61AOi+tgDpgKMA6YC4AOmBigDpgakA6YGyAOmBvADpgo8A6YKRAOmClADpg44A6YOeAOmDsQDpg70A6YSRAOmEmwDphYkA6YWqAOmGmQDphrQA6YeGAOmHjADph48A6YeRAOmItADpiLgA6Ym2AOmJvADpi5cA6YuYAOmMhADpjYoA6Y+5AOmQlQDplbcA6ZaAAOmWiwDplq0A6Za3AOmYnADpmK4A6ZmLAOmZjQDpmbUA6Zm4AOmZvADpmoYA6ZqjAOmatgDpmrcA6Zq4AOmauQDpm4MA6ZuiAOmbowDpm6gA6Zu2AOmbtwDpnKMA6ZyyAOmdiADpnZEA6Z2WAOmdngDpnaIA6Z2pAOmfiwDpn5sA6Z+gAOmfrQDpn7MA6Z+/AOmggQDpoIUA6aCLAOmgmADpoKkA6aC7AOmhngDpoqgA6aObAOmjnwDpo6IA6aOvAOmjvADppKgA6aSpAOmmlgDpppkA6aanAOmmrADpp4IA6aexAOmnvgDpqaoA6aqoAOmrmADpq58A6aySAOmspQDprK8A6ayyAOmsvADprZoA6a2vAOmxgADpsZcA6bOlAOmzvQDptacA6ba0AOm3ugDpuJ4A6bm1AOm5vwDpupcA6bqfAOm6pQDpursA6buDAOm7jQDpu44A6buRAOm7uQDpu70A6bu+AOm8hQDpvI4A6byPAOm8kwDpvJYA6bygAOm8uwDpvYMA6b2KAOm9kgDpvo0A6b6OAOm+nADpvp8A6b6gAOqcpwDqna8A6qy3AOqtkgDqsIAA6rCBAOqwggDqsIMA6rCEAOqwhQDqsIYA6rCHAOqwiADqsIkA6rCKAOqwiwDqsIwA6rCNAOqwjgDqsI8A6rCQAOqwkQDqsJIA6rCTAOqwlADqsJUA6rCWAOqwlwDqsJgA6rCZAOqwmgDqsJsA6rCcAOqwnQDqsJ4A6rCfAOqwoADqsKEA6rCiAOqwowDqsKQA6rClAOqwpgDqsKcA6rCoAOqwqQDqsKoA6rCrAOqwrADqsK0A6rCuAOqwrwDqsLAA6rCxAOqwsgDqsLMA6rC0AOqwtQDqsLYA6rC3AOqwuADqsLkA6rC6AOqwuwDqsLwA6rC9AOqwvgDqsL8A6rGAAOqxgQDqsYIA6rGDAOqxhADqsYUA6rGGAOqxhwDqsYgA6rGJAOqxigDqsYsA6rGMAOqxjQDqsY4A6rGPAOqxkADqsZEA6rGSAOqxkwDqsZQA6rGVAOqxlgDqsZcA6rGYAOqxmQDqsZoA6rGbAOqxnADqsZ0A6rGeAOqxnwDqsaAA6rGhAOqxogDqsaMA6rGkAOqxpQDqsaYA6rGnAOqxqADqsakA6rGqAOqxqwDqsawA6rGtAOqxrgDqsa8A6rGwAOqxsQDqsbIA6rGzAOqxtADqsbUA6rG2AOqxtwDqsbgA6rG5AOqxugDqsbsA6rG8AOqxvQDqsb4A6rG/AOqygADqsoEA6rKCAOqygwDqsoQA6rKFAOqyhgDqsocA6rKIAOqyiQDqsooA6rKLAOqyjADqso0A6rKOAOqyjwDqspAA6rKRAOqykgDqspMA6rKUAOqylQDqspYA6rKXAOqymADqspkA6rKaAOqymwDqspwA6rKdAOqyngDqsp8A6rKgAOqyoQDqsqIA6rKjAOqypADqsqUA6rKmAOqypwDqsqgA6rKpAOqyqgDqsqsA6rKsAOqyrQDqsq4A6rKvAOqysADqsrEA6rKyAOqyswDqsrQA6rK1AOqytgDqsrcA6rK4AOqyuQDqsroA6rK7AOqyvADqsr0A6rK+AOqyvwDqs4AA6rOBAOqzggDqs4MA6rOEAOqzhQDqs4YA6rOHAOqziADqs4kA6rOKAOqziwDqs4wA6rONAOqzjgDqs48A6rOQAOqzkQDqs5IA6rOTAOqzlADqs5UA6rOWAOqzlwDqs5gA6rOZAOqzmgDqs5sA6rOcAOqznQDqs54A6rOfAOqzoADqs6EA6rOiAOqzowDqs6QA6rOlAOqzpgDqs6cA6rOoAOqzqQDqs6oA6rOrAOqzrADqs60A6rOuAOqzrwDqs7AA6rOxAOqzsgDqs7MA6rO0AOqztQDqs7YA6rO3AOqzuADqs7kA6rO6AOqzuwDqs7wA6rO9AOqzvgDqs78A6rSAAOq0gQDqtIIA6rSDAOq0hADqtIUA6rSGAOq0hwDqtIgA6rSJAOq0igDqtIsA6rSMAOq0jQDqtI4A6rSPAOq0kADqtJEA6rSSAOq0kwDqtJQA6rSVAOq0lgDqtJcA6rSYAOq0mQDqtJoA6rSbAOq0nADqtJ0A6rSeAOq0nwDqtKAA6rShAOq0ogDqtKMA6rSkAOq0pQDqtKYA6rSnAOq0qADqtKkA6rSqAOq0qwDqtKwA6rStAOq0rgDqtK8A6rSwAOq0sQDqtLIA6rSzAOq0tADqtLUA6rS2AOq0twDqtLgA6rS5AOq0ugDqtLsA6rS8AOq0vQDqtL4A6rS/AOq1gADqtYEA6rWCAOq1gwDqtYQA6rWFAOq1hgDqtYcA6rWIAOq1iQDqtYoA6rWLAOq1jADqtY0A6rWOAOq1jwDqtZAA6rWRAOq1kgDqtZMA6rWUAOq1lQDqtZYA6rWXAOq1mADqtZkA6rWaAOq1mwDqtZwA6rWdAOq1ngDqtZ8A6rWgAOq1oQDqtaIA6rWjAOq1pADqtaUA6rWmAOq1pwDqtagA6rWpAOq1qgDqtasA6rWsAOq1rQDqta4A6rWvAOq1sADqtbEA6rWyAOq1swDqtbQA6rW1AOq1tgDqtbcA6rW4AOq1uQDqtboA6rW7AOq1vADqtb0A6rW+AOq1vwDqtoAA6raBAOq2ggDqtoMA6raEAOq2hQDqtoYA6raHAOq2iADqtokA6raKAOq2iwDqtowA6raNAOq2jgDqto8A6raQAOq2kQDqtpIA6raTAOq2lADqtpUA6raWAOq2lwDqtpgA6raZAOq2mgDqtpsA6racAOq2nQDqtp4A6rafAOq2oADqtqEA6raiAOq2owDqtqQA6ralAOq2pgDqtqcA6raoAOq2qQDqtqoA6rarAOq2rADqtq0A6rauAOq2rwDqtrAA6raxAOq2sgDqtrMA6ra0AOq2tQDqtrYA6ra3AOq2uADqtrkA6ra6AOq2uwDqtrwA6ra9AOq2vgDqtr8A6reAAOq3gQDqt4IA6reDAOq3hADqt4UA6reGAOq3hwDqt4gA6reJAOq3igDqt4sA6reMAOq3jQDqt44A6rePAOq3kADqt5EA6reSAOq3kwDqt5QA6reVAOq3lgDqt5cA6reYAOq3mQDqt5oA6rebAOq3nADqt50A6reeAOq3nwDqt6AA6rehAOq3ogDqt6MA6rekAOq3pQDqt6YA6renAOq3qADqt6kA6reqAOq3qwDqt6wA6retAOq3rgDqt68A6rewAOq3sQDqt7IA6rezAOq3tADqt7UA6re2AOq3twDqt7gA6re5AOq3ugDqt7sA6re8AOq3vQDqt74A6re/AOq4gADquIEA6riCAOq4gwDquIQA6riFAOq4hgDquIcA6riIAOq4iQDquIoA6riLAOq4jADquI0A6riOAOq4jwDquJAA6riRAOq4kgDquJMA6riUAOq4lQDquJYA6riXAOq4mADquJkA6riaAOq4mwDquJwA6ridAOq4ngDquJ8A6rigAOq4oQDquKIA6rijAOq4pADquKUA6rimAOq4pwDquKgA6ripAOq4qgDquKsA6risAOq4rQDquK4A6rivAOq4sADquLEA6riyAOq4swDquLQA6ri1AOq4tgDquLcA6ri4AOq4uQDquLoA6ri7AOq4vADquL0A6ri+AOq4vwDquYAA6rmBAOq5ggDquYMA6rmEAOq5hQDquYYA6rmHAOq5iADquYkA6rmKAOq5iwDquYwA6rmNAOq5jgDquY8A6rmQAOq5kQDquZIA6rmTAOq5lADquZUA6rmWAOq5lwDquZgA6rmZAOq5mgDquZsA6rmcAOq5nQDquZ4A6rmfAOq5oADquaEA6rmiAOq5owDquaQA6rmlAOq5pgDquacA6rmoAOq5qQDquaoA6rmrAOq5rADqua0A6rmuAOq5rwDqubAA6rmxAOq5sgDqubMA6rm0AOq5tQDqubYA6rm3AOq5uADqubkA6rm6AOq5uwDqubwA6rm9AOq5vgDqub8A6rqAAOq6gQDquoIA6rqDAOq6hADquoUA6rqGAOq6hwDquogA6rqJAOq6igDquosA6rqMAOq6jQDquo4A6rqPAOq6kADqupEA6rqSAOq6kwDqupQA6rqVAOq6lgDqupcA6rqYAOq6mQDqupoA6rqbAOq6nADqup0A6rqeAOq6nwDquqAA6rqhAOq6ogDquqMA6rqkAOq6pQDquqYA6rqnAOq6qADquqkA6rqqAOq6qwDquqwA6rqtAOq6rgDquq8A6rqwAOq6sQDqurIA6rqzAOq6tADqurUA6rq2AOq6twDqurgA6rq5AOq6ugDqursA6rq8AOq6vQDqur4A6rq/AOq7gADqu4EA6ruCAOq7gwDqu4QA6ruFAOq7hgDqu4cA6ruIAOq7iQDqu4oA6ruLAOq7jADqu40A6ruOAOq7jwDqu5AA6ruRAOq7kgDqu5MA6ruUAOq7lQDqu5YA6ruXAOq7mADqu5kA6ruaAOq7mwDqu5wA6rudAOq7ngDqu58A6rugAOq7oQDqu6IA6rujAOq7pADqu6UA6rumAOq7pwDqu6gA6rupAOq7qgDqu6sA6rusAOq7rQDqu64A6ruvAOq7sADqu7EA6ruyAOq7swDqu7QA6ru1AOq7tgDqu7cA6ru4AOq7uQDqu7oA6ru7AOq7vADqu70A6ru+AOq7vwDqvIAA6ryBAOq8ggDqvIMA6ryEAOq8hQDqvIYA6ryHAOq8iADqvIkA6ryKAOq8iwDqvIwA6ryNAOq8jgDqvI8A6ryQAOq8kQDqvJIA6ryTAOq8lADqvJUA6ryWAOq8lwDqvJgA6ryZAOq8mgDqvJsA6rycAOq8nQDqvJ4A6ryfAOq8oADqvKEA6ryiAOq8owDqvKQA6rylAOq8pgDqvKcA6ryoAOq8qQDqvKoA6ryrAOq8rADqvK0A6ryuAOq8rwDqvLAA6ryxAOq8sgDqvLMA6ry0AOq8tQDqvLYA6ry3AOq8uADqvLkA6ry6AOq8uwDqvLwA6ry9AOq8vgDqvL8A6r2AAOq9gQDqvYIA6r2DAOq9hADqvYUA6r2GAOq9hwDqvYgA6r2JAOq9igDqvYsA6r2MAOq9jQDqvY4A6r2PAOq9kADqvZEA6r2SAOq9kwDqvZQA6r2VAOq9lgDqvZcA6r2YAOq9mQDqvZoA6r2bAOq9nADqvZ0A6r2eAOq9nwDqvaAA6r2hAOq9ogDqvaMA6r2kAOq9pQDqvaYA6r2nAOq9qADqvakA6r2qAOq9qwDqvawA6r2tAOq9rgDqva8A6r2wAOq9sQDqvbIA6r2zAOq9tADqvbUA6r22AOq9twDqvbgA6r25AOq9ugDqvbsA6r28AOq9vQDqvb4A6r2/AOq+gADqvoEA6r6CAOq+gwDqvoQA6r6FAOq+hgDqvocA6r6IAOq+iQDqvooA6r6LAOq+jADqvo0A6r6OAOq+jwDqvpAA6r6RAOq+kgDqvpMA6r6UAOq+lQDqvpYA6r6XAOq+mADqvpkA6r6aAOq+mwDqvpwA6r6dAOq+ngDqvp8A6r6gAOq+oQDqvqIA6r6jAOq+pADqvqUA6r6mAOq+pwDqvqgA6r6pAOq+qgDqvqsA6r6sAOq+rQDqvq4A6r6vAOq+sADqvrEA6r6yAOq+swDqvrQA6r61AOq+tgDqvrcA6r64AOq+uQDqvroA6r67AOq+vADqvr0A6r6+AOq+vwDqv4AA6r+BAOq/ggDqv4MA6r+EAOq/hQDqv4YA6r+HAOq/iADqv4kA6r+KAOq/iwDqv4wA6r+NAOq/jgDqv48A6r+QAOq/kQDqv5IA6r+TAOq/lADqv5UA6r+WAOq/lwDqv5gA6r+ZAOq/mgDqv5sA6r+cAOq/nQDqv54A6r+fAOq/oADqv6EA6r+iAOq/owDqv6QA6r+lAOq/pgDqv6cA6r+oAOq/qQDqv6oA6r+rAOq/rADqv60A6r+uAOq/rwDqv7AA6r+xAOq/sgDqv7MA6r+0AOq/tQDqv7YA6r+3AOq/uADqv7kA6r+6AOq/uwDqv7wA6r+9AOq/vgDqv78A64CAAOuAgQDrgIIA64CDAOuAhADrgIUA64CGAOuAhwDrgIgA64CJAOuAigDrgIsA64CMAOuAjQDrgI4A64CPAOuAkADrgJEA64CSAOuAkwDrgJQA64CVAOuAlgDrgJcA64CYAOuAmQDrgJoA64CbAOuAnADrgJ0A64CeAOuAnwDrgKAA64ChAOuAogDrgKMA64CkAOuApQDrgKYA64CnAOuAqADrgKkA64CqAOuAqwDrgKwA64CtAOuArgDrgK8A64CwAOuAsQDrgLIA64CzAOuAtADrgLUA64C2AOuAtwDrgLgA64C5AOuAugDrgLsA64C8AOuAvQDrgL4A64C/AOuBgADrgYEA64GCAOuBgwDrgYQA64GFAOuBhgDrgYcA64GIAOuBiQDrgYoA64GLAOuBjADrgY0A64GOAOuBjwDrgZAA64GRAOuBkgDrgZMA64GUAOuBlQDrgZYA64GXAOuBmADrgZkA64GaAOuBmwDrgZwA64GdAOuBngDrgZ8A64GgAOuBoQDrgaIA64GjAOuBpADrgaUA64GmAOuBpwDrgagA64GpAOuBqgDrgasA64GsAOuBrQDrga4A64GvAOuBsADrgbEA64GyAOuBswDrgbQA64G1AOuBtgDrgbcA64G4AOuBuQDrgboA64G7AOuBvADrgb0A64G+AOuBvwDrgoAA64KBAOuCggDrgoMA64KEAOuChQDrgoYA64KHAOuCiADrgokA64KKAOuCiwDrgowA64KNAOuCjgDrgo8A64KQAOuCkQDrgpIA64KTAOuClADrgpUA64KWAOuClwDrgpgA64KZAOuCmgDrgpsA64KcAOuCnQDrgp4A64KfAOuCoADrgqEA64KiAOuCowDrgqQA64KlAOuCpgDrgqcA64KoAOuCqQDrgqoA64KrAOuCrADrgq0A64KuAOuCrwDrgrAA64KxAOuCsgDrgrMA64K0AOuCtQDrgrYA64K3AOuCuADrgrkA64K6AOuCuwDrgrwA64K9AOuCvgDrgr8A64OAAOuDgQDrg4IA64ODAOuDhADrg4UA64OGAOuDhwDrg4gA64OJAOuDigDrg4sA64OMAOuDjQDrg44A64OPAOuDkADrg5EA64OSAOuDkwDrg5QA64OVAOuDlgDrg5cA64OYAOuDmQDrg5oA64ObAOuDnADrg50A64OeAOuDnwDrg6AA64OhAOuDogDrg6MA64OkAOuDpQDrg6YA64OnAOuDqADrg6kA64OqAOuDqwDrg6wA64OtAOuDrgDrg68A64OwAOuDsQDrg7IA64OzAOuDtADrg7UA64O2AOuDtwDrg7gA64O5AOuDugDrg7sA64O8AOuDvQDrg74A64O/AOuEgADrhIEA64SCAOuEgwDrhIQA64SFAOuEhgDrhIcA64SIAOuEiQDrhIoA64SLAOuEjADrhI0A64SOAOuEjwDrhJAA64SRAOuEkgDrhJMA64SUAOuElQDrhJYA64SXAOuEmADrhJkA64SaAOuEmwDrhJwA64SdAOuEngDrhJ8A64SgAOuEoQDrhKIA64SjAOuEpADrhKUA64SmAOuEpwDrhKgA64SpAOuEqgDrhKsA64SsAOuErQDrhK4A64SvAOuEsADrhLEA64SyAOuEswDrhLQA64S1AOuEtgDrhLcA64S4AOuEuQDrhLoA64S7AOuEvADrhL0A64S+AOuEvwDrhYAA64WBAOuFggDrhYMA64WEAOuFhQDrhYYA64WHAOuFiADrhYkA64WKAOuFiwDrhYwA64WNAOuFjgDrhY8A64WQAOuFkQDrhZIA64WTAOuFlADrhZUA64WWAOuFlwDrhZgA64WZAOuFmgDrhZsA64WcAOuFnQDrhZ4A64WfAOuFoADrhaEA64WiAOuFowDrhaQA64WlAOuFpgDrhacA64WoAOuFqQDrhaoA64WrAOuFrADrha0A64WuAOuFrwDrhbAA64WxAOuFsgDrhbMA64W0AOuFtQDrhbYA64W3AOuFuADrhbkA64W6AOuFuwDrhbwA64W9AOuFvgDrhb8A64aAAOuGgQDrhoIA64aDAOuGhADrhoUA64aGAOuGhwDrhogA64aJAOuGigDrhosA64aMAOuGjQDrho4A64aPAOuGkADrhpEA64aSAOuGkwDrhpQA64aVAOuGlgDrhpcA64aYAOuGmQDrhpoA64abAOuGnADrhp0A64aeAOuGnwDrhqAA64ahAOuGogDrhqMA64akAOuGpQDrhqYA64anAOuGqADrhqkA64aqAOuGqwDrhqwA64atAOuGrgDrhq8A64awAOuGsQDrhrIA64azAOuGtADrhrUA64a2AOuGtwDrhrgA64a5AOuGugDrhrsA64a8AOuGvQDrhr4A64a/AOuHgADrh4EA64eCAOuHgwDrh4QA64eFAOuHhgDrh4cA64eIAOuHiQDrh4oA64eLAOuHjADrh40A64eOAOuHjwDrh5AA64eRAOuHkgDrh5MA64eUAOuHlQDrh5YA64eXAOuHmADrh5kA64eaAOuHmwDrh5wA64edAOuHngDrh58A64egAOuHoQDrh6IA64ejAOuHpADrh6UA64emAOuHpwDrh6gA64epAOuHqgDrh6sA64esAOuHrQDrh64A64evAOuHsADrh7EA64eyAOuHswDrh7QA64e1AOuHtgDrh7cA64e4AOuHuQDrh7oA64e7AOuHvADrh70A64e+AOuHvwDriIAA64iBAOuIggDriIMA64iEAOuIhQDriIYA64iHAOuIiADriIkA64iKAOuIiwDriIwA64iNAOuIjgDriI8A64iQAOuIkQDriJIA64iTAOuIlADriJUA64iWAOuIlwDriJgA64iZAOuImgDriJsA64icAOuInQDriJ4A64ifAOuIoADriKEA64iiAOuIowDriKQA64ilAOuIpgDriKcA64ioAOuIqQDriKoA64irAOuIrADriK0A64iuAOuIrwDriLAA64ixAOuIsgDriLMA64i0AOuItQDriLYA64i3AOuIuADriLkA64i6AOuIuwDriLwA64i9AOuIvgDriL8A64mAAOuJgQDriYIA64mDAOuJhADriYUA64mGAOuJhwDriYgA64mJAOuJigDriYsA64mMAOuJjQDriY4A64mPAOuJkADriZEA64mSAOuJkwDriZQA64mVAOuJlgDriZcA64mYAOuJmQDriZoA64mbAOuJnADriZ0A64meAOuJnwDriaAA64mhAOuJogDriaMA64mkAOuJpQDriaYA64mnAOuJqADriakA64mqAOuJqwDriawA64mtAOuJrgDria8A64mwAOuJsQDribIA64mzAOuJtADribUA64m2AOuJtwDribgA64m5AOuJugDribsA64m8AOuJvQDrib4A64m/AOuKgADrioEA64qCAOuKgwDrioQA64qFAOuKhgDriocA64qIAOuKiQDriooA64qLAOuKjADrio0A64qOAOuKjwDripAA64qRAOuKkgDripMA64qUAOuKlQDripYA64qXAOuKmADripkA64qaAOuKmwDripwA64qdAOuKngDrip8A64qgAOuKoQDriqIA64qjAOuKpADriqUA64qmAOuKpwDriqgA64qpAOuKqgDriqsA64qsAOuKrQDriq4A64qvAOuKsADrirEA64qyAOuKswDrirQA64q1AOuKtgDrircA64q4AOuKuQDriroA64q7AOuKvADrir0A64q+AOuKvwDri4AA64uBAOuLggDri4MA64uEAOuLhQDri4YA64uHAOuLiADri4kA64uKAOuLiwDri4wA64uNAOuLjgDri48A64uQAOuLkQDri5IA64uTAOuLlADri5UA64uWAOuLlwDri5gA64uZAOuLmgDri5sA64ucAOuLnQDri54A64ufAOuLoADri6EA64uiAOuLowDri6QA64ulAOuLpgDri6cA64uoAOuLqQDri6oA64urAOuLrADri60A64uuAOuLrwDri7AA64uxAOuLsgDri7MA64u0AOuLtQDri7YA64u3AOuLuADri7kA64u6AOuLuwDri7wA64u9AOuLvgDri78A64yAAOuMgQDrjIIA64yDAOuMhADrjIUA64yGAOuMhwDrjIgA64yJAOuMigDrjIsA64yMAOuMjQDrjI4A64yPAOuMkADrjJEA64ySAOuMkwDrjJQA64yVAOuMlgDrjJcA64yYAOuMmQDrjJoA64ybAOuMnADrjJ0A64yeAOuMnwDrjKAA64yhAOuMogDrjKMA64ykAOuMpQDrjKYA64ynAOuMqADrjKkA64yqAOuMqwDrjKwA64ytAOuMrgDrjK8A64ywAOuMsQDrjLIA64yzAOuMtADrjLUA64y2AOuMtwDrjLgA64y5AOuMugDrjLsA64y8AOuMvQDrjL4A64y/AOuNgADrjYEA642CAOuNgwDrjYQA642FAOuNhgDrjYcA642IAOuNiQDrjYoA642LAOuNjADrjY0A642OAOuNjwDrjZAA642RAOuNkgDrjZMA642UAOuNlQDrjZYA642XAOuNmADrjZkA642aAOuNmwDrjZwA642dAOuNngDrjZ8A642gAOuNoQDrjaIA642jAOuNpADrjaUA642mAOuNpwDrjagA642pAOuNqgDrjasA642sAOuNrQDrja4A642vAOuNsADrjbEA642yAOuNswDrjbQA6421AOuNtgDrjbcA6424AOuNuQDrjboA6427AOuNvADrjb0A642+AOuNvwDrjoAA646BAOuOggDrjoMA646EAOuOhQDrjoYA646HAOuOiADrjokA646KAOuOiwDrjowA646NAOuOjgDrjo8A646QAOuOkQDrjpIA646TAOuOlADrjpUA646WAOuOlwDrjpgA646ZAOuOmgDrjpsA646cAOuOnQDrjp4A646fAOuOoADrjqEA646iAOuOowDrjqQA646lAOuOpgDrjqcA646oAOuOqQDrjqoA646rAOuOrADrjq0A646uAOuOrwDrjrAA646xAOuOsgDrjrMA6460AOuOtQDrjrYA6463AOuOuADrjrkA6466AOuOuwDrjrwA6469AOuOvgDrjr8A64+AAOuPgQDrj4IA64+DAOuPhADrj4UA64+GAOuPhwDrj4gA64+JAOuPigDrj4sA64+MAOuPjQDrj44A64+PAOuPkADrj5EA64+SAOuPkwDrj5QA64+VAOuPlgDrj5cA64+YAOuPmQDrj5oA64+bAOuPnADrj50A64+eAOuPnwDrj6AA64+hAOuPogDrj6MA64+kAOuPpQDrj6YA64+nAOuPqADrj6kA64+qAOuPqwDrj6wA64+tAOuPrgDrj68A64+wAOuPsQDrj7IA64+zAOuPtADrj7UA64+2AOuPtwDrj7gA64+5AOuPugDrj7sA64+8AOuPvQDrj74A64+/AOuQgADrkIEA65CCAOuQgwDrkIQA65CFAOuQhgDrkIcA65CIAOuQiQDrkIoA65CLAOuQjADrkI0A65COAOuQjwDrkJAA65CRAOuQkgDrkJMA65CUAOuQlQDrkJYA65CXAOuQmADrkJkA65CaAOuQmwDrkJwA65CdAOuQngDrkJ8A65CgAOuQoQDrkKIA65CjAOuQpADrkKUA65CmAOuQpwDrkKgA65CpAOuQqgDrkKsA65CsAOuQrQDrkK4A65CvAOuQsADrkLEA65CyAOuQswDrkLQA65C1AOuQtgDrkLcA65C4AOuQuQDrkLoA65C7AOuQvADrkL0A65C+AOuQvwDrkYAA65GBAOuRggDrkYMA65GEAOuRhQDrkYYA65GHAOuRiADrkYkA65GKAOuRiwDrkYwA65GNAOuRjgDrkY8A65GQAOuRkQDrkZIA65GTAOuRlADrkZUA65GWAOuRlwDrkZgA65GZAOuRmgDrkZsA65GcAOuRnQDrkZ4A65GfAOuRoADrkaEA65GiAOuRowDrkaQA65GlAOuRpgDrkacA65GoAOuRqQDrkaoA65GrAOuRrADrka0A65GuAOuRrwDrkbAA65GxAOuRsgDrkbMA65G0AOuRtQDrkbYA65G3AOuRuADrkbkA65G6AOuRuwDrkbwA65G9AOuRvgDrkb8A65KAAOuSgQDrkoIA65KDAOuShADrkoUA65KGAOuShwDrkogA65KJAOuSigDrkosA65KMAOuSjQDrko4A65KPAOuSkADrkpEA65KSAOuSkwDrkpQA65KVAOuSlgDrkpcA65KYAOuSmQDrkpoA65KbAOuSnADrkp0A65KeAOuSnwDrkqAA65KhAOuSogDrkqMA65KkAOuSpQDrkqYA65KnAOuSqADrkqkA65KqAOuSqwDrkqwA65KtAOuSrgDrkq8A65KwAOuSsQDrkrIA65KzAOuStADrkrUA65K2AOuStwDrkrgA65K5AOuSugDrkrsA65K8AOuSvQDrkr4A65K/AOuTgADrk4EA65OCAOuTgwDrk4QA65OFAOuThgDrk4cA65OIAOuTiQDrk4oA65OLAOuTjADrk40A65OOAOuTjwDrk5AA65ORAOuTkgDrk5MA65OUAOuTlQDrk5YA65OXAOuTmADrk5kA65OaAOuTmwDrk5wA65OdAOuTngDrk58A65OgAOuToQDrk6IA65OjAOuTpADrk6UA65OmAOuTpwDrk6gA65OpAOuTqgDrk6sA65OsAOuTrQDrk64A65OvAOuTsADrk7EA65OyAOuTswDrk7QA65O1AOuTtgDrk7cA65O4AOuTuQDrk7oA65O7AOuTvADrk70A65O+AOuTvwDrlIAA65SBAOuUggDrlIMA65SEAOuUhQDrlIYA65SHAOuUiADrlIkA65SKAOuUiwDrlIwA65SNAOuUjgDrlI8A65SQAOuUkQDrlJIA65STAOuUlADrlJUA65SWAOuUlwDrlJgA65SZAOuUmgDrlJsA65ScAOuUnQDrlJ4A65SfAOuUoADrlKEA65SiAOuUowDrlKQA65SlAOuUpgDrlKcA65SoAOuUqQDrlKoA65SrAOuUrADrlK0A65SuAOuUrwDrlLAA65SxAOuUsgDrlLMA65S0AOuUtQDrlLYA65S3AOuUuADrlLkA65S6AOuUuwDrlLwA65S9AOuUvgDrlL8A65WAAOuVgQDrlYIA65WDAOuVhADrlYUA65WGAOuVhwDrlYgA65WJAOuVigDrlYsA65WMAOuVjQDrlY4A65WPAOuVkADrlZEA65WSAOuVkwDrlZQA65WVAOuVlgDrlZcA65WYAOuVmQDrlZoA65WbAOuVnADrlZ0A65WeAOuVnwDrlaAA65WhAOuVogDrlaMA65WkAOuVpQDrlaYA65WnAOuVqADrlakA65WqAOuVqwDrlawA65WtAOuVrgDrla8A65WwAOuVsQDrlbIA65WzAOuVtADrlbUA65W2AOuVtwDrlbgA65W5AOuVugDrlbsA65W8AOuVvQDrlb4A65W/AOuWgADrloEA65aCAOuWgwDrloQA65aFAOuWhgDrlocA65aIAOuWiQDrlooA65aLAOuWjADrlo0A65aOAOuWjwDrlpAA65aRAOuWkgDrlpMA65aUAOuWlQDrlpYA65aXAOuWmADrlpkA65aaAOuWmwDrlpwA65adAOuWngDrlp8A65agAOuWoQDrlqIA65ajAOuWpADrlqUA65amAOuWpwDrlqgA65apAOuWqgDrlqsA65asAOuWrQDrlq4A65avAOuWsADrlrEA65ayAOuWswDrlrQA65a1AOuWtgDrlrcA65a4AOuWuQDrlroA65a7AOuWvADrlr0A65a+AOuWvwDrl4AA65eBAOuXggDrl4MA65eEAOuXhQDrl4YA65eHAOuXiADrl4kA65eKAOuXiwDrl4wA65eNAOuXjgDrl48A65eQAOuXkQDrl5IA65eTAOuXlADrl5UA65eWAOuXlwDrl5gA65eZAOuXmgDrl5sA65ecAOuXnQDrl54A65efAOuXoADrl6EA65eiAOuXowDrl6QA65elAOuXpgDrl6cA65eoAOuXqQDrl6oA65erAOuXrADrl60A65euAOuXrwDrl7AA65exAOuXsgDrl7MA65e0AOuXtQDrl7YA65e3AOuXuADrl7kA65e6AOuXuwDrl7wA65e9AOuXvgDrl78A65iAAOuYgQDrmIIA65iDAOuYhADrmIUA65iGAOuYhwDrmIgA65iJAOuYigDrmIsA65iMAOuYjQDrmI4A65iPAOuYkADrmJEA65iSAOuYkwDrmJQA65iVAOuYlgDrmJcA65iYAOuYmQDrmJoA65ibAOuYnADrmJ0A65ieAOuYnwDrmKAA65ihAOuYogDrmKMA65ikAOuYpQDrmKYA65inAOuYqADrmKkA65iqAOuYqwDrmKwA65itAOuYrgDrmK8A65iwAOuYsQDrmLIA65izAOuYtADrmLUA65i2AOuYtwDrmLgA65i5AOuYugDrmLsA65i8AOuYvQDrmL4A65i/AOuZgADrmYEA65mCAOuZgwDrmYQA65mFAOuZhgDrmYcA65mIAOuZiQDrmYoA65mLAOuZjADrmY0A65mOAOuZjwDrmZAA65mRAOuZkgDrmZMA65mUAOuZlQDrmZYA65mXAOuZmADrmZkA65maAOuZmwDrmZwA65mdAOuZngDrmZ8A65mgAOuZoQDrmaIA65mjAOuZpADrmaUA65mmAOuZpwDrmagA65mpAOuZqgDrmasA65msAOuZrQDrma4A65mvAOuZsADrmbEA65myAOuZswDrmbQA65m1AOuZtgDrmbcA65m4AOuZuQDrmboA65m7AOuZvADrmb0A65m+AOuZvwDrmoAA65qBAOuaggDrmoMA65qEAOuahQDrmoYA65qHAOuaiADrmokA65qKAOuaiwDrmowA65qNAOuajgDrmo8A65qQAOuakQDrmpIA65qTAOualADrmpUA65qWAOualwDrmpgA65qZAOuamgDrmpsA65qcAOuanQDrmp4A65qfAOuaoADrmqEA65qiAOuaowDrmqQA65qlAOuapgDrmqcA65qoAOuaqQDrmqoA65qrAOuarADrmq0A65quAOuarwDrmrAA65qxAOuasgDrmrMA65q0AOuatQDrmrYA65q3AOuauADrmrkA65q6AOuauwDrmrwA65q9AOuavgDrmr8A65uAAOubgQDrm4IA65uDAOubhADrm4UA65uGAOubhwDrm4gA65uJAOubigDrm4sA65uMAOubjQDrm44A65uPAOubkADrm5EA65uSAOubkwDrm5QA65uVAOublgDrm5cA65uYAOubmQDrm5oA65ubAOubnADrm50A65ueAOubnwDrm6AA65uhAOubogDrm6MA65ukAOubpQDrm6YA65unAOubqADrm6kA65uqAOubqwDrm6wA65utAOubrgDrm68A65uwAOubsQDrm7IA65uzAOubtADrm7UA65u2AOubtwDrm7gA65u5AOubugDrm7sA65u8AOubvQDrm74A65u/AOucgADrnIEA65yCAOucgwDrnIQA65yFAOuchgDrnIcA65yIAOuciQDrnIoA65yLAOucjADrnI0A65yOAOucjwDrnJAA65yRAOuckgDrnJMA65yUAOuclQDrnJYA65yXAOucmADrnJkA65yaAOucmwDrnJwA65ydAOucngDrnJ8A65ygAOucoQDrnKIA65yjAOucpADrnKUA65ymAOucpwDrnKgA65ypAOucqgDrnKsA65ysAOucrQDrnK4A65yvAOucsADrnLEA65yyAOucswDrnLQA65y1AOuctgDrnLcA65y4AOucuQDrnLoA65y7AOucvADrnL0A65y+AOucvwDrnYAA652BAOudggDrnYMA652EAOudhQDrnYYA652HAOudiADrnYkA652KAOudiwDrnYwA652NAOudjgDrnY8A652QAOudkQDrnZIA652TAOudlADrnZUA652WAOudlwDrnZgA652ZAOudmgDrnZsA652cAOudnQDrnZ4A652fAOudoADrnaEA652iAOudowDrnaQA652lAOudpgDrnacA652oAOudqQDrnaoA652rAOudrADrna0A652uAOudrwDrnbAA652xAOudsgDrnbMA6520AOudtQDrnbYA6523AOuduADrnbkA6526AOuduwDrnbwA6529AOudvgDrnb8A656AAOuegQDrnoIA656DAOuehADrnoUA656GAOuehwDrnogA656JAOueigDrnosA656MAOuejQDrno4A656PAOuekADrnpEA656SAOuekwDrnpQA656VAOuelgDrnpcA656YAOuemQDrnpoA656bAOuenADrnp0A656eAOuenwDrnqAA656hAOueogDrnqMA656kAOuepQDrnqYA656nAOueqADrnqkA656qAOueqwDrnqwA656tAOuergDrnq8A656wAOuesQDrnrIA656zAOuetADrnrUA6562AOuetwDrnrgA6565AOueugDrnrsA6568AOuevQDrnr4A656/AOufgADrn4EA65+CAOufgwDrn4QA65+FAOufhgDrn4cA65+IAOufiQDrn4oA65+LAOufjADrn40A65+OAOufjwDrn5AA65+RAOufkgDrn5MA65+UAOuflQDrn5YA65+XAOufmADrn5kA65+aAOufmwDrn5wA65+dAOufngDrn58A65+gAOufoQDrn6IA65+jAOufpADrn6UA65+mAOufpwDrn6gA65+pAOufqgDrn6sA65+sAOufrQDrn64A65+vAOufsADrn7EA65+yAOufswDrn7QA65+1AOuftgDrn7cA65+4AOufuQDrn7oA65+7AOufvADrn70A65++AOufvwDroIAA66CBAOugggDroIMA66CEAOughQDroIYA66CHAOugiADroIkA66CKAOugiwDroIwA66CNAOugjgDroI8A66CQAOugkQDroJIA66CTAOuglADroJUA66CWAOuglwDroJgA66CZAOugmgDroJsA66CcAOugnQDroJ4A66CfAOugoADroKEA66CiAOugowDroKQA66ClAOugpgDroKcA66CoAOugqQDroKoA66CrAOugrADroK0A66CuAOugrwDroLAA66CxAOugsgDroLMA66C0AOugtQDroLYA66C3AOuguADroLkA66C6AOuguwDroLwA66C9AOugvgDroL8A66GAAOuhgQDroYIA66GDAOuhhADroYUA66GGAOuhhwDroYgA66GJAOuhigDroYsA66GMAOuhjQDroY4A66GPAOuhkADroZEA66GSAOuhkwDroZQA66GVAOuhlgDroZcA66GYAOuhmQDroZoA66GbAOuhnADroZ0A66GeAOuhnwDroaAA66GhAOuhogDroaMA66GkAOuhpQDroaYA66GnAOuhqADroakA66GqAOuhqwDroawA66GtAOuhrgDroa8A66GwAOuhsQDrobIA66GzAOuhtADrobUA66G2AOuhtwDrobgA66G5AOuhugDrobsA66G8AOuhvQDrob4A66G/AOuigADrooEA66KCAOuigwDrooQA66KFAOuihgDroocA66KIAOuiiQDroooA66KLAOuijADroo0A66KOAOuijwDropAA66KRAOuikgDropMA66KUAOuilQDropYA66KXAOuimADropkA66KaAOuimwDropwA66KdAOuingDrop8A66KgAOuioQDroqIA66KjAOuipADroqUA66KmAOuipwDroqgA66KpAOuiqgDroqsA66KsAOuirQDroq4A66KvAOuisADrorEA66KyAOuiswDrorQA66K1AOuitgDrorcA66K4AOuiuQDroroA66K7AOuivADror0A66K+AOuivwDro4AA66OBAOujggDro4MA66OEAOujhQDro4YA66OHAOujiADro4kA66OKAOujiwDro4wA66ONAOujjgDro48A66OQAOujkQDro5IA66OTAOujlADro5UA66OWAOujlwDro5gA66OZAOujmgDro5sA66OcAOujnQDro54A66OfAOujoADro6EA66OiAOujowDro6QA66OlAOujpgDro6cA66OoAOujqQDro6oA66OrAOujrADro60A66OuAOujrwDro7AA66OxAOujsgDro7MA66O0AOujtQDro7YA66O3AOujuADro7kA66O6AOujuwDro7wA66O9AOujvgDro78A66SAAOukgQDrpIIA66SDAOukhADrpIUA66SGAOukhwDrpIgA66SJAOukigDrpIsA66SMAOukjQDrpI4A66SPAOukkADrpJEA66SSAOukkwDrpJQA66SVAOuklgDrpJcA66SYAOukmQDrpJoA66SbAOuknADrpJ0A66SeAOuknwDrpKAA66ShAOukogDrpKMA66SkAOukpQDrpKYA66SnAOukqADrpKkA66SqAOukqwDrpKwA66StAOukrgDrpK8A66SwAOuksQDrpLIA66SzAOuktADrpLUA66S2AOuktwDrpLgA66S5AOukugDrpLsA66S8AOukvQDrpL4A66S/AOulgADrpYEA66WCAOulgwDrpYQA66WFAOulhgDrpYcA66WIAOuliQDrpYoA66WLAOuljADrpY0A66WOAOuljwDrpZAA66WRAOulkgDrpZMA66WUAOullQDrpZYA66WXAOulmADrpZkA66WaAOulmwDrpZwA66WdAOulngDrpZ8A66WgAOuloQDrpaIA66WjAOulpADrpaUA66WmAOulpwDrpagA66WpAOulqgDrpasA66WsAOulrQDrpa4A66WvAOulsADrpbEA66WyAOulswDrpbQA66W1AOultgDrpbcA66W4AOuluQDrpboA66W7AOulvADrpb0A66W+AOulvwDrpoAA66aBAOumggDrpoMA66aEAOumhQDrpoYA66aHAOumiADrpokA66aKAOumiwDrpowA66aNAOumjgDrpo8A66aQAOumkQDrppIA66aTAOumlADrppUA66aWAOumlwDrppgA66aZAOummgDrppsA66acAOumnQDrpp4A66afAOumoADrpqEA66aiAOumowDrpqQA66alAOumpgDrpqcA66aoAOumqQDrpqoA66arAOumrADrpq0A66auAOumrwDrprAA66axAOumsgDrprMA66a0AOumtQDrprYA66a3AOumuADrprkA66a6AOumuwDrprwA66a9AOumvgDrpr8A66eAAOungQDrp4IA66eDAOunhADrp4UA66eGAOunhwDrp4gA66eJAOunigDrp4sA66eMAOunjQDrp44A66ePAOunkADrp5EA66eSAOunkwDrp5QA66eVAOunlgDrp5cA66eYAOunmQDrp5oA66ebAOunnADrp50A66eeAOunnwDrp6AA66ehAOunogDrp6MA66ekAOunpQDrp6YA66enAOunqADrp6kA66eqAOunqwDrp6wA66etAOunrgDrp68A66ewAOunsQDrp7IA66ezAOuntADrp7UA66e2AOuntwDrp7gA66e5AOunugDrp7sA66e8AOunvQDrp74A66e/AOuogADrqIEA66iCAOuogwDrqIQA66iFAOuohgDrqIcA66iIAOuoiQDrqIoA66iLAOuojADrqI0A66iOAOuojwDrqJAA66iRAOuokgDrqJMA66iUAOuolQDrqJYA66iXAOuomADrqJkA66iaAOuomwDrqJwA66idAOuongDrqJ8A66igAOuooQDrqKIA66ijAOuopADrqKUA66imAOuopwDrqKgA66ipAOuoqgDrqKsA66isAOuorQDrqK4A66ivAOuosADrqLEA66iyAOuoswDrqLQA66i1AOuotgDrqLcA66i4AOuouQDrqLoA66i7AOuovADrqL0A66i+AOuovwDrqYAA66mBAOupggDrqYMA66mEAOuphQDrqYYA66mHAOupiADrqYkA66mKAOupiwDrqYwA66mNAOupjgDrqY8A66mQAOupkQDrqZIA66mTAOuplADrqZUA66mWAOuplwDrqZgA66mZAOupmgDrqZsA66mcAOupnQDrqZ4A66mfAOupoADrqaEA66miAOupowDrqaQA66mlAOuppgDrqacA66moAOupqQDrqaoA66mrAOuprADrqa0A66muAOuprwDrqbAA66mxAOupsgDrqbMA66m0AOuptQDrqbYA66m3AOupuADrqbkA66m6AOupuwDrqbwA66m9AOupvgDrqb8A66qAAOuqgQDrqoIA66qDAOuqhADrqoUA66qGAOuqhwDrqogA66qJAOuqigDrqosA66qMAOuqjQDrqo4A66qPAOuqkADrqpEA66qSAOuqkwDrqpQA66qVAOuqlgDrqpcA66qYAOuqmQDrqpoA66qbAOuqnADrqp0A66qeAOuqnwDrqqAA66qhAOuqogDrqqMA66qkAOuqpQDrqqYA66qnAOuqqADrqqkA66qqAOuqqwDrqqwA66qtAOuqrgDrqq8A66qwAOuqsQDrqrIA66qzAOuqtADrqrUA66q2AOuqtwDrqrgA66q5AOuqugDrqrsA66q8AOuqvQDrqr4A66q/AOurgADrq4EA66uCAOurgwDrq4QA66uFAOurhgDrq4cA66uIAOuriQDrq4oA66uLAOurjADrq40A66uOAOurjwDrq5AA66uRAOurkgDrq5MA66uUAOurlQDrq5YA66uXAOurmADrq5kA66uaAOurmwDrq5wA66udAOurngDrq58A66ugAOuroQDrq6IA66ujAOurpADrq6UA66umAOurpwDrq6gA66upAOurqgDrq6sA66usAOurrQDrq64A66uvAOursADrq7EA66uyAOurswDrq7QA66u1AOurtgDrq7cA66u4AOuruQDrq7oA66u7AOurvADrq70A66u+AOurvwDrrIAA66yBAOusggDrrIMA66yEAOushQDrrIYA66yHAOusiADrrIkA66yKAOusiwDrrIwA66yNAOusjgDrrI8A66yQAOuskQDrrJIA66yTAOuslADrrJUA66yWAOuslwDrrJgA66yZAOusmgDrrJsA66ycAOusnQDrrJ4A66yfAOusoADrrKEA66yiAOusowDrrKQA66ylAOuspgDrrKcA66yoAOusqQDrrKoA66yrAOusrADrrK0A66yuAOusrwDrrLAA66yxAOussgDrrLMA66y0AOustQDrrLYA66y3AOusuADrrLkA66y6AOusuwDrrLwA66y9AOusvgDrrL8A662AAOutgQDrrYIA662DAOuthADrrYUA662GAOuthwDrrYgA662JAOutigDrrYsA662MAOutjQDrrY4A662PAOutkADrrZEA662SAOutkwDrrZQA662VAOutlgDrrZcA662YAOutmQDrrZoA662bAOutnADrrZ0A662eAOutnwDrraAA662hAOutogDrraMA662kAOutpQDrraYA662nAOutqADrrakA662qAOutqwDrrawA662tAOutrgDrra8A662wAOutsQDrrbIA662zAOuttADrrbUA6622AOuttwDrrbgA6625AOutugDrrbsA6628AOutvQDrrb4A662/AOuugADrroEA666CAOuugwDrroQA666FAOuuhgDrrocA666IAOuuiQDrrooA666LAOuujADrro0A666OAOuujwDrrpAA666RAOuukgDrrpMA666UAOuulQDrrpYA666XAOuumADrrpkA666aAOuumwDrrpwA666dAOuungDrrp8A666gAOuuoQDrrqIA666jAOuupADrrqUA666mAOuupwDrrqgA666pAOuuqgDrrqsA666sAOuurQDrrq4A666vAOuusADrrrEA666yAOuuswDrrrQA6661AOuutgDrrrcA6664AOuuuQDrrroA6667AOuuvADrrr0A666+AOuuvwDrr4AA66+BAOuvggDrr4MA66+EAOuvhQDrr4YA66+HAOuviADrr4kA66+KAOuviwDrr4wA66+NAOuvjgDrr48A66+QAOuvkQDrr5IA66+TAOuvlADrr5UA66+WAOuvlwDrr5gA66+ZAOuvmgDrr5sA66+cAOuvnQDrr54A66+fAOuvoADrr6EA66+iAOuvowDrr6QA66+lAOuvpgDrr6cA66+oAOuvqQDrr6oA66+rAOuvrADrr60A66+uAOuvrwDrr7AA66+xAOuvsgDrr7MA66+0AOuvtQDrr7YA66+3AOuvuADrr7kA66+6AOuvuwDrr7wA66+9AOuvvgDrr78A67CAAOuwgQDrsIIA67CDAOuwhADrsIUA67CGAOuwhwDrsIgA67CJAOuwigDrsIsA67CMAOuwjQDrsI4A67CPAOuwkADrsJEA67CSAOuwkwDrsJQA67CVAOuwlgDrsJcA67CYAOuwmQDrsJoA67CbAOuwnADrsJ0A67CeAOuwnwDrsKAA67ChAOuwogDrsKMA67CkAOuwpQDrsKYA67CnAOuwqADrsKkA67CqAOuwqwDrsKwA67CtAOuwrgDrsK8A67CwAOuwsQDrsLIA67CzAOuwtADrsLUA67C2AOuwtwDrsLgA67C5AOuwugDrsLsA67C8AOuwvQDrsL4A67C/AOuxgADrsYEA67GCAOuxgwDrsYQA67GFAOuxhgDrsYcA67GIAOuxiQDrsYoA67GLAOuxjADrsY0A67GOAOuxjwDrsZAA67GRAOuxkgDrsZMA67GUAOuxlQDrsZYA67GXAOuxmADrsZkA67GaAOuxmwDrsZwA67GdAOuxngDrsZ8A67GgAOuxoQDrsaIA67GjAOuxpADrsaUA67GmAOuxpwDrsagA67GpAOuxqgDrsasA67GsAOuxrQDrsa4A67GvAOuxsADrsbEA67GyAOuxswDrsbQA67G1AOuxtgDrsbcA67G4AOuxuQDrsboA67G7AOuxvADrsb0A67G+AOuxvwDrsoAA67KBAOuyggDrsoMA67KEAOuyhQDrsoYA67KHAOuyiADrsokA67KKAOuyiwDrsowA67KNAOuyjgDrso8A67KQAOuykQDrspIA67KTAOuylADrspUA67KWAOuylwDrspgA67KZAOuymgDrspsA67KcAOuynQDrsp4A67KfAOuyoADrsqEA67KiAOuyowDrsqQA67KlAOuypgDrsqcA67KoAOuyqQDrsqoA67KrAOuyrADrsq0A67KuAOuyrwDrsrAA67KxAOuysgDrsrMA67K0AOuytQDrsrYA67K3AOuyuADrsrkA67K6AOuyuwDrsrwA67K9AOuyvgDrsr8A67OAAOuzgQDrs4IA67ODAOuzhADrs4UA67OGAOuzhwDrs4gA67OJAOuzigDrs4sA67OMAOuzjQDrs44A67OPAOuzkADrs5EA67OSAOuzkwDrs5QA67OVAOuzlgDrs5cA67OYAOuzmQDrs5oA67ObAOuznADrs50A67OeAOuznwDrs6AA67OhAOuzogDrs6MA67OkAOuzpQDrs6YA67OnAOuzqADrs6kA67OqAOuzqwDrs6wA67OtAOuzrgDrs68A67OwAOuzsQDrs7IA67OzAOuztADrs7UA67O2AOuztwDrs7gA67O5AOuzugDrs7sA67O8AOuzvQDrs74A67O/AOu0gADrtIEA67SCAOu0gwDrtIQA67SFAOu0hgDrtIcA67SIAOu0iQDrtIoA67SLAOu0jADrtI0A67SOAOu0jwDrtJAA67SRAOu0kgDrtJMA67SUAOu0lQDrtJYA67SXAOu0mADrtJkA67SaAOu0mwDrtJwA67SdAOu0ngDrtJ8A67SgAOu0oQDrtKIA67SjAOu0pADrtKUA67SmAOu0pwDrtKgA67SpAOu0qgDrtKsA67SsAOu0rQDrtK4A67SvAOu0sADrtLEA67SyAOu0swDrtLQA67S1AOu0tgDrtLcA67S4AOu0uQDrtLoA67S7AOu0vADrtL0A67S+AOu0vwDrtYAA67WBAOu1ggDrtYMA67WEAOu1hQDrtYYA67WHAOu1iADrtYkA67WKAOu1iwDrtYwA67WNAOu1jgDrtY8A67WQAOu1kQDrtZIA67WTAOu1lADrtZUA67WWAOu1lwDrtZgA67WZAOu1mgDrtZsA67WcAOu1nQDrtZ4A67WfAOu1oADrtaEA67WiAOu1owDrtaQA67WlAOu1pgDrtacA67WoAOu1qQDrtaoA67WrAOu1rADrta0A67WuAOu1rwDrtbAA67WxAOu1sgDrtbMA67W0AOu1tQDrtbYA67W3AOu1uADrtbkA67W6AOu1uwDrtbwA67W9AOu1vgDrtb8A67aAAOu2gQDrtoIA67aDAOu2hADrtoUA67aGAOu2hwDrtogA67aJAOu2igDrtosA67aMAOu2jQDrto4A67aPAOu2kADrtpEA67aSAOu2kwDrtpQA67aVAOu2lgDrtpcA67aYAOu2mQDrtpoA67abAOu2nADrtp0A67aeAOu2nwDrtqAA67ahAOu2ogDrtqMA67akAOu2pQDrtqYA67anAOu2qADrtqkA67aqAOu2qwDrtqwA67atAOu2rgDrtq8A67awAOu2sQDrtrIA67azAOu2tADrtrUA67a2AOu2twDrtrgA67a5AOu2ugDrtrsA67a8AOu2vQDrtr4A67a/AOu3gADrt4EA67eCAOu3gwDrt4QA67eFAOu3hgDrt4cA67eIAOu3iQDrt4oA67eLAOu3jADrt40A67eOAOu3jwDrt5AA67eRAOu3kgDrt5MA67eUAOu3lQDrt5YA67eXAOu3mADrt5kA67eaAOu3mwDrt5wA67edAOu3ngDrt58A67egAOu3oQDrt6IA67ejAOu3pADrt6UA67emAOu3pwDrt6gA67epAOu3qgDrt6sA67esAOu3rQDrt64A67evAOu3sADrt7EA67eyAOu3swDrt7QA67e1AOu3tgDrt7cA67e4AOu3uQDrt7oA67e7AOu3vADrt70A67e+AOu3vwDruIAA67iBAOu4ggDruIMA67iEAOu4hQDruIYA67iHAOu4iADruIkA67iKAOu4iwDruIwA67iNAOu4jgDruI8A67iQAOu4kQDruJIA67iTAOu4lADruJUA67iWAOu4lwDruJgA67iZAOu4mgDruJsA67icAOu4nQDruJ4A67ifAOu4oADruKEA67iiAOu4owDruKQA67ilAOu4pgDruKcA67ioAOu4qQDruKoA67irAOu4rADruK0A67iuAOu4rwDruLAA67ixAOu4sgDruLMA67i0AOu4tQDruLYA67i3AOu4uADruLkA67i6AOu4uwDruLwA67i9AOu4vgDruL8A67mAAOu5gQDruYIA67mDAOu5hADruYUA67mGAOu5hwDruYgA67mJAOu5igDruYsA67mMAOu5jQDruY4A67mPAOu5kADruZEA67mSAOu5kwDruZQA67mVAOu5lgDruZcA67mYAOu5mQDruZoA67mbAOu5nADruZ0A67meAOu5nwDruaAA67mhAOu5ogDruaMA67mkAOu5pQDruaYA67mnAOu5qADruakA67mqAOu5qwDruawA67mtAOu5rgDrua8A67mwAOu5sQDrubIA67mzAOu5tADrubUA67m2AOu5twDrubgA67m5AOu5ugDrubsA67m8AOu5vQDrub4A67m/AOu6gADruoEA67qCAOu6gwDruoQA67qFAOu6hgDruocA67qIAOu6iQDruooA67qLAOu6jADruo0A67qOAOu6jwDrupAA67qRAOu6kgDrupMA67qUAOu6lQDrupYA67qXAOu6mADrupkA67qaAOu6mwDrupwA67qdAOu6ngDrup8A67qgAOu6oQDruqIA67qjAOu6pADruqUA67qmAOu6pwDruqgA67qpAOu6qgDruqsA67qsAOu6rQDruq4A67qvAOu6sADrurEA67qyAOu6swDrurQA67q1AOu6tgDrurcA67q4AOu6uQDruroA67q7AOu6vADrur0A67q+AOu6vwDru4AA67uBAOu7ggDru4MA67uEAOu7hQDru4YA67uHAOu7iADru4kA67uKAOu7iwDru4wA67uNAOu7jgDru48A67uQAOu7kQDru5IA67uTAOu7lADru5UA67uWAOu7lwDru5gA67uZAOu7mgDru5sA67ucAOu7nQDru54A67ufAOu7oADru6EA67uiAOu7owDru6QA67ulAOu7pgDru6cA67uoAOu7qQDru6oA67urAOu7rADru60A67uuAOu7rwDru7AA67uxAOu7sgDru7MA67u0AOu7tQDru7YA67u3AOu7uADru7kA67u6AOu7uwDru7wA67u9AOu7vgDru78A67yAAOu8gQDrvIIA67yDAOu8hADrvIUA67yGAOu8hwDrvIgA67yJAOu8igDrvIsA67yMAOu8jQDrvI4A67yPAOu8kADrvJEA67ySAOu8kwDrvJQA67yVAOu8lgDrvJcA67yYAOu8mQDrvJoA67ybAOu8nADrvJ0A67yeAOu8nwDrvKAA67yhAOu8ogDrvKMA67ykAOu8pQDrvKYA67ynAOu8qADrvKkA67yqAOu8qwDrvKwA67ytAOu8rgDrvK8A67ywAOu8sQDrvLIA67yzAOu8tADrvLUA67y2AOu8twDrvLgA67y5AOu8ugDrvLsA67y8AOu8vQDrvL4A67y/AOu9gADrvYEA672CAOu9gwDrvYQA672FAOu9hgDrvYcA672IAOu9iQDrvYoA672LAOu9jADrvY0A672OAOu9jwDrvZAA672RAOu9kgDrvZMA672UAOu9lQDrvZYA672XAOu9mADrvZkA672aAOu9mwDrvZwA672dAOu9ngDrvZ8A672gAOu9oQDrvaIA672jAOu9pADrvaUA672mAOu9pwDrvagA672pAOu9qgDrvasA672sAOu9rQDrva4A672vAOu9sADrvbEA672yAOu9swDrvbQA6721AOu9tgDrvbcA6724AOu9uQDrvboA6727AOu9vADrvb0A672+AOu9vwDrvoAA676BAOu+ggDrvoMA676EAOu+hQDrvoYA676HAOu+iADrvokA676KAOu+iwDrvowA676NAOu+jgDrvo8A676QAOu+kQDrvpIA676TAOu+lADrvpUA676WAOu+lwDrvpgA676ZAOu+mgDrvpsA676cAOu+nQDrvp4A676fAOu+oADrvqEA676iAOu+owDrvqQA676lAOu+pgDrvqcA676oAOu+qQDrvqoA676rAOu+rADrvq0A676uAOu+rwDrvrAA676xAOu+sgDrvrMA6760AOu+tQDrvrYA6763AOu+uADrvrkA6766AOu+uwDrvrwA6769AOu+vgDrvr8A67+AAOu/gQDrv4IA67+DAOu/hADrv4UA67+GAOu/hwDrv4gA67+JAOu/igDrv4sA67+MAOu/jQDrv44A67+PAOu/kADrv5EA67+SAOu/kwDrv5QA67+VAOu/lgDrv5cA67+YAOu/mQDrv5oA67+bAOu/nADrv50A67+eAOu/nwDrv6AA67+hAOu/ogDrv6MA67+kAOu/pQDrv6YA67+nAOu/qADrv6kA67+qAOu/qwDrv6wA67+tAOu/rgDrv68A67+wAOu/sQDrv7IA67+zAOu/tADrv7UA67+2AOu/twDrv7gA67+5AOu/ugDrv7sA67+8AOu/vQDrv74A67+/AOyAgADsgIEA7ICCAOyAgwDsgIQA7ICFAOyAhgDsgIcA7ICIAOyAiQDsgIoA7ICLAOyAjADsgI0A7ICOAOyAjwDsgJAA7ICRAOyAkgDsgJMA7ICUAOyAlQDsgJYA7ICXAOyAmADsgJkA7ICaAOyAmwDsgJwA7ICdAOyAngDsgJ8A7ICgAOyAoQDsgKIA7ICjAOyApADsgKUA7ICmAOyApwDsgKgA7ICpAOyAqgDsgKsA7ICsAOyArQDsgK4A7ICvAOyAsADsgLEA7ICyAOyAswDsgLQA7IC1AOyAtgDsgLcA7IC4AOyAuQDsgLoA7IC7AOyAvADsgL0A7IC+AOyAvwDsgYAA7IGBAOyBggDsgYMA7IGEAOyBhQDsgYYA7IGHAOyBiADsgYkA7IGKAOyBiwDsgYwA7IGNAOyBjgDsgY8A7IGQAOyBkQDsgZIA7IGTAOyBlADsgZUA7IGWAOyBlwDsgZgA7IGZAOyBmgDsgZsA7IGcAOyBnQDsgZ4A7IGfAOyBoADsgaEA7IGiAOyBowDsgaQA7IGlAOyBpgDsgacA7IGoAOyBqQDsgaoA7IGrAOyBrADsga0A7IGuAOyBrwDsgbAA7IGxAOyBsgDsgbMA7IG0AOyBtQDsgbYA7IG3AOyBuADsgbkA7IG6AOyBuwDsgbwA7IG9AOyBvgDsgb8A7IKAAOyCgQDsgoIA7IKDAOyChADsgoUA7IKGAOyChwDsgogA7IKJAOyCigDsgosA7IKMAOyCjQDsgo4A7IKPAOyCkADsgpEA7IKSAOyCkwDsgpQA7IKVAOyClgDsgpcA7IKYAOyCmQDsgpoA7IKbAOyCnADsgp0A7IKeAOyCnwDsgqAA7IKhAOyCogDsgqMA7IKkAOyCpQDsgqYA7IKnAOyCqADsgqkA7IKqAOyCqwDsgqwA7IKtAOyCrgDsgq8A7IKwAOyCsQDsgrIA7IKzAOyCtADsgrUA7IK2AOyCtwDsgrgA7IK5AOyCugDsgrsA7IK8AOyCvQDsgr4A7IK/AOyDgADsg4EA7IOCAOyDgwDsg4QA7IOFAOyDhgDsg4cA7IOIAOyDiQDsg4oA7IOLAOyDjADsg40A7IOOAOyDjwDsg5AA7IORAOyDkgDsg5MA7IOUAOyDlQDsg5YA7IOXAOyDmADsg5kA7IOaAOyDmwDsg5wA7IOdAOyDngDsg58A7IOgAOyDoQDsg6IA7IOjAOyDpADsg6UA7IOmAOyDpwDsg6gA7IOpAOyDqgDsg6sA7IOsAOyDrQDsg64A7IOvAOyDsADsg7EA7IOyAOyDswDsg7QA7IO1AOyDtgDsg7cA7IO4AOyDuQDsg7oA7IO7AOyDvADsg70A7IO+AOyDvwDshIAA7ISBAOyEggDshIMA7ISEAOyEhQDshIYA7ISHAOyEiADshIkA7ISKAOyEiwDshIwA7ISNAOyEjgDshI8A7ISQAOyEkQDshJIA7ISTAOyElADshJUA7ISWAOyElwDshJgA7ISZAOyEmgDshJsA7IScAOyEnQDshJ4A7ISfAOyEoADshKEA7ISiAOyEowDshKQA7ISlAOyEpgDshKcA7ISoAOyEqQDshKoA7ISrAOyErADshK0A7ISuAOyErwDshLAA7ISxAOyEsgDshLMA7IS0AOyEtQDshLYA7IS3AOyEuADshLkA7IS6AOyEuwDshLwA7IS9AOyEvgDshL8A7IWAAOyFgQDshYIA7IWDAOyFhADshYUA7IWGAOyFhwDshYgA7IWJAOyFigDshYsA7IWMAOyFjQDshY4A7IWPAOyFkADshZEA7IWSAOyFkwDshZQA7IWVAOyFlgDshZcA7IWYAOyFmQDshZoA7IWbAOyFnADshZ0A7IWeAOyFnwDshaAA7IWhAOyFogDshaMA7IWkAOyFpQDshaYA7IWnAOyFqADshakA7IWqAOyFqwDshawA7IWtAOyFrgDsha8A7IWwAOyFsQDshbIA7IWzAOyFtADshbUA7IW2AOyFtwDshbgA7IW5AOyFugDshbsA7IW8AOyFvQDshb4A7IW/AOyGgADshoEA7IaCAOyGgwDshoQA7IaFAOyGhgDshocA7IaIAOyGiQDshooA7IaLAOyGjADsho0A7IaOAOyGjwDshpAA7IaRAOyGkgDshpMA7IaUAOyGlQDshpYA7IaXAOyGmADshpkA7IaaAOyGmwDshpwA7IadAOyGngDshp8A7IagAOyGoQDshqIA7IajAOyGpADshqUA7IamAOyGpwDshqgA7IapAOyGqgDshqsA7IasAOyGrQDshq4A7IavAOyGsADshrEA7IayAOyGswDshrQA7Ia1AOyGtgDshrcA7Ia4AOyGuQDshroA7Ia7AOyGvADshr0A7Ia+AOyGvwDsh4AA7IeBAOyHggDsh4MA7IeEAOyHhQDsh4YA7IeHAOyHiADsh4kA7IeKAOyHiwDsh4wA7IeNAOyHjgDsh48A7IeQAOyHkQDsh5IA7IeTAOyHlADsh5UA7IeWAOyHlwDsh5gA7IeZAOyHmgDsh5sA7IecAOyHnQDsh54A7IefAOyHoADsh6EA7IeiAOyHowDsh6QA7IelAOyHpgDsh6cA7IeoAOyHqQDsh6oA7IerAOyHrADsh60A7IeuAOyHrwDsh7AA7IexAOyHsgDsh7MA7Ie0AOyHtQDsh7YA7Ie3AOyHuADsh7kA7Ie6AOyHuwDsh7wA7Ie9AOyHvgDsh78A7IiAAOyIgQDsiIIA7IiDAOyIhADsiIUA7IiGAOyIhwDsiIgA7IiJAOyIigDsiIsA7IiMAOyIjQDsiI4A7IiPAOyIkADsiJEA7IiSAOyIkwDsiJQA7IiVAOyIlgDsiJcA7IiYAOyImQDsiJoA7IibAOyInADsiJ0A7IieAOyInwDsiKAA7IihAOyIogDsiKMA7IikAOyIpQDsiKYA7IinAOyIqADsiKkA7IiqAOyIqwDsiKwA7IitAOyIrgDsiK8A7IiwAOyIsQDsiLIA7IizAOyItADsiLUA7Ii2AOyItwDsiLgA7Ii5AOyIugDsiLsA7Ii8AOyIvQDsiL4A7Ii/AOyJgADsiYEA7ImCAOyJgwDsiYQA7ImFAOyJhgDsiYcA7ImIAOyJiQDsiYoA7ImLAOyJjADsiY0A7ImOAOyJjwDsiZAA7ImRAOyJkgDsiZMA7ImUAOyJlQDsiZYA7ImXAOyJmADsiZkA7ImaAOyJmwDsiZwA7ImdAOyJngDsiZ8A7ImgAOyJoQDsiaIA7ImjAOyJpADsiaUA7ImmAOyJpwDsiagA7ImpAOyJqgDsiasA7ImsAOyJrQDsia4A7ImvAOyJsADsibEA7ImyAOyJswDsibQA7Im1AOyJtgDsibcA7Im4AOyJuQDsiboA7Im7AOyJvADsib0A7Im+AOyJvwDsioAA7IqBAOyKggDsioMA7IqEAOyKhQDsioYA7IqHAOyKiADsiokA7IqKAOyKiwDsiowA7IqNAOyKjgDsio8A7IqQAOyKkQDsipIA7IqTAOyKlADsipUA7IqWAOyKlwDsipgA7IqZAOyKmgDsipsA7IqcAOyKnQDsip4A7IqfAOyKoADsiqEA7IqiAOyKowDsiqQA7IqlAOyKpgDsiqcA7IqoAOyKqQDsiqoA7IqrAOyKrADsiq0A7IquAOyKrwDsirAA7IqxAOyKsgDsirMA7Iq0AOyKtQDsirYA7Iq3AOyKuADsirkA7Iq6AOyKuwDsirwA7Iq9AOyKvgDsir8A7IuAAOyLgQDsi4IA7IuDAOyLhADsi4UA7IuGAOyLhwDsi4gA7IuJAOyLigDsi4sA7IuMAOyLjQDsi44A7IuPAOyLkADsi5EA7IuSAOyLkwDsi5QA7IuVAOyLlgDsi5cA7IuYAOyLmQDsi5oA7IubAOyLnADsi50A7IueAOyLnwDsi6AA7IuhAOyLogDsi6MA7IukAOyLpQDsi6YA7IunAOyLqADsi6kA7IuqAOyLqwDsi6wA7IutAOyLrgDsi68A7IuwAOyLsQDsi7IA7IuzAOyLtADsi7UA7Iu2AOyLtwDsi7gA7Iu5AOyLugDsi7sA7Iu8AOyLvQDsi74A7Iu/AOyMgADsjIEA7IyCAOyMgwDsjIQA7IyFAOyMhgDsjIcA7IyIAOyMiQDsjIoA7IyLAOyMjADsjI0A7IyOAOyMjwDsjJAA7IyRAOyMkgDsjJMA7IyUAOyMlQDsjJYA7IyXAOyMmADsjJkA7IyaAOyMmwDsjJwA7IydAOyMngDsjJ8A7IygAOyMoQDsjKIA7IyjAOyMpADsjKUA7IymAOyMpwDsjKgA7IypAOyMqgDsjKsA7IysAOyMrQDsjK4A7IyvAOyMsADsjLEA7IyyAOyMswDsjLQA7Iy1AOyMtgDsjLcA7Iy4AOyMuQDsjLoA7Iy7AOyMvADsjL0A7Iy+AOyMvwDsjYAA7I2BAOyNggDsjYMA7I2EAOyNhQDsjYYA7I2HAOyNiADsjYkA7I2KAOyNiwDsjYwA7I2NAOyNjgDsjY8A7I2QAOyNkQDsjZIA7I2TAOyNlADsjZUA7I2WAOyNlwDsjZgA7I2ZAOyNmgDsjZsA7I2cAOyNnQDsjZ4A7I2fAOyNoADsjaEA7I2iAOyNowDsjaQA7I2lAOyNpgDsjacA7I2oAOyNqQDsjaoA7I2rAOyNrADsja0A7I2uAOyNrwDsjbAA7I2xAOyNsgDsjbMA7I20AOyNtQDsjbYA7I23AOyNuADsjbkA7I26AOyNuwDsjbwA7I29AOyNvgDsjb8A7I6AAOyOgQDsjoIA7I6DAOyOhADsjoUA7I6GAOyOhwDsjogA7I6JAOyOigDsjosA7I6MAOyOjQDsjo4A7I6PAOyOkADsjpEA7I6SAOyOkwDsjpQA7I6VAOyOlgDsjpcA7I6YAOyOmQDsjpoA7I6bAOyOnADsjp0A7I6eAOyOnwDsjqAA7I6hAOyOogDsjqMA7I6kAOyOpQDsjqYA7I6nAOyOqADsjqkA7I6qAOyOqwDsjqwA7I6tAOyOrgDsjq8A7I6wAOyOsQDsjrIA7I6zAOyOtADsjrUA7I62AOyOtwDsjrgA7I65AOyOugDsjrsA7I68AOyOvQDsjr4A7I6/AOyPgADsj4EA7I+CAOyPgwDsj4QA7I+FAOyPhgDsj4cA7I+IAOyPiQDsj4oA7I+LAOyPjADsj40A7I+OAOyPjwDsj5AA7I+RAOyPkgDsj5MA7I+UAOyPlQDsj5YA7I+XAOyPmADsj5kA7I+aAOyPmwDsj5wA7I+dAOyPngDsj58A7I+gAOyPoQDsj6IA7I+jAOyPpADsj6UA7I+mAOyPpwDsj6gA7I+pAOyPqgDsj6sA7I+sAOyPrQDsj64A7I+vAOyPsADsj7EA7I+yAOyPswDsj7QA7I+1AOyPtgDsj7cA7I+4AOyPuQDsj7oA7I+7AOyPvADsj70A7I++AOyPvwDskIAA7JCBAOyQggDskIMA7JCEAOyQhQDskIYA7JCHAOyQiADskIkA7JCKAOyQiwDskIwA7JCNAOyQjgDskI8A7JCQAOyQkQDskJIA7JCTAOyQlADskJUA7JCWAOyQlwDskJgA7JCZAOyQmgDskJsA7JCcAOyQnQDskJ4A7JCfAOyQoADskKEA7JCiAOyQowDskKQA7JClAOyQpgDskKcA7JCoAOyQqQDskKoA7JCrAOyQrADskK0A7JCuAOyQrwDskLAA7JCxAOyQsgDskLMA7JC0AOyQtQDskLYA7JC3AOyQuADskLkA7JC6AOyQuwDskLwA7JC9AOyQvgDskL8A7JGAAOyRgQDskYIA7JGDAOyRhADskYUA7JGGAOyRhwDskYgA7JGJAOyRigDskYsA7JGMAOyRjQDskY4A7JGPAOyRkADskZEA7JGSAOyRkwDskZQA7JGVAOyRlgDskZcA7JGYAOyRmQDskZoA7JGbAOyRnADskZ0A7JGeAOyRnwDskaAA7JGhAOyRogDskaMA7JGkAOyRpQDskaYA7JGnAOyRqADskakA7JGqAOyRqwDskawA7JGtAOyRrgDska8A7JGwAOyRsQDskbIA7JGzAOyRtADskbUA7JG2AOyRtwDskbgA7JG5AOyRugDskbsA7JG8AOyRvQDskb4A7JG/AOySgADskoEA7JKCAOySgwDskoQA7JKFAOyShgDskocA7JKIAOySiQDskooA7JKLAOySjADsko0A7JKOAOySjwDskpAA7JKRAOySkgDskpMA7JKUAOySlQDskpYA7JKXAOySmADskpkA7JKaAOySmwDskpwA7JKdAOySngDskp8A7JKgAOySoQDskqIA7JKjAOySpADskqUA7JKmAOySpwDskqgA7JKpAOySqgDskqsA7JKsAOySrQDskq4A7JKvAOySsADskrEA7JKyAOySswDskrQA7JK1AOyStgDskrcA7JK4AOySuQDskroA7JK7AOySvADskr0A7JK+AOySvwDsk4AA7JOBAOyTggDsk4MA7JOEAOyThQDsk4YA7JOHAOyTiADsk4kA7JOKAOyTiwDsk4wA7JONAOyTjgDsk48A7JOQAOyTkQDsk5IA7JOTAOyTlADsk5UA7JOWAOyTlwDsk5gA7JOZAOyTmgDsk5sA7JOcAOyTnQDsk54A7JOfAOyToADsk6EA7JOiAOyTowDsk6QA7JOlAOyTpgDsk6cA7JOoAOyTqQDsk6oA7JOrAOyTrADsk60A7JOuAOyTrwDsk7AA7JOxAOyTsgDsk7MA7JO0AOyTtQDsk7YA7JO3AOyTuADsk7kA7JO6AOyTuwDsk7wA7JO9AOyTvgDsk78A7JSAAOyUgQDslIIA7JSDAOyUhADslIUA7JSGAOyUhwDslIgA7JSJAOyUigDslIsA7JSMAOyUjQDslI4A7JSPAOyUkADslJEA7JSSAOyUkwDslJQA7JSVAOyUlgDslJcA7JSYAOyUmQDslJoA7JSbAOyUnADslJ0A7JSeAOyUnwDslKAA7JShAOyUogDslKMA7JSkAOyUpQDslKYA7JSnAOyUqADslKkA7JSqAOyUqwDslKwA7JStAOyUrgDslK8A7JSwAOyUsQDslLIA7JSzAOyUtADslLUA7JS2AOyUtwDslLgA7JS5AOyUugDslLsA7JS8AOyUvQDslL4A7JS/AOyVgADslYEA7JWCAOyVgwDslYQA7JWFAOyVhgDslYcA7JWIAOyViQDslYoA7JWLAOyVjADslY0A7JWOAOyVjwDslZAA7JWRAOyVkgDslZMA7JWUAOyVlQDslZYA7JWXAOyVmADslZkA7JWaAOyVmwDslZwA7JWdAOyVngDslZ8A7JWgAOyVoQDslaIA7JWjAOyVpADslaUA7JWmAOyVpwDslagA7JWpAOyVqgDslasA7JWsAOyVrQDsla4A7JWvAOyVsADslbEA7JWyAOyVswDslbQA7JW1AOyVtgDslbcA7JW4AOyVuQDslboA7JW7AOyVvADslb0A7JW+AOyVvwDsloAA7JaBAOyWggDsloMA7JaEAOyWhQDsloYA7JaHAOyWiADslokA7JaKAOyWiwDslowA7JaNAOyWjgDslo8A7JaQAOyWkQDslpIA7JaTAOyWlADslpUA7JaWAOyWlwDslpgA7JaZAOyWmgDslpsA7JacAOyWnQDslp4A7JafAOyWoADslqEA7JaiAOyWowDslqQA7JalAOyWpgDslqcA7JaoAOyWqQDslqoA7JarAOyWrADslq0A7JauAOyWrwDslrAA7JaxAOyWsgDslrMA7Ja0AOyWtQDslrYA7Ja3AOyWuADslrkA7Ja6AOyWuwDslrwA7Ja9AOyWvgDslr8A7JeAAOyXgQDsl4IA7JeDAOyXhADsl4UA7JeGAOyXhwDsl4gA7JeJAOyXigDsl4sA7JeMAOyXjQDsl44A7JePAOyXkADsl5EA7JeSAOyXkwDsl5QA7JeVAOyXlgDsl5cA7JeYAOyXmQDsl5oA7JebAOyXnADsl50A7JeeAOyXnwDsl6AA7JehAOyXogDsl6MA7JekAOyXpQDsl6YA7JenAOyXqADsl6kA7JeqAOyXqwDsl6wA7JetAOyXrgDsl68A7JewAOyXsQDsl7IA7JezAOyXtADsl7UA7Je2AOyXtwDsl7gA7Je5AOyXugDsl7sA7Je8AOyXvQDsl74A7Je/AOyYgADsmIEA7JiCAOyYgwDsmIQA7JiFAOyYhgDsmIcA7JiIAOyYiQDsmIoA7JiLAOyYjADsmI0A7JiOAOyYjwDsmJAA7JiRAOyYkgDsmJMA7JiUAOyYlQDsmJYA7JiXAOyYmADsmJkA7JiaAOyYmwDsmJwA7JidAOyYngDsmJ8A7JigAOyYoQDsmKIA7JijAOyYpADsmKUA7JimAOyYpwDsmKgA7JipAOyYqgDsmKsA7JisAOyYrQDsmK4A7JivAOyYsADsmLEA7JiyAOyYswDsmLQA7Ji1AOyYtgDsmLcA7Ji4AOyYuQDsmLoA7Ji7AOyYvADsmL0A7Ji+AOyYvwDsmYAA7JmBAOyZggDsmYMA7JmEAOyZhQDsmYYA7JmHAOyZiADsmYkA7JmKAOyZiwDsmYwA7JmNAOyZjgDsmY8A7JmQAOyZkQDsmZIA7JmTAOyZlADsmZUA7JmWAOyZlwDsmZgA7JmZAOyZmgDsmZsA7JmcAOyZnQDsmZ4A7JmfAOyZoADsmaEA7JmiAOyZowDsmaQA7JmlAOyZpgDsmacA7JmoAOyZqQDsmaoA7JmrAOyZrADsma0A7JmuAOyZrwDsmbAA7JmxAOyZsgDsmbMA7Jm0AOyZtQDsmbYA7Jm3AOyZuADsmbkA7Jm6AOyZuwDsmbwA7Jm9AOyZvgDsmb8A7JqAAOyagQDsmoIA7JqDAOyahADsmoUA7JqGAOyahwDsmogA7JqJAOyaigDsmosA7JqMAOyajQDsmo4A7JqPAOyakADsmpEA7JqSAOyakwDsmpQA7JqVAOyalgDsmpcA7JqYAOyamQDsmpoA7JqbAOyanADsmp0A7JqeAOyanwDsmqAA7JqhAOyaogDsmqMA7JqkAOyapQDsmqYA7JqnAOyaqADsmqkA7JqqAOyaqwDsmqwA7JqtAOyargDsmq8A7JqwAOyasQDsmrIA7JqzAOyatADsmrUA7Jq2AOyatwDsmrgA7Jq5AOyaugDsmrsA7Jq8AOyavQDsmr4A7Jq/AOybgADsm4EA7JuCAOybgwDsm4QA7JuFAOybhgDsm4cA7JuIAOybiQDsm4oA7JuLAOybjADsm40A7JuOAOybjwDsm5AA7JuRAOybkgDsm5MA7JuUAOyblQDsm5YA7JuXAOybmADsm5kA7JuaAOybmwDsm5wA7JudAOybngDsm58A7JugAOyboQDsm6IA7JujAOybpADsm6UA7JumAOybpwDsm6gA7JupAOybqgDsm6sA7JusAOybrQDsm64A7JuvAOybsADsm7EA7JuyAOybswDsm7QA7Ju1AOybtgDsm7cA7Ju4AOybuQDsm7oA7Ju7AOybvADsm70A7Ju+AOybvwDsnIAA7JyBAOycggDsnIMA7JyEAOychQDsnIYA7JyHAOyciADsnIkA7JyKAOyciwDsnIwA7JyNAOycjgDsnI8A7JyQAOyckQDsnJIA7JyTAOyclADsnJUA7JyWAOyclwDsnJgA7JyZAOycmgDsnJsA7JycAOycnQDsnJ4A7JyfAOycoADsnKEA7JyiAOycowDsnKQA7JylAOycpgDsnKcA7JyoAOycqQDsnKoA7JyrAOycrADsnK0A7JyuAOycrwDsnLAA7JyxAOycsgDsnLMA7Jy0AOyctQDsnLYA7Jy3AOycuADsnLkA7Jy6AOycuwDsnLwA7Jy9AOycvgDsnL8A7J2AAOydgQDsnYIA7J2DAOydhADsnYUA7J2GAOydhwDsnYgA7J2JAOydigDsnYsA7J2MAOydjQDsnY4A7J2PAOydkADsnZEA7J2SAOydkwDsnZQA7J2VAOydlgDsnZcA7J2YAOydmQDsnZoA7J2bAOydnADsnZ0A7J2eAOydnwDsnaAA7J2hAOydogDsnaMA7J2kAOydpQDsnaYA7J2nAOydqADsnakA7J2qAOydqwDsnawA7J2tAOydrgDsna8A7J2wAOydsQDsnbIA7J2zAOydtADsnbUA7J22AOydtwDsnbgA7J25AOydugDsnbsA7J28AOydvQDsnb4A7J2/AOyegADsnoEA7J6CAOyegwDsnoQA7J6FAOyehgDsnocA7J6IAOyeiQDsnooA7J6LAOyejADsno0A7J6OAOyejwDsnpAA7J6RAOyekgDsnpMA7J6UAOyelQDsnpYA7J6XAOyemADsnpkA7J6aAOyemwDsnpwA7J6dAOyengDsnp8A7J6gAOyeoQDsnqIA7J6jAOyepADsnqUA7J6mAOyepwDsnqgA7J6pAOyeqgDsnqsA7J6sAOyerQDsnq4A7J6vAOyesADsnrEA7J6yAOyeswDsnrQA7J61AOyetgDsnrcA7J64AOyeuQDsnroA7J67AOyevADsnr0A7J6+AOyevwDsn4AA7J+BAOyfggDsn4MA7J+EAOyfhQDsn4YA7J+HAOyfiADsn4kA7J+KAOyfiwDsn4wA7J+NAOyfjgDsn48A7J+QAOyfkQDsn5IA7J+TAOyflADsn5UA7J+WAOyflwDsn5gA7J+ZAOyfmgDsn5sA7J+cAOyfnQDsn54A7J+fAOyfoADsn6EA7J+iAOyfowDsn6QA7J+lAOyfpgDsn6cA7J+oAOyfqQDsn6oA7J+rAOyfrADsn60A7J+uAOyfrwDsn7AA7J+xAOyfsgDsn7MA7J+0AOyftQDsn7YA7J+3AOyfuADsn7kA7J+6AOyfuwDsn7wA7J+9AOyfvgDsn78A7KCAAOyggQDsoIIA7KCDAOyghADsoIUA7KCGAOyghwDsoIgA7KCJAOygigDsoIsA7KCMAOygjQDsoI4A7KCPAOygkADsoJEA7KCSAOygkwDsoJQA7KCVAOyglgDsoJcA7KCYAOygmQDsoJoA7KCbAOygnADsoJ0A7KCeAOygnwDsoKAA7KChAOygogDsoKMA7KCkAOygpQDsoKYA7KCnAOygqADsoKkA7KCqAOygqwDsoKwA7KCtAOygrgDsoK8A7KCwAOygsQDsoLIA7KCzAOygtADsoLUA7KC2AOygtwDsoLgA7KC5AOygugDsoLsA7KC8AOygvQDsoL4A7KC/AOyhgADsoYEA7KGCAOyhgwDsoYQA7KGFAOyhhgDsoYcA7KGIAOyhiQDsoYoA7KGLAOyhjADsoY0A7KGOAOyhjwDsoZAA7KGRAOyhkgDsoZMA7KGUAOyhlQDsoZYA7KGXAOyhmADsoZkA7KGaAOyhmwDsoZwA7KGdAOyhngDsoZ8A7KGgAOyhoQDsoaIA7KGjAOyhpADsoaUA7KGmAOyhpwDsoagA7KGpAOyhqgDsoasA7KGsAOyhrQDsoa4A7KGvAOyhsADsobEA7KGyAOyhswDsobQA7KG1AOyhtgDsobcA7KG4AOyhuQDsoboA7KG7AOyhvADsob0A7KG+AOyhvwDsooAA7KKBAOyiggDsooMA7KKEAOyihQDsooYA7KKHAOyiiADsookA7KKKAOyiiwDsoowA7KKNAOyijgDsoo8A7KKQAOyikQDsopIA7KKTAOyilADsopUA7KKWAOyilwDsopgA7KKZAOyimgDsopsA7KKcAOyinQDsop4A7KKfAOyioADsoqEA7KKiAOyiowDsoqQA7KKlAOyipgDsoqcA7KKoAOyiqQDsoqoA7KKrAOyirADsoq0A7KKuAOyirwDsorAA7KKxAOyisgDsorMA7KK0AOyitQDsorYA7KK3AOyiuADsorkA7KK6AOyiuwDsorwA7KK9AOyivgDsor8A7KOAAOyjgQDso4IA7KODAOyjhADso4UA7KOGAOyjhwDso4gA7KOJAOyjigDso4sA7KOMAOyjjQDso44A7KOPAOyjkADso5EA7KOSAOyjkwDso5QA7KOVAOyjlgDso5cA7KOYAOyjmQDso5oA7KObAOyjnADso50A7KOeAOyjnwDso6AA7KOhAOyjogDso6MA7KOkAOyjpQDso6YA7KOnAOyjqADso6kA7KOqAOyjqwDso6wA7KOtAOyjrgDso68A7KOwAOyjsQDso7IA7KOzAOyjtADso7UA7KO2AOyjtwDso7gA7KO5AOyjugDso7sA7KO8AOyjvOydmADso70A7KO+AOyjvwDspIAA7KSBAOykggDspIMA7KSEAOykhQDspIYA7KSHAOykiADspIkA7KSKAOykiwDspIwA7KSNAOykjgDspI8A7KSQAOykkQDspJIA7KSTAOyklADspJUA7KSWAOyklwDspJgA7KSZAOykmgDspJsA7KScAOyknQDspJ4A7KSfAOykoADspKEA7KSiAOykowDspKQA7KSlAOykpgDspKcA7KSoAOykqQDspKoA7KSrAOykrADspK0A7KSuAOykrwDspLAA7KSxAOyksgDspLMA7KS0AOyktQDspLYA7KS3AOykuADspLkA7KS6AOykuwDspLwA7KS9AOykvgDspL8A7KWAAOylgQDspYIA7KWDAOylhADspYUA7KWGAOylhwDspYgA7KWJAOyligDspYsA7KWMAOyljQDspY4A7KWPAOylkADspZEA7KWSAOylkwDspZQA7KWVAOyllgDspZcA7KWYAOylmQDspZoA7KWbAOylnADspZ0A7KWeAOylnwDspaAA7KWhAOylogDspaMA7KWkAOylpQDspaYA7KWnAOylqADspakA7KWqAOylqwDspawA7KWtAOylrgDspa8A7KWwAOylsQDspbIA7KWzAOyltADspbUA7KW2AOyltwDspbgA7KW5AOylugDspbsA7KW8AOylvQDspb4A7KW/AOymgADspoEA7KaCAOymgwDspoQA7KaFAOymhgDspocA7KaIAOymiQDspooA7KaLAOymjADspo0A7KaOAOymjwDsppAA7KaRAOymkgDsppMA7KaUAOymlQDsppYA7KaXAOymmADsppkA7KaaAOymmwDsppwA7KadAOymngDspp8A7KagAOymoQDspqIA7KajAOympADspqUA7KamAOympwDspqgA7KapAOymqgDspqsA7KasAOymrQDspq4A7KavAOymsADsprEA7KayAOymswDsprQA7Ka1AOymtgDsprcA7Ka4AOymuQDsproA7Ka7AOymvADspr0A7Ka+AOymvwDsp4AA7KeBAOynggDsp4MA7KeEAOynhQDsp4YA7KeHAOyniADsp4kA7KeKAOyniwDsp4wA7KeNAOynjgDsp48A7KeQAOynkQDsp5IA7KeTAOynlADsp5UA7KeWAOynlwDsp5gA7KeZAOynmgDsp5sA7KecAOynnQDsp54A7KefAOynoADsp6EA7KeiAOynowDsp6QA7KelAOynpgDsp6cA7KeoAOynqQDsp6oA7KerAOynrADsp60A7KeuAOynrwDsp7AA7KexAOynsgDsp7MA7Ke0AOyntQDsp7YA7Ke3AOynuADsp7kA7Ke6AOynuwDsp7wA7Ke9AOynvgDsp78A7KiAAOyogQDsqIIA7KiDAOyohADsqIUA7KiGAOyohwDsqIgA7KiJAOyoigDsqIsA7KiMAOyojQDsqI4A7KiPAOyokADsqJEA7KiSAOyokwDsqJQA7KiVAOyolgDsqJcA7KiYAOyomQDsqJoA7KibAOyonADsqJ0A7KieAOyonwDsqKAA7KihAOyoogDsqKMA7KikAOyopQDsqKYA7KinAOyoqADsqKkA7KiqAOyoqwDsqKwA7KitAOyorgDsqK8A7KiwAOyosQDsqLIA7KizAOyotADsqLUA7Ki2AOyotwDsqLgA7Ki5AOyougDsqLsA7Ki8AOyovQDsqL4A7Ki/AOypgADsqYEA7KmCAOypgwDsqYQA7KmFAOyphgDsqYcA7KmIAOypiQDsqYoA7KmLAOypjADsqY0A7KmOAOypjwDsqZAA7KmRAOypkgDsqZMA7KmUAOyplQDsqZYA7KmXAOypmADsqZkA7KmaAOypmwDsqZwA7KmdAOypngDsqZ8A7KmgAOypoQDsqaIA7KmjAOyppADsqaUA7KmmAOyppwDsqagA7KmpAOypqgDsqasA7KmsAOyprQDsqa4A7KmvAOypsADsqbEA7KmyAOypswDsqbQA7Km1AOyptgDsqbcA7Km4AOypuQDsqboA7Km7AOypvADsqb0A7Km+AOypvwDsqoAA7KqBAOyqggDsqoMA7KqEAOyqhQDsqoYA7KqHAOyqiADsqokA7KqKAOyqiwDsqowA7KqNAOyqjgDsqo8A7KqQAOyqkQDsqpIA7KqTAOyqlADsqpUA7KqWAOyqlwDsqpgA7KqZAOyqmgDsqpsA7KqcAOyqnQDsqp4A7KqfAOyqoADsqqEA7KqiAOyqowDsqqQA7KqlAOyqpgDsqqcA7KqoAOyqqQDsqqoA7KqrAOyqrADsqq0A7KquAOyqrwDsqrAA7KqxAOyqsgDsqrMA7Kq0AOyqtQDsqrYA7Kq3AOyquADsqrkA7Kq6AOyquwDsqrwA7Kq9AOyqvgDsqr8A7KuAAOyrgQDsq4IA7KuDAOyrhADsq4UA7KuGAOyrhwDsq4gA7KuJAOyrigDsq4sA7KuMAOyrjQDsq44A7KuPAOyrkADsq5EA7KuSAOyrkwDsq5QA7KuVAOyrlgDsq5cA7KuYAOyrmQDsq5oA7KubAOyrnADsq50A7KueAOyrnwDsq6AA7KuhAOyrogDsq6MA7KukAOyrpQDsq6YA7KunAOyrqADsq6kA7KuqAOyrqwDsq6wA7KutAOyrrgDsq68A7KuwAOyrsQDsq7IA7KuzAOyrtADsq7UA7Ku2AOyrtwDsq7gA7Ku5AOyrugDsq7sA7Ku8AOyrvQDsq74A7Ku/AOysgADsrIEA7KyCAOysgwDsrIQA7KyFAOyshgDsrIcA7KyIAOysiQDsrIoA7KyLAOysjADsrI0A7KyOAOysjwDsrJAA7KyRAOyskgDsrJMA7KyUAOyslQDsrJYA7KyXAOysmADsrJkA7KyaAOysmwDsrJwA7KydAOysngDsrJ8A7KygAOysoQDsrKIA7KyjAOyspADsrKUA7KymAOyspwDsrKgA7KypAOysqgDsrKsA7KysAOysrQDsrK4A7KyvAOyssADsrLEA7KyyAOysswDsrLQA7Ky1AOystgDsrLcA7Ky4AOysuQDsrLoA7Ky7AOysvADsrL0A7Ky+AOysvwDsrYAA7K2BAOytggDsrYMA7K2EAOythQDsrYYA7K2HAOytiADsrYkA7K2KAOytiwDsrYwA7K2NAOytjgDsrY8A7K2QAOytkQDsrZIA7K2TAOytlADsrZUA7K2WAOytlwDsrZgA7K2ZAOytmgDsrZsA7K2cAOytnQDsrZ4A7K2fAOytoADsraEA7K2iAOytowDsraQA7K2lAOytpgDsracA7K2oAOytqQDsraoA7K2rAOytrADsra0A7K2uAOytrwDsrbAA7K2xAOytsgDsrbMA7K20AOyttQDsrbYA7K23AOytuADsrbkA7K26AOytuwDsrbwA7K29AOytvgDsrb8A7K6AAOyugQDsroIA7K6DAOyuhADsroUA7K6GAOyuhwDsrogA7K6JAOyuigDsrosA7K6MAOyujQDsro4A7K6PAOyukADsrpEA7K6SAOyukwDsrpQA7K6VAOyulgDsrpcA7K6YAOyumQDsrpoA7K6bAOyunADsrp0A7K6eAOyunwDsrqAA7K6hAOyuogDsrqMA7K6kAOyupQDsrqYA7K6nAOyuqADsrqkA7K6qAOyuqwDsrqwA7K6tAOyurgDsrq8A7K6wAOyusQDsrrIA7K6zAOyutADsrrUA7K62AOyutwDsrrgA7K65AOyuugDsrrsA7K68AOyuvQDsrr4A7K6/AOyvgADsr4EA7K+CAOyvgwDsr4QA7K+FAOyvhgDsr4cA7K+IAOyviQDsr4oA7K+LAOyvjADsr40A7K+OAOyvjwDsr5AA7K+RAOyvkgDsr5MA7K+UAOyvlQDsr5YA7K+XAOyvmADsr5kA7K+aAOyvmwDsr5wA7K+dAOyvngDsr58A7K+gAOyvoQDsr6IA7K+jAOyvpADsr6UA7K+mAOyvpwDsr6gA7K+pAOyvqgDsr6sA7K+sAOyvrQDsr64A7K+vAOyvsADsr7EA7K+yAOyvswDsr7QA7K+1AOyvtgDsr7cA7K+4AOyvuQDsr7oA7K+7AOyvvADsr70A7K++AOyvvwDssIAA7LCBAOywggDssIMA7LCEAOywhQDssIYA7LCHAOywiADssIkA7LCKAOywiwDssIwA7LCNAOywjgDssI8A7LCQAOywkQDssJIA7LCTAOywlADssJUA7LCWAOywlwDssJgA7LCZAOywmgDssJsA7LCcAOywnQDssJ4A7LCfAOywoADssKEA7LCiAOywowDssKQA7LClAOywpgDssKcA7LCoAOywqQDssKoA7LCrAOywrADssK0A7LCuAOywrwDssLAA7LCxAOywsgDssLMA7LC0AOywtQDssLYA7LC3AOywuADssLjqs6AA7LC5AOywugDssLsA7LC8AOywvQDssL4A7LC/AOyxgADssYEA7LGCAOyxgwDssYQA7LGFAOyxhgDssYcA7LGIAOyxiQDssYoA7LGLAOyxjADssY0A7LGOAOyxjwDssZAA7LGRAOyxkgDssZMA7LGUAOyxlQDssZYA7LGXAOyxmADssZkA7LGaAOyxmwDssZwA7LGdAOyxngDssZ8A7LGgAOyxoQDssaIA7LGjAOyxpADssaUA7LGmAOyxpwDssagA7LGpAOyxqgDssasA7LGsAOyxrQDssa4A7LGvAOyxsADssbEA7LGyAOyxswDssbQA7LG1AOyxtgDssbcA7LG4AOyxuQDssboA7LG7AOyxvADssb0A7LG+AOyxvwDssoAA7LKBAOyyggDssoMA7LKEAOyyhQDssoYA7LKHAOyyiADssokA7LKKAOyyiwDssowA7LKNAOyyjgDsso8A7LKQAOyykQDsspIA7LKTAOyylADsspUA7LKWAOyylwDsspgA7LKZAOyymgDsspsA7LKcAOyynQDssp4A7LKfAOyyoADssqEA7LKiAOyyowDssqQA7LKlAOyypgDssqcA7LKoAOyyqQDssqoA7LKrAOyyrADssq0A7LKuAOyyrwDssrAA7LKxAOyysgDssrMA7LK0AOyytQDssrYA7LK3AOyyuADssrkA7LK6AOyyuwDssrwA7LK9AOyyvgDssr8A7LOAAOyzgQDss4IA7LODAOyzhADss4UA7LOGAOyzhwDss4gA7LOJAOyzigDss4sA7LOMAOyzjQDss44A7LOPAOyzkADss5EA7LOSAOyzkwDss5QA7LOVAOyzlgDss5cA7LOYAOyzmQDss5oA7LObAOyznADss50A7LOeAOyznwDss6AA7LOhAOyzogDss6MA7LOkAOyzpQDss6YA7LOnAOyzqADss6kA7LOqAOyzqwDss6wA7LOtAOyzrgDss68A7LOwAOyzsQDss7IA7LOzAOyztADss7UA7LO2AOyztwDss7gA7LO5AOyzugDss7sA7LO8AOyzvQDss74A7LO/AOy0gADstIEA7LSCAOy0gwDstIQA7LSFAOy0hgDstIcA7LSIAOy0iQDstIoA7LSLAOy0jADstI0A7LSOAOy0jwDstJAA7LSRAOy0kgDstJMA7LSUAOy0lQDstJYA7LSXAOy0mADstJkA7LSaAOy0mwDstJwA7LSdAOy0ngDstJ8A7LSgAOy0oQDstKIA7LSjAOy0pADstKUA7LSmAOy0pwDstKgA7LSpAOy0qgDstKsA7LSsAOy0rQDstK4A7LSvAOy0sADstLEA7LSyAOy0swDstLQA7LS1AOy0tgDstLcA7LS4AOy0uQDstLoA7LS7AOy0vADstL0A7LS+AOy0vwDstYAA7LWBAOy1ggDstYMA7LWEAOy1hQDstYYA7LWHAOy1iADstYkA7LWKAOy1iwDstYwA7LWNAOy1jgDstY8A7LWQAOy1kQDstZIA7LWTAOy1lADstZUA7LWWAOy1lwDstZgA7LWZAOy1mgDstZsA7LWcAOy1nQDstZ4A7LWfAOy1oADstaEA7LWiAOy1owDstaQA7LWlAOy1pgDstacA7LWoAOy1qQDstaoA7LWrAOy1rADsta0A7LWuAOy1rwDstbAA7LWxAOy1sgDstbMA7LW0AOy1tQDstbYA7LW3AOy1uADstbkA7LW6AOy1uwDstbwA7LW9AOy1vgDstb8A7LaAAOy2gQDstoIA7LaDAOy2hADstoUA7LaGAOy2hwDstogA7LaJAOy2igDstosA7LaMAOy2jQDsto4A7LaPAOy2kADstpEA7LaSAOy2kwDstpQA7LaVAOy2lgDstpcA7LaYAOy2mQDstpoA7LabAOy2nADstp0A7LaeAOy2nwDstqAA7LahAOy2ogDstqMA7LakAOy2pQDstqYA7LanAOy2qADstqkA7LaqAOy2qwDstqwA7LatAOy2rgDstq8A7LawAOy2sQDstrIA7LazAOy2tADstrUA7La2AOy2twDstrgA7La5AOy2ugDstrsA7La8AOy2vQDstr4A7La/AOy3gADst4EA7LeCAOy3gwDst4QA7LeFAOy3hgDst4cA7LeIAOy3iQDst4oA7LeLAOy3jADst40A7LeOAOy3jwDst5AA7LeRAOy3kgDst5MA7LeUAOy3lQDst5YA7LeXAOy3mADst5kA7LeaAOy3mwDst5wA7LedAOy3ngDst58A7LegAOy3oQDst6IA7LejAOy3pADst6UA7LemAOy3pwDst6gA7LepAOy3qgDst6sA7LesAOy3rQDst64A7LevAOy3sADst7EA7LeyAOy3swDst7QA7Le1AOy3tgDst7cA7Le4AOy3uQDst7oA7Le7AOy3vADst70A7Le+AOy3vwDsuIAA7LiBAOy4ggDsuIMA7LiEAOy4hQDsuIYA7LiHAOy4iADsuIkA7LiKAOy4iwDsuIwA7LiNAOy4jgDsuI8A7LiQAOy4kQDsuJIA7LiTAOy4lADsuJUA7LiWAOy4lwDsuJgA7LiZAOy4mgDsuJsA7LicAOy4nQDsuJ4A7LifAOy4oADsuKEA7LiiAOy4owDsuKQA7LilAOy4pgDsuKcA7LioAOy4qQDsuKoA7LirAOy4rADsuK0A7LiuAOy4rwDsuLAA7LixAOy4sgDsuLMA7Li0AOy4tQDsuLYA7Li3AOy4uADsuLkA7Li6AOy4uwDsuLwA7Li9AOy4vgDsuL8A7LmAAOy5gQDsuYIA7LmDAOy5hADsuYUA7LmGAOy5hwDsuYgA7LmJAOy5igDsuYsA7LmMAOy5jQDsuY4A7LmPAOy5kADsuZEA7LmSAOy5kwDsuZQA7LmVAOy5lgDsuZcA7LmYAOy5mQDsuZoA7LmbAOy5nADsuZ0A7LmeAOy5nwDsuaAA7LmhAOy5ogDsuaMA7LmkAOy5pQDsuaYA7LmnAOy5qADsuakA7LmqAOy5qwDsuawA7LmtAOy5rgDsua8A7LmwAOy5sQDsubIA7LmzAOy5tADsubUA7Lm2AOy5twDsubgA7Lm5AOy5ugDsubsA7Lm8AOy5vQDsub4A7Lm/AOy6gADsuoEA7LqCAOy6gwDsuoQA7LqFAOy6hgDsuocA7LqIAOy6iQDsuooA7LqLAOy6jADsuo0A7LqOAOy6jwDsupAA7LqRAOy6kgDsupMA7LqUAOy6lQDsupYA7LqXAOy6mADsupkA7LqaAOy6mwDsupwA7LqdAOy6ngDsup8A7LqgAOy6oQDsuqIA7LqjAOy6pADsuqUA7LqmAOy6pwDsuqgA7LqpAOy6qgDsuqsA7LqsAOy6rQDsuq4A7LqvAOy6sADsurEA7LqyAOy6swDsurQA7Lq1AOy6tgDsurcA7Lq4AOy6uQDsuroA7Lq7AOy6vADsur0A7Lq+AOy6vwDsu4AA7LuBAOy7ggDsu4MA7LuEAOy7hQDsu4YA7LuHAOy7iADsu4kA7LuKAOy7iwDsu4wA7LuNAOy7jgDsu48A7LuQAOy7kQDsu5IA7LuTAOy7lADsu5UA7LuWAOy7lwDsu5gA7LuZAOy7mgDsu5sA7LucAOy7nQDsu54A7LufAOy7oADsu6EA7LuiAOy7owDsu6QA7LulAOy7pgDsu6cA7LuoAOy7qQDsu6oA7LurAOy7rADsu60A7LuuAOy7rwDsu7AA7LuxAOy7sgDsu7MA7Lu0AOy7tQDsu7YA7Lu3AOy7uADsu7kA7Lu6AOy7uwDsu7wA7Lu9AOy7vgDsu78A7LyAAOy8gQDsvIIA7LyDAOy8hADsvIUA7LyGAOy8hwDsvIgA7LyJAOy8igDsvIsA7LyMAOy8jQDsvI4A7LyPAOy8kADsvJEA7LySAOy8kwDsvJQA7LyVAOy8lgDsvJcA7LyYAOy8mQDsvJoA7LybAOy8nADsvJ0A7LyeAOy8nwDsvKAA7LyhAOy8ogDsvKMA7LykAOy8pQDsvKYA7LynAOy8qADsvKkA7LyqAOy8qwDsvKwA7LytAOy8rgDsvK8A7LywAOy8sQDsvLIA7LyzAOy8tADsvLUA7Ly2AOy8twDsvLgA7Ly5AOy8ugDsvLsA7Ly8AOy8vQDsvL4A7Ly/AOy9gADsvYEA7L2CAOy9gwDsvYQA7L2FAOy9hgDsvYcA7L2IAOy9iQDsvYoA7L2LAOy9jADsvY0A7L2OAOy9jwDsvZAA7L2RAOy9kgDsvZMA7L2UAOy9lQDsvZYA7L2XAOy9mADsvZkA7L2aAOy9mwDsvZwA7L2dAOy9ngDsvZ8A7L2gAOy9oQDsvaIA7L2jAOy9pADsvaUA7L2mAOy9pwDsvagA7L2pAOy9qgDsvasA7L2sAOy9rQDsva4A7L2vAOy9sADsvbEA7L2yAOy9swDsvbQA7L21AOy9tgDsvbcA7L24AOy9uQDsvboA7L27AOy9vADsvb0A7L2+AOy9vwDsvoAA7L6BAOy+ggDsvoMA7L6EAOy+hQDsvoYA7L6HAOy+iADsvokA7L6KAOy+iwDsvowA7L6NAOy+jgDsvo8A7L6QAOy+kQDsvpIA7L6TAOy+lADsvpUA7L6WAOy+lwDsvpgA7L6ZAOy+mgDsvpsA7L6cAOy+nQDsvp4A7L6fAOy+oADsvqEA7L6iAOy+owDsvqQA7L6lAOy+pgDsvqcA7L6oAOy+qQDsvqoA7L6rAOy+rADsvq0A7L6uAOy+rwDsvrAA7L6xAOy+sgDsvrMA7L60AOy+tQDsvrYA7L63AOy+uADsvrkA7L66AOy+uwDsvrwA7L69AOy+vgDsvr8A7L+AAOy/gQDsv4IA7L+DAOy/hADsv4UA7L+GAOy/hwDsv4gA7L+JAOy/igDsv4sA7L+MAOy/jQDsv44A7L+PAOy/kADsv5EA7L+SAOy/kwDsv5QA7L+VAOy/lgDsv5cA7L+YAOy/mQDsv5oA7L+bAOy/nADsv50A7L+eAOy/nwDsv6AA7L+hAOy/ogDsv6MA7L+kAOy/pQDsv6YA7L+nAOy/qADsv6kA7L+qAOy/qwDsv6wA7L+tAOy/rgDsv68A7L+wAOy/sQDsv7IA7L+zAOy/tADsv7UA7L+2AOy/twDsv7gA7L+5AOy/ugDsv7sA7L+8AOy/vQDsv74A7L+/AO2AgADtgIEA7YCCAO2AgwDtgIQA7YCFAO2AhgDtgIcA7YCIAO2AiQDtgIoA7YCLAO2AjADtgI0A7YCOAO2AjwDtgJAA7YCRAO2AkgDtgJMA7YCUAO2AlQDtgJYA7YCXAO2AmADtgJkA7YCaAO2AmwDtgJwA7YCdAO2AngDtgJ8A7YCgAO2AoQDtgKIA7YCjAO2ApADtgKUA7YCmAO2ApwDtgKgA7YCpAO2AqgDtgKsA7YCsAO2ArQDtgK4A7YCvAO2AsADtgLEA7YCyAO2AswDtgLQA7YC1AO2AtgDtgLcA7YC4AO2AuQDtgLoA7YC7AO2AvADtgL0A7YC+AO2AvwDtgYAA7YGBAO2BggDtgYMA7YGEAO2BhQDtgYYA7YGHAO2BiADtgYkA7YGKAO2BiwDtgYwA7YGNAO2BjgDtgY8A7YGQAO2BkQDtgZIA7YGTAO2BlADtgZUA7YGWAO2BlwDtgZgA7YGZAO2BmgDtgZsA7YGcAO2BnQDtgZ4A7YGfAO2BoADtgaEA7YGiAO2BowDtgaQA7YGlAO2BpgDtgacA7YGoAO2BqQDtgaoA7YGrAO2BrADtga0A7YGuAO2BrwDtgbAA7YGxAO2BsgDtgbMA7YG0AO2BtQDtgbYA7YG3AO2BuADtgbkA7YG6AO2BuwDtgbwA7YG9AO2BvgDtgb8A7YKAAO2CgQDtgoIA7YKDAO2ChADtgoUA7YKGAO2ChwDtgogA7YKJAO2CigDtgosA7YKMAO2CjQDtgo4A7YKPAO2CkADtgpEA7YKSAO2CkwDtgpQA7YKVAO2ClgDtgpcA7YKYAO2CmQDtgpoA7YKbAO2CnADtgp0A7YKeAO2CnwDtgqAA7YKhAO2CogDtgqMA7YKkAO2CpQDtgqYA7YKnAO2CqADtgqkA7YKqAO2CqwDtgqwA7YKtAO2CrgDtgq8A7YKwAO2CsQDtgrIA7YKzAO2CtADtgrUA7YK2AO2CtwDtgrgA7YK5AO2CugDtgrsA7YK8AO2CvQDtgr4A7YK/AO2DgADtg4EA7YOCAO2DgwDtg4QA7YOFAO2DhgDtg4cA7YOIAO2DiQDtg4oA7YOLAO2DjADtg40A7YOOAO2DjwDtg5AA7YORAO2DkgDtg5MA7YOUAO2DlQDtg5YA7YOXAO2DmADtg5kA7YOaAO2DmwDtg5wA7YOdAO2DngDtg58A7YOgAO2DoQDtg6IA7YOjAO2DpADtg6UA7YOmAO2DpwDtg6gA7YOpAO2DqgDtg6sA7YOsAO2DrQDtg64A7YOvAO2DsADtg7EA7YOyAO2DswDtg7QA7YO1AO2DtgDtg7cA7YO4AO2DuQDtg7oA7YO7AO2DvADtg70A7YO+AO2DvwDthIAA7YSBAO2EggDthIMA7YSEAO2EhQDthIYA7YSHAO2EiADthIkA7YSKAO2EiwDthIwA7YSNAO2EjgDthI8A7YSQAO2EkQDthJIA7YSTAO2ElADthJUA7YSWAO2ElwDthJgA7YSZAO2EmgDthJsA7YScAO2EnQDthJ4A7YSfAO2EoADthKEA7YSiAO2EowDthKQA7YSlAO2EpgDthKcA7YSoAO2EqQDthKoA7YSrAO2ErADthK0A7YSuAO2ErwDthLAA7YSxAO2EsgDthLMA7YS0AO2EtQDthLYA7YS3AO2EuADthLkA7YS6AO2EuwDthLwA7YS9AO2EvgDthL8A7YWAAO2FgQDthYIA7YWDAO2FhADthYUA7YWGAO2FhwDthYgA7YWJAO2FigDthYsA7YWMAO2FjQDthY4A7YWPAO2FkADthZEA7YWSAO2FkwDthZQA7YWVAO2FlgDthZcA7YWYAO2FmQDthZoA7YWbAO2FnADthZ0A7YWeAO2FnwDthaAA7YWhAO2FogDthaMA7YWkAO2FpQDthaYA7YWnAO2FqADthakA7YWqAO2FqwDthawA7YWtAO2FrgDtha8A7YWwAO2FsQDthbIA7YWzAO2FtADthbUA7YW2AO2FtwDthbgA7YW5AO2FugDthbsA7YW8AO2FvQDthb4A7YW/AO2GgADthoEA7YaCAO2GgwDthoQA7YaFAO2GhgDthocA7YaIAO2GiQDthooA7YaLAO2GjADtho0A7YaOAO2GjwDthpAA7YaRAO2GkgDthpMA7YaUAO2GlQDthpYA7YaXAO2GmADthpkA7YaaAO2GmwDthpwA7YadAO2GngDthp8A7YagAO2GoQDthqIA7YajAO2GpADthqUA7YamAO2GpwDthqgA7YapAO2GqgDthqsA7YasAO2GrQDthq4A7YavAO2GsADthrEA7YayAO2GswDthrQA7Ya1AO2GtgDthrcA7Ya4AO2GuQDthroA7Ya7AO2GvADthr0A7Ya+AO2GvwDth4AA7YeBAO2HggDth4MA7YeEAO2HhQDth4YA7YeHAO2HiADth4kA7YeKAO2HiwDth4wA7YeNAO2HjgDth48A7YeQAO2HkQDth5IA7YeTAO2HlADth5UA7YeWAO2HlwDth5gA7YeZAO2HmgDth5sA7YecAO2HnQDth54A7YefAO2HoADth6EA7YeiAO2HowDth6QA7YelAO2HpgDth6cA7YeoAO2HqQDth6oA7YerAO2HrADth60A7YeuAO2HrwDth7AA7YexAO2HsgDth7MA7Ye0AO2HtQDth7YA7Ye3AO2HuADth7kA7Ye6AO2HuwDth7wA7Ye9AO2HvgDth78A7YiAAO2IgQDtiIIA7YiDAO2IhADtiIUA7YiGAO2IhwDtiIgA7YiJAO2IigDtiIsA7YiMAO2IjQDtiI4A7YiPAO2IkADtiJEA7YiSAO2IkwDtiJQA7YiVAO2IlgDtiJcA7YiYAO2ImQDtiJoA7YibAO2InADtiJ0A7YieAO2InwDtiKAA7YihAO2IogDtiKMA7YikAO2IpQDtiKYA7YinAO2IqADtiKkA7YiqAO2IqwDtiKwA7YitAO2IrgDtiK8A7YiwAO2IsQDtiLIA7YizAO2ItADtiLUA7Yi2AO2ItwDtiLgA7Yi5AO2IugDtiLsA7Yi8AO2IvQDtiL4A7Yi/AO2JgADtiYEA7YmCAO2JgwDtiYQA7YmFAO2JhgDtiYcA7YmIAO2JiQDtiYoA7YmLAO2JjADtiY0A7YmOAO2JjwDtiZAA7YmRAO2JkgDtiZMA7YmUAO2JlQDtiZYA7YmXAO2JmADtiZkA7YmaAO2JmwDtiZwA7YmdAO2JngDtiZ8A7YmgAO2JoQDtiaIA7YmjAO2JpADtiaUA7YmmAO2JpwDtiagA7YmpAO2JqgDtiasA7YmsAO2JrQDtia4A7YmvAO2JsADtibEA7YmyAO2JswDtibQA7Ym1AO2JtgDtibcA7Ym4AO2JuQDtiboA7Ym7AO2JvADtib0A7Ym+AO2JvwDtioAA7YqBAO2KggDtioMA7YqEAO2KhQDtioYA7YqHAO2KiADtiokA7YqKAO2KiwDtiowA7YqNAO2KjgDtio8A7YqQAO2KkQDtipIA7YqTAO2KlADtipUA7YqWAO2KlwDtipgA7YqZAO2KmgDtipsA7YqcAO2KnQDtip4A7YqfAO2KoADtiqEA7YqiAO2KowDtiqQA7YqlAO2KpgDtiqcA7YqoAO2KqQDtiqoA7YqrAO2KrADtiq0A7YquAO2KrwDtirAA7YqxAO2KsgDtirMA7Yq0AO2KtQDtirYA7Yq3AO2KuADtirkA7Yq6AO2KuwDtirwA7Yq9AO2KvgDtir8A7YuAAO2LgQDti4IA7YuDAO2LhADti4UA7YuGAO2LhwDti4gA7YuJAO2LigDti4sA7YuMAO2LjQDti44A7YuPAO2LkADti5EA7YuSAO2LkwDti5QA7YuVAO2LlgDti5cA7YuYAO2LmQDti5oA7YubAO2LnADti50A7YueAO2LnwDti6AA7YuhAO2LogDti6MA7YukAO2LpQDti6YA7YunAO2LqADti6kA7YuqAO2LqwDti6wA7YutAO2LrgDti68A7YuwAO2LsQDti7IA7YuzAO2LtADti7UA7Yu2AO2LtwDti7gA7Yu5AO2LugDti7sA7Yu8AO2LvQDti74A7Yu/AO2MgADtjIEA7YyCAO2MgwDtjIQA7YyFAO2MhgDtjIcA7YyIAO2MiQDtjIoA7YyLAO2MjADtjI0A7YyOAO2MjwDtjJAA7YyRAO2MkgDtjJMA7YyUAO2MlQDtjJYA7YyXAO2MmADtjJkA7YyaAO2MmwDtjJwA7YydAO2MngDtjJ8A7YygAO2MoQDtjKIA7YyjAO2MpADtjKUA7YymAO2MpwDtjKgA7YypAO2MqgDtjKsA7YysAO2MrQDtjK4A7YyvAO2MsADtjLEA7YyyAO2MswDtjLQA7Yy1AO2MtgDtjLcA7Yy4AO2MuQDtjLoA7Yy7AO2MvADtjL0A7Yy+AO2MvwDtjYAA7Y2BAO2NggDtjYMA7Y2EAO2NhQDtjYYA7Y2HAO2NiADtjYkA7Y2KAO2NiwDtjYwA7Y2NAO2NjgDtjY8A7Y2QAO2NkQDtjZIA7Y2TAO2NlADtjZUA7Y2WAO2NlwDtjZgA7Y2ZAO2NmgDtjZsA7Y2cAO2NnQDtjZ4A7Y2fAO2NoADtjaEA7Y2iAO2NowDtjaQA7Y2lAO2NpgDtjacA7Y2oAO2NqQDtjaoA7Y2rAO2NrADtja0A7Y2uAO2NrwDtjbAA7Y2xAO2NsgDtjbMA7Y20AO2NtQDtjbYA7Y23AO2NuADtjbkA7Y26AO2NuwDtjbwA7Y29AO2NvgDtjb8A7Y6AAO2OgQDtjoIA7Y6DAO2OhADtjoUA7Y6GAO2OhwDtjogA7Y6JAO2OigDtjosA7Y6MAO2OjQDtjo4A7Y6PAO2OkADtjpEA7Y6SAO2OkwDtjpQA7Y6VAO2OlgDtjpcA7Y6YAO2OmQDtjpoA7Y6bAO2OnADtjp0A7Y6eAO2OnwDtjqAA7Y6hAO2OogDtjqMA7Y6kAO2OpQDtjqYA7Y6nAO2OqADtjqkA7Y6qAO2OqwDtjqwA7Y6tAO2OrgDtjq8A7Y6wAO2OsQDtjrIA7Y6zAO2OtADtjrUA7Y62AO2OtwDtjrgA7Y65AO2OugDtjrsA7Y68AO2OvQDtjr4A7Y6/AO2PgADtj4EA7Y+CAO2PgwDtj4QA7Y+FAO2PhgDtj4cA7Y+IAO2PiQDtj4oA7Y+LAO2PjADtj40A7Y+OAO2PjwDtj5AA7Y+RAO2PkgDtj5MA7Y+UAO2PlQDtj5YA7Y+XAO2PmADtj5kA7Y+aAO2PmwDtj5wA7Y+dAO2PngDtj58A7Y+gAO2PoQDtj6IA7Y+jAO2PpADtj6UA7Y+mAO2PpwDtj6gA7Y+pAO2PqgDtj6sA7Y+sAO2PrQDtj64A7Y+vAO2PsADtj7EA7Y+yAO2PswDtj7QA7Y+1AO2PtgDtj7cA7Y+4AO2PuQDtj7oA7Y+7AO2PvADtj70A7Y++AO2PvwDtkIAA7ZCBAO2QggDtkIMA7ZCEAO2QhQDtkIYA7ZCHAO2QiADtkIkA7ZCKAO2QiwDtkIwA7ZCNAO2QjgDtkI8A7ZCQAO2QkQDtkJIA7ZCTAO2QlADtkJUA7ZCWAO2QlwDtkJgA7ZCZAO2QmgDtkJsA7ZCcAO2QnQDtkJ4A7ZCfAO2QoADtkKEA7ZCiAO2QowDtkKQA7ZClAO2QpgDtkKcA7ZCoAO2QqQDtkKoA7ZCrAO2QrADtkK0A7ZCuAO2QrwDtkLAA7ZCxAO2QsgDtkLMA7ZC0AO2QtQDtkLYA7ZC3AO2QuADtkLkA7ZC6AO2QuwDtkLwA7ZC9AO2QvgDtkL8A7ZGAAO2RgQDtkYIA7ZGDAO2RhADtkYUA7ZGGAO2RhwDtkYgA7ZGJAO2RigDtkYsA7ZGMAO2RjQDtkY4A7ZGPAO2RkADtkZEA7ZGSAO2RkwDtkZQA7ZGVAO2RlgDtkZcA7ZGYAO2RmQDtkZoA7ZGbAO2RnADtkZ0A7ZGeAO2RnwDtkaAA7ZGhAO2RogDtkaMA7ZGkAO2RpQDtkaYA7ZGnAO2RqADtkakA7ZGqAO2RqwDtkawA7ZGtAO2RrgDtka8A7ZGwAO2RsQDtkbIA7ZGzAO2RtADtkbUA7ZG2AO2RtwDtkbgA7ZG5AO2RugDtkbsA7ZG8AO2RvQDtkb4A7ZG/AO2SgADtkoEA7ZKCAO2SgwDtkoQA7ZKFAO2ShgDtkocA7ZKIAO2SiQDtkooA7ZKLAO2SjADtko0A7ZKOAO2SjwDtkpAA7ZKRAO2SkgDtkpMA7ZKUAO2SlQDtkpYA7ZKXAO2SmADtkpkA7ZKaAO2SmwDtkpwA7ZKdAO2SngDtkp8A7ZKgAO2SoQDtkqIA7ZKjAO2SpADtkqUA7ZKmAO2SpwDtkqgA7ZKpAO2SqgDtkqsA7ZKsAO2SrQDtkq4A7ZKvAO2SsADtkrEA7ZKyAO2SswDtkrQA7ZK1AO2StgDtkrcA7ZK4AO2SuQDtkroA7ZK7AO2SvADtkr0A7ZK+AO2SvwDtk4AA7ZOBAO2TggDtk4MA7ZOEAO2ThQDtk4YA7ZOHAO2TiADtk4kA7ZOKAO2TiwDtk4wA7ZONAO2TjgDtk48A7ZOQAO2TkQDtk5IA7ZOTAO2TlADtk5UA7ZOWAO2TlwDtk5gA7ZOZAO2TmgDtk5sA7ZOcAO2TnQDtk54A7ZOfAO2ToADtk6EA7ZOiAO2TowDtk6QA7ZOlAO2TpgDtk6cA7ZOoAO2TqQDtk6oA7ZOrAO2TrADtk60A7ZOuAO2TrwDtk7AA7ZOxAO2TsgDtk7MA7ZO0AO2TtQDtk7YA7ZO3AO2TuADtk7kA7ZO6AO2TuwDtk7wA7ZO9AO2TvgDtk78A7ZSAAO2UgQDtlIIA7ZSDAO2UhADtlIUA7ZSGAO2UhwDtlIgA7ZSJAO2UigDtlIsA7ZSMAO2UjQDtlI4A7ZSPAO2UkADtlJEA7ZSSAO2UkwDtlJQA7ZSVAO2UlgDtlJcA7ZSYAO2UmQDtlJoA7ZSbAO2UnADtlJ0A7ZSeAO2UnwDtlKAA7ZShAO2UogDtlKMA7ZSkAO2UpQDtlKYA7ZSnAO2UqADtlKkA7ZSqAO2UqwDtlKwA7ZStAO2UrgDtlK8A7ZSwAO2UsQDtlLIA7ZSzAO2UtADtlLUA7ZS2AO2UtwDtlLgA7ZS5AO2UugDtlLsA7ZS8AO2UvQDtlL4A7ZS/AO2VgADtlYEA7ZWCAO2VgwDtlYQA7ZWFAO2VhgDtlYcA7ZWIAO2ViQDtlYoA7ZWLAO2VjADtlY0A7ZWOAO2VjwDtlZAA7ZWRAO2VkgDtlZMA7ZWUAO2VlQDtlZYA7ZWXAO2VmADtlZkA7ZWaAO2VmwDtlZwA7ZWdAO2VngDtlZ8A7ZWgAO2VoQDtlaIA7ZWjAO2VpADtlaUA7ZWmAO2VpwDtlagA7ZWpAO2VqgDtlasA7ZWsAO2VrQDtla4A7ZWvAO2VsADtlbEA7ZWyAO2VswDtlbQA7ZW1AO2VtgDtlbcA7ZW4AO2VuQDtlboA7ZW7AO2VvADtlb0A7ZW+AO2VvwDtloAA7ZaBAO2WggDtloMA7ZaEAO2WhQDtloYA7ZaHAO2WiADtlokA7ZaKAO2WiwDtlowA7ZaNAO2WjgDtlo8A7ZaQAO2WkQDtlpIA7ZaTAO2WlADtlpUA7ZaWAO2WlwDtlpgA7ZaZAO2WmgDtlpsA7ZacAO2WnQDtlp4A7ZafAO2WoADtlqEA7ZaiAO2WowDtlqQA7ZalAO2WpgDtlqcA7ZaoAO2WqQDtlqoA7ZarAO2WrADtlq0A7ZauAO2WrwDtlrAA7ZaxAO2WsgDtlrMA7Za0AO2WtQDtlrYA7Za3AO2WuADtlrkA7Za6AO2WuwDtlrwA7Za9AO2WvgDtlr8A7ZeAAO2XgQDtl4IA7ZeDAO2XhADtl4UA7ZeGAO2XhwDtl4gA7ZeJAO2XigDtl4sA7ZeMAO2XjQDtl44A7ZePAO2XkADtl5EA7ZeSAO2XkwDtl5QA7ZeVAO2XlgDtl5cA7ZeYAO2XmQDtl5oA7ZebAO2XnADtl50A7ZeeAO2XnwDtl6AA7ZehAO2XogDtl6MA7ZekAO2XpQDtl6YA7ZenAO2XqADtl6kA7ZeqAO2XqwDtl6wA7ZetAO2XrgDtl68A7ZewAO2XsQDtl7IA7ZezAO2XtADtl7UA7Ze2AO2XtwDtl7gA7Ze5AO2XugDtl7sA7Ze8AO2XvQDtl74A7Ze/AO2YgADtmIEA7ZiCAO2YgwDtmIQA7ZiFAO2YhgDtmIcA7ZiIAO2YiQDtmIoA7ZiLAO2YjADtmI0A7ZiOAO2YjwDtmJAA7ZiRAO2YkgDtmJMA7ZiUAO2YlQDtmJYA7ZiXAO2YmADtmJkA7ZiaAO2YmwDtmJwA7ZidAO2YngDtmJ8A7ZigAO2YoQDtmKIA7ZijAO2YpADtmKUA7ZimAO2YpwDtmKgA7ZipAO2YqgDtmKsA7ZisAO2YrQDtmK4A7ZivAO2YsADtmLEA7ZiyAO2YswDtmLQA7Zi1AO2YtgDtmLcA7Zi4AO2YuQDtmLoA7Zi7AO2YvADtmL0A7Zi+AO2YvwDtmYAA7ZmBAO2ZggDtmYMA7ZmEAO2ZhQDtmYYA7ZmHAO2ZiADtmYkA7ZmKAO2ZiwDtmYwA7ZmNAO2ZjgDtmY8A7ZmQAO2ZkQDtmZIA7ZmTAO2ZlADtmZUA7ZmWAO2ZlwDtmZgA7ZmZAO2ZmgDtmZsA7ZmcAO2ZnQDtmZ4A7ZmfAO2ZoADtmaEA7ZmiAO2ZowDtmaQA7ZmlAO2ZpgDtmacA7ZmoAO2ZqQDtmaoA7ZmrAO2ZrADtma0A7ZmuAO2ZrwDtmbAA7ZmxAO2ZsgDtmbMA7Zm0AO2ZtQDtmbYA7Zm3AO2ZuADtmbkA7Zm6AO2ZuwDtmbwA7Zm9AO2ZvgDtmb8A7ZqAAO2agQDtmoIA7ZqDAO2ahADtmoUA7ZqGAO2ahwDtmogA7ZqJAO2aigDtmosA7ZqMAO2ajQDtmo4A7ZqPAO2akADtmpEA7ZqSAO2akwDtmpQA7ZqVAO2algDtmpcA7ZqYAO2amQDtmpoA7ZqbAO2anADtmp0A7ZqeAO2anwDtmqAA7ZqhAO2aogDtmqMA7ZqkAO2apQDtmqYA7ZqnAO2aqADtmqkA7ZqqAO2aqwDtmqwA7ZqtAO2argDtmq8A7ZqwAO2asQDtmrIA7ZqzAO2atADtmrUA7Zq2AO2atwDtmrgA7Zq5AO2augDtmrsA7Zq8AO2avQDtmr4A7Zq/AO2bgADtm4EA7ZuCAO2bgwDtm4QA7ZuFAO2bhgDtm4cA7ZuIAO2biQDtm4oA7ZuLAO2bjADtm40A7ZuOAO2bjwDtm5AA7ZuRAO2bkgDtm5MA7ZuUAO2blQDtm5YA7ZuXAO2bmADtm5kA7ZuaAO2bmwDtm5wA7ZudAO2bngDtm58A7ZugAO2boQDtm6IA7ZujAO2bpADtm6UA7ZumAO2bpwDtm6gA7ZupAO2bqgDtm6sA7ZusAO2brQDtm64A7ZuvAO2bsADtm7EA7ZuyAO2bswDtm7QA7Zu1AO2btgDtm7cA7Zu4AO2buQDtm7oA7Zu7AO2bvADtm70A7Zu+AO2bvwDtnIAA7ZyBAO2cggDtnIMA7ZyEAO2chQDtnIYA7ZyHAO2ciADtnIkA7ZyKAO2ciwDtnIwA7ZyNAO2cjgDtnI8A7ZyQAO2ckQDtnJIA7ZyTAO2clADtnJUA7ZyWAO2clwDtnJgA7ZyZAO2cmgDtnJsA7ZycAO2cnQDtnJ4A7ZyfAO2coADtnKEA7ZyiAO2cowDtnKQA7ZylAO2cpgDtnKcA7ZyoAO2cqQDtnKoA7ZyrAO2crADtnK0A7ZyuAO2crwDtnLAA7ZyxAO2csgDtnLMA7Zy0AO2ctQDtnLYA7Zy3AO2cuADtnLkA7Zy6AO2cuwDtnLwA7Zy9AO2cvgDtnL8A7Z2AAO2dgQDtnYIA7Z2DAO2dhADtnYUA7Z2GAO2dhwDtnYgA7Z2JAO2digDtnYsA7Z2MAO2djQDtnY4A7Z2PAO2dkADtnZEA7Z2SAO2dkwDtnZQA7Z2VAO2dlgDtnZcA7Z2YAO2dmQDtnZoA7Z2bAO2dnADtnZ0A7Z2eAO2dnwDtnaAA7Z2hAO2dogDtnaMA7Z2kAO2dpQDtnaYA7Z2nAO2dqADtnakA7Z2qAO2dqwDtnawA7Z2tAO2drgDtna8A7Z2wAO2dsQDtnbIA7Z2zAO2dtADtnbUA7Z22AO2dtwDtnbgA7Z25AO2dugDtnbsA7Z28AO2dvQDtnb4A7Z2/AO2egADtnoEA7Z6CAO2egwDtnoQA7Z6FAO2ehgDtnocA7Z6IAO2eiQDtnooA7Z6LAO2ejADtno0A7Z6OAO2ejwDtnpAA7Z6RAO2ekgDtnpMA7Z6UAO2elQDtnpYA7Z6XAO2emADtnpkA7Z6aAO2emwDtnpwA7Z6dAO2engDtnp8A7Z6gAO2eoQDtnqIA7Z6jAPCRgpoA8JGCnADwkYKrAPCRhK4A8JGErwDwkY2LAPCRjYwA8JGSuwDwkZK8APCRkr4A8JGWugDwkZa7APCdhZfwnYWlAPCdhZjwnYWlAPCdhZjwnYWl8J2FrgDwnYWY8J2FpfCdha8A8J2FmPCdhaXwnYWwAPCdhZjwnYWl8J2FsQDwnYWY8J2FpfCdhbIA8J2GufCdhaUA8J2GufCdhaXwnYWuAPCdhrnwnYWl8J2FrwDwnYa68J2FpQDwnYa68J2FpfCdha4A8J2GuvCdhaXwnYWvAPCghKIA8KCUnADwoJSlAPCglYsA8KCYugDwoKCEAPCgo54A8KCorADwoK2jAPChk6QA8KGaqADwoZuqAPChp4gA8KGsmADwobSLAPCht6QA8KG3pgDwooaDAPCihp8A8KKMsQDwopuUAPCioYQA8KKhigDwoqyMAPCir7EA8KOAigDwo4q4APCjjZ8A8KOOkwDwo46cAPCjj4MA8KOPlQDwo5GtAPCjmqMA8KOipwDwo6qNAPCjq7oA8KOyvADwo7SeAPCju5EA8KO9ngDwo76OAPCkiaMA8KSLrgDwpI6rAPCkmIgA8KSctQDwpKCUAPCksLYA8KSykgDwpL6hAPCkvrgA8KWBhADwpYOyAPClg7MA8KWEmQDwpYSzAPCliYkA8KWQnQDwpZimAPClmpoA8KWbhQDwpaW8APClqqcA8KWuqwDwpbKAAPCls5AA8KW+hgDwpoeaAPCmiKgA8KaJhwDwpouZAPCmjL4A8KaTmgDwppSjAPCmlqgA8KaepwDwpp61APCmrLwA8KawtgDwprOVAPCmtasA8Ka8rADwpr6xAPCng5IA8KePigDwp5mnAPCnoq4A8KelpgDwp7KoAPCnu5MA8Ke8rwDwqJeSAPCol60A8KicrgDwqK+6APCotbcA8KmFhQDwqYefAPCpiJoA8KmQigDwqZKWAPCplrYA8KmssADwqoOOAPCqhIUA8KqIjgDwqoqRAPCqjpIA8KqYgAA=" + }, + { + "type": "Strip", + "strip_left": false, + "strip_right": true + }, + { + "type": "Replace", + "pattern": { + "Regex": " {2,}" + }, + "content": "▁" + } + ] + }, + "pre_tokenizer": { + "type": "Metaspace", + "replacement": "▁", + "add_prefix_space": true, + "prepend_scheme": "first" + }, + "post_processor": { + "type": "TemplateProcessing", + "single": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "pair": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + }, + { + "Sequence": { + "id": "B", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "special_tokens": { + "": { + "id": "", + "ids": [ + 1 + ], + "tokens": [ + "" + ] + } + } + }, + "decoder": { + "type": "Metaspace", + "replacement": "▁", + "add_prefix_space": true, + "prepend_scheme": "always" + }, + "model": { + "type": "Unigram", + "unk_id": 2, + "vocab": [ + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "▁", + -2.0122928619384766 + ], + [ + "X", + -2.486478805541992 + ], + [ + ".", + -3.5449328422546387 + ], + [ + ",", + -3.649247407913208 + ], + [ + "s", + -3.9033992290496826 + ], + [ + "▁the", + -3.9598512649536133 + ], + [ + "a", + -4.097104549407959 + ], + [ + ":", + -4.414328098297119 + ], + [ + "▁and", + -4.420670986175537 + ], + [ + "▁to", + -4.4523234367370605 + ], + [ + "▁of", + -4.572070121765137 + ], + [ + "▁fill", + -4.575019836425781 + ], + [ + "e", + -4.674920082092285 + ], + [ + "▁in", + -4.812063694000244 + ], + [ + "t", + -5.063905715942383 + ], + [ + "-", + -5.129043102264404 + ], + [ + "▁is", + -5.283425331115723 + ], + [ + "▁de", + -5.344141960144043 + ], + [ + "▁for", + -5.3930158615112305 + ], + [ + "’", + -5.4228339195251465 + ], + [ + "i", + -5.469857692718506 + ], + [ + "▁that", + -5.576240539550781 + ], + [ + "▁you", + -5.596375465393066 + ], + [ + "d", + -5.6047282218933105 + ], + [ + "▁I", + -5.6640448570251465 + ], + [ + "▁with", + -5.703730583190918 + ], + [ + "n", + -5.737886905670166 + ], + [ + "▁on", + -5.784142971038818 + ], + [ + "'", + -5.828996181488037 + ], + [ + "o", + -5.925558090209961 + ], + [ + "▁are", + -5.931313991546631 + ], + [ + "▁it", + -5.939518928527832 + ], + [ + "en", + -5.9465556144714355 + ], + [ + "▁be", + -5.9556708335876465 + ], + [ + "▁The", + -5.990020751953125 + ], + [ + "▁as", + -6.057407379150391 + ], + [ + "▁your", + -6.132311820983887 + ], + [ + "l", + -6.139498710632324 + ], + [ + "▁(", + -6.184796333312988 + ], + [ + "▁or", + -6.241950035095215 + ], + [ + "▁have", + -6.27459192276001 + ], + [ + "▁at", + -6.327472686767578 + ], + [ + "▁from", + -6.349645137786865 + ], + [ + "▁an", + -6.350090980529785 + ], + [ + "▁was", + -6.350385665893555 + ], + [ + "▁this", + -6.352563381195068 + ], + [ + "er", + -6.3604278564453125 + ], + [ + "▁la", + -6.3624043464660645 + ], + [ + "m", + -6.375206470489502 + ], + [ + "r", + -6.376530170440674 + ], + [ + "ing", + -6.3778581619262695 + ], + [ + "▁can", + -6.387146472930908 + ], + [ + "!", + -6.421379566192627 + ], + [ + "▁will", + -6.423982620239258 + ], + [ + "▁by", + -6.44155216217041 + ], + [ + "?", + -6.585887432098389 + ], + [ + "▁not", + -6.5959086418151855 + ], + [ + "re", + -6.620072364807129 + ], + [ + ")", + -6.63656759262085 + ], + [ + "▁we", + -6.643022060394287 + ], + [ + "y", + -6.654535293579102 + ], + [ + "▁und", + -6.741473197937012 + ], + [ + "▁has", + -6.7602033615112305 + ], + [ + "▁all", + -6.768176555633545 + ], + [ + "▁die", + -6.8641204833984375 + ], + [ + "▁but", + -6.906830310821533 + ], + [ + "▁our", + -6.909878730773926 + ], + [ + "▁their", + -6.91325044631958 + ], + [ + "▁A", + -6.915814399719238 + ], + [ + "▁more", + -6.918668746948242 + ], + [ + "▁un", + -6.924930572509766 + ], + [ + "▁der", + -6.925402641296387 + ], + [ + "c", + -6.925714015960693 + ], + [ + "u", + -6.932939052581787 + ], + [ + "in", + -6.934063911437988 + ], + [ + "▁so", + -6.947050094604492 + ], + [ + "▁they", + -6.989297866821289 + ], + [ + "▁one", + -7.012735843658447 + ], + [ + "▁about", + -7.071486473083496 + ], + [ + "▁my", + -7.072140693664551 + ], + [ + "ul", + -7.076492786407471 + ], + [ + "▁which", + -7.097039222717285 + ], + [ + "à", + -7.099997520446777 + ], + [ + "▁In", + -7.100254535675049 + ], + [ + "/", + -7.100865840911865 + ], + [ + "he", + -7.104752540588379 + ], + [ + "f", + -7.110044002532959 + ], + [ + "▁le", + -7.112937927246094 + ], + [ + "▁out", + -7.128556728363037 + ], + [ + "▁also", + -7.133583068847656 + ], + [ + "▁des", + -7.156766414642334 + ], + [ + "▁It", + -7.162121295928955 + ], + [ + "▁up", + -7.1723432540893555 + ], + [ + "▁\"", + -7.172809600830078 + ], + [ + "▁time", + -7.178046703338623 + ], + [ + "ă", + -7.183253765106201 + ], + [ + "if", + -7.185171127319336 + ], + [ + "▁This", + -7.191652297973633 + ], + [ + "▁We", + -7.223267078399658 + ], + [ + "p", + -7.224130153656006 + ], + [ + "▁do", + -7.228212356567383 + ], + [ + "–", + -7.235409736633301 + ], + [ + "▁“", + -7.238142013549805 + ], + [ + "on", + -7.240827560424805 + ], + [ + "h", + -7.2543206214904785 + ], + [ + "▁si", + -7.276725769042969 + ], + [ + "le", + -7.2994256019592285 + ], + [ + "▁les", + -7.312957286834717 + ], + [ + "▁în", + -7.314571857452393 + ], + [ + "▁his", + -7.324767112731934 + ], + [ + "▁who", + -7.35105562210083 + ], + [ + "▁like", + -7.371364116668701 + ], + [ + "b", + -7.375369071960449 + ], + [ + "▁when", + -7.380199432373047 + ], + [ + ";", + -7.380846977233887 + ], + [ + "▁been", + -7.38668966293335 + ], + [ + "▁other", + -7.388518333435059 + ], + [ + "ly", + -7.394660949707031 + ], + [ + "\"", + -7.407205104827881 + ], + [ + "g", + -7.407997131347656 + ], + [ + "▁cu", + -7.415276527404785 + ], + [ + "▁care", + -7.432408332824707 + ], + [ + "▁what", + -7.433043003082275 + ], + [ + "▁new", + -7.4370903968811035 + ], + [ + "or", + -7.445409774780273 + ], + [ + "▁some", + -7.461953639984131 + ], + [ + "▁get", + -7.479001998901367 + ], + [ + "▁were", + -7.491549491882324 + ], + [ + "▁just", + -7.492495536804199 + ], + [ + "▁there", + -7.493194103240967 + ], + [ + "▁would", + -7.494382381439209 + ], + [ + "S", + -7.4974141120910645 + ], + [ + "▁them", + -7.513596057891846 + ], + [ + "▁any", + -7.520544052124023 + ], + [ + ").", + -7.521052360534668 + ], + [ + "al", + -7.523056983947754 + ], + [ + "▁into", + -7.527902603149414 + ], + [ + "▁me", + -7.528337001800537 + ], + [ + "▁had", + -7.532425403594971 + ], + [ + "▁se", + -7.5451483726501465 + ], + [ + "▁make", + -7.5827131271362305 + ], + [ + "at", + -7.589433670043945 + ], + [ + "▁than", + -7.592360019683838 + ], + [ + "▁du", + -7.595852375030518 + ], + [ + "▁over", + -7.6078782081604 + ], + [ + "▁You", + -7.626111030578613 + ], + [ + "▁how", + -7.635554313659668 + ], + [ + "▁no", + -7.63729190826416 + ], + [ + "▁people", + -7.639947414398193 + ], + [ + "an", + -7.64084005355835 + ], + [ + "”", + -7.644528865814209 + ], + [ + "é", + -7.646921157836914 + ], + [ + "it", + -7.648641109466553 + ], + [ + "▁If", + -7.648687839508057 + ], + [ + "k", + -7.6605634689331055 + ], + [ + "▁pe", + -7.662139415740967 + ], + [ + "is", + -7.66726016998291 + ], + [ + "▁her", + -7.6733808517456055 + ], + [ + "▁work", + -7.680386543273926 + ], + [ + "ve", + -7.687412738800049 + ], + [ + "▁only", + -7.69785737991333 + ], + [ + "▁may", + -7.702393531799316 + ], + [ + "▁its", + -7.702449798583984 + ], + [ + "▁first", + -7.704373836517334 + ], + [ + "▁most", + -7.708309173583984 + ], + [ + "▁well", + -7.708758354187012 + ], + [ + "▁use", + -7.715085983276367 + ], + [ + "▁zu", + -7.718777656555176 + ], + [ + "▁pour", + -7.736708164215088 + ], + [ + "z", + -7.745654106140137 + ], + [ + "il", + -7.745913982391357 + ], + [ + "▁need", + -7.74778938293457 + ], + [ + "▁these", + -7.763317584991455 + ], + [ + "▁din", + -7.769891262054443 + ], + [ + "▁den", + -7.775663375854492 + ], + [ + "▁us", + -7.778133869171143 + ], + [ + "able", + -7.779712200164795 + ], + [ + "▁S", + -7.781893730163574 + ], + [ + "▁mit", + -7.792516231536865 + ], + [ + "▁very", + -7.79970645904541 + ], + [ + "▁am", + -7.814100742340088 + ], + [ + "&", + -7.829529285430908 + ], + [ + "▁au", + -7.83012056350708 + ], + [ + "▁many", + -7.83834171295166 + ], + [ + "▁mai", + -7.84363317489624 + ], + [ + "A", + -7.849830150604248 + ], + [ + "th", + -7.855541229248047 + ], + [ + "▁through", + -7.859585285186768 + ], + [ + "▁pentru", + -7.86391544342041 + ], + [ + "▁two", + -7.873607158660889 + ], + [ + "▁von", + -7.874959945678711 + ], + [ + "▁way", + -7.887117385864258 + ], + [ + "ll", + -7.887749195098877 + ], + [ + "I", + -7.891303539276123 + ], + [ + "▁ce", + -7.9015631675720215 + ], + [ + "▁și", + -7.904444694519043 + ], + [ + "▁help", + -7.907405853271484 + ], + [ + "▁best", + -7.907911777496338 + ], + [ + "),", + -7.908212184906006 + ], + [ + "un", + -7.925017833709717 + ], + [ + "▁years", + -7.925964832305908 + ], + [ + "▁2", + -7.9282684326171875 + ], + [ + "▁C", + -7.936962604522705 + ], + [ + "▁nu", + -7.939520835876465 + ], + [ + "▁good", + -7.943995952606201 + ], + [ + "v", + -7.94746732711792 + ], + [ + "▁1", + -7.94765567779541 + ], + [ + "w", + -7.947978496551514 + ], + [ + "▁das", + -7.960538864135742 + ], + [ + "▁ca", + -7.962430477142334 + ], + [ + "▁where", + -7.964908123016357 + ], + [ + "▁know", + -7.96622896194458 + ], + [ + "▁year", + -7.971063613891602 + ], + [ + "▁He", + -7.974609375 + ], + [ + "▁see", + -7.980011463165283 + ], + [ + "▁für", + -7.984004497528076 + ], + [ + "▁auf", + -7.984249114990234 + ], + [ + "▁3", + -7.984433650970459 + ], + [ + "de", + -7.985401153564453 + ], + [ + "est", + -8.002091407775879 + ], + [ + "▁back", + -8.007022857666016 + ], + [ + "▁such", + -8.008523941040039 + ], + [ + "▁should", + -8.011754989624023 + ], + [ + "x", + -8.015050888061523 + ], + [ + "▁after", + -8.01761245727539 + ], + [ + "▁could", + -8.019674301147461 + ], + [ + "▁ist", + -8.020784378051758 + ], + [ + "▁now", + -8.022845268249512 + ], + [ + "▁much", + -8.023111343383789 + ], + [ + "and", + -8.02390193939209 + ], + [ + "...", + -8.030110359191895 + ], + [ + "▁home", + -8.036273956298828 + ], + [ + "to", + -8.03821086883545 + ], + [ + "▁ein", + -8.04833984375 + ], + [ + "▁even", + -8.048656463623047 + ], + [ + "▁que", + -8.049829483032227 + ], + [ + "▁day", + -8.051553726196289 + ], + [ + "▁take", + -8.054189682006836 + ], + [ + "▁want", + -8.054435729980469 + ], + [ + "▁For", + -8.06217098236084 + ], + [ + "▁said", + -8.063249588012695 + ], + [ + "▁sur", + -8.073471069335938 + ], + [ + "▁une", + -8.077030181884766 + ], + [ + "▁să", + -8.082921028137207 + ], + [ + "▁dans", + -8.084549903869629 + ], + [ + "▁great", + -8.088057518005371 + ], + [ + "▁este", + -8.08947467803955 + ], + [ + "▁because", + -8.094311714172363 + ], + [ + "▁information", + -8.104085922241211 + ], + [ + "ului", + -8.105451583862305 + ], + [ + "▁find", + -8.112174987792969 + ], + [ + "C", + -8.119946479797363 + ], + [ + "▁she", + -8.125317573547363 + ], + [ + "▁im", + -8.126056671142578 + ], + [ + "ation", + -8.130115509033203 + ], + [ + "▁then", + -8.13021469116211 + ], + [ + "▁est", + -8.13099479675293 + ], + [ + "▁par", + -8.138585090637207 + ], + [ + "▁used", + -8.141871452331543 + ], + [ + "▁E", + -8.146790504455566 + ], + [ + "▁made", + -8.149978637695312 + ], + [ + "▁So", + -8.15785026550293 + ], + [ + "am", + -8.16288948059082 + ], + [ + "▁eine", + -8.165464401245117 + ], + [ + "▁şi", + -8.168368339538574 + ], + [ + "▁business", + -8.17335033416748 + ], + [ + "▁right", + -8.173593521118164 + ], + [ + "▁here", + -8.176125526428223 + ], + [ + "▁being", + -8.184967041015625 + ], + [ + "▁B", + -8.185355186462402 + ], + [ + "▁those", + -8.185736656188965 + ], + [ + "▁before", + -8.194721221923828 + ], + [ + "▁And", + -8.199501037597656 + ], + [ + "▁P", + -8.200712203979492 + ], + [ + "ers", + -8.200922012329102 + ], + [ + "▁don", + -8.204029083251953 + ], + [ + "B", + -8.20487117767334 + ], + [ + "▁life", + -8.206265449523926 + ], + [ + "▁go", + -8.209736824035645 + ], + [ + "▁As", + -8.210551261901855 + ], + [ + "▁M", + -8.221170425415039 + ], + [ + "▁each", + -8.22955322265625 + ], + [ + "▁qui", + -8.23323917388916 + ], + [ + "▁place", + -8.236248970031738 + ], + [ + "com", + -8.237479209899902 + ], + [ + "ant", + -8.252915382385254 + ], + [ + "▁sich", + -8.255932807922363 + ], + [ + "▁There", + -8.261948585510254 + ], + [ + "ar", + -8.264991760253906 + ], + [ + "▁Sie", + -8.273868560791016 + ], + [ + "▁own", + -8.277531623840332 + ], + [ + "▁part", + -8.279440879821777 + ], + [ + "ent", + -8.281047821044922 + ], + [ + "▁world", + -8.28173542022705 + ], + [ + "ment", + -8.282004356384277 + ], + [ + "▁while", + -8.294474601745605 + ], + [ + "▁But", + -8.295366287231445 + ], + [ + "▁around", + -8.300799369812012 + ], + [ + "▁L", + -8.301082611083984 + ], + [ + "us", + -8.304039001464844 + ], + [ + "▁plus", + -8.313054084777832 + ], + [ + "▁To", + -8.313691139221191 + ], + [ + "▁5", + -8.31412410736084 + ], + [ + "▁high", + -8.31862735748291 + ], + [ + "▁long", + -8.319378852844238 + ], + [ + "D", + -8.320075035095215 + ], + [ + "▁D", + -8.320279121398926 + ], + [ + "▁really", + -8.322924613952637 + ], + [ + "▁nicht", + -8.332040786743164 + ], + [ + "▁Le", + -8.335328102111816 + ], + [ + "▁service", + -8.3412504196167 + ], + [ + "▁4", + -8.342093467712402 + ], + [ + "▁different", + -8.342538833618164 + ], + [ + "▁Die", + -8.348092079162598 + ], + [ + "▁think", + -8.353771209716797 + ], + [ + "—", + -8.355998039245605 + ], + [ + "▁auch", + -8.357160568237305 + ], + [ + "▁look", + -8.362202644348145 + ], + [ + "▁both", + -8.366817474365234 + ], + [ + "lor", + -8.36687183380127 + ], + [ + "▁down", + -8.367999076843262 + ], + [ + "ten", + -8.368885040283203 + ], + [ + "▁La", + -8.378066062927246 + ], + [ + "▁off", + -8.380044937133789 + ], + [ + "▁vous", + -8.380541801452637 + ], + [ + "▁They", + -8.381462097167969 + ], + [ + "M", + -8.383248329162598 + ], + [ + "▁pas", + -8.384513854980469 + ], + [ + "▁data", + -8.385709762573242 + ], + [ + "▁T", + -8.386754989624023 + ], + [ + "▁love", + -8.388101577758789 + ], + [ + "▁every", + -8.390009880065918 + ], + [ + "▁10", + -8.391179084777832 + ], + [ + "▁last", + -8.392083168029785 + ], + [ + "▁same", + -8.393481254577637 + ], + [ + "▁using", + -8.395487785339355 + ], + [ + "▁free", + -8.408831596374512 + ], + [ + "▁dem", + -8.40894889831543 + ], + [ + "▁still", + -8.409984588623047 + ], + [ + "ate", + -8.410931587219238 + ], + [ + "ist", + -8.415611267089844 + ], + [ + "▁between", + -8.420283317565918 + ], + [ + "P", + -8.420982360839844 + ], + [ + "be", + -8.428167343139648 + ], + [ + "▁available", + -8.429443359375 + ], + [ + "man", + -8.432978630065918 + ], + [ + "▁company", + -8.439678192138672 + ], + [ + "▁G", + -8.441640853881836 + ], + [ + "▁experience", + -8.444950103759766 + ], + [ + "▁going", + -8.449073791503906 + ], + [ + "▁site", + -8.453832626342773 + ], + [ + "j", + -8.455142974853516 + ], + [ + "are", + -8.456900596618652 + ], + [ + "▁set", + -8.470661163330078 + ], + [ + "2", + -8.473684310913086 + ], + [ + "▁system", + -8.474678039550781 + ], + [ + "▁important", + -8.476791381835938 + ], + [ + "▁few", + -8.482437133789062 + ], + [ + "▁fi", + -8.482551574707031 + ], + [ + "ich", + -8.483301162719727 + ], + [ + "▁What", + -8.488649368286133 + ], + [ + "▁services", + -8.502433776855469 + ], + [ + "▁under", + -8.502569198608398 + ], + [ + "▁When", + -8.50308895111084 + ], + [ + "▁online", + -8.50699520111084 + ], + [ + "▁New", + -8.51494312286377 + ], + [ + "▁come", + -8.524871826171875 + ], + [ + "▁provide", + -8.525650024414062 + ], + [ + "F", + -8.526449203491211 + ], + [ + "▁team", + -8.52782154083252 + ], + [ + "▁always", + -8.529409408569336 + ], + [ + "▁De", + -8.530412673950195 + ], + [ + "▁că", + -8.532517433166504 + ], + [ + "▁him", + -8.53586196899414 + ], + [ + "▁F", + -8.538305282592773 + ], + [ + "▁things", + -8.550079345703125 + ], + [ + "▁including", + -8.550943374633789 + ], + [ + "▁support", + -8.552608489990234 + ], + [ + "▁number", + -8.554113388061523 + ], + [ + "T", + -8.557183265686035 + ], + [ + "▁during", + -8.55886459350586 + ], + [ + "▁family", + -8.560463905334473 + ], + [ + "▁little", + -8.561317443847656 + ], + [ + "▁three", + -8.567726135253906 + ], + [ + "▁water", + -8.56810188293457 + ], + [ + "▁man", + -8.569759368896484 + ], + [ + "▁An", + -8.57192611694336 + ], + [ + "based", + -8.572155952453613 + ], + [ + "▁R", + -8.57442855834961 + ], + [ + "▁sau", + -8.574433326721191 + ], + [ + "▁avec", + -8.576035499572754 + ], + [ + "▁better", + -8.576830863952637 + ], + [ + "▁„", + -8.582253456115723 + ], + [ + "▁too", + -8.58635425567627 + ], + [ + "ge", + -8.586719512939453 + ], + [ + "▁must", + -8.589736938476562 + ], + [ + "▁per", + -8.589916229248047 + ], + [ + "ele", + -8.590399742126465 + ], + [ + "▁oder", + -8.59264850616455 + ], + [ + "au", + -8.59555435180664 + ], + [ + "▁aus", + -8.595727920532227 + ], + [ + "▁werden", + -8.598653793334961 + ], + [ + "▁does", + -8.599140167236328 + ], + [ + "▁without", + -8.599270820617676 + ], + [ + "▁ou", + -8.599929809570312 + ], + [ + "▁design", + -8.60101318359375 + ], + [ + "▁va", + -8.605440139770508 + ], + [ + "▁did", + -8.615679740905762 + ], + [ + "▁O", + -8.619062423706055 + ], + [ + "▁U", + -8.623565673828125 + ], + [ + "up", + -8.62901496887207 + ], + [ + "▁end", + -8.63367748260498 + ], + [ + "▁local", + -8.636231422424316 + ], + [ + "▁next", + -8.638967514038086 + ], + [ + "▁sure", + -8.64098072052002 + ], + [ + "▁lot", + -8.64644718170166 + ], + [ + "▁Re", + -8.647016525268555 + ], + [ + "▁top", + -8.647642135620117 + ], + [ + "▁Our", + -8.656886100769043 + ], + [ + "▁small", + -8.656978607177734 + ], + [ + "▁full", + -8.659418106079102 + ], + [ + "▁something", + -8.662886619567871 + ], + [ + "ung", + -8.666722297668457 + ], + [ + "▁vor", + -8.673250198364258 + ], + [ + "E", + -8.673337936401367 + ], + [ + "▁give", + -8.67603588104248 + ], + [ + "▁might", + -8.67660903930664 + ], + [ + "▁another", + -8.679330825805664 + ], + [ + "▁6", + -8.680779457092285 + ], + [ + "▁All", + -8.681318283081055 + ], + [ + "▁process", + -8.681672096252441 + ], + [ + "L", + -8.682575225830078 + ], + [ + "▁found", + -8.68941593170166 + ], + [ + "▁sind", + -8.690044403076172 + ], + [ + "▁since", + -8.69528865814209 + ], + [ + "▁With", + -8.695560455322266 + ], + [ + "K", + -8.696988105773926 + ], + [ + "um", + -8.701016426086426 + ], + [ + "▁within", + -8.701669692993164 + ], + [ + "▁post", + -8.706608772277832 + ], + [ + "▁car", + -8.709365844726562 + ], + [ + "une", + -8.714099884033203 + ], + [ + "▁N", + -8.715041160583496 + ], + [ + "▁J", + -8.715597152709961 + ], + [ + "ic", + -8.71823787689209 + ], + [ + "R", + -8.722309112548828 + ], + [ + "ter", + -8.727437019348145 + ], + [ + "ur", + -8.728265762329102 + ], + [ + "▁She", + -8.73131275177002 + ], + [ + "▁public", + -8.732009887695312 + ], + [ + "▁keep", + -8.735784530639648 + ], + [ + "▁H", + -8.736178398132324 + ], + [ + "▁order", + -8.740762710571289 + ], + [ + "▁start", + -8.742195129394531 + ], + [ + "ez", + -8.74746322631836 + ], + [ + "▁‘", + -8.749832153320312 + ], + [ + "uri", + -8.751104354858398 + ], + [ + "▁20", + -8.752482414245605 + ], + [ + "▁On", + -8.753515243530273 + ], + [ + "▁offer", + -8.763005256652832 + ], + [ + "▁quality", + -8.764988899230957 + ], + [ + "▁working", + -8.769987106323242 + ], + [ + "▁No", + -8.770307540893555 + ], + [ + "▁That", + -8.775156021118164 + ], + [ + "▁game", + -8.7863187789917 + ], + [ + "▁bei", + -8.786642074584961 + ], + [ + "▁today", + -8.788661003112793 + ], + [ + "▁never", + -8.794586181640625 + ], + [ + "▁week", + -8.79587173461914 + ], + [ + "▁St", + -8.797786712646484 + ], + [ + "▁feel", + -8.799317359924316 + ], + [ + "▁put", + -8.801899909973145 + ], + [ + "▁website", + -8.80322265625 + ], + [ + "Y", + -8.804483413696289 + ], + [ + "▁days", + -8.804709434509277 + ], + [ + "▁program", + -8.805448532104492 + ], + [ + "▁looking", + -8.810463905334473 + ], + [ + "▁K", + -8.810808181762695 + ], + [ + "▁students", + -8.811436653137207 + ], + [ + "▁create", + -8.811800956726074 + ], + [ + "▁change", + -8.812616348266602 + ], + [ + "▁book", + -8.812932014465332 + ], + [ + "ity", + -8.813761711120605 + ], + [ + "▁At", + -8.815207481384277 + ], + [ + "▁possible", + -8.815670013427734 + ], + [ + "▁sunt", + -8.81651496887207 + ], + [ + "▁7", + -8.818120002746582 + ], + [ + "▁real", + -8.823369026184082 + ], + [ + "▁al", + -8.824172019958496 + ], + [ + "▁making", + -8.825371742248535 + ], + [ + "▁Be", + -8.825761795043945 + ], + [ + "▁products", + -8.82592487335205 + ], + [ + "▁case", + -8.82653522491455 + ], + [ + "▁school", + -8.8272066116333 + ], + [ + "▁say", + -8.830352783203125 + ], + [ + "area", + -8.832084655761719 + ], + [ + "▁My", + -8.833836555480957 + ], + [ + "▁point", + -8.834731101989746 + ], + [ + "▁als", + -8.83560848236084 + ], + [ + "▁children", + -8.836194038391113 + ], + [ + "▁course", + -8.844061851501465 + ], + [ + "▁show", + -8.847993850708008 + ], + [ + "▁8", + -8.849273681640625 + ], + [ + "▁These", + -8.849345207214355 + ], + [ + "▁18", + -8.851140975952148 + ], + [ + "▁large", + -8.851323127746582 + ], + [ + "co", + -8.854362487792969 + ], + [ + "▁über", + -8.854788780212402 + ], + [ + "▁second", + -8.856559753417969 + ], + [ + "▁market", + -8.859807014465332 + ], + [ + "▁fost", + -8.86048698425293 + ], + [ + "▁easy", + -8.863983154296875 + ], + [ + "▁plan", + -8.864302635192871 + ], + [ + "▁project", + -8.864927291870117 + ], + [ + "G", + -8.865178108215332 + ], + [ + "W", + -8.869574546813965 + ], + [ + "3", + -8.871939659118652 + ], + [ + "▁son", + -8.873332023620605 + ], + [ + "la", + -8.879053115844727 + ], + [ + "▁face", + -8.88137435913086 + ], + [ + "▁needs", + -8.88148021697998 + ], + [ + "ch", + -8.883138656616211 + ], + [ + "▁personal", + -8.88343620300293 + ], + [ + "me", + -8.886031150817871 + ], + [ + "▁sont", + -8.887377738952637 + ], + [ + "▁je", + -8.894930839538574 + ], + [ + "▁non", + -8.895471572875977 + ], + [ + "▁got", + -8.896591186523438 + ], + [ + "▁Do", + -8.897382736206055 + ], + [ + "the", + -8.89765453338623 + ], + [ + "▁health", + -8.89908504486084 + ], + [ + "▁special", + -8.90555477142334 + ], + [ + ".\"", + -8.907710075378418 + ], + [ + "1", + -8.907852172851562 + ], + [ + "den", + -8.908616065979004 + ], + [ + "▁state", + -8.909355163574219 + ], + [ + "▁open", + -8.91019058227539 + ], + [ + "▁money", + -8.91053581237793 + ], + [ + "▁again", + -8.913084983825684 + ], + [ + "▁food", + -8.913167953491211 + ], + [ + "▁page", + -8.914595603942871 + ], + [ + "▁together", + -8.91628360748291 + ], + [ + "age", + -8.919108390808105 + ], + [ + "▁qu", + -8.921928405761719 + ], + [ + "hat", + -8.922386169433594 + ], + [ + "▁ver", + -8.926993370056152 + ], + [ + "▁W", + -8.927785873413086 + ], + [ + "▁away", + -8.928759574890137 + ], + [ + "▁wird", + -8.931641578674316 + ], + [ + "▁until", + -8.934249877929688 + ], + [ + "V", + -8.934935569763184 + ], + [ + "▁pre", + -8.935851097106934 + ], + [ + "▁One", + -8.936429977416992 + ], + [ + "▁product", + -8.936561584472656 + ], + [ + "▁often", + -8.939326286315918 + ], + [ + "▁wir", + -8.944111824035645 + ], + [ + "▁nach", + -8.945127487182617 + ], + [ + "▁include", + -8.946555137634277 + ], + [ + "▁um", + -8.948204040527344 + ], + [ + "▁room", + -8.953709602355957 + ], + [ + "▁group", + -8.953767776489258 + ], + [ + "▁name", + -8.954949378967285 + ], + [ + "ce", + -8.955448150634766 + ], + [ + "H", + -8.956180572509766 + ], + [ + "N", + -8.958139419555664 + ], + [ + "▁person", + -8.958183288574219 + ], + [ + "▁social", + -8.958606719970703 + ], + [ + "▁list", + -8.963666915893555 + ], + [ + "▁How", + -8.964127540588379 + ], + [ + "▁why", + -8.96571159362793 + ], + [ + "▁community", + -8.965995788574219 + ], + [ + "▁contact", + -8.973031044006348 + ], + [ + "­", + -8.9755859375 + ], + [ + "▁co", + -8.979683876037598 + ], + [ + "▁play", + -8.983960151672363 + ], + [ + "▁having", + -8.984169960021973 + ], + [ + "▁power", + -8.986917495727539 + ], + [ + "▁call", + -8.991690635681152 + ], + [ + "▁against", + -8.991816520690918 + ], + [ + "▁become", + -8.997780799865723 + ], + [ + "▁cost", + -9.003793716430664 + ], + [ + "▁V", + -9.004593849182129 + ], + [ + "▁research", + -9.006913185119629 + ], + [ + "▁12", + -9.007307052612305 + ], + [ + "▁wie", + -9.008277893066406 + ], + [ + "der", + -9.008386611938477 + ], + [ + "▁thing", + -9.014028549194336 + ], + [ + "▁along", + -9.017301559448242 + ], + [ + "4", + -9.017330169677734 + ], + [ + "▁access", + -9.020391464233398 + ], + [ + "▁level", + -9.020505905151367 + ], + [ + "▁price", + -9.022817611694336 + ], + [ + "▁einen", + -9.023714065551758 + ], + [ + "▁side", + -9.026359558105469 + ], + [ + "▁Un", + -9.026851654052734 + ], + [ + "▁means", + -9.030416488647461 + ], + [ + "(", + -9.032341957092285 + ], + [ + "▁big", + -9.034374237060547 + ], + [ + "▁God", + -9.036499977111816 + ], + [ + "▁dass", + -9.037314414978027 + ], + [ + "im", + -9.037374496459961 + ], + [ + "▁30", + -9.037432670593262 + ], + [ + "▁event", + -9.041665077209473 + ], + [ + "▁development", + -9.042060852050781 + ], + [ + "▁form", + -9.04226303100586 + ], + [ + "▁read", + -9.042579650878906 + ], + [ + "▁hand", + -9.043194770812988 + ], + [ + "▁control", + -9.04446792602539 + ], + [ + "▁However", + -9.046320915222168 + ], + [ + "▁done", + -9.048060417175293 + ], + [ + "▁job", + -9.051692008972168 + ], + [ + "▁hard", + -9.056619644165039 + ], + [ + "▁war", + -9.057538032531738 + ], + [ + "▁area", + -9.0584135055542 + ], + [ + "▁add", + -9.0586576461792 + ], + [ + "▁votre", + -9.0593900680542 + ], + [ + "▁live", + -9.059494018554688 + ], + [ + "▁range", + -9.060099601745605 + ], + [ + "▁After", + -9.060164451599121 + ], + [ + "▁Les", + -9.060513496398926 + ], + [ + "▁far", + -9.064413070678711 + ], + [ + "ver", + -9.064727783203125 + ], + [ + "▁old", + -9.069576263427734 + ], + [ + "▁perfect", + -9.06976318359375 + ], + [ + "▁15", + -9.070429801940918 + ], + [ + "▁space", + -9.073654174804688 + ], + [ + "▁house", + -9.074068069458008 + ], + [ + "ine", + -9.07408618927002 + ], + [ + "▁enough", + -9.074334144592285 + ], + [ + "0", + -9.075824737548828 + ], + [ + "▁several", + -9.077119827270508 + ], + [ + "The", + -9.081155776977539 + ], + [ + "mm", + -9.085619926452637 + ], + [ + "▁University", + -9.08637523651123 + ], + [ + "▁diese", + -9.087566375732422 + ], + [ + "▁Co", + -9.088335990905762 + ], + [ + "▁comes", + -9.088497161865234 + ], + [ + "▁across", + -9.088857650756836 + ], + [ + "▁already", + -9.090097427368164 + ], + [ + ",”", + -9.090341567993164 + ], + [ + "▁body", + -9.09276294708252 + ], + [ + "▁Das", + -9.094594955444336 + ], + [ + "▁einer", + -9.095956802368164 + ], + [ + "▁left", + -9.09921646118164 + ], + [ + "▁future", + -9.105711936950684 + ], + [ + "▁times", + -9.106670379638672 + ], + [ + "▁dar", + -9.109651565551758 + ], + [ + "▁simple", + -9.110408782958984 + ], + [ + "ry", + -9.112407684326172 + ], + [ + "▁getting", + -9.113155364990234 + ], + [ + "▁try", + -9.115362167358398 + ], + [ + "ți", + -9.116897583007812 + ], + [ + "ness", + -9.120043754577637 + ], + [ + "▁makes", + -9.120377540588379 + ], + [ + "▁past", + -9.120619773864746 + ], + [ + "ca", + -9.12130069732666 + ], + [ + "▁light", + -9.122207641601562 + ], + [ + "▁Der", + -9.122997283935547 + ], + [ + "▁run", + -9.125843048095703 + ], + [ + "▁four", + -9.126943588256836 + ], + [ + "ance", + -9.130500793457031 + ], + [ + "▁ever", + -9.131503105163574 + ], + [ + "▁einem", + -9.131816864013672 + ], + [ + "▁below", + -9.133723258972168 + ], + [ + "O", + -9.134073257446289 + ], + [ + "▁9", + -9.137282371520996 + ], + [ + "▁learn", + -9.14004135131836 + ], + [ + "out", + -9.140358924865723 + ], + [ + "▁video", + -9.143178939819336 + ], + [ + "▁etc", + -9.146929740905762 + ], + [ + "▁«", + -9.148795127868652 + ], + [ + "▁zum", + -9.149712562561035 + ], + [ + "▁kann", + -9.1504487991333 + ], + [ + "▁minutes", + -9.151180267333984 + ], + [ + "▁example", + -9.154194831848145 + ], + [ + "▁nous", + -9.154619216918945 + ], + [ + "▁Se", + -9.157441139221191 + ], + [ + "▁sie", + -9.159955024719238 + ], + [ + "▁industry", + -9.161614418029785 + ], + [ + "▁problem", + -9.162016868591309 + ], + [ + "J", + -9.162480354309082 + ], + [ + "▁country", + -9.163366317749023 + ], + [ + "▁fact", + -9.164189338684082 + ], + [ + "▁type", + -9.164190292358398 + ], + [ + "ner", + -9.164238929748535 + ], + [ + "▁companies", + -9.165864944458008 + ], + [ + "▁line", + -9.169849395751953 + ], + [ + "▁city", + -9.172713279724121 + ], + [ + "▁check", + -9.173710823059082 + ], + [ + "▁doing", + -9.174406051635742 + ], + [ + "elle", + -9.175037384033203 + ], + [ + "▁fun", + -9.176549911499023 + ], + [ + "▁En", + -9.177546501159668 + ], + [ + "▁Your", + -9.178601264953613 + ], + [ + "ling", + -9.181450843811035 + ], + [ + "▁share", + -9.18185806274414 + ], + [ + "ile", + -9.182005882263184 + ], + [ + "▁actually", + -9.187544822692871 + ], + [ + "▁value", + -9.187751770019531 + ], + [ + "zi", + -9.188661575317383 + ], + [ + "▁ab", + -9.1898832321167 + ], + [ + "▁offers", + -9.1905517578125 + ], + [ + "▁less", + -9.190573692321777 + ], + [ + "▁night", + -9.193560600280762 + ], + [ + "▁Dr", + -9.19518756866455 + ], + [ + "▁started", + -9.195454597473145 + ], + [ + "▁least", + -9.198020935058594 + ], + [ + "▁short", + -9.198562622070312 + ], + [ + "▁main", + -9.201143264770508 + ], + [ + "▁single", + -9.202939987182617 + ], + [ + "▁though", + -9.203780174255371 + ], + [ + "▁prin", + -9.203930854797363 + ], + [ + "time", + -9.20531177520752 + ], + [ + "▁hours", + -9.206608772277832 + ], + [ + "▁others", + -9.206849098205566 + ], + [ + "▁called", + -9.20730209350586 + ], + [ + "▁visit", + -9.208869934082031 + ], + [ + "▁bit", + -9.209009170532227 + ], + [ + "ée", + -9.210821151733398 + ], + [ + "▁customers", + -9.211383819580078 + ], + [ + "▁music", + -9.212000846862793 + ], + [ + "▁members", + -9.217191696166992 + ], + [ + "ies", + -9.21743392944336 + ], + [ + "▁pay", + -9.219176292419434 + ], + [ + "nd", + -9.219744682312012 + ], + [ + "▁once", + -9.221125602722168 + ], + [ + "gen", + -9.2217378616333 + ], + [ + "▁können", + -9.222976684570312 + ], + [ + "▁low", + -9.223771095275879 + ], + [ + "▁durch", + -9.227394104003906 + ], + [ + "▁story", + -9.228075981140137 + ], + [ + "▁understand", + -9.22953987121582 + ], + [ + "“", + -9.229856491088867 + ], + [ + "▁Am", + -9.231831550598145 + ], + [ + "▁didn", + -9.234603881835938 + ], + [ + "▁content", + -9.237217903137207 + ], + [ + "son", + -9.24180793762207 + ], + [ + "▁building", + -9.242242813110352 + ], + [ + "▁result", + -9.242605209350586 + ], + [ + "▁aux", + -9.243107795715332 + ], + [ + "▁complete", + -9.244999885559082 + ], + [ + "▁doesn", + -9.24510669708252 + ], + [ + "▁haben", + -9.246070861816406 + ], + [ + "▁questions", + -9.24661636352539 + ], + [ + "line", + -9.247077941894531 + ], + [ + "▁technology", + -9.247429847717285 + ], + [ + "▁Pro", + -9.247976303100586 + ], + [ + "▁current", + -9.248504638671875 + ], + [ + "▁won", + -9.248883247375488 + ], + [ + "▁let", + -9.250710487365723 + ], + [ + "▁features", + -9.251978874206543 + ], + [ + "▁please", + -9.258262634277344 + ], + [ + "5", + -9.258519172668457 + ], + [ + "▁above", + -9.259394645690918 + ], + [ + "ive", + -9.262128829956055 + ], + [ + "▁management", + -9.262394905090332 + ], + [ + "▁lui", + -9.262539863586426 + ], + [ + "her", + -9.263057708740234 + ], + [ + "▁training", + -9.265711784362793 + ], + [ + "▁everything", + -9.2665433883667 + ], + [ + "▁noch", + -9.266846656799316 + ], + [ + "▁came", + -9.267708778381348 + ], + [ + "▁web", + -9.272823333740234 + ], + [ + "▁ensure", + -9.272987365722656 + ], + [ + "▁months", + -9.273130416870117 + ], + [ + "▁art", + -9.27313232421875 + ], + [ + "▁sub", + -9.274359703063965 + ], + [ + "▁million", + -9.274559020996094 + ], + [ + "▁professional", + -9.275035858154297 + ], + [ + "▁results", + -9.278368949890137 + ], + [ + "▁kind", + -9.278395652770996 + ], + [ + "▁season", + -9.279285430908203 + ], + [ + "▁unique", + -9.281067848205566 + ], + [ + "ze", + -9.284360885620117 + ], + [ + "▁enjoy", + -9.28487777709961 + ], + [ + "▁early", + -9.287765502929688 + ], + [ + "▁major", + -9.288202285766602 + ], + [ + "▁yet", + -9.29152774810791 + ], + [ + "▁Ver", + -9.293331146240234 + ], + [ + "one", + -9.296777725219727 + ], + [ + "▁media", + -9.29719352722168 + ], + [ + "▁[", + -9.30095100402832 + ], + [ + "▁property", + -9.302969932556152 + ], + [ + "▁beautiful", + -9.304466247558594 + ], + [ + "▁given", + -9.305286407470703 + ], + [ + "▁due", + -9.306716918945312 + ], + [ + "▁government", + -9.307181358337402 + ], + [ + "▁nur", + -9.30881404876709 + ], + [ + "▁email", + -9.309103012084961 + ], + [ + "▁total", + -9.311080932617188 + ], + [ + "▁natural", + -9.311264038085938 + ], + [ + "▁test", + -9.311450004577637 + ], + [ + "▁provides", + -9.311640739440918 + ], + [ + "▁various", + -9.312631607055664 + ], + [ + "▁American", + -9.315605163574219 + ], + [ + "▁moment", + -9.318109512329102 + ], + [ + "▁air", + -9.318952560424805 + ], + [ + "▁idea", + -9.319236755371094 + ], + [ + "▁known", + -9.319981575012207 + ], + [ + "▁Il", + -9.320504188537598 + ], + [ + "▁friends", + -9.320576667785645 + ], + [ + "▁final", + -9.320919036865234 + ], + [ + "▁buy", + -9.32139778137207 + ], + [ + "▁specific", + -9.322234153747559 + ], + [ + "▁issues", + -9.32454776763916 + ], + [ + "▁took", + -9.325233459472656 + ], + [ + "▁mind", + -9.326258659362793 + ], + [ + "▁study", + -9.32675838470459 + ], + [ + "▁addition", + -9.328418731689453 + ], + [ + "▁size", + -9.332446098327637 + ], + [ + "▁pro", + -9.334047317504883 + ], + [ + "▁film", + -9.33545970916748 + ], + [ + "▁pot", + -9.335636138916016 + ], + [ + "▁thought", + -9.338120460510254 + ], + [ + "▁tell", + -9.33890438079834 + ], + [ + "▁While", + -9.339675903320312 + ], + [ + "▁head", + -9.339983940124512 + ], + [ + "▁clients", + -9.340429306030273 + ], + [ + "▁performance", + -9.346199989318848 + ], + [ + "▁question", + -9.346835136413574 + ], + [ + "▁whether", + -9.347925186157227 + ], + [ + "▁certain", + -9.34826946258545 + ], + [ + "▁model", + -9.348764419555664 + ], + [ + "▁following", + -9.350926399230957 + ], + [ + "▁energy", + -9.354207992553711 + ], + [ + "▁office", + -9.354207992553711 + ], + [ + "▁whole", + -9.356687545776367 + ], + [ + "▁bring", + -9.356956481933594 + ], + [ + "▁required", + -9.35726261138916 + ], + [ + "ţi", + -9.358223915100098 + ], + [ + "▁date", + -9.358695030212402 + ], + [ + "_", + -9.358983039855957 + ], + [ + "que", + -9.359789848327637 + ], + [ + "▁da", + -9.360264778137207 + ], + [ + "▁US", + -9.36120319366455 + ], + [ + "▁taking", + -9.36143684387207 + ], + [ + "go", + -9.362788200378418 + ], + [ + "▁living", + -9.36341667175293 + ], + [ + "▁someone", + -9.363489151000977 + ], + [ + "▁heart", + -9.365120887756348 + ], + [ + "▁key", + -9.365775108337402 + ], + [ + "▁areas", + -9.366238594055176 + ], + [ + "▁says", + -9.367013931274414 + ], + [ + "▁2018", + -9.369132041931152 + ], + [ + "▁month", + -9.37012767791748 + ], + [ + "▁Er", + -9.371354103088379 + ], + [ + "ste", + -9.375077247619629 + ], + [ + "▁11", + -9.375179290771484 + ], + [ + "▁front", + -9.37528133392334 + ], + [ + "▁Now", + -9.37669563293457 + ], + [ + "▁class", + -9.376946449279785 + ], + [ + "▁choose", + -9.377082824707031 + ], + [ + "pe", + -9.37808609008789 + ], + [ + "▁further", + -9.379021644592285 + ], + [ + "▁believe", + -9.37936019897461 + ], + [ + "of", + -9.379590034484863 + ], + [ + "▁among", + -9.380990982055664 + ], + [ + "sch", + -9.381686210632324 + ], + [ + "▁child", + -9.382609367370605 + ], + [ + "▁aber", + -9.38376235961914 + ], + [ + "▁Please", + -9.386269569396973 + ], + [ + "rea", + -9.387248992919922 + ], + [ + "▁later", + -9.387272834777832 + ], + [ + "▁amount", + -9.388760566711426 + ], + [ + "ice", + -9.390128135681152 + ], + [ + "▁National", + -9.390177726745605 + ], + [ + "▁style", + -9.390748977661133 + ], + [ + "▁tout", + -9.391490936279297 + ], + [ + "▁staff", + -9.392939567565918 + ], + [ + "▁white", + -9.397933959960938 + ], + [ + "▁ge", + -9.399179458618164 + ], + [ + "▁five", + -9.400984764099121 + ], + [ + "▁blog", + -9.40109920501709 + ], + [ + "▁designed", + -9.40125846862793 + ], + [ + "▁went", + -9.402216911315918 + ], + [ + "▁Da", + -9.40268611907959 + ], + [ + "▁general", + -9.403801918029785 + ], + [ + "▁rest", + -9.403874397277832 + ], + [ + "▁zur", + -9.40579891204834 + ], + [ + "▁quite", + -9.405948638916016 + ], + [ + "per", + -9.40687084197998 + ], + [ + "▁customer", + -9.408379554748535 + ], + [ + "▁close", + -9.408747673034668 + ], + [ + "▁Some", + -9.41054630279541 + ], + [ + "▁women", + -9.41075611114502 + ], + [ + "▁move", + -9.410761833190918 + ], + [ + "▁software", + -9.411357879638672 + ], + [ + "▁Ein", + -9.413651466369629 + ], + [ + "▁Ab", + -9.413823127746582 + ], + [ + "▁history", + -9.413864135742188 + ], + [ + "▁either", + -9.41564655303955 + ], + [ + "▁seen", + -9.417396545410156 + ], + [ + "▁card", + -9.419726371765137 + ], + [ + "▁City", + -9.421541213989258 + ], + [ + "▁hope", + -9.421769142150879 + ], + [ + "▁16", + -9.422072410583496 + ], + [ + "és", + -9.422825813293457 + ], + [ + "va", + -9.423294067382812 + ], + [ + "▁Al", + -9.423827171325684 + ], + [ + "▁especially", + -9.424827575683594 + ], + [ + "▁view", + -9.426136016845703 + ], + [ + "men", + -9.427363395690918 + ], + [ + "▁account", + -9.427489280700684 + ], + [ + "▁needed", + -9.429777145385742 + ], + [ + "▁United", + -9.429789543151855 + ], + [ + "]", + -9.432387351989746 + ], + [ + "▁yourself", + -9.432788848876953 + ], + [ + "▁100", + -9.433059692382812 + ], + [ + "▁receive", + -9.433417320251465 + ], + [ + "▁ideas", + -9.43369197845459 + ], + [ + "▁writing", + -9.434585571289062 + ], + [ + "▁simply", + -9.434741973876953 + ], + [ + "▁present", + -9.435087203979492 + ], + [ + "▁continue", + -9.436107635498047 + ], + [ + "▁application", + -9.44115161895752 + ], + [ + "▁build", + -9.44187068939209 + ], + [ + "▁turn", + -9.44249439239502 + ], + [ + "ated", + -9.442923545837402 + ], + [ + "▁everyone", + -9.443060874938965 + ], + [ + "cette", + -9.443114280700684 + ], + [ + "▁bien", + -9.444964408874512 + ], + [ + "less", + -9.445222854614258 + ], + [ + "▁Si", + -9.445359230041504 + ], + [ + "▁original", + -9.446867942810059 + ], + [ + "8", + -9.44794750213623 + ], + [ + "▁individual", + -9.448895454406738 + ], + [ + "tre", + -9.449433326721191 + ], + [ + "▁works", + -9.45171070098877 + ], + [ + "▁options", + -9.451821327209473 + ], + [ + "▁May", + -9.454456329345703 + ], + [ + "▁Not", + -9.454940795898438 + ], + [ + "▁report", + -9.455467224121094 + ], + [ + "mer", + -9.457239151000977 + ], + [ + "▁human", + -9.459118843078613 + ], + [ + "▁provided", + -9.459603309631348 + ], + [ + "▁By", + -9.460925102233887 + ], + [ + "▁series", + -9.462006568908691 + ], + [ + "7", + -9.46226692199707 + ], + [ + "▁modern", + -9.463875770568848 + ], + [ + "▁meet", + -9.463921546936035 + ], + [ + "▁50", + -9.464119911193848 + ], + [ + "▁25", + -9.46969985961914 + ], + [ + "▁color", + -9.470091819763184 + ], + [ + "▁download", + -9.470109939575195 + ], + [ + "▁Here", + -9.471144676208496 + ], + [ + "6", + -9.471323013305664 + ], + [ + "▁poate", + -9.471449851989746 + ], + [ + "▁În", + -9.472321510314941 + ], + [ + "▁phone", + -9.473695755004883 + ], + [ + "▁likely", + -9.474374771118164 + ], + [ + "▁table", + -9.476469993591309 + ], + [ + "▁ma", + -9.476551055908203 + ], + [ + "▁Or", + -9.479181289672852 + ], + [ + "Z", + -9.48026180267334 + ], + [ + "▁19", + -9.482215881347656 + ], + [ + "▁insurance", + -9.482544898986816 + ], + [ + "▁anything", + -9.483808517456055 + ], + [ + "▁search", + -9.485033988952637 + ], + [ + "▁Ge", + -9.48520565032959 + ], + [ + "▁issue", + -9.485564231872559 + ], + [ + "▁includes", + -9.485688209533691 + ], + [ + "▁clear", + -9.487342834472656 + ], + [ + "les", + -9.488021850585938 + ], + [ + "▁almost", + -9.488259315490723 + ], + [ + "ilor", + -9.48935317993164 + ], + [ + "▁14", + -9.490717887878418 + ], + [ + "by", + -9.494056701660156 + ], + [ + "▁Du", + -9.49624252319336 + ], + [ + "▁mais", + -9.497303009033203 + ], + [ + "ier", + -9.499163627624512 + ], + [ + "▁law", + -9.49924087524414 + ], + [ + "▁added", + -9.500134468078613 + ], + [ + "▁con", + -9.500962257385254 + ], + [ + ",\"", + -9.501530647277832 + ], + [ + "▁ago", + -9.502127647399902 + ], + [ + "▁His", + -9.504697799682617 + ], + [ + "▁points", + -9.504981994628906 + ], + [ + "▁mult", + -9.505581855773926 + ], + [ + "▁financial", + -9.506216049194336 + ], + [ + "▁problems", + -9.506428718566895 + ], + [ + "▁however", + -9.50648307800293 + ], + [ + "▁events", + -9.50675106048584 + ], + [ + "▁half", + -9.507889747619629 + ], + [ + "ard", + -9.511183738708496 + ], + [ + "▁ask", + -9.51156997680664 + ], + [ + "▁version", + -9.511631965637207 + ], + [ + "end", + -9.512478828430176 + ], + [ + "▁created", + -9.512639999389648 + ], + [ + "▁lead", + -9.512917518615723 + ], + [ + "▁focus", + -9.513853073120117 + ], + [ + "▁increase", + -9.515096664428711 + ], + [ + "ex", + -9.515118598937988 + ], + [ + "▁allow", + -9.515798568725586 + ], + [ + "▁extra", + -9.516464233398438 + ], + [ + "▁24", + -9.516692161560059 + ], + [ + "▁credit", + -9.516772270202637 + ], + [ + "▁production", + -9.516801834106445 + ], + [ + "zu", + -9.517256736755371 + ], + [ + "▁black", + -9.51754093170166 + ], + [ + "▁systems", + -9.518040657043457 + ], + [ + "▁17", + -9.518178939819336 + ], + [ + "▁opportunity", + -9.518531799316406 + ], + [ + "▁bis", + -9.519219398498535 + ], + [ + "▁fast", + -9.519807815551758 + ], + [ + "ring", + -9.521166801452637 + ], + [ + "▁Don", + -9.522114753723145 + ], + [ + "▁via", + -9.52242660522461 + ], + [ + "fer", + -9.5225248336792 + ], + [ + "▁comme", + -9.522799491882324 + ], + [ + "▁popular", + -9.523722648620605 + ], + [ + "▁South", + -9.524491310119629 + ], + [ + "ating", + -9.525003433227539 + ], + [ + "▁State", + -9.525198936462402 + ], + [ + "ator", + -9.525679588317871 + ], + [ + "▁common", + -9.525968551635742 + ], + [ + "con", + -9.526727676391602 + ], + [ + "▁throughout", + -9.527557373046875 + ], + [ + "▁risk", + -9.52774715423584 + ], + [ + "▁young", + -9.528532028198242 + ], + [ + "▁Je", + -9.528688430786133 + ], + [ + "▁image", + -9.52928352355957 + ], + [ + "ha", + -9.529376983642578 + ], + [ + "▁third", + -9.529587745666504 + ], + [ + "▁taken", + -9.530049324035645 + ], + [ + "▁Z", + -9.5314302444458 + ], + [ + "▁dis", + -9.5316162109375 + ], + [ + "▁From", + -9.533575057983398 + ], + [ + "▁details", + -9.534862518310547 + ], + [ + "▁games", + -9.53516674041748 + ], + [ + "▁practice", + -9.536040306091309 + ], + [ + "che", + -9.536151885986328 + ], + [ + "▁security", + -9.537364959716797 + ], + [ + "▁medical", + -9.537653923034668 + ], + [ + "▁learning", + -9.537806510925293 + ], + [ + "▁material", + -9.538509368896484 + ], + [ + "▁international", + -9.540703773498535 + ], + [ + "▁forward", + -9.541245460510254 + ], + [ + "▁paper", + -9.541247367858887 + ], + [ + "▁action", + -9.541348457336426 + ], + [ + "▁file", + -9.542378425598145 + ], + [ + "▁oil", + -9.543096542358398 + ], + [ + "▁self", + -9.54377555847168 + ], + [ + "▁private", + -9.545247077941895 + ], + [ + "▁interest", + -9.545559883117676 + ], + [ + "bar", + -9.546065330505371 + ], + [ + "▁sale", + -9.547115325927734 + ], + [ + "▁stay", + -9.547348976135254 + ], + [ + "ke", + -9.548089981079102 + ], + [ + "▁San", + -9.549053192138672 + ], + [ + "▁matter", + -9.549870491027832 + ], + [ + "▁reason", + -9.550254821777344 + ], + [ + "ted", + -9.55147647857666 + ], + [ + "▁potential", + -9.551742553710938 + ], + [ + "▁brand", + -9.552441596984863 + ], + [ + "▁field", + -9.55315113067627 + ], + [ + "▁treatment", + -9.553420066833496 + ], + [ + "▁period", + -9.553516387939453 + ], + [ + "▁York", + -9.553890228271484 + ], + [ + "▁Park", + -9.554738998413086 + ], + [ + "▁acest", + -9.556009292602539 + ], + [ + "ou", + -9.556926727294922 + ], + [ + "▁Ce", + -9.557014465332031 + ], + [ + "▁ready", + -9.558111190795898 + ], + [ + "▁rather", + -9.55860424041748 + ], + [ + "▁outside", + -9.560086250305176 + ], + [ + "▁standard", + -9.560121536254883 + ], + [ + "▁located", + -9.560770034790039 + ], + [ + "▁marketing", + -9.562313079833984 + ], + [ + "cu", + -9.564041137695312 + ], + [ + "▁Can", + -9.564562797546387 + ], + [ + "▁education", + -9.566105842590332 + ], + [ + "use", + -9.566640853881836 + ], + [ + "▁role", + -9.566828727722168 + ], + [ + "▁men", + -9.571505546569824 + ], + [ + "▁probably", + -9.571550369262695 + ], + [ + "▁store", + -9.57221508026123 + ], + [ + "▁John", + -9.572355270385742 + ], + [ + "▁rate", + -9.573956489562988 + ], + [ + "▁code", + -9.573994636535645 + ], + [ + "▁kids", + -9.574408531188965 + ], + [ + "▁currently", + -9.57552719116211 + ], + [ + "▁near", + -9.576475143432617 + ], + [ + "▁sales", + -9.576716423034668 + ], + [ + "▁usually", + -9.577012062072754 + ], + [ + "▁activities", + -9.577242851257324 + ], + [ + "▁party", + -9.577371597290039 + ], + [ + "▁leur", + -9.577434539794922 + ], + [ + "▁particular", + -9.577627182006836 + ], + [ + "▁mehr", + -9.577707290649414 + ], + [ + "ill", + -9.578757286071777 + ], + [ + "▁percent", + -9.579113006591797 + ], + [ + "▁fait", + -9.579537391662598 + ], + [ + "▁happy", + -9.579904556274414 + ], + [ + "▁inside", + -9.58005428314209 + ], + [ + "▁save", + -9.580510139465332 + ], + [ + "▁skills", + -9.580765724182129 + ], + [ + "▁consider", + -9.581025123596191 + ], + [ + "▁recent", + -9.58161735534668 + ], + [ + "▁strong", + -9.581781387329102 + ], + [ + "▁position", + -9.582076072692871 + ], + [ + "▁knowledge", + -9.582303047180176 + ], + [ + "▁tax", + -9.583868980407715 + ], + [ + "▁users", + -9.584261894226074 + ], + [ + "und", + -9.585564613342285 + ], + [ + "▁coming", + -9.585904121398926 + ], + [ + "▁article", + -9.585923194885254 + ], + [ + "min", + -9.586345672607422 + ], + [ + "▁sein", + -9.586555480957031 + ], + [ + "▁travel", + -9.586871147155762 + ], + [ + "▁changes", + -9.58765983581543 + ], + [ + "▁impact", + -9.588181495666504 + ], + [ + "▁wanted", + -9.588460922241211 + ], + [ + "▁address", + -9.5885591506958 + ], + [ + "▁soon", + -9.58873462677002 + ], + [ + "▁North", + -9.588915824890137 + ], + [ + "ată", + -9.589237213134766 + ], + [ + "▁trying", + -9.58985424041748 + ], + [ + "▁app", + -9.590612411499023 + ], + [ + "▁School", + -9.592510223388672 + ], + [ + "▁Es", + -9.592548370361328 + ], + [ + "we", + -9.59261703491211 + ], + [ + "▁conditions", + -9.59292984008789 + ], + [ + "▁digital", + -9.593293190002441 + ], + [ + "▁similar", + -9.594805717468262 + ], + [ + "▁solution", + -9.59514331817627 + ], + [ + "▁location", + -9.595183372497559 + ], + [ + "▁Of", + -9.595418930053711 + ], + [ + "▁follow", + -9.595842361450195 + ], + [ + "▁red", + -9.597526550292969 + ], + [ + "▁review", + -9.599202156066895 + ], + [ + "▁skin", + -9.599575996398926 + ], + [ + "▁pretty", + -9.600369453430176 + ], + [ + "day", + -9.600558280944824 + ], + [ + "▁dé", + -9.602072715759277 + ], + [ + "▁cause", + -9.602169036865234 + ], + [ + "▁Sa", + -9.602463722229004 + ], + [ + "▁user", + -9.602520942687988 + ], + [ + "▁Man", + -9.603377342224121 + ], + [ + "”.", + -9.604146003723145 + ], + [ + "▁Just", + -9.604366302490234 + ], + [ + "▁faire", + -9.604475021362305 + ], + [ + "▁member", + -9.605619430541992 + ], + [ + "▁iar", + -9.606892585754395 + ], + [ + "▁higher", + -9.607715606689453 + ], + [ + "▁step", + -9.607887268066406 + ], + [ + "▁wide", + -9.608185768127441 + ], + [ + "▁uns", + -9.608920097351074 + ], + [ + "▁World", + -9.609135627746582 + ], + [ + "▁additional", + -9.61176586151123 + ], + [ + "ber", + -9.613197326660156 + ], + [ + "▁easily", + -9.613990783691406 + ], + [ + "▁deal", + -9.615070343017578 + ], + [ + "▁ways", + -9.615514755249023 + ], + [ + "▁mobile", + -9.616837501525879 + ], + [ + "▁national", + -9.616913795471191 + ], + [ + "▁couple", + -9.617389678955078 + ], + [ + "▁ihre", + -9.61939811706543 + ], + [ + "▁choice", + -9.619612693786621 + ], + [ + "for", + -9.619686126708984 + ], + [ + "ous", + -9.62070083618164 + ], + [ + "▁Google", + -9.620855331420898 + ], + [ + "▁environment", + -9.622426986694336 + ], + [ + "urile", + -9.623322486877441 + ], + [ + "▁Center", + -9.626680374145508 + ], + [ + "mp", + -9.628592491149902 + ], + [ + "▁»", + -9.629727363586426 + ], + [ + "qui", + -9.630680084228516 + ], + [ + "▁growth", + -9.631048202514648 + ], + [ + "ler", + -9.633174896240234 + ], + [ + "▁improve", + -9.63360595703125 + ], + [ + "▁items", + -9.6336669921875 + ], + [ + "▁Nu", + -9.63393783569336 + ], + [ + "▁leave", + -9.634074211120605 + ], + [ + "▁true", + -9.634805679321289 + ], + [ + "▁wurde", + -9.63487434387207 + ], + [ + "▁cannot", + -9.635004043579102 + ], + [ + "▁13", + -9.635096549987793 + ], + [ + "▁running", + -9.636015892028809 + ], + [ + "▁anti", + -9.636177062988281 + ], + [ + "▁option", + -9.636306762695312 + ], + [ + "▁reading", + -9.63657283782959 + ], + [ + "▁Car", + -9.636698722839355 + ], + [ + "▁Wir", + -9.638110160827637 + ], + [ + "▁April", + -9.63975715637207 + ], + [ + "▁behind", + -9.640642166137695 + ], + [ + "▁client", + -9.640750885009766 + ], + [ + "▁cover", + -9.641012191772461 + ], + [ + "▁stop", + -9.641090393066406 + ], + [ + "ja", + -9.641277313232422 + ], + [ + "▁built", + -9.641307830810547 + ], + [ + "▁Con", + -9.641313552856445 + ], + [ + "ement", + -9.641366004943848 + ], + [ + "▁projects", + -9.641828536987305 + ], + [ + "▁variety", + -9.641840934753418 + ], + [ + "▁Ihre", + -9.642666816711426 + ], + [ + "ș", + -9.64302921295166 + ], + [ + "▁unter", + -9.64385986328125 + ], + [ + "▁longer", + -9.646577835083008 + ], + [ + "year", + -9.647161483764648 + ], + [ + "▁photo", + -9.648370742797852 + ], + [ + "▁Also", + -9.64933967590332 + ], + [ + "▁received", + -9.651098251342773 + ], + [ + "▁return", + -9.652676582336426 + ], + [ + "00", + -9.653081893920898 + ], + [ + "▁bar", + -9.653343200683594 + ], + [ + "ary", + -9.654427528381348 + ], + [ + "elor", + -9.655137062072754 + ], + [ + "▁Home", + -9.656189918518066 + ], + [ + "our", + -9.656298637390137 + ], + [ + "▁Me", + -9.65771198272705 + ], + [ + "▁held", + -9.659111022949219 + ], + [ + "▁click", + -9.66014289855957 + ], + [ + "▁ex", + -9.660178184509277 + ], + [ + "▁cum", + -9.661561965942383 + ], + [ + "▁takes", + -9.66395378112793 + ], + [ + "▁computer", + -9.665796279907227 + ], + [ + "▁told", + -9.668192863464355 + ], + [ + "+", + -9.670648574829102 + ], + [ + "▁patients", + -9.670809745788574 + ], + [ + "ting", + -9.672165870666504 + ], + [ + "▁direct", + -9.672248840332031 + ], + [ + "▁quickly", + -9.672410011291504 + ], + [ + "tic", + -9.672877311706543 + ], + [ + "▁vom", + -9.673723220825195 + ], + [ + "▁di", + -9.67381477355957 + ], + [ + "▁kitchen", + -9.674022674560547 + ], + [ + "▁network", + -9.675640106201172 + ], + [ + "▁2015", + -9.676688194274902 + ], + [ + "▁effective", + -9.677227020263672 + ], + [ + "▁collection", + -9.677703857421875 + ], + [ + "▁2017", + -9.677751541137695 + ], + [ + "▁words", + -9.678145408630371 + ], + [ + "▁cele", + -9.678857803344727 + ], + [ + "▁student", + -9.678862571716309 + ], + [ + "▁amazing", + -9.678932189941406 + ], + [ + "eur", + -9.680419921875 + ], + [ + ".”", + -9.68227481842041 + ], + [ + "▁ale", + -9.682716369628906 + ], + [ + "”,", + -9.68414306640625 + ], + [ + "▁purchase", + -9.684350967407227 + ], + [ + "▁mean", + -9.68477725982666 + ], + [ + "▁West", + -9.686846733093262 + ], + [ + "▁nice", + -9.6889066696167 + ], + [ + "▁age", + -9.689131736755371 + ], + [ + "▁base", + -9.68923568725586 + ], + [ + "▁summer", + -9.68928337097168 + ], + [ + "▁multi", + -9.689496994018555 + ], + [ + "▁allows", + -9.689573287963867 + ], + [ + "▁latest", + -9.689604759216309 + ], + [ + "▁global", + -9.68992805480957 + ], + [ + "▁chance", + -9.690792083740234 + ], + [ + "▁sense", + -9.690872192382812 + ], + [ + "ieren", + -9.692789077758789 + ], + [ + "▁difficult", + -9.693133354187012 + ], + [ + "ité", + -9.694750785827637 + ], + [ + "ka", + -9.694792747497559 + ], + [ + "du", + -9.69483757019043 + ], + [ + "▁providing", + -9.695744514465332 + ], + [ + "▁Art", + -9.696940422058105 + ], + [ + "▁drive", + -9.698554992675781 + ], + [ + "▁Go", + -9.698877334594727 + ], + [ + "▁très", + -9.699414253234863 + ], + [ + "U", + -9.699579238891602 + ], + [ + "▁Pre", + -9.699846267700195 + ], + [ + "▁shows", + -9.700040817260742 + ], + [ + "▁hair", + -9.701324462890625 + ], + [ + "▁success", + -9.701513290405273 + ], + [ + "▁UK", + -9.703169822692871 + ], + [ + "red", + -9.703241348266602 + ], + [ + "ü", + -9.703370094299316 + ], + [ + "ish", + -9.703631401062012 + ], + [ + "▁weeks", + -9.704839706420898 + ], + [ + "▁solutions", + -9.7055025100708 + ], + [ + "▁Pe", + -9.7057523727417 + ], + [ + "▁equipment", + -9.706141471862793 + ], + [ + "și", + -9.706482887268066 + ], + [ + "▁worked", + -9.707073211669922 + ], + [ + "\".", + -9.708627700805664 + ], + [ + "▁legal", + -9.708720207214355 + ], + [ + "▁bad", + -9.70892333984375 + ], + [ + "▁40", + -9.709561347961426 + ], + [ + "▁Internet", + -9.709798812866211 + ], + [ + "▁included", + -9.709976196289062 + ], + [ + "▁upon", + -9.710977554321289 + ], + [ + "▁excellent", + -9.71106243133545 + ], + [ + "▁goal", + -9.71130084991455 + ], + [ + "▁El", + -9.711408615112305 + ], + [ + "▁Mo", + -9.711703300476074 + ], + [ + "▁policy", + -9.71319580078125 + ], + [ + "▁aussi", + -9.713537216186523 + ], + [ + "▁weight", + -9.713687896728516 + ], + [ + "ici", + -9.715133666992188 + ], + [ + "▁approach", + -9.715584754943848 + ], + [ + "▁six", + -9.71579647064209 + ], + [ + "▁entire", + -9.715911865234375 + ], + [ + "9", + -9.71633529663086 + ], + [ + "▁send", + -9.716832160949707 + ], + [ + "▁1.", + -9.718971252441406 + ], + [ + "▁wenn", + -9.719056129455566 + ], + [ + "▁photos", + -9.71993637084961 + ], + [ + "://", + -9.721014022827148 + ], + [ + "ger", + -9.72281551361084 + ], + [ + "▁favorite", + -9.723104476928711 + ], + [ + "ley", + -9.723477363586426 + ], + [ + "▁else", + -9.72463321685791 + ], + [ + "▁types", + -9.72468376159668 + ], + [ + "▁link", + -9.725333213806152 + ], + [ + "▁recently", + -9.72584056854248 + ], + [ + "▁Mit", + -9.72631549835205 + ], + [ + "▁hot", + -9.726548194885254 + ], + [ + "tra", + -9.726597785949707 + ], + [ + "ş", + -9.727307319641113 + ], + [ + "▁according", + -9.728511810302734 + ], + [ + "▁necessary", + -9.728511810302734 + ], + [ + "▁multiple", + -9.729269027709961 + ], + [ + "▁Im", + -9.729510307312012 + ], + [ + "▁sehr", + -9.729660034179688 + ], + [ + "▁sign", + -9.732263565063477 + ], + [ + "▁anyone", + -9.73283576965332 + ], + [ + "▁land", + -9.733613014221191 + ], + [ + "▁States", + -9.734037399291992 + ], + [ + "▁unsere", + -9.734119415283203 + ], + [ + "ées", + -9.734639167785645 + ], + [ + "We", + -9.735671043395996 + ], + [ + "▁nothing", + -9.735845565795898 + ], + [ + "▁commercial", + -9.736858367919922 + ], + [ + "ful", + -9.737265586853027 + ], + [ + "▁seems", + -9.739325523376465 + ], + [ + "▁International", + -9.740097045898438 + ], + [ + "▁March", + -9.74163818359375 + ], + [ + "▁Thanks", + -9.743307113647461 + ], + [ + "▁County", + -9.74365234375 + ], + [ + "▁books", + -9.744638442993164 + ], + [ + "▁Ca", + -9.7451753616333 + ], + [ + "▁mi", + -9.746304512023926 + ], + [ + "▁meeting", + -9.746662139892578 + ], + [ + "▁tools", + -9.747593879699707 + ], + [ + "▁cut", + -9.747650146484375 + ], + [ + "▁related", + -9.74765682220459 + ], + [ + "▁lives", + -9.748003005981445 + ], + [ + "way", + -9.748501777648926 + ], + [ + "▁develop", + -9.748651504516602 + ], + [ + "▁sound", + -9.748723983764648 + ], + [ + "▁safe", + -9.748950958251953 + ], + [ + "▁Her", + -9.74937629699707 + ], + [ + "▁average", + -9.751277923583984 + ], + [ + "▁clean", + -9.75174331665039 + ], + [ + "▁talk", + -9.752362251281738 + ], + [ + "▁peut", + -9.75241756439209 + ], + [ + "▁dann", + -9.752546310424805 + ], + [ + "▁terms", + -9.753265380859375 + ], + [ + "▁foarte", + -9.753512382507324 + ], + [ + "▁super", + -9.754284858703613 + ], + [ + "▁programs", + -9.754853248596191 + ], + [ + "▁decision", + -9.75540828704834 + ], + [ + "▁costs", + -9.756058692932129 + ], + [ + "▁être", + -9.756291389465332 + ], + [ + "▁2019", + -9.757674217224121 + ], + [ + "led", + -9.759482383728027 + ], + [ + "▁parents", + -9.759617805480957 + ], + [ + "▁Mr", + -9.761702537536621 + ], + [ + "▁lower", + -9.762362480163574 + ], + [ + "▁door", + -9.762978553771973 + ], + [ + "▁été", + -9.763933181762695 + ], + [ + "▁box", + -9.764954566955566 + ], + [ + "▁record", + -9.765517234802246 + ], + [ + "▁win", + -9.765650749206543 + ], + [ + "ster", + -9.766402244567871 + ], + [ + "▁America", + -9.766748428344727 + ], + [ + "▁immer", + -9.768763542175293 + ], + [ + "▁road", + -9.76996898651123 + ], + [ + "▁leading", + -9.772759437561035 + ], + [ + "▁section", + -9.772838592529297 + ], + [ + "▁Facebook", + -9.772990226745605 + ], + [ + "▁Most", + -9.7738676071167 + ], + [ + "iert", + -9.77435302734375 + ], + [ + "▁morning", + -9.774497032165527 + ], + [ + "▁asked", + -9.775190353393555 + ], + [ + "▁involved", + -9.77551555633545 + ], + [ + "▁hier", + -9.777607917785645 + ], + [ + "▁images", + -9.77821159362793 + ], + [ + "▁House", + -9.778263092041016 + ], + [ + "▁highly", + -9.780763626098633 + ], + [ + "▁Bar", + -9.781620979309082 + ], + [ + "▁Service", + -9.782510757446289 + ], + [ + "▁attention", + -9.784318923950195 + ], + [ + "▁normal", + -9.784571647644043 + ], + [ + "▁plans", + -9.785883903503418 + ], + [ + "▁source", + -9.785930633544922 + ], + [ + "▁Aus", + -9.788092613220215 + ], + [ + "▁benefits", + -9.788655281066895 + ], + [ + "▁ses", + -9.789348602294922 + ], + [ + "des", + -9.789867401123047 + ], + [ + "▁internet", + -9.789949417114258 + ], + [ + "▁materials", + -9.790080070495605 + ], + [ + "▁même", + -9.791318893432617 + ], + [ + "▁fine", + -9.791522026062012 + ], + [ + "▁fit", + -9.792226791381836 + ], + [ + "▁21", + -9.792612075805664 + ], + [ + "▁itself", + -9.793739318847656 + ], + [ + "▁wieder", + -9.793972969055176 + ], + [ + "▁Many", + -9.795313835144043 + ], + [ + "▁nature", + -9.795402526855469 + ], + [ + "▁pain", + -9.795467376708984 + ], + [ + "▁device", + -9.796183586120605 + ], + [ + "art", + -9.796989440917969 + ], + [ + "pro", + -9.7971830368042 + ], + [ + "▁France", + -9.797271728515625 + ], + [ + "lich", + -9.797314643859863 + ], + [ + "▁2014", + -9.799542427062988 + ], + [ + "▁inter", + -9.799964904785156 + ], + [ + "▁Li", + -9.800453186035156 + ], + [ + "▁career", + -9.801136016845703 + ], + [ + "▁looks", + -9.80145263671875 + ], + [ + "▁ré", + -9.802245140075684 + ], + [ + "▁ability", + -9.802556991577148 + ], + [ + "▁situation", + -9.803154945373535 + ], + [ + "ville", + -9.803157806396484 + ], + [ + "▁2016", + -9.80319595336914 + ], + [ + "tes", + -9.803462982177734 + ], + [ + "▁remember", + -9.803879737854004 + ], + [ + "▁TV", + -9.803998947143555 + ], + [ + "▁levels", + -9.805853843688965 + ], + [ + "▁subject", + -9.807723999023438 + ], + [ + "ally", + -9.80844497680664 + ], + [ + "▁reduce", + -9.810232162475586 + ], + [ + "▁*", + -9.8108491897583 + ], + [ + "▁Day", + -9.810867309570312 + ], + [ + "▁write", + -9.812152862548828 + ], + [ + "▁pick", + -9.814252853393555 + ], + [ + "ence", + -9.815399169921875 + ], + [ + "▁fresh", + -9.816520690917969 + ], + [ + "▁traditional", + -9.816662788391113 + ], + [ + "chi", + -9.817692756652832 + ], + [ + "▁machine", + -9.818047523498535 + ], + [ + "▁resources", + -9.819125175476074 + ], + [ + "â", + -9.819502830505371 + ], + [ + "▁countries", + -9.820009231567383 + ], + [ + "▁Even", + -9.820342063903809 + ], + [ + "▁green", + -9.821283340454102 + ], + [ + "▁Free", + -9.821910858154297 + ], + [ + "▁daily", + -9.822112083435059 + ], + [ + "▁respect", + -9.823013305664062 + ], + [ + "▁instead", + -9.823714256286621 + ], + [ + "▁Once", + -9.82418155670166 + ], + [ + "▁word", + -9.824407577514648 + ], + [ + "▁construction", + -9.82489013671875 + ], + [ + "▁huge", + -9.825064659118652 + ], + [ + "▁feature", + -9.825220108032227 + ], + [ + "▁themselves", + -9.826369285583496 + ], + [ + "▁loss", + -9.82919692993164 + ], + [ + "%", + -9.830063819885254 + ], + [ + "▁safety", + -9.830256462097168 + ], + [ + "▁economic", + -9.831406593322754 + ], + [ + "▁require", + -9.831945419311523 + ], + [ + "30", + -9.83255386352539 + ], + [ + "▁planning", + -9.833393096923828 + ], + [ + "▁mal", + -9.834482192993164 + ], + [ + "▁directly", + -9.835214614868164 + ], + [ + "ure", + -9.835719108581543 + ], + [ + "▁track", + -9.835734367370605 + ], + [ + "▁tool", + -9.836135864257812 + ], + [ + "▁positive", + -9.836392402648926 + ], + [ + "▁piece", + -9.837076187133789 + ], + [ + "▁parts", + -9.837140083312988 + ], + [ + "ang", + -9.83740520477295 + ], + [ + "▁trip", + -9.837453842163086 + ], + [ + "▁organization", + -9.837935447692871 + ], + [ + "▁sites", + -9.838274002075195 + ], + [ + "▁fire", + -9.83831787109375 + ], + [ + "▁China", + -9.838876724243164 + ], + [ + "▁Pour", + -9.839289665222168 + ], + [ + "▁plant", + -9.84011459350586 + ], + [ + "▁board", + -9.840341567993164 + ], + [ + "▁interesting", + -9.841227531433105 + ], + [ + "gar", + -9.841713905334473 + ], + [ + "▁fie", + -9.841752052307129 + ], + [ + "▁late", + -9.842166900634766 + ], + [ + "▁wall", + -9.842294692993164 + ], + [ + "▁walk", + -9.842741966247559 + ], + [ + "ham", + -9.843868255615234 + ], + [ + "▁Ne", + -9.845427513122559 + ], + [ + "▁First", + -9.845462799072266 + ], + [ + "▁double", + -9.845701217651367 + ], + [ + "▁budget", + -9.847657203674316 + ], + [ + "▁cases", + -9.847670555114746 + ], + [ + "cal", + -9.849738121032715 + ], + [ + "old", + -9.849796295166016 + ], + [ + "▁Bo", + -9.849822998046875 + ], + [ + "▁spend", + -9.850439071655273 + ], + [ + "port", + -9.850828170776367 + ], + [ + "▁worth", + -9.850934028625488 + ], + [ + "ique", + -9.851308822631836 + ], + [ + "nes", + -9.85190486907959 + ], + [ + "cul", + -9.852272033691406 + ], + [ + "era", + -9.85296630859375 + ], + [ + "▁text", + -9.853032112121582 + ], + [ + "▁decided", + -9.854948997497559 + ], + [ + "▁floor", + -9.855036735534668 + ], + [ + "▁requirements", + -9.85529899597168 + ], + [ + "▁cel", + -9.855361938476562 + ], + [ + "▁effect", + -9.855412483215332 + ], + [ + "▁gibt", + -9.856159210205078 + ], + [ + "▁news", + -9.859238624572754 + ], + [ + "▁vos", + -9.859931945800781 + ], + [ + "▁players", + -9.86057186126709 + ], + [ + "▁saw", + -9.862728118896484 + ], + [ + "▁auto", + -9.863056182861328 + ], + [ + "▁town", + -9.863207817077637 + ], + [ + "▁myself", + -9.864106178283691 + ], + [ + "▁lost", + -9.864988327026367 + ], + [ + "▁$", + -9.865124702453613 + ], + [ + "▁June", + -9.86609172821045 + ], + [ + "▁significant", + -9.866196632385254 + ], + [ + "▁giving", + -9.866230010986328 + ], + [ + "▁stand", + -9.866744041442871 + ], + [ + "▁stock", + -9.867657661437988 + ], + [ + "▁hold", + -9.867766380310059 + ], + [ + "▁Are", + -9.869078636169434 + ], + [ + "▁shall", + -9.86923599243164 + ], + [ + "▁ideal", + -9.869279861450195 + ], + [ + "▁London", + -9.87080192565918 + ], + [ + "▁answer", + -9.870853424072266 + ], + [ + "▁Vor", + -9.87157917022705 + ], + [ + "▁gives", + -9.873115539550781 + ], + [ + "ative", + -9.87316608428955 + ], + [ + "▁timp", + -9.873167991638184 + ], + [ + "▁center", + -9.87362289428711 + ], + [ + "▁Group", + -9.874580383300781 + ], + [ + "▁sans", + -9.875143051147461 + ], + [ + "▁Ar", + -9.875466346740723 + ], + [ + "▁Ma", + -9.875568389892578 + ], + [ + "▁reach", + -9.876279830932617 + ], + [ + "ren", + -9.876652717590332 + ], + [ + "▁More", + -9.877446174621582 + ], + [ + "mit", + -9.878068923950195 + ], + [ + "▁guide", + -9.87833309173584 + ], + [ + "▁fully", + -9.878828048706055 + ], + [ + "▁Since", + -9.878952980041504 + ], + [ + "▁Inc", + -9.87923812866211 + ], + [ + "▁culture", + -9.879780769348145 + ], + [ + "eat", + -9.880531311035156 + ], + [ + "▁written", + -9.880722999572754 + ], + [ + "▁Ho", + -9.881338119506836 + ], + [ + "▁India", + -9.881625175476074 + ], + [ + "▁Well", + -9.881708145141602 + ], + [ + "back", + -9.881752967834473 + ], + [ + "▁goes", + -9.882170677185059 + ], + [ + "▁completely", + -9.88217544555664 + ], + [ + "▁tour", + -9.883081436157227 + ], + [ + "▁began", + -9.883196830749512 + ], + [ + "▁picture", + -9.883255958557129 + ], + [ + "▁mare", + -9.88353157043457 + ], + [ + "▁playing", + -9.884223937988281 + ], + [ + "▁trebuie", + -9.884926795959473 + ], + [ + "ils", + -9.884940147399902 + ], + [ + "chen", + -9.885220527648926 + ], + [ + "▁hit", + -9.885416984558105 + ], + [ + "▁complex", + -9.88591480255127 + ], + [ + "▁Thank", + -9.886140823364258 + ], + [ + "▁Let", + -9.886350631713867 + ], + [ + "▁applications", + -9.887116432189941 + ], + [ + "▁friend", + -9.888312339782715 + ], + [ + "▁English", + -9.889549255371094 + ], + [ + "▁charge", + -9.890040397644043 + ], + [ + "▁recommend", + -9.893453598022461 + ], + [ + "▁message", + -9.893672943115234 + ], + [ + "In", + -9.893722534179688 + ], + [ + "▁Mar", + -9.894762992858887 + ], + [ + "pp", + -9.895845413208008 + ], + [ + "▁method", + -9.89692497253418 + ], + [ + "▁successful", + -9.897004127502441 + ], + [ + "tion", + -9.898880958557129 + ], + [ + "▁release", + -9.899920463562012 + ], + [ + "▁creating", + -9.900403022766113 + ], + [ + "▁despre", + -9.90141773223877 + ], + [ + "esc", + -9.902434349060059 + ], + [ + "▁eye", + -9.902752876281738 + ], + [ + "▁apply", + -9.905945777893066 + ], + [ + "net", + -9.906000137329102 + ], + [ + "side", + -9.906539916992188 + ], + [ + "▁ar", + -9.906949996948242 + ], + [ + "▁platform", + -9.90713882446289 + ], + [ + "▁touch", + -9.907329559326172 + ], + [ + "▁towards", + -9.90785026550293 + ], + [ + "▁match", + -9.908224105834961 + ], + [ + "▁Black", + -9.909344673156738 + ], + [ + "▁fall", + -9.90961742401123 + ], + [ + "▁ground", + -9.910234451293945 + ], + [ + "▁High", + -9.910740852355957 + ], + [ + "▁Q", + -9.911155700683594 + ], + [ + "▁schon", + -9.911709785461426 + ], + [ + "▁hotel", + -9.911751747131348 + ], + [ + "▁prices", + -9.912031173706055 + ], + [ + "▁developed", + -9.913411140441895 + ], + [ + "uk", + -9.913476943969727 + ], + [ + "ide", + -9.91367244720459 + ], + [ + "▁September", + -9.91370964050293 + ], + [ + "ized", + -9.914202690124512 + ], + [ + "▁War", + -9.914704322814941 + ], + [ + "!!", + -9.916285514831543 + ], + [ + "▁grow", + -9.916997909545898 + ], + [ + "▁watch", + -9.917067527770996 + ], + [ + "▁storage", + -9.917412757873535 + ], + [ + "eau", + -9.917513847351074 + ], + [ + "can", + -9.918373107910156 + ], + [ + "▁Get", + -9.919524192810059 + ], + [ + "▁See", + -9.91953182220459 + ], + [ + "▁European", + -9.919703483581543 + ], + [ + "▁language", + -9.91982650756836 + ], + [ + "ează", + -9.920175552368164 + ], + [ + "▁court", + -9.920334815979004 + ], + [ + "▁Why", + -9.921106338500977 + ], + [ + "▁hear", + -9.921342849731445 + ], + [ + "▁doar", + -9.921804428100586 + ], + [ + "lan", + -9.92330265045166 + ], + [ + "▁Christmas", + -9.923810958862305 + ], + [ + "▁Web", + -9.923871994018555 + ], + [ + "vo", + -9.92405891418457 + ], + [ + "▁sent", + -9.924983024597168 + ], + [ + "▁businesses", + -9.925868034362793 + ], + [ + "▁Red", + -9.926278114318848 + ], + [ + "tel", + -9.926375389099121 + ], + [ + "▁Ha", + -9.926508903503418 + ], + [ + "▁wonderful", + -9.926653861999512 + ], + [ + "ations", + -9.926738739013672 + ], + [ + "za", + -9.92748737335205 + ], + [ + "▁22", + -9.928659439086914 + ], + [ + "▁thinking", + -9.92941665649414 + ], + [ + "▁became", + -9.929733276367188 + ], + [ + "▁cool", + -9.929835319519043 + ], + [ + "▁speed", + -9.930370330810547 + ], + [ + "mar", + -9.930426597595215 + ], + [ + "▁--", + -9.931743621826172 + ], + [ + "▁groups", + -9.931920051574707 + ], + [ + "▁interested", + -9.93198299407959 + ], + [ + "ak", + -9.93218994140625 + ], + [ + "▁60", + -9.932672500610352 + ], + [ + "▁screen", + -9.93370246887207 + ], + [ + "▁Design", + -9.933789253234863 + ], + [ + "▁limited", + -9.935648918151855 + ], + [ + "▁expected", + -9.935959815979004 + ], + [ + "▁opportunities", + -9.936376571655273 + ], + [ + "▁regular", + -9.936870574951172 + ], + [ + "off", + -9.93702220916748 + ], + [ + "▁Best", + -9.937298774719238 + ], + [ + "Re", + -9.938436508178711 + ], + [ + "▁ihr", + -9.938719749450684 + ], + [ + "▁Great", + -9.938907623291016 + ], + [ + "▁employees", + -9.93924617767334 + ], + [ + "▁custom", + -9.939679145812988 + ], + [ + "▁multe", + -9.940123558044434 + ], + [ + "let", + -9.940876007080078 + ], + [ + "▁benefit", + -9.942487716674805 + ], + [ + "▁term", + -9.942623138427734 + ], + [ + "▁bine", + -9.942869186401367 + ], + [ + "▁deep", + -9.944526672363281 + ], + [ + "▁August", + -9.94526481628418 + ], + [ + "▁President", + -9.945381164550781 + ], + [ + "▁Auf", + -9.945854187011719 + ], + [ + "▁wish", + -9.946924209594727 + ], + [ + "▁sometimes", + -9.947274208068848 + ], + [ + "ari", + -9.947793960571289 + ], + [ + "▁pressure", + -9.948184967041016 + ], + [ + "▁ani", + -9.94859504699707 + ], + [ + "▁trade", + -9.949930191040039 + ], + [ + "▁firm", + -9.950027465820312 + ], + [ + "▁comment", + -9.95003604888916 + ], + [ + "▁November", + -9.950242042541504 + ], + [ + "▁expect", + -9.951102256774902 + ], + [ + "▁2012", + -9.952491760253906 + ], + [ + "▁Ich", + -9.95328140258789 + ], + [ + "▁relationship", + -9.95363998413086 + ], + [ + "▁active", + -9.954682350158691 + ], + [ + "org", + -9.954710960388184 + ], + [ + "▁heat", + -9.956732749938965 + ], + [ + "▁wood", + -9.95678997039795 + ], + [ + "▁notre", + -9.957921028137207 + ], + [ + "▁function", + -9.958330154418945 + ], + [ + "▁2.", + -9.95909309387207 + ], + [ + "▁wedding", + -9.960049629211426 + ], + [ + "▁starting", + -9.961235046386719 + ], + [ + "▁Health", + -9.961249351501465 + ], + [ + "\",", + -9.961713790893555 + ], + [ + "▁death", + -9.962173461914062 + ], + [ + "▁pages", + -9.962764739990234 + ], + [ + "▁vehicle", + -9.96293830871582 + ], + [ + "▁request", + -9.963874816894531 + ], + [ + "▁helps", + -9.963916778564453 + ], + [ + "▁blue", + -9.964017868041992 + ], + [ + "▁analysis", + -9.964414596557617 + ], + [ + "▁posted", + -9.964544296264648 + ], + [ + "▁healthy", + -9.964814186096191 + ], + [ + "▁contract", + -9.964988708496094 + ], + [ + "▁•", + -9.965263366699219 + ], + [ + "▁Each", + -9.965293884277344 + ], + [ + "▁Fa", + -9.966179847717285 + ], + [ + "▁dintre", + -9.966221809387207 + ], + [ + "▁Friday", + -9.967202186584473 + ], + [ + "▁considered", + -9.967992782592773 + ], + [ + "cher", + -9.96826457977295 + ], + [ + "▁quick", + -9.968731880187988 + ], + [ + "▁understanding", + -9.96916389465332 + ], + [ + "▁condition", + -9.969378471374512 + ], + [ + "ization", + -9.971049308776855 + ], + [ + "▁document", + -9.971664428710938 + ], + [ + "▁prevent", + -9.971890449523926 + ], + [ + "▁growing", + -9.9725341796875 + ], + [ + "▁protection", + -9.972620964050293 + ], + [ + "▁cat", + -9.974002838134766 + ], + [ + "▁#", + -9.975058555603027 + ], + [ + "10", + -9.975275039672852 + ], + [ + "▁join", + -9.9759521484375 + ], + [ + "▁serve", + -9.976580619812012 + ], + [ + "▁blood", + -9.977095603942871 + ], + [ + "▁July", + -9.977341651916504 + ], + [ + "▁region", + -9.977787971496582 + ], + [ + "car", + -9.97933578491211 + ], + [ + "▁entre", + -9.979788780212402 + ], + [ + "▁physical", + -9.981287002563477 + ], + [ + "▁cash", + -9.9813232421875 + ], + [ + "aux", + -9.981823921203613 + ], + [ + "ng", + -9.982654571533203 + ], + [ + "▁stage", + -9.98281478881836 + ], + [ + "▁seem", + -9.983034133911133 + ], + [ + "▁definitely", + -9.983795166015625 + ], + [ + "▁investment", + -9.983827590942383 + ], + [ + "▁purpose", + -9.985441207885742 + ], + [ + "▁begin", + -9.985486030578613 + ], + [ + "®", + -9.985495567321777 + ], + [ + "▁break", + -9.985701560974121 + ], + [ + "itate", + -9.987293243408203 + ], + [ + "▁moving", + -9.989288330078125 + ], + [ + "▁met", + -9.990678787231445 + ], + [ + "ize", + -9.990833282470703 + ], + [ + "▁select", + -9.991165161132812 + ], + [ + "▁tous", + -9.991310119628906 + ], + [ + "▁Europe", + -9.991639137268066 + ], + [ + "@", + -9.992724418640137 + ], + [ + "▁individuals", + -9.993392944335938 + ], + [ + "▁Zeit", + -9.993524551391602 + ], + [ + "gu", + -9.995670318603516 + ], + [ + "▁unit", + -9.995753288269043 + ], + [ + "▁noi", + -9.996089935302734 + ], + [ + "▁places", + -9.996171951293945 + ], + [ + "all", + -9.99632453918457 + ], + [ + "▁wait", + -9.996755599975586 + ], + [ + "▁difference", + -9.997234344482422 + ], + [ + "▁round", + -9.998015403747559 + ], + [ + "50", + -9.99953842163086 + ], + [ + "rie", + -9.999545097351074 + ], + [ + "▁Et", + -9.999933242797852 + ], + [ + "20", + -10.000725746154785 + ], + [ + "▁activity", + -10.000792503356934 + ], + [ + "е", + -10.000866889953613 + ], + [ + "▁Windows", + -10.001087188720703 + ], + [ + "▁produce", + -10.001385688781738 + ], + [ + "▁keine", + -10.00212574005127 + ], + [ + "▁Air", + -10.002567291259766 + ], + [ + "▁January", + -10.004890441894531 + ], + [ + "▁deux", + -10.005081176757812 + ], + [ + "▁entry", + -10.005208015441895 + ], + [ + "king", + -10.006500244140625 + ], + [ + "▁goals", + -10.006736755371094 + ], + [ + "▁previous", + -10.0077543258667 + ], + [ + "▁+", + -10.008035659790039 + ], + [ + "▁Business", + -10.008259773254395 + ], + [ + "ont", + -10.008552551269531 + ], + [ + "▁Sunday", + -10.008694648742676 + ], + [ + "▁offering", + -10.010359764099121 + ], + [ + "▁response", + -10.011018753051758 + ], + [ + "▁surface", + -10.011393547058105 + ], + [ + "▁Department", + -10.01212215423584 + ], + [ + "▁exactly", + -10.012190818786621 + ], + [ + "▁Online", + -10.012577056884766 + ], + [ + "dem", + -10.013803482055664 + ], + [ + "ischen", + -10.014006614685059 + ], + [ + "▁hands", + -10.015100479125977 + ], + [ + "▁hour", + -10.016197204589844 + ], + [ + "▁dog", + -10.016946792602539 + ], + [ + "▁damage", + -10.017006874084473 + ], + [ + "▁capital", + -10.018792152404785 + ], + [ + "▁toate", + -10.020488739013672 + ], + [ + "▁wrong", + -10.020674705505371 + ], + [ + "unui", + -10.022201538085938 + ], + [ + "tri", + -10.023979187011719 + ], + [ + "▁sell", + -10.023999214172363 + ], + [ + "▁published", + -10.024175643920898 + ], + [ + "▁families", + -10.024675369262695 + ], + [ + "▁avoid", + -10.025490760803223 + ], + [ + "▁Ko", + -10.025506019592285 + ], + [ + "▁mod", + -10.026697158813477 + ], + [ + "rat", + -10.027653694152832 + ], + [ + "▁Make", + -10.0299654006958 + ], + [ + "▁October", + -10.030153274536133 + ], + [ + "▁former", + -10.031285285949707 + ], + [ + "▁Services", + -10.03281021118164 + ], + [ + "▁felt", + -10.033045768737793 + ], + [ + "▁selection", + -10.033309936523438 + ], + [ + "eaza", + -10.034177780151367 + ], + [ + "gel", + -10.034422874450684 + ], + [ + "▁Good", + -10.035792350769043 + ], + [ + "▁actual", + -10.0364351272583 + ], + [ + "▁gut", + -10.036853790283203 + ], + [ + "▁gas", + -10.03708553314209 + ], + [ + "15", + -10.038182258605957 + ], + [ + "▁structure", + -10.038285255432129 + ], + [ + "▁act", + -10.0386381149292 + ], + [ + "▁Zu", + -10.038654327392578 + ], + [ + "▁creative", + -10.039134979248047 + ], + [ + "▁Vi", + -10.039159774780273 + ], + [ + "▁shop", + -10.04066276550293 + ], + [ + "▁Lo", + -10.040735244750977 + ], + [ + "şi", + -10.042192459106445 + ], + [ + "▁mis", + -10.042224884033203 + ], + [ + "ungen", + -10.042301177978516 + ], + [ + "▁fan", + -10.04240608215332 + ], + [ + "▁|", + -10.043391227722168 + ], + [ + "▁Bei", + -10.044037818908691 + ], + [ + "▁protect", + -10.04454517364502 + ], + [ + "▁Na", + -10.0447998046875 + ], + [ + "q", + -10.045693397521973 + ], + [ + "ok", + -10.04710578918457 + ], + [ + "▁California", + -10.047263145446777 + ], + [ + "▁political", + -10.047301292419434 + ], + [ + "25", + -10.047530174255371 + ], + [ + "▁feeling", + -10.047913551330566 + ], + [ + "▁ces", + -10.048321723937988 + ], + [ + "▁display", + -10.048857688903809 + ], + [ + "▁essential", + -10.04964542388916 + ], + [ + "ând", + -10.049971580505371 + ], + [ + "▁seine", + -10.050551414489746 + ], + [ + "▁soft", + -10.050915718078613 + ], + [ + "ach", + -10.05102252960205 + ], + [ + "▁happen", + -10.051118850708008 + ], + [ + "▁Paul", + -10.053346633911133 + ], + [ + "▁Cu", + -10.054024696350098 + ], + [ + "house", + -10.055376052856445 + ], + [ + "ante", + -10.05582046508789 + ], + [ + "▁easier", + -10.056551933288574 + ], + [ + "▁sort", + -10.0567045211792 + ], + [ + "▁Post", + -10.057138442993164 + ], + [ + "▁accept", + -10.05730152130127 + ], + [ + "field", + -10.057648658752441 + ], + [ + "zen", + -10.057741165161133 + ], + [ + "▁character", + -10.057848930358887 + ], + [ + "▁beginning", + -10.058433532714844 + ], + [ + "▁Jesus", + -10.058760643005371 + ], + [ + "▁weekend", + -10.059663772583008 + ], + [ + "▁certainly", + -10.06114387512207 + ], + [ + "▁THE", + -10.061254501342773 + ], + [ + "▁alle", + -10.06189250946045 + ], + [ + "▁transport", + -10.062220573425293 + ], + [ + "▁Saturday", + -10.063043594360352 + ], + [ + "▁basic", + -10.064136505126953 + ], + [ + "▁loved", + -10.06431770324707 + ], + [ + "ros", + -10.065333366394043 + ], + [ + "▁offered", + -10.065996170043945 + ], + [ + "▁camera", + -10.067024230957031 + ], + [ + "▁Green", + -10.06789779663086 + ], + [ + "ology", + -10.069480895996094 + ], + [ + "ä", + -10.069646835327148 + ], + [ + "▁manage", + -10.070416450500488 + ], + [ + "▁paid", + -10.070881843566895 + ], + [ + "▁advice", + -10.071617126464844 + ], + [ + "▁patient", + -10.072234153747559 + ], + [ + "▁spent", + -10.072272300720215 + ], + [ + "▁mir", + -10.072366714477539 + ], + [ + "▁baby", + -10.072400093078613 + ], + [ + "ö", + -10.073193550109863 + ], + [ + "▁basis", + -10.073338508605957 + ], + [ + "▁cancer", + -10.073765754699707 + ], + [ + "▁Although", + -10.07400894165039 + ], + [ + "▁gift", + -10.074336051940918 + ], + [ + "▁3.", + -10.074871063232422 + ], + [ + "dieser", + -10.075157165527344 + ], + [ + "▁overall", + -10.07520580291748 + ], + [ + "▁Sch", + -10.075265884399414 + ], + [ + "▁Ex", + -10.076258659362793 + ], + [ + "▁December", + -10.07689094543457 + ], + [ + "▁released", + -10.078214645385742 + ], + [ + "▁prior", + -10.07900333404541 + ], + [ + "▁sowie", + -10.081072807312012 + ], + [ + "▁club", + -10.081326484680176 + ], + [ + "▁Street", + -10.081535339355469 + ], + [ + "▁College", + -10.08254623413086 + ], + [ + "▁î", + -10.083059310913086 + ], + [ + "over", + -10.083159446716309 + ], + [ + "▁gave", + -10.08454704284668 + ], + [ + "▁truly", + -10.084784507751465 + ], + [ + "par", + -10.084806442260742 + ], + [ + "▁Canada", + -10.084888458251953 + ], + [ + "▁existing", + -10.085420608520508 + ], + [ + "lie", + -10.086335182189941 + ], + [ + "▁ganz", + -10.086658477783203 + ], + [ + "▁setting", + -10.087109565734863 + ], + [ + "▁supply", + -10.08739185333252 + ], + [ + "▁college", + -10.087540626525879 + ], + [ + "▁communication", + -10.088407516479492 + ], + [ + "▁23", + -10.088834762573242 + ], + [ + "▁pass", + -10.091546058654785 + ], + [ + "▁devices", + -10.091872215270996 + ], + [ + "▁glass", + -10.092083930969238 + ], + [ + "▁experienced", + -10.092395782470703 + ], + [ + "▁grand", + -10.093363761901855 + ], + [ + "▁Po", + -10.093396186828613 + ], + [ + "▁beyond", + -10.094029426574707 + ], + [ + "▁format", + -10.094165802001953 + ], + [ + "▁mon", + -10.09461498260498 + ], + [ + "▁perform", + -10.094635009765625 + ], + [ + "sten", + -10.095130920410156 + ], + [ + "▁1,", + -10.096270561218262 + ], + [ + "▁Per", + -10.096640586853027 + ], + [ + "▁sold", + -10.097247123718262 + ], + [ + "▁rates", + -10.0972900390625 + ], + [ + "▁regarding", + -10.097782135009766 + ], + [ + "▁Paris", + -10.098291397094727 + ], + [ + "▁Dar", + -10.099579811096191 + ], + [ + "▁challenge", + -10.099649429321289 + ], + [ + "▁feet", + -10.100564002990723 + ], + [ + "▁Su", + -10.102017402648926 + ], + [ + "je", + -10.102593421936035 + ], + [ + "▁Bank", + -10.102627754211426 + ], + [ + "ven", + -10.103126525878906 + ], + [ + "jo", + -10.103290557861328 + ], + [ + "▁band", + -10.10348892211914 + ], + [ + "▁delivery", + -10.104915618896484 + ], + [ + "Vous", + -10.104924201965332 + ], + [ + "tele", + -10.10495376586914 + ], + [ + "▁East", + -10.105379104614258 + ], + [ + "▁pictures", + -10.106067657470703 + ], + [ + "▁useful", + -10.106481552124023 + ], + [ + "*", + -10.107648849487305 + ], + [ + "▁increased", + -10.107746124267578 + ], + [ + "▁stories", + -10.108119010925293 + ], + [ + "sion", + -10.108280181884766 + ], + [ + "bra", + -10.108345985412598 + ], + [ + "▁brought", + -10.108466148376465 + ], + [ + "▁effort", + -10.109898567199707 + ], + [ + "▁payment", + -10.11058235168457 + ], + [ + "▁heard", + -10.110925674438477 + ], + [ + "▁played", + -10.111245155334473 + ], + [ + "▁White", + -10.111417770385742 + ], + [ + "▁metal", + -10.111721992492676 + ], + [ + "tal", + -10.111754417419434 + ], + [ + "▁engine", + -10.112006187438965 + ], + [ + "▁Club", + -10.11218547821045 + ], + [ + "ical", + -10.114581108093262 + ], + [ + "▁effects", + -10.115421295166016 + ], + [ + "▁degree", + -10.115763664245605 + ], + [ + "▁bed", + -10.1159086227417 + ], + [ + "ette", + -10.115991592407227 + ], + [ + "▁David", + -10.116386413574219 + ], + [ + "°", + -10.117666244506836 + ], + [ + "▁Au", + -10.117938041687012 + ], + [ + "▁Company", + -10.11845874786377 + ], + [ + "▁player", + -10.11938190460205 + ], + [ + "▁Today", + -10.120569229125977 + ], + [ + "▁maintain", + -10.12093448638916 + ], + [ + "▁minute", + -10.121193885803223 + ], + [ + "mail", + -10.122172355651855 + ], + [ + "▁race", + -10.122366905212402 + ], + [ + "▁comfortable", + -10.123887062072754 + ], + [ + "▁responsible", + -10.124085426330566 + ], + [ + "vor", + -10.124622344970703 + ], + [ + "▁associated", + -10.124695777893066 + ], + [ + "▁weather", + -10.124701499938965 + ], + [ + "▁$1", + -10.125639915466309 + ], + [ + "▁tried", + -10.126176834106445 + ], + [ + "▁Check", + -10.127649307250977 + ], + [ + "▁solid", + -10.127864837646484 + ], + [ + "▁movie", + -10.128364562988281 + ], + [ + "▁coffee", + -10.12874698638916 + ], + [ + "board", + -10.129073143005371 + ], + [ + "▁po", + -10.12946605682373 + ], + [ + "▁warm", + -10.129583358764648 + ], + [ + "▁connect", + -10.131733894348145 + ], + [ + "▁Ad", + -10.133807182312012 + ], + [ + "work", + -10.133859634399414 + ], + [ + "mal", + -10.13397216796875 + ], + [ + "▁Act", + -10.134634971618652 + ], + [ + "▁achieve", + -10.134769439697266 + ], + [ + "▁Nach", + -10.136604309082031 + ], + [ + "www", + -10.136669158935547 + ], + [ + "term", + -10.13672161102295 + ], + [ + "▁claim", + -10.137251853942871 + ], + [ + "▁particularly", + -10.138245582580566 + ], + [ + "▁cas", + -10.138396263122559 + ], + [ + "▁furniture", + -10.138461112976074 + ], + [ + "▁finish", + -10.13896369934082 + ], + [ + "▁temps", + -10.139026641845703 + ], + [ + "▁disease", + -10.139115333557129 + ], + [ + "▁lots", + -10.139196395874023 + ], + [ + "▁ball", + -10.139307975769043 + ], + [ + "▁sun", + -10.14010238647461 + ], + [ + "▁strategy", + -10.140498161315918 + ], + [ + "bre", + -10.140518188476562 + ], + [ + "▁mine", + -10.141541481018066 + ], + [ + "▁Click", + -10.141743659973145 + ], + [ + "ran", + -10.141983032226562 + ], + [ + "▁Will", + -10.142234802246094 + ], + [ + "▁garden", + -10.142974853515625 + ], + [ + "▁stuff", + -10.14359188079834 + ], + [ + "▁limit", + -10.144641876220703 + ], + [ + "▁bottom", + -10.14494800567627 + ], + [ + "▁shown", + -10.144962310791016 + ], + [ + "ship", + -10.145271301269531 + ], + [ + "▁habe", + -10.145858764648438 + ], + [ + "▁Super", + -10.146219253540039 + ], + [ + "▁completed", + -10.146971702575684 + ], + [ + "▁wine", + -10.146979331970215 + ], + [ + "ische", + -10.147262573242188 + ], + [ + "▁largest", + -10.147466659545898 + ], + [ + "▁appropriate", + -10.148261070251465 + ], + [ + "▁immediately", + -10.150248527526855 + ], + [ + "▁Hi", + -10.152358055114746 + ], + [ + "▁trust", + -10.152767181396484 + ], + [ + "ability", + -10.154254913330078 + ], + [ + "▁powerful", + -10.155101776123047 + ], + [ + "▁helping", + -10.155620574951172 + ], + [ + "▁schedule", + -10.155688285827637 + ], + [ + "▁correct", + -10.155707359313965 + ], + [ + "▁transfer", + -10.156496047973633 + ], + [ + "pre", + -10.15665340423584 + ], + [ + "▁journey", + -10.15688419342041 + ], + [ + "pm", + -10.157002449035645 + ], + [ + "don", + -10.158435821533203 + ], + [ + "▁highest", + -10.159249305725098 + ], + [ + "▁finally", + -10.15999698638916 + ], + [ + "form", + -10.160258293151855 + ], + [ + "▁extremely", + -10.160404205322266 + ], + [ + "▁window", + -10.160501480102539 + ], + [ + "▁Over", + -10.162222862243652 + ], + [ + "▁remove", + -10.162469863891602 + ], + [ + "wood", + -10.162479400634766 + ], + [ + "▁2013", + -10.163631439208984 + ], + [ + "▁mother", + -10.164072036743164 + ], + [ + "▁Auto", + -10.16436767578125 + ], + [ + "▁annual", + -10.164615631103516 + ], + [ + "▁Star", + -10.164834976196289 + ], + [ + "▁Di", + -10.166138648986816 + ], + [ + "о", + -10.16711139678955 + ], + [ + "▁gold", + -10.167129516601562 + ], + [ + "tar", + -10.167352676391602 + ], + [ + "ju", + -10.167750358581543 + ], + [ + "▁Use", + -10.169474601745605 + ], + [ + "▁thanks", + -10.16960334777832 + ], + [ + "▁centre", + -10.170127868652344 + ], + [ + "▁Australia", + -10.170358657836914 + ], + [ + "▁estate", + -10.170504570007324 + ], + [ + "▁eyes", + -10.1714448928833 + ], + [ + "▁force", + -10.171592712402344 + ], + [ + "▁income", + -10.17395305633545 + ], + [ + "▁science", + -10.174036026000977 + ], + [ + "ori", + -10.174230575561523 + ], + [ + "▁enter", + -10.174851417541504 + ], + [ + "▁28", + -10.175408363342285 + ], + [ + "ire", + -10.17568302154541 + ], + [ + "▁schools", + -10.175797462463379 + ], + [ + "▁restaurant", + -10.176088333129883 + ], + [ + "▁Council", + -10.177032470703125 + ], + [ + "aus", + -10.177885055541992 + ], + [ + "▁agree", + -10.17905330657959 + ], + [ + "▁campaign", + -10.179192543029785 + ], + [ + "▁Ta", + -10.179428100585938 + ], + [ + "▁letter", + -10.179814338684082 + ], + [ + "▁central", + -10.179931640625 + ], + [ + "▁Because", + -10.180054664611816 + ], + [ + "▁path", + -10.180349349975586 + ], + [ + "▁loc", + -10.180882453918457 + ], + [ + "▁files", + -10.182587623596191 + ], + [ + "▁population", + -10.182705879211426 + ], + [ + "▁explore", + -10.182723999023438 + ], + [ + "▁mid", + -10.182734489440918 + ], + [ + "▁concept", + -10.182748794555664 + ], + [ + "▁church", + -10.183015823364258 + ], + [ + "80", + -10.183026313781738 + ], + [ + "▁einfach", + -10.185834884643555 + ], + [ + "▁reasons", + -10.186690330505371 + ], + [ + "▁determine", + -10.186755180358887 + ], + [ + "▁February", + -10.187095642089844 + ], + [ + "▁evidence", + -10.18797779083252 + ], + [ + "▁sleep", + -10.188036918640137 + ], + [ + "▁Board", + -10.188652992248535 + ], + [ + "▁maybe", + -10.189635276794434 + ], + [ + "▁wasn", + -10.189701080322266 + ], + [ + "▁Monday", + -10.190101623535156 + ], + [ + "▁director", + -10.190481185913086 + ], + [ + "well", + -10.190974235534668 + ], + [ + "During", + -10.191001892089844 + ], + [ + "▁sweet", + -10.191061973571777 + ], + [ + "▁assist", + -10.19124984741211 + ], + [ + "▁police", + -10.191511154174805 + ], + [ + "▁repair", + -10.191729545593262 + ], + [ + "▁techniques", + -10.191733360290527 + ], + [ + "▁served", + -10.191808700561523 + ], + [ + "vi", + -10.192037582397461 + ], + [ + "▁sports", + -10.192331314086914 + ], + [ + "▁opening", + -10.192401885986328 + ], + [ + "▁ones", + -10.192731857299805 + ], + [ + "▁notice", + -10.193460464477539 + ], + [ + "▁PC", + -10.193547248840332 + ], + [ + "▁alte", + -10.194242477416992 + ], + [ + "▁Bi", + -10.194340705871582 + ], + [ + "▁cold", + -10.195606231689453 + ], + [ + "▁billion", + -10.195794105529785 + ], + [ + "▁balance", + -10.196361541748047 + ], + [ + "cer", + -10.196417808532715 + ], + [ + "▁nearly", + -10.196725845336914 + ], + [ + "▁wear", + -10.197259902954102 + ], + [ + "free", + -10.19760799407959 + ], + [ + "▁Have", + -10.197748184204102 + ], + [ + "▁comfort", + -10.199211120605469 + ], + [ + "▁studies", + -10.199225425720215 + ], + [ + "▁traffic", + -10.199540138244629 + ], + [ + "▁item", + -10.200214385986328 + ], + [ + "▁teaching", + -10.200467109680176 + ], + [ + "▁turned", + -10.201326370239258 + ], + [ + "isation", + -10.201354026794434 + ], + [ + "12", + -10.202038764953613 + ], + [ + "▁greater", + -10.202167510986328 + ], + [ + "▁knew", + -10.20233154296875 + ], + [ + "▁Association", + -10.203333854675293 + ], + [ + "▁Office", + -10.203802108764648 + ], + [ + "▁established", + -10.204085350036621 + ], + [ + "45", + -10.204170227050781 + ], + [ + "▁Love", + -10.204318046569824 + ], + [ + "▁changed", + -10.204882621765137 + ], + [ + "▁pan", + -10.205184936523438 + ], + [ + "van", + -10.20565414428711 + ], + [ + "▁Mi", + -10.205663681030273 + ], + [ + "▁tend", + -10.20637321472168 + ], + [ + "▁connection", + -10.206522941589355 + ], + [ + "▁lack", + -10.206954002380371 + ], + [ + "▁bank", + -10.208464622497559 + ], + [ + "cat", + -10.208720207214355 + ], + [ + "▁helped", + -10.209071159362793 + ], + [ + "▁spot", + -10.209417343139648 + ], + [ + "▁spring", + -10.20974063873291 + ], + [ + "▁Wi", + -10.210912704467773 + ], + [ + "▁Mac", + -10.211682319641113 + ], + [ + "▁Christ", + -10.212015151977539 + ], + [ + "▁saying", + -10.212835311889648 + ], + [ + "▁General", + -10.213062286376953 + ], + [ + "▁port", + -10.213099479675293 + ], + [ + "▁Mal", + -10.213156700134277 + ], + [ + "▁System", + -10.213486671447754 + ], + [ + "▁According", + -10.2152738571167 + ], + [ + "▁chiar", + -10.21568489074707 + ], + [ + "log", + -10.21576976776123 + ], + [ + "▁mix", + -10.215974807739258 + ], + [ + "▁Lake", + -10.216042518615723 + ], + [ + "▁intr", + -10.216590881347656 + ], + [ + "▁deliver", + -10.216793060302734 + ], + [ + "mon", + -10.216931343078613 + ], + [ + "▁Ro", + -10.217060089111328 + ], + [ + "▁Management", + -10.217504501342773 + ], + [ + "bri", + -10.218718528747559 + ], + [ + "▁pieces", + -10.218774795532227 + ], + [ + "▁announced", + -10.218926429748535 + ], + [ + "▁Yes", + -10.219268798828125 + ], + [ + "▁dark", + -10.220884323120117 + ], + [ + "val", + -10.221765518188477 + ], + [ + "▁rights", + -10.22309684753418 + ], + [ + "▁Diese", + -10.223100662231445 + ], + [ + "ki", + -10.223350524902344 + ], + [ + "vent", + -10.22375774383545 + ], + [ + "▁born", + -10.22380542755127 + ], + [ + "▁muss", + -10.224031448364258 + ], + [ + "compared", + -10.224660873413086 + ], + [ + "▁demand", + -10.224669456481934 + ], + [ + "▁handle", + -10.225493431091309 + ], + [ + "▁mode", + -10.226058006286621 + ], + [ + "lic", + -10.226137161254883 + ], + [ + "▁ahead", + -10.226436614990234 + ], + [ + "▁sharing", + -10.227599143981934 + ], + [ + "▁micro", + -10.227779388427734 + ], + [ + "▁Par", + -10.228626251220703 + ], + [ + "▁Every", + -10.22950553894043 + ], + [ + "▁bag", + -10.229736328125 + ], + [ + "▁daca", + -10.22974967956543 + ], + [ + "▁Apple", + -10.23022174835205 + ], + [ + "▁Mark", + -10.230239868164062 + ], + [ + "▁larger", + -10.231284141540527 + ], + [ + "eze", + -10.231978416442871 + ], + [ + "▁progress", + -10.232234001159668 + ], + [ + "▁stress", + -10.232929229736328 + ], + [ + "▁cards", + -10.233663558959961 + ], + [ + "▁driving", + -10.233738899230957 + ], + [ + "▁dry", + -10.233970642089844 + ], + [ + "▁relevant", + -10.234556198120117 + ], + [ + "▁Jo", + -10.234825134277344 + ], + [ + "▁tree", + -10.235036849975586 + ], + [ + "▁reported", + -10.235770225524902 + ], + [ + "ities", + -10.23577880859375 + ], + [ + "▁tea", + -10.235806465148926 + ], + [ + "▁although", + -10.236145973205566 + ], + [ + "▁Research", + -10.236261367797852 + ], + [ + "▁pool", + -10.23691463470459 + ], + [ + "▁fin", + -10.237163543701172 + ], + [ + "▁Und", + -10.238130569458008 + ], + [ + "▁decide", + -10.239217758178711 + ], + [ + "▁expert", + -10.239344596862793 + ], + [ + "rate", + -10.239428520202637 + ], + [ + "zeit", + -10.239971160888672 + ], + [ + "▁26", + -10.24040412902832 + ], + [ + "▁Ka", + -10.24056339263916 + ], + [ + "▁fix", + -10.240666389465332 + ], + [ + "igen", + -10.240713119506836 + ], + [ + "▁direction", + -10.241188049316406 + ], + [ + "▁star", + -10.241661071777344 + ], + [ + "▁middle", + -10.241889953613281 + ], + [ + "▁Ja", + -10.241962432861328 + ], + [ + "▁Land", + -10.24207878112793 + ], + [ + "ken", + -10.242605209350586 + ], + [ + "▁button", + -10.242630004882812 + ], + [ + "▁rules", + -10.242656707763672 + ], + [ + "▁également", + -10.242706298828125 + ], + [ + "▁viel", + -10.243158340454102 + ], + [ + "▁welcome", + -10.243682861328125 + ], + [ + "că", + -10.243932723999023 + ], + [ + "▁Top", + -10.245308876037598 + ], + [ + "▁allowed", + -10.245487213134766 + ], + [ + "▁tip", + -10.245584487915039 + ], + [ + "▁cei", + -10.245768547058105 + ], + [ + "▁Nous", + -10.246004104614258 + ], + [ + "té", + -10.246850967407227 + ], + [ + "▁unei", + -10.246903419494629 + ], + [ + "▁efforts", + -10.247260093688965 + ], + [ + "▁note", + -10.247719764709473 + ], + [ + "▁title", + -10.247977256774902 + ], + [ + "ric", + -10.248047828674316 + ], + [ + "berg", + -10.248252868652344 + ], + [ + "▁ainsi", + -10.248576164245605 + ], + [ + "▁led", + -10.248713493347168 + ], + [ + "▁alone", + -10.248786926269531 + ], + [ + "ward", + -10.249215126037598 + ], + [ + "▁vie", + -10.249323844909668 + ], + [ + "▁brain", + -10.249427795410156 + ], + [ + "light", + -10.250100135803223 + ], + [ + "▁Court", + -10.250598907470703 + ], + [ + "set", + -10.250869750976562 + ], + [ + "▁steps", + -10.251251220703125 + ], + [ + "pri", + -10.251391410827637 + ], + [ + "Q", + -10.251654624938965 + ], + [ + "sti", + -10.251938819885254 + ], + [ + "▁voice", + -10.252121925354004 + ], + [ + "▁models", + -10.252705574035645 + ], + [ + "▁parties", + -10.25442886352539 + ], + [ + "▁radio", + -10.255270957946777 + ], + [ + "▁mission", + -10.25545883178711 + ], + [ + "▁methods", + -10.255658149719238 + ], + [ + "▁Te", + -10.256019592285156 + ], + [ + "air", + -10.256489753723145 + ], + [ + "▁essay", + -10.256719589233398 + ], + [ + "my", + -10.256826400756836 + ], + [ + "▁competition", + -10.257049560546875 + ], + [ + "ses", + -10.257447242736816 + ], + [ + "▁serious", + -10.258724212646484 + ], + [ + "▁Ti", + -10.258733749389648 + ], + [ + "▁Hand", + -10.259561538696289 + ], + [ + "not", + -10.25958251953125 + ], + [ + "▁winter", + -10.261277198791504 + ], + [ + "24", + -10.261724472045898 + ], + [ + "▁vision", + -10.26174545288086 + ], + [ + "▁technical", + -10.262110710144043 + ], + [ + "▁cross", + -10.262799263000488 + ], + [ + "▁update", + -10.262947082519531 + ], + [ + "▁Team", + -10.263564109802246 + ], + [ + "▁evening", + -10.264286041259766 + ], + [ + "▁experts", + -10.26435661315918 + ], + [ + "part", + -10.264640808105469 + ], + [ + "▁wo", + -10.265190124511719 + ], + [ + "▁App", + -10.265729904174805 + ], + [ + "▁peu", + -10.266267776489258 + ], + [ + "▁mich", + -10.26630687713623 + ], + [ + "▁reports", + -10.267001152038574 + ], + [ + "▁km", + -10.267594337463379 + ], + [ + "▁print", + -10.2678804397583 + ], + [ + "▁Hotel", + -10.268101692199707 + ], + [ + "▁earlier", + -10.268235206604004 + ], + [ + "▁uses", + -10.26826286315918 + ], + [ + "▁menu", + -10.268416404724121 + ], + [ + "▁miles", + -10.26845645904541 + ], + [ + "▁classes", + -10.268463134765625 + ], + [ + "▁mo", + -10.268525123596191 + ], + [ + "▁loan", + -10.2691011428833 + ], + [ + "▁host", + -10.269192695617676 + ], + [ + "▁author", + -10.269274711608887 + ], + [ + "-1", + -10.269434928894043 + ], + [ + "▁bun", + -10.269940376281738 + ], + [ + "19", + -10.270011901855469 + ], + [ + "uch", + -10.270670890808105 + ], + [ + "ble", + -10.270813941955566 + ], + [ + "▁holiday", + -10.270859718322754 + ], + [ + "los", + -10.271894454956055 + ], + [ + "▁looked", + -10.272663116455078 + ], + [ + "▁Test", + -10.272759437561035 + ], + [ + "▁moved", + -10.273000717163086 + ], + [ + "▁numbers", + -10.273306846618652 + ], + [ + "▁covered", + -10.273405075073242 + ], + [ + "ker", + -10.273696899414062 + ], + [ + "TM", + -10.273768424987793 + ], + [ + "▁album", + -10.274727821350098 + ], + [ + "▁27", + -10.27476692199707 + ], + [ + "▁când", + -10.27523422241211 + ], + [ + "▁shopping", + -10.275248527526855 + ], + [ + "▁Ihr", + -10.27531623840332 + ], + [ + "▁requires", + -10.275786399841309 + ], + [ + "▁USA", + -10.275909423828125 + ], + [ + "000", + -10.275951385498047 + ], + [ + "▁official", + -10.276010513305664 + ], + [ + "▁states", + -10.276346206665039 + ], + [ + "▁tips", + -10.276570320129395 + ], + [ + "ible", + -10.277321815490723 + ], + [ + "▁Lu", + -10.27756404876709 + ], + [ + "ces", + -10.278343200683594 + ], + [ + "▁figure", + -10.27839469909668 + ], + [ + "▁Take", + -10.278576850891113 + ], + [ + "▁după", + -10.278687477111816 + ], + [ + "▁teams", + -10.278980255126953 + ], + [ + "▁song", + -10.279138565063477 + ], + [ + "▁master", + -10.279386520385742 + ], + [ + "ED", + -10.279841423034668 + ], + [ + "▁cleaning", + -10.280523300170898 + ], + [ + "▁drop", + -10.280651092529297 + ], + [ + "▁primary", + -10.2808837890625 + ], + [ + "▁Life", + -10.28108024597168 + ], + [ + "▁carry", + -10.281129837036133 + ], + [ + "▁initial", + -10.281270980834961 + ], + [ + "▁encore", + -10.281617164611816 + ], + [ + "▁Add", + -10.281670570373535 + ], + [ + "▁woman", + -10.282076835632324 + ], + [ + "▁Water", + -10.282219886779785 + ], + [ + "▁advantage", + -10.28277587890625 + ], + [ + "see", + -10.283234596252441 + ], + [ + "ré", + -10.283341407775879 + ], + [ + "▁motor", + -10.283479690551758 + ], + [ + "mel", + -10.2838716506958 + ], + [ + "▁finding", + -10.284419059753418 + ], + [ + "▁plastic", + -10.286365509033203 + ], + [ + "▁IT", + -10.286602973937988 + ], + [ + "▁Church", + -10.286916732788086 + ], + [ + "▁shape", + -10.287345886230469 + ], + [ + "▁gets", + -10.287763595581055 + ], + [ + "▁followed", + -10.288186073303223 + ], + [ + "▁100%", + -10.288315773010254 + ], + [ + "▁Program", + -10.28912353515625 + ], + [ + "▁Another", + -10.28934383392334 + ], + [ + "▁zwei", + -10.289522171020508 + ], + [ + "▁father", + -10.289839744567871 + ], + [ + "▁rich", + -10.290282249450684 + ], + [ + "où", + -10.290810585021973 + ], + [ + "▁lines", + -10.290934562683105 + ], + [ + "▁distance", + -10.291757583618164 + ], + [ + "▁cell", + -10.291876792907715 + ], + [ + "▁parte", + -10.292072296142578 + ], + [ + "bit", + -10.292445182800293 + ], + [ + "▁perhaps", + -10.292749404907227 + ], + [ + "rii", + -10.293590545654297 + ], + [ + "▁session", + -10.294137954711914 + ], + [ + "▁Pentru", + -10.294528007507324 + ], + [ + "ING", + -10.295049667358398 + ], + [ + "ants", + -10.295478820800781 + ], + [ + "▁remain", + -10.295543670654297 + ], + [ + "13", + -10.295588493347168 + ], + [ + "▁finished", + -10.295763969421387 + ], + [ + "bel", + -10.298725128173828 + ], + [ + "▁organizations", + -10.299455642700195 + ], + [ + "▁Any", + -10.299896240234375 + ], + [ + "▁taste", + -10.300277709960938 + ], + [ + "Whether", + -10.300600051879883 + ], + [ + "ram", + -10.300874710083008 + ], + [ + "like", + -10.301307678222656 + ], + [ + "▁artist", + -10.301319122314453 + ], + [ + "aire", + -10.303369522094727 + ], + [ + "▁French", + -10.303386688232422 + ], + [ + "▁donc", + -10.303634643554688 + ], + [ + "ow", + -10.30386734008789 + ], + [ + "▁200", + -10.303993225097656 + ], + [ + "▁paint", + -10.304465293884277 + ], + [ + "▁Open", + -10.304535865783691 + ], + [ + "▁appear", + -10.304722785949707 + ], + [ + "▁Washington", + -10.304765701293945 + ], + [ + "▁target", + -10.30491828918457 + ], + [ + "pir", + -10.305578231811523 + ], + [ + "▁generally", + -10.305987358093262 + ], + [ + "▁British", + -10.306790351867676 + ], + [ + "▁seven", + -10.306937217712402 + ], + [ + "▁bio", + -10.307162284851074 + ], + [ + "▁sector", + -10.307358741760254 + ], + [ + "90", + -10.30777359008789 + ], + [ + "▁fapt", + -10.307881355285645 + ], + [ + "▁prefer", + -10.308316230773926 + ], + [ + "▁partner", + -10.308427810668945 + ], + [ + "ăm", + -10.308547973632812 + ], + [ + "▁diverse", + -10.308610916137695 + ], + [ + "▁onto", + -10.309283256530762 + ], + [ + "▁refer", + -10.309828758239746 + ], + [ + "▁Law", + -10.310302734375 + ], + [ + "▁Ri", + -10.310596466064453 + ], + [ + "▁critical", + -10.310735702514648 + ], + [ + "▁copy", + -10.310897827148438 + ], + [ + "ck", + -10.311517715454102 + ], + [ + "ix", + -10.311732292175293 + ], + [ + "tag", + -10.311793327331543 + ], + [ + "▁Road", + -10.311936378479004 + ], + [ + "▁concern", + -10.312053680419922 + ], + [ + "▁maximum", + -10.312095642089844 + ], + [ + "▁train", + -10.312148094177246 + ], + [ + "▁într", + -10.312189102172852 + ], + [ + "ura", + -10.313023567199707 + ], + [ + "▁Qu", + -10.313481330871582 + ], + [ + "▁links", + -10.313538551330566 + ], + [ + "▁audience", + -10.313969612121582 + ], + [ + "▁foot", + -10.314554214477539 + ], + [ + "▁Blue", + -10.314605712890625 + ], + [ + "ification", + -10.315386772155762 + ], + [ + "▁developing", + -10.315847396850586 + ], + [ + "▁interior", + -10.315876007080078 + ], + [ + "=", + -10.316556930541992 + ], + [ + "▁aceasta", + -10.31698989868164 + ], + [ + "▁dedicated", + -10.317373275756836 + ], + [ + "▁movement", + -10.317383766174316 + ], + [ + "sta", + -10.318868637084961 + ], + [ + "▁challenges", + -10.319018363952637 + ], + [ + "inte", + -10.319074630737305 + ], + [ + "▁Euro", + -10.319075584411621 + ], + [ + "▁classic", + -10.320341110229492 + ], + [ + "▁Um", + -10.320767402648926 + ], + [ + "▁alternative", + -10.321407318115234 + ], + [ + "mann", + -10.321614265441895 + ], + [ + "▁Une", + -10.322278022766113 + ], + [ + "qu", + -10.322415351867676 + ], + [ + "▁heavy", + -10.322434425354004 + ], + [ + "▁install", + -10.322484970092773 + ], + [ + "▁fiind", + -10.322504043579102 + ], + [ + "▁leaders", + -10.323003768920898 + ], + [ + "▁views", + -10.323019981384277 + ], + [ + "▁www", + -10.323084831237793 + ], + [ + "▁standards", + -10.323270797729492 + ], + [ + "ong", + -10.323580741882324 + ], + [ + "40", + -10.323833465576172 + ], + [ + "▁cm", + -10.323848724365234 + ], + [ + "▁park", + -10.324324607849121 + ], + [ + "▁himself", + -10.324419021606445 + ], + [ + "▁People", + -10.324649810791016 + ], + [ + "▁separate", + -10.324843406677246 + ], + [ + "▁secure", + -10.325018882751465 + ], + [ + "sie", + -10.325084686279297 + ], + [ + "▁maintenance", + -10.325199127197266 + ], + [ + "▁encourage", + -10.32766056060791 + ], + [ + "ein", + -10.328139305114746 + ], + [ + "▁reviews", + -10.328202247619629 + ], + [ + "▁Michael", + -10.328210830688477 + ], + [ + "▁background", + -10.328283309936523 + ], + [ + "▁therefore", + -10.328433990478516 + ], + [ + "▁server", + -10.328487396240234 + ], + [ + "▁dream", + -10.328742027282715 + ], + [ + "ping", + -10.329025268554688 + ], + [ + "▁block", + -10.329855918884277 + ], + [ + "▁2009", + -10.330734252929688 + ], + [ + "▁facilities", + -10.330931663513184 + ], + [ + "▁II", + -10.331367492675781 + ], + [ + "▁attend", + -10.33156967163086 + ], + [ + "▁cap", + -10.33224105834961 + ], + [ + "35", + -10.332416534423828 + ], + [ + "▁steel", + -10.332796096801758 + ], + [ + "▁shared", + -10.333391189575195 + ], + [ + "▁doctor", + -10.333939552307129 + ], + [ + "▁River", + -10.33411693572998 + ], + [ + "▁Bay", + -10.334456443786621 + ], + [ + "▁length", + -10.335005760192871 + ], + [ + "▁jobs", + -10.335466384887695 + ], + [ + "▁Plus", + -10.335992813110352 + ], + [ + "▁station", + -10.336140632629395 + ], + [ + "▁elements", + -10.336268424987793 + ], + [ + "▁rock", + -10.336668014526367 + ], + [ + "▁professionals", + -10.336670875549316 + ], + [ + "cle", + -10.336777687072754 + ], + [ + "▁dont", + -10.336873054504395 + ], + [ + "urilor", + -10.337142944335938 + ], + [ + "▁gain", + -10.337271690368652 + ], + [ + "▁programme", + -10.337540626525879 + ], + [ + "▁Cor", + -10.338377952575684 + ], + [ + "▁leader", + -10.338542938232422 + ], + [ + "ării", + -10.33876895904541 + ], + [ + "▁>", + -10.339137077331543 + ], + [ + "▁task", + -10.339471817016602 + ], + [ + "▁seeing", + -10.339943885803223 + ], + [ + "▁statement", + -10.34045696258545 + ], + [ + "vin", + -10.341094017028809 + ], + [ + "▁fish", + -10.341700553894043 + ], + [ + "▁advanced", + -10.342403411865234 + ], + [ + "▁discuss", + -10.342494010925293 + ], + [ + "die", + -10.342904090881348 + ], + [ + "isch", + -10.342944145202637 + ], + [ + "▁plenty", + -10.342947959899902 + ], + [ + "▁Hall", + -10.343120574951172 + ], + [ + "▁Other", + -10.343339920043945 + ], + [ + "▁homes", + -10.344944953918457 + ], + [ + "▁Ni", + -10.345016479492188 + ], + [ + "▁testing", + -10.345102310180664 + ], + [ + "▁Last", + -10.345392227172852 + ], + [ + "▁Note", + -10.345595359802246 + ], + [ + "▁talking", + -10.345934867858887 + ], + [ + "▁exchange", + -10.347042083740234 + ], + [ + "▁exercise", + -10.347189903259277 + ], + [ + "▁cea", + -10.347546577453613 + ], + [ + "▁wife", + -10.34820556640625 + ], + [ + "▁Für", + -10.348480224609375 + ], + [ + "▁Texas", + -10.34981918334961 + ], + [ + "▁fr", + -10.35065746307373 + ], + [ + "▁speak", + -10.350894927978516 + ], + [ + "17", + -10.351007461547852 + ], + [ + "70", + -10.351462364196777 + ], + [ + "▁promote", + -10.351851463317871 + ], + [ + "tul", + -10.351990699768066 + ], + [ + "apos", + -10.35208511352539 + ], + [ + "▁Jahr", + -10.35214900970459 + ], + [ + "▁Trump", + -10.352204322814941 + ], + [ + "▁ohne", + -10.352357864379883 + ], + [ + "▁learned", + -10.353700637817383 + ], + [ + "▁Sp", + -10.353803634643555 + ], + [ + "▁owner", + -10.354275703430176 + ], + [ + "mor", + -10.354422569274902 + ], + [ + "▁fois", + -10.354452133178711 + ], + [ + "▁meaning", + -10.35518741607666 + ], + [ + "▁dacă", + -10.355249404907227 + ], + [ + "nic", + -10.355484008789062 + ], + [ + "а", + -10.355525970458984 + ], + [ + "14", + -10.355767250061035 + ], + [ + "▁driver", + -10.356258392333984 + ], + [ + "▁Amazon", + -10.3567533493042 + ], + [ + "▁flow", + -10.358469009399414 + ], + [ + "▁shot", + -10.358726501464844 + ], + [ + "▁sous", + -10.35914421081543 + ], + [ + "▁Gold", + -10.359339714050293 + ], + [ + "▁straight", + -10.359562873840332 + ], + [ + "▁conference", + -10.359610557556152 + ], + [ + "▁peste", + -10.359662055969238 + ], + [ + "whose", + -10.36030101776123 + ], + [ + "▁installation", + -10.36050796508789 + ], + [ + "▁produced", + -10.360607147216797 + ], + [ + "▁independent", + -10.36192512512207 + ], + [ + "▁Institute", + -10.362021446228027 + ], + [ + "▁James", + -10.362373352050781 + ], + [ + "▁mental", + -10.362601280212402 + ], + [ + "ara", + -10.362798690795898 + ], + [ + "ium", + -10.363021850585938 + ], + [ + "▁husband", + -10.36306095123291 + ], + [ + "▁guests", + -10.363907814025879 + ], + [ + "27", + -10.364319801330566 + ], + [ + "▁Che", + -10.364651679992676 + ], + [ + "▁Indian", + -10.364694595336914 + ], + [ + "zer", + -10.36478042602539 + ], + [ + "▁minimum", + -10.364962577819824 + ], + [ + "500", + -10.365096092224121 + ], + [ + "▁sit", + -10.36561393737793 + ], + [ + "put", + -10.36656379699707 + ], + [ + "▁avea", + -10.36665153503418 + ], + [ + "▁ride", + -10.367088317871094 + ], + [ + "gan", + -10.367152214050293 + ], + [ + "▁Ke", + -10.36747932434082 + ], + [ + "book", + -10.367515563964844 + ], + [ + "ages", + -10.368019104003906 + ], + [ + "▁presented", + -10.368157386779785 + ], + [ + "▁Com", + -10.368927955627441 + ], + [ + "▁Call", + -10.369053840637207 + ], + [ + "▁fee", + -10.369847297668457 + ], + [ + "ări", + -10.369905471801758 + ], + [ + "▁putea", + -10.37072467803955 + ], + [ + "▁Public", + -10.371030807495117 + ], + [ + "▁pa", + -10.371152877807617 + ], + [ + "28", + -10.371233940124512 + ], + [ + "▁Director", + -10.37126350402832 + ], + [ + "▁contains", + -10.3717622756958 + ], + [ + "▁factors", + -10.372554779052734 + ], + [ + "▁famous", + -10.372614860534668 + ], + [ + "▁bathroom", + -10.373040199279785 + ], + [ + "▁core", + -10.37353229522705 + ], + [ + "▁viele", + -10.373610496520996 + ], + [ + "▁acum", + -10.374361991882324 + ], + [ + "▁animal", + -10.374407768249512 + ], + [ + "▁Ihnen", + -10.374425888061523 + ], + [ + "▁Find", + -10.374545097351074 + ], + [ + "▁Fall", + -10.374861717224121 + ], + [ + "ford", + -10.376051902770996 + ], + [ + "▁coverage", + -10.3765287399292 + ], + [ + "▁smart", + -10.376830101013184 + ], + [ + "ries", + -10.376893997192383 + ], + [ + "▁memory", + -10.3772554397583 + ], + [ + "▁dance", + -10.377443313598633 + ], + [ + "11", + -10.37746810913086 + ], + [ + "▁communities", + -10.377655982971191 + ], + [ + "eurs", + -10.378050804138184 + ], + [ + "▁Florida", + -10.378463745117188 + ], + [ + "▁sport", + -10.379366874694824 + ], + [ + "▁bus", + -10.37992000579834 + ], + [ + "▁colors", + -10.379969596862793 + ], + [ + "▁affect", + -10.380044937133789 + ], + [ + "▁score", + -10.380183219909668 + ], + [ + "▁properties", + -10.38050365447998 + ], + [ + "18", + -10.380593299865723 + ], + [ + "▁astfel", + -10.381312370300293 + ], + [ + "▁beach", + -10.382407188415527 + ], + [ + "▁friendly", + -10.382795333862305 + ], + [ + "izing", + -10.38288688659668 + ], + [ + "▁buying", + -10.383146286010742 + ], + [ + "▁forget", + -10.383195877075195 + ], + [ + "este", + -10.383198738098145 + ], + [ + "▁capacity", + -10.38360595703125 + ], + [ + "▁lose", + -10.383692741394043 + ], + [ + "▁listed", + -10.38407039642334 + ], + [ + "ica", + -10.384084701538086 + ], + [ + "han", + -10.384085655212402 + ], + [ + "▁selbst", + -10.384390830993652 + ], + [ + "▁values", + -10.384391784667969 + ], + [ + "▁Power", + -10.384559631347656 + ], + [ + "▁comments", + -10.384831428527832 + ], + [ + "eux", + -10.385346412658691 + ], + [ + "ați", + -10.385419845581055 + ], + [ + "▁context", + -10.385710716247559 + ], + [ + "liche", + -10.385944366455078 + ], + [ + "▁keeping", + -10.38620662689209 + ], + [ + "▁2008", + -10.38647174835205 + ], + [ + "▁su", + -10.386670112609863 + ], + [ + "▁biggest", + -10.386838912963867 + ], + [ + "▁fiecare", + -10.387356758117676 + ], + [ + "ight", + -10.38845157623291 + ], + [ + "▁toute", + -10.389808654785156 + ], + [ + "▁dinner", + -10.389827728271484 + ], + [ + "bau", + -10.390706062316895 + ], + [ + "▁Mai", + -10.390762329101562 + ], + [ + "▁status", + -10.390776634216309 + ], + [ + "rez", + -10.391340255737305 + ], + [ + "▁selected", + -10.391549110412598 + ], + [ + "▁cells", + -10.392601013183594 + ], + [ + "▁eight", + -10.393319129943848 + ], + [ + "▁package", + -10.393320083618164 + ], + [ + "▁scale", + -10.39333724975586 + ], + [ + "din", + -10.39336109161377 + ], + [ + "▁Who", + -10.393381118774414 + ], + [ + "▁century", + -10.393399238586426 + ], + [ + "▁bi", + -10.393516540527344 + ], + [ + "▁Africa", + -10.39384937286377 + ], + [ + "▁http", + -10.394133567810059 + ], + [ + "▁named", + -10.394230842590332 + ], + [ + "▁adding", + -10.394901275634766 + ], + [ + "▁mention", + -10.395039558410645 + ], + [ + "▁casino", + -10.395421981811523 + ], + [ + "▁couldn", + -10.395624160766602 + ], + [ + "▁outdoor", + -10.395912170410156 + ], + [ + "▁sugar", + -10.3960542678833 + ], + [ + "▁prepared", + -10.396124839782715 + ], + [ + "21", + -10.396528244018555 + ], + [ + "▁Ba", + -10.396632194519043 + ], + [ + "vers", + -10.396697998046875 + ], + [ + "ration", + -10.396773338317871 + ], + [ + "▁ja", + -10.397035598754883 + ], + [ + "▁aspect", + -10.397224426269531 + ], + [ + "▁31", + -10.397462844848633 + ], + [ + "▁treat", + -10.397475242614746 + ], + [ + "tru", + -10.397841453552246 + ], + [ + "▁flat", + -10.397890090942383 + ], + [ + "32", + -10.397989273071289 + ], + [ + "▁reality", + -10.398238182067871 + ], + [ + "▁waste", + -10.39876937866211 + ], + [ + "▁King", + -10.399649620056152 + ], + [ + "▁drug", + -10.399870872497559 + ], + [ + "▁operations", + -10.400120735168457 + ], + [ + "▁aim", + -10.40042495727539 + ], + [ + "▁fans", + -10.400444984436035 + ], + [ + "▁vers", + -10.400891304016113 + ], + [ + "▁plants", + -10.400971412658691 + ], + [ + "▁Dis", + -10.401477813720703 + ], + [ + "▁Daten", + -10.401510238647461 + ], + [ + "être", + -10.40267276763916 + ], + [ + "▁placed", + -10.40326976776123 + ], + [ + "▁bon", + -10.403977394104004 + ], + [ + "beim", + -10.4041109085083 + ], + [ + "▁slow", + -10.40501880645752 + ], + [ + "cri", + -10.405512809753418 + ], + [ + "▁Care", + -10.405691146850586 + ], + [ + "mes", + -10.406211853027344 + ], + [ + "26", + -10.406257629394531 + ], + [ + "box", + -10.406330108642578 + ], + [ + "▁helpful", + -10.406362533569336 + ], + [ + "▁documents", + -10.406543731689453 + ], + [ + "▁visitors", + -10.406773567199707 + ], + [ + "ture", + -10.406862258911133 + ], + [ + "▁Menschen", + -10.406891822814941 + ], + [ + "▁Chi", + -10.406975746154785 + ], + [ + "▁recipe", + -10.40764045715332 + ], + [ + "▁kept", + -10.407693862915039 + ], + [ + "▁Grand", + -10.407915115356445 + ], + [ + "▁operating", + -10.408178329467773 + ], + [ + "point", + -10.408329010009766 + ], + [ + "▁bin", + -10.40837287902832 + ], + [ + "▁Tri", + -10.40845775604248 + ], + [ + "Be", + -10.408512115478516 + ], + [ + "▁experiences", + -10.40856647491455 + ], + [ + "▁academic", + -10.408608436584473 + ], + [ + "▁finden", + -10.40870475769043 + ], + [ + "▁sera", + -10.409092903137207 + ], + [ + "act", + -10.410541534423828 + ], + [ + "▁Pa", + -10.410907745361328 + ], + [ + "▁society", + -10.411056518554688 + ], + [ + "▁combination", + -10.411237716674805 + ], + [ + "5%", + -10.41182804107666 + ], + [ + "▁owners", + -10.41188907623291 + ], + [ + "▁poor", + -10.412039756774902 + ], + [ + "▁Robert", + -10.412378311157227 + ], + [ + "▁military", + -10.412964820861816 + ], + [ + "▁economy", + -10.413033485412598 + ], + [ + "▁aware", + -10.413055419921875 + ], + [ + "rot", + -10.413443565368652 + ], + [ + "mie", + -10.413544654846191 + ], + [ + "▁Thursday", + -10.414399147033691 + ], + [ + "▁2011", + -10.41490650177002 + ], + [ + "▁fantastic", + -10.41554069519043 + ], + [ + "▁numerous", + -10.415921211242676 + ], + [ + "▁fair", + -10.4165620803833 + ], + [ + "med", + -10.416753768920898 + ], + [ + "▁welche", + -10.416893005371094 + ], + [ + "▁fruit", + -10.41712760925293 + ], + [ + "ku", + -10.417325019836426 + ], + [ + "▁Social", + -10.417583465576172 + ], + [ + "▁funds", + -10.418157577514648 + ], + [ + "▁atunci", + -10.418214797973633 + ], + [ + "▁Part", + -10.418238639831543 + ], + [ + "▁Big", + -10.418301582336426 + ], + [ + "▁2010", + -10.419414520263672 + ], + [ + "▁detail", + -10.419889450073242 + ], + [ + "▁Peter", + -10.419942855834961 + ], + [ + "ani", + -10.420196533203125 + ], + [ + "▁Wie", + -10.420795440673828 + ], + [ + "▁Tu", + -10.421649932861328 + ], + [ + "ear", + -10.421706199645996 + ], + [ + "▁Wenn", + -10.421941757202148 + ], + [ + "▁manager", + -10.42199993133545 + ], + [ + "▁Dan", + -10.422409057617188 + ], + [ + "▁Pi", + -10.42257308959961 + ], + [ + "▁wants", + -10.422652244567871 + ], + [ + "▁Data", + -10.42322826385498 + ], + [ + "pos", + -10.42387580871582 + ], + [ + "▁older", + -10.423946380615234 + ], + [ + "▁Download", + -10.424071311950684 + ], + [ + "▁Was", + -10.424107551574707 + ], + [ + "▁corner", + -10.424195289611816 + ], + [ + "▁president", + -10.424199104309082 + ], + [ + "mas", + -10.424248695373535 + ], + [ + "▁smaller", + -10.424361228942871 + ], + [ + "▁bright", + -10.424459457397461 + ], + [ + "▁proper", + -10.424582481384277 + ], + [ + "▁Kinder", + -10.424637794494629 + ], + [ + "▁Two", + -10.424668312072754 + ], + [ + "▁award", + -10.42471694946289 + ], + [ + "▁premier", + -10.425211906433105 + ], + [ + "▁seek", + -10.425646781921387 + ], + [ + "▁thank", + -10.425662994384766 + ], + [ + "▁proud", + -10.426509857177734 + ], + [ + "▁workers", + -10.426774024963379 + ], + [ + "▁2000", + -10.426970481872559 + ], + [ + "▁gone", + -10.427482604980469 + ], + [ + "▁medium", + -10.427693367004395 + ], + [ + "▁grade", + -10.42777156829834 + ], + [ + "▁Ru", + -10.427800178527832 + ], + [ + "cro", + -10.427851676940918 + ], + [ + "▁interview", + -10.428311347961426 + ], + [ + "23", + -10.428787231445312 + ], + [ + "▁mari", + -10.429442405700684 + ], + [ + "▁80", + -10.429756164550781 + ], + [ + "▁Ga", + -10.430047035217285 + ], + [ + "▁90", + -10.431839942932129 + ], + [ + "▁anderen", + -10.432605743408203 + ], + [ + "▁cultural", + -10.433018684387207 + ], + [ + "but", + -10.433144569396973 + ], + [ + "rum", + -10.433300018310547 + ], + [ + "get", + -10.43338680267334 + ], + [ + "▁pop", + -10.433582305908203 + ], + [ + "▁Information", + -10.433594703674316 + ], + [ + "▁press", + -10.434972763061523 + ], + [ + "▁Project", + -10.435359001159668 + ], + [ + "▁excited", + -10.435755729675293 + ], + [ + "▁Saint", + -10.436088562011719 + ], + [ + "▁England", + -10.436192512512207 + ], + [ + "▁beauty", + -10.43643856048584 + ], + [ + "▁agreement", + -10.436464309692383 + ], + [ + "▁Like", + -10.437565803527832 + ], + [ + "▁strength", + -10.437664985656738 + ], + [ + "▁waiting", + -10.438165664672852 + ], + [ + "и", + -10.438270568847656 + ], + [ + "Le", + -10.438329696655273 + ], + [ + "▁residents", + -10.43835735321045 + ], + [ + "▁Ben", + -10.438603401184082 + ], + [ + "▁mentioned", + -10.439260482788086 + ], + [ + "▁etwas", + -10.43930721282959 + ], + [ + "▁rooms", + -10.439347267150879 + ], + [ + "▁neue", + -10.439501762390137 + ], + [ + "▁Microsoft", + -10.439726829528809 + ], + [ + "▁passed", + -10.440205574035645 + ], + [ + "▁sea", + -10.440893173217773 + ], + [ + "▁electric", + -10.441244125366211 + ], + [ + "▁forms", + -10.441384315490723 + ], + [ + "▁Central", + -10.441597938537598 + ], + [ + "▁Lord", + -10.442625999450684 + ], + [ + "ute", + -10.442763328552246 + ], + [ + "▁pré", + -10.442790031433105 + ], + [ + "▁square", + -10.44308090209961 + ], + [ + "itatea", + -10.443451881408691 + ], + [ + "▁debt", + -10.443757057189941 + ], + [ + "▁street", + -10.443975448608398 + ], + [ + "▁pi", + -10.444917678833008 + ], + [ + "▁happened", + -10.445326805114746 + ], + [ + "▁Tuesday", + -10.445592880249023 + ], + [ + "recht", + -10.446094512939453 + ], + [ + "▁Eine", + -10.44627857208252 + ], + [ + "▁Set", + -10.446768760681152 + ], + [ + "▁federal", + -10.4468412399292 + ], + [ + "CC", + -10.446905136108398 + ], + [ + "....", + -10.446938514709473 + ], + [ + "lig", + -10.447463035583496 + ], + [ + "▁Christian", + -10.44870662689209 + ], + [ + "▁truth", + -10.449213981628418 + ], + [ + "▁map", + -10.449728012084961 + ], + [ + "▁secret", + -10.449979782104492 + ], + [ + "▁Chinese", + -10.450844764709473 + ], + [ + "hol", + -10.450895309448242 + ], + [ + "▁wrote", + -10.451505661010742 + ], + [ + "▁hospital", + -10.451783180236816 + ], + [ + "▁Island", + -10.451870918273926 + ], + [ + "▁frame", + -10.451946258544922 + ], + [ + "▁sources", + -10.452117919921875 + ], + [ + "pan", + -10.453242301940918 + ], + [ + "▁29", + -10.453530311584473 + ], + [ + "▁changing", + -10.454547882080078 + ], + [ + "▁Where", + -10.454627990722656 + ], + [ + "▁negative", + -10.45471477508545 + ], + [ + "▁processes", + -10.45491886138916 + ], + [ + "▁leadership", + -10.455029487609863 + ], + [ + "▁nos", + -10.455195426940918 + ], + [ + "▁info", + -10.455780029296875 + ], + [ + "▁Gu", + -10.45595645904541 + ], + [ + "▁CO", + -10.45605182647705 + ], + [ + "▁reference", + -10.456884384155273 + ], + [ + "▁corporate", + -10.457097053527832 + ], + [ + "▁characters", + -10.457563400268555 + ], + [ + "▁dining", + -10.4577054977417 + ], + [ + "▁becoming", + -10.459708213806152 + ], + [ + "▁4.", + -10.460311889648438 + ], + [ + "▁Science", + -10.460626602172852 + ], + [ + "▁Education", + -10.461943626403809 + ], + [ + "▁camp", + -10.46207046508789 + ], + [ + "fall", + -10.462146759033203 + ], + [ + "▁Auch", + -10.462471961975098 + ], + [ + "▁topic", + -10.462519645690918 + ], + [ + "▁influence", + -10.463460922241211 + ], + [ + "▁70", + -10.463892936706543 + ], + [ + "▁identify", + -10.464459419250488 + ], + [ + "▁(19", + -10.464646339416504 + ], + [ + "care", + -10.465216636657715 + ], + [ + "ions", + -10.466215133666992 + ], + [ + "ray", + -10.4663724899292 + ], + [ + "▁Both", + -10.466577529907227 + ], + [ + "▁collect", + -10.466997146606445 + ], + [ + "▁practices", + -10.467667579650879 + ], + [ + "▁fight", + -10.468058586120605 + ], + [ + "▁injury", + -10.46873664855957 + ], + [ + "▁nici", + -10.46905517578125 + ], + [ + "▁depuis", + -10.469563484191895 + ], + [ + "▁actions", + -10.469609260559082 + ], + [ + "▁Wednesday", + -10.47089958190918 + ], + [ + "▁bill", + -10.471086502075195 + ], + [ + "▁cheap", + -10.471318244934082 + ], + [ + "lui", + -10.471719741821289 + ], + [ + "▁awesome", + -10.471731185913086 + ], + [ + "tig", + -10.472554206848145 + ], + [ + "▁expensive", + -10.472636222839355 + ], + [ + "ceea", + -10.472834587097168 + ], + [ + "▁exact", + -10.472907066345215 + ], + [ + "22", + -10.473462104797363 + ], + [ + "▁avant", + -10.47352123260498 + ], + [ + "▁fat", + -10.47353744506836 + ], + [ + "▁spending", + -10.474353790283203 + ], + [ + "▁designs", + -10.47608470916748 + ], + [ + "▁damit", + -10.4761323928833 + ], + [ + "▁comp", + -10.47619342803955 + ], + [ + "▁whatever", + -10.476434707641602 + ], + [ + "▁Light", + -10.476442337036133 + ], + [ + "▁quarter", + -10.47680377960205 + ], + [ + "hand", + -10.477301597595215 + ], + [ + "▁connected", + -10.477584838867188 + ], + [ + "▁technologies", + -10.47772216796875 + ], + [ + "ges", + -10.477808952331543 + ], + [ + "▁shower", + -10.478998184204102 + ], + [ + "▁500", + -10.47923469543457 + ], + [ + "▁Time", + -10.479436874389648 + ], + [ + "▁zone", + -10.480525970458984 + ], + [ + "▁vote", + -10.480624198913574 + ], + [ + "▁andere", + -10.480871200561523 + ], + [ + "▁otherwise", + -10.480988502502441 + ], + [ + "tur", + -10.481294631958008 + ], + [ + "▁happens", + -10.481504440307617 + ], + [ + "hin", + -10.481597900390625 + ], + [ + "▁volume", + -10.482161521911621 + ], + [ + "▁thousands", + -10.482391357421875 + ], + [ + "war", + -10.482551574707031 + ], + [ + "▁Play", + -10.482900619506836 + ], + [ + "▁temperature", + -10.48371410369873 + ], + [ + "▁industrial", + -10.483830451965332 + ], + [ + "▁fuel", + -10.483915328979492 + ], + [ + "100", + -10.48409366607666 + ], + [ + "top", + -10.484210014343262 + ], + [ + "kin", + -10.484312057495117 + ], + [ + "▁efficient", + -10.484414100646973 + ], + [ + "teil", + -10.484525680541992 + ], + [ + "alt", + -10.484578132629395 + ], + [ + "▁monde", + -10.48483657836914 + ], + [ + "▁Ra", + -10.484899520874023 + ], + [ + "▁bedroom", + -10.485103607177734 + ], + [ + "▁showing", + -10.485316276550293 + ], + [ + "▁continued", + -10.485490798950195 + ], + [ + "▁Plan", + -10.48552131652832 + ], + [ + "▁assistance", + -10.486014366149902 + ], + [ + "▁discover", + -10.48622989654541 + ], + [ + "▁Year", + -10.486238479614258 + ], + [ + "▁applied", + -10.486433029174805 + ], + [ + "▁audio", + -10.48755931854248 + ], + [ + "▁thus", + -10.487645149230957 + ], + [ + "▁permet", + -10.48806095123291 + ], + [ + "▁fashion", + -10.488532066345215 + ], + [ + "cra", + -10.488645553588867 + ], + [ + "ious", + -10.488700866699219 + ], + [ + "▁focused", + -10.489258766174316 + ], + [ + "16", + -10.48930549621582 + ], + [ + "▁arm", + -10.489364624023438 + ], + [ + "▁Their", + -10.489789962768555 + ], + [ + "▁Foundation", + -10.49022388458252 + ], + [ + "▁majority", + -10.49022388458252 + ], + [ + "▁wind", + -10.490785598754883 + ], + [ + "▁bought", + -10.491056442260742 + ], + [ + "▁factor", + -10.491918563842773 + ], + [ + "▁opened", + -10.49213695526123 + ], + [ + "tern", + -10.492374420166016 + ], + [ + "▁cars", + -10.492597579956055 + ], + [ + "▁exciting", + -10.492691040039062 + ], + [ + "▁affordable", + -10.493510246276855 + ], + [ + "ches", + -10.493563652038574 + ], + [ + "▁panel", + -10.493720054626465 + ], + [ + "▁caused", + -10.493793487548828 + ], + [ + "▁travail", + -10.493998527526855 + ], + [ + "▁roof", + -10.494073867797852 + ], + [ + "▁enable", + -10.494202613830566 + ], + [ + "▁toward", + -10.494491577148438 + ], + [ + "▁Development", + -10.494688987731934 + ], + [ + "▁foreign", + -10.495308876037598 + ], + [ + "avi", + -10.495320320129395 + ], + [ + "long", + -10.495328903198242 + ], + [ + "De", + -10.49578857421875 + ], + [ + "▁Mon", + -10.49588394165039 + ], + [ + "▁Va", + -10.495942115783691 + ], + [ + "AP", + -10.496097564697266 + ], + [ + "▁asta", + -10.49720573425293 + ], + [ + "▁prepare", + -10.497220993041992 + ], + [ + "▁German", + -10.497261047363281 + ], + [ + "▁Centre", + -10.497325897216797 + ], + [ + "ère", + -10.497367858886719 + ], + [ + "▁fear", + -10.497537612915039 + ], + [ + "▁Este", + -10.497878074645996 + ], + [ + "▁Des", + -10.49793529510498 + ], + [ + "▁Kon", + -10.499308586120605 + ], + [ + "á", + -10.499866485595703 + ], + [ + "stand", + -10.500805854797363 + ], + [ + "▁Real", + -10.500842094421387 + ], + [ + "lichen", + -10.50098705291748 + ], + [ + "▁Beach", + -10.501455307006836 + ], + [ + "▁expertise", + -10.50185775756836 + ], + [ + "▁route", + -10.502445220947266 + ], + [ + "▁nation", + -10.502551078796387 + ], + [ + "▁snow", + -10.503022193908691 + ], + [ + "▁articles", + -10.503127098083496 + ], + [ + "▁Wood", + -10.504426956176758 + ], + [ + "▁operation", + -10.50494384765625 + ], + [ + "▁passion", + -10.505215644836426 + ], + [ + "▁cand", + -10.505690574645996 + ], + [ + "haus", + -10.505701065063477 + ], + [ + "OR", + -10.505711555480957 + ], + [ + "▁senior", + -10.506511688232422 + ], + [ + "▁becomes", + -10.506546020507812 + ], + [ + "▁sounds", + -10.506878852844238 + ], + [ + "▁enjoyed", + -10.50704574584961 + ], + [ + "▁gegen", + -10.507533073425293 + ], + [ + "▁courses", + -10.507919311523438 + ], + [ + "▁absolutely", + -10.508257865905762 + ], + [ + "tim", + -10.508264541625977 + ], + [ + "uff", + -10.508516311645508 + ], + [ + "▁moins", + -10.50860595703125 + ], + [ + "▁TO", + -10.509060859680176 + ], + [ + "▁fabric", + -10.509267807006836 + ], + [ + "poli", + -10.509326934814453 + ], + [ + "▁Bre", + -10.509761810302734 + ], + [ + "▁bo", + -10.509916305541992 + ], + [ + "▁Elle", + -10.510469436645508 + ], + [ + "bu", + -10.512336730957031 + ], + [ + "▁participants", + -10.512401580810547 + ], + [ + "stone", + -10.512794494628906 + ], + [ + "ties", + -10.51366138458252 + ], + [ + "▁listen", + -10.513700485229492 + ], + [ + "▁Spiel", + -10.513752937316895 + ], + [ + "pot", + -10.513872146606445 + ], + [ + "▁selling", + -10.514358520507812 + ], + [ + "▁geht", + -10.514680862426758 + ], + [ + "▁mini", + -10.515146255493164 + ], + [ + "▁trans", + -10.515408515930176 + ], + [ + "▁ingredients", + -10.515642166137695 + ], + [ + "auf", + -10.515671730041504 + ], + [ + "▁orice", + -10.51595401763916 + ], + [ + "▁Next", + -10.516300201416016 + ], + [ + "▁cream", + -10.516756057739258 + ], + [ + "▁edge", + -10.516973495483398 + ], + [ + "▁recommended", + -10.517022132873535 + ], + [ + "▁Form", + -10.517277717590332 + ], + [ + "▁processing", + -10.51746940612793 + ], + [ + "vert", + -10.517709732055664 + ], + [ + "▁described", + -10.518362998962402 + ], + [ + "▁installed", + -10.51884937286377 + ], + [ + "▁managed", + -10.518952369689941 + ], + [ + "▁electronic", + -10.518966674804688 + ], + [ + "▁performed", + -10.519064903259277 + ], + [ + "▁raise", + -10.519098281860352 + ], + [ + "▁imagine", + -10.519281387329102 + ], + [ + "down", + -10.51952838897705 + ], + [ + "▁fond", + -10.519978523254395 + ], + [ + "▁Inter", + -10.520434379577637 + ], + [ + "▁Mc", + -10.520550727844238 + ], + [ + "▁Dans", + -10.520679473876953 + ], + [ + "istic", + -10.520966529846191 + ], + [ + "▁miss", + -10.521052360534668 + ], + [ + "sur", + -10.521062850952148 + ], + [ + "▁Col", + -10.521879196166992 + ], + [ + "cut", + -10.522021293640137 + ], + [ + "▁dupa", + -10.522160530090332 + ], + [ + "▁Twitter", + -10.522604942321777 + ], + [ + "▁bowl", + -10.523721694946289 + ], + [ + "▁remains", + -10.5237455368042 + ], + [ + "▁Jan", + -10.524046897888184 + ], + [ + "▁smooth", + -10.524162292480469 + ], + [ + "▁fees", + -10.524415969848633 + ], + [ + "▁aid", + -10.524494171142578 + ], + [ + "▁presence", + -10.524827003479004 + ], + [ + "▁Android", + -10.52499771118164 + ], + [ + "▁decisions", + -10.52539348602295 + ], + [ + "▁names", + -10.5254487991333 + ], + [ + "▁Music", + -10.525546073913574 + ], + [ + "▁innovative", + -10.525578498840332 + ], + [ + "▁Tom", + -10.525997161865234 + ], + [ + "▁spread", + -10.526165962219238 + ], + [ + "▁lovely", + -10.526222229003906 + ], + [ + "▁daughter", + -10.526397705078125 + ], + [ + "US", + -10.527050971984863 + ], + [ + "▁facility", + -10.52710247039795 + ], + [ + "▁peace", + -10.527105331420898 + ], + [ + "▁department", + -10.527277946472168 + ], + [ + "▁weiter", + -10.527591705322266 + ], + [ + "▁Sun", + -10.527756690979004 + ], + [ + "▁fund", + -10.527772903442383 + ], + [ + "▁2018.", + -10.52792739868164 + ], + [ + "▁discussion", + -10.528186798095703 + ], + [ + "75", + -10.528799057006836 + ], + [ + "EC", + -10.529126167297363 + ], + [ + "▁lunch", + -10.529144287109375 + ], + [ + "▁videos", + -10.52927017211914 + ], + [ + "05", + -10.531253814697266 + ], + [ + "ige", + -10.531266212463379 + ], + [ + "▁parking", + -10.531564712524414 + ], + [ + "▁relationships", + -10.531732559204102 + ], + [ + "▁George", + -10.532986640930176 + ], + [ + "▁teachers", + -10.53299617767334 + ], + [ + "room", + -10.533458709716797 + ], + [ + "▁Tra", + -10.533605575561523 + ], + [ + "▁Sam", + -10.533651351928711 + ], + [ + "▁properly", + -10.535590171813965 + ], + [ + "▁Book", + -10.535629272460938 + ], + [ + "▁CA", + -10.536957740783691 + ], + [ + "▁calls", + -10.53756046295166 + ], + [ + "▁stat", + -10.538175582885742 + ], + [ + "ux", + -10.538220405578613 + ], + [ + "▁soit", + -10.538439750671387 + ], + [ + "▁Community", + -10.538684844970703 + ], + [ + "▁Jahren", + -10.538714408874512 + ], + [ + "▁increasing", + -10.539575576782227 + ], + [ + "▁civil", + -10.540184020996094 + ], + [ + "app", + -10.540573120117188 + ], + [ + "▁35", + -10.540589332580566 + ], + [ + "▁rise", + -10.540600776672363 + ], + [ + "▁dabei", + -10.540989875793457 + ], + [ + "▁studio", + -10.541803359985352 + ], + [ + "▁policies", + -10.542054176330566 + ], + [ + "▁agent", + -10.542055130004883 + ], + [ + "▁Before", + -10.542601585388184 + ], + [ + "▁Cal", + -10.543017387390137 + ], + [ + "▁2005", + -10.543404579162598 + ], + [ + "▁sample", + -10.543777465820312 + ], + [ + "▁manner", + -10.545186996459961 + ], + [ + "wing", + -10.54521369934082 + ], + [ + "stra", + -10.545552253723145 + ], + [ + "▁fel", + -10.545793533325195 + ], + [ + "▁Show", + -10.545952796936035 + ], + [ + "▁scene", + -10.54656982421875 + ], + [ + "mic", + -10.546764373779297 + ], + [ + "nom", + -10.546995162963867 + ], + [ + "▁typically", + -10.547088623046875 + ], + [ + "▁pair", + -10.547104835510254 + ], + [ + "▁detailed", + -10.547394752502441 + ], + [ + "▁Work", + -10.547422409057617 + ], + [ + "▁cities", + -10.547451972961426 + ], + [ + "▁Rock", + -10.54749584197998 + ], + [ + "▁Gar", + -10.547906875610352 + ], + [ + "▁serving", + -10.548352241516113 + ], + [ + "▁machen", + -10.548521995544434 + ], + [ + "▁trees", + -10.54888916015625 + ], + [ + "▁accident", + -10.549199104309082 + ], + [ + "▁cloud", + -10.54920482635498 + ], + [ + "▁animals", + -10.549297332763672 + ], + [ + "▁Den", + -10.549897193908691 + ], + [ + "▁Wa", + -10.54990291595459 + ], + [ + "▁suggest", + -10.550220489501953 + ], + [ + "putting", + -10.550407409667969 + ], + [ + "▁suite", + -10.550434112548828 + ], + [ + "▁clearly", + -10.550849914550781 + ], + [ + "▁net", + -10.551287651062012 + ], + [ + "▁funding", + -10.551506996154785 + ], + [ + "▁salt", + -10.551935195922852 + ], + [ + "▁Men", + -10.552119255065918 + ], + [ + "ped", + -10.552419662475586 + ], + [ + "▁Food", + -10.553142547607422 + ], + [ + "▁leaving", + -10.553544998168945 + ], + [ + "▁Government", + -10.554243087768555 + ], + [ + "ick", + -10.554381370544434 + ], + [ + "▁seat", + -10.555121421813965 + ], + [ + "▁Los", + -10.555183410644531 + ], + [ + "▁teacher", + -10.555587768554688 + ], + [ + "▁iPhone", + -10.555693626403809 + ], + [ + "▁300", + -10.556120872497559 + ], + [ + "▁commitment", + -10.556180000305176 + ], + [ + "▁aspects", + -10.556498527526855 + ], + [ + "▁previously", + -10.55711555480957 + ], + [ + "▁cent", + -10.5572509765625 + ], + [ + "▁Vo", + -10.557341575622559 + ], + [ + "▁artists", + -10.557963371276855 + ], + [ + "▁runs", + -10.558130264282227 + ], + [ + ">", + -10.558155059814453 + ], + [ + "▁Gi", + -10.558273315429688 + ], + [ + "▁mar", + -10.5585355758667 + ], + [ + "!!!", + -10.558544158935547 + ], + [ + "▁Media", + -10.558943748474121 + ], + [ + "▁feedback", + -10.559109687805176 + ], + [ + "▁resolution", + -10.559117317199707 + ], + [ + "IN", + -10.55915641784668 + ], + [ + "▁wurden", + -10.55952262878418 + ], + [ + "▁busy", + -10.559832572937012 + ], + [ + "▁adult", + -10.5600004196167 + ], + [ + "29", + -10.560487747192383 + ], + [ + "elles", + -10.561375617980957 + ], + [ + "▁closed", + -10.561762809753418 + ], + [ + "▁trouble", + -10.561767578125 + ], + [ + "▁rent", + -10.561984062194824 + ], + [ + "lot", + -10.56224536895752 + ], + [ + "▁importance", + -10.562314987182617 + ], + [ + "▁units", + -10.56257438659668 + ], + [ + "Pro", + -10.562713623046875 + ], + [ + "▁provider", + -10.563005447387695 + ], + [ + "▁visual", + -10.563288688659668 + ], + [ + "IT", + -10.563385009765625 + ], + [ + "▁diet", + -10.563733100891113 + ], + [ + "▁appearance", + -10.563932418823242 + ], + [ + "pin", + -10.564576148986816 + ], + [ + "▁Din", + -10.564760208129883 + ], + [ + "▁eating", + -10.565516471862793 + ], + [ + "Fi", + -10.565762519836426 + ], + [ + "ball", + -10.565765380859375 + ], + [ + "är", + -10.565861701965332 + ], + [ + "ney", + -10.565878868103027 + ], + [ + "▁records", + -10.566070556640625 + ], + [ + "▁Fi", + -10.566180229187012 + ], + [ + "▁faut", + -10.566329002380371 + ], + [ + "▁CD", + -10.566803932189941 + ], + [ + "ign", + -10.566930770874023 + ], + [ + "▁vă", + -10.566996574401855 + ], + [ + "▁agency", + -10.567153930664062 + ], + [ + "ierung", + -10.567323684692383 + ], + [ + "▁Back", + -10.567361831665039 + ], + [ + "▁windows", + -10.567545890808105 + ], + [ + "▁pull", + -10.567888259887695 + ], + [ + "ash", + -10.567959785461426 + ], + [ + "▁profit", + -10.568593978881836 + ], + [ + "▁brings", + -10.568605422973633 + ], + [ + "▁Committee", + -10.569122314453125 + ], + [ + "▁girl", + -10.569174766540527 + ], + [ + "▁vehicles", + -10.569372177124023 + ], + [ + "▁Hier", + -10.569567680358887 + ], + [ + "ES", + -10.569639205932617 + ], + [ + "până", + -10.569880485534668 + ], + [ + "▁Kunden", + -10.570380210876465 + ], + [ + "pen", + -10.570462226867676 + ], + [ + "▁explain", + -10.570505142211914 + ], + [ + "▁cadru", + -10.570760726928711 + ], + [ + "▁attack", + -10.571100234985352 + ], + [ + "▁markets", + -10.571115493774414 + ], + [ + "▁claims", + -10.571340560913086 + ], + [ + "▁walking", + -10.571385383605957 + ], + [ + "▁pouv", + -10.571528434753418 + ], + [ + "low", + -10.571642875671387 + ], + [ + "▁showed", + -10.572114944458008 + ], + [ + "▁principal", + -10.57211971282959 + ], + [ + "▁lucru", + -10.572144508361816 + ], + [ + "▁precum", + -10.572712898254395 + ], + [ + "TA", + -10.573094367980957 + ], + [ + "▁partners", + -10.573104858398438 + ], + [ + "▁exist", + -10.573136329650879 + ], + [ + "▁internal", + -10.57334041595459 + ], + [ + "hen", + -10.573945045471191 + ], + [ + "▁Master", + -10.573966979980469 + ], + [ + "unless", + -10.574013710021973 + ], + [ + "▁doubt", + -10.574721336364746 + ], + [ + "$", + -10.574785232543945 + ], + [ + "▁Long", + -10.574888229370117 + ], + [ + "▁leaves", + -10.574907302856445 + ], + [ + "allowing", + -10.575063705444336 + ], + [ + "pol", + -10.575272560119629 + ], + [ + "▁Up", + -10.575491905212402 + ], + [ + "▁Contact", + -10.576093673706055 + ], + [ + "▁practical", + -10.57708740234375 + ], + [ + "▁suit", + -10.57758903503418 + ], + [ + "▁Site", + -10.577656745910645 + ], + [ + "▁formation", + -10.57768726348877 + ], + [ + "▁signal", + -10.578215599060059 + ], + [ + "▁approximately", + -10.578414916992188 + ], + [ + "▁ourselves", + -10.578497886657715 + ], + [ + "▁colour", + -10.578519821166992 + ], + [ + "▁species", + -10.578530311584473 + ], + [ + "▁advance", + -10.578753471374512 + ], + [ + "▁PM", + -10.57891845703125 + ], + [ + "ans", + -10.579121589660645 + ], + [ + "▁locations", + -10.579397201538086 + ], + [ + "vous", + -10.579601287841797 + ], + [ + "▁updated", + -10.579636573791504 + ], + [ + "▁faith", + -10.579673767089844 + ], + [ + "mus", + -10.579740524291992 + ], + [ + "▁stores", + -10.579863548278809 + ], + [ + "heim", + -10.580127716064453 + ], + [ + "▁suitable", + -10.580558776855469 + ], + [ + "▁continues", + -10.580703735351562 + ], + [ + "▁fac", + -10.581133842468262 + ], + [ + "ever", + -10.581156730651855 + ], + [ + "▁Bill", + -10.581195831298828 + ], + [ + "▁chose", + -10.58121109008789 + ], + [ + "▁inform", + -10.581228256225586 + ], + [ + "▁environmental", + -10.581427574157715 + ], + [ + "▁responsibility", + -10.58188533782959 + ], + [ + "99", + -10.582542419433594 + ], + [ + "▁competitive", + -10.583723068237305 + ], + [ + "▁strategies", + -10.583903312683105 + ], + [ + "▁toujours", + -10.584270477294922 + ], + [ + "tive", + -10.58430290222168 + ], + [ + "▁automatically", + -10.585600852966309 + ], + [ + "▁dress", + -10.585609436035156 + ], + [ + "▁Minister", + -10.585624694824219 + ], + [ + "har", + -10.586076736450195 + ], + [ + "▁Start", + -10.586249351501465 + ], + [ + "▁=", + -10.586563110351562 + ], + [ + "▁pattern", + -10.58659553527832 + ], + [ + "tier", + -10.58676528930664 + ], + [ + "▁pays", + -10.587034225463867 + ], + [ + "▁profile", + -10.58725357055664 + ], + [ + "▁raised", + -10.587263107299805 + ], + [ + "ange", + -10.587288856506348 + ], + [ + "▁drink", + -10.587762832641602 + ], + [ + "▁element", + -10.588042259216309 + ], + [ + "▁landscape", + -10.58875560760498 + ], + [ + "▁Tag", + -10.589073181152344 + ], + [ + "▁cheese", + -10.589590072631836 + ], + [ + "ific", + -10.590009689331055 + ], + [ + "▁Stadt", + -10.590181350708008 + ], + [ + "39", + -10.591398239135742 + ], + [ + "▁launch", + -10.592113494873047 + ], + [ + "▁wouldn", + -10.592150688171387 + ], + [ + "AS", + -10.592202186584473 + ], + [ + "▁push", + -10.593059539794922 + ], + [ + "▁mill", + -10.593452453613281 + ], + [ + "▁mass", + -10.593647003173828 + ], + [ + "▁category", + -10.593790054321289 + ], + [ + "sondern", + -10.594050407409668 + ], + [ + "col", + -10.594111442565918 + ], + [ + "▁climate", + -10.594313621520996 + ], + [ + "lier", + -10.594437599182129 + ], + [ + "▁slightly", + -10.595514297485352 + ], + [ + "95", + -10.596519470214844 + ], + [ + "ace", + -10.596612930297852 + ], + [ + "▁domain", + -10.597633361816406 + ], + [ + "kan", + -10.598306655883789 + ], + [ + "▁feed", + -10.598485946655273 + ], + [ + "▁Live", + -10.598837852478027 + ], + [ + "▁Mais", + -10.599113464355469 + ], + [ + "▁après", + -10.599365234375 + ], + [ + "▁village", + -10.59941577911377 + ], + [ + "▁hatte", + -10.59968090057373 + ], + [ + "▁joined", + -10.599881172180176 + ], + [ + "▁Museum", + -10.600311279296875 + ], + [ + "head", + -10.600855827331543 + ], + [ + "▁draw", + -10.6009521484375 + ], + [ + "▁concerns", + -10.600966453552246 + ], + [ + "ER", + -10.601505279541016 + ], + [ + "▁technique", + -10.601648330688477 + ], + [ + "▁Bio", + -10.601861000061035 + ], + [ + "▁Sea", + -10.601881980895996 + ], + [ + "▁@", + -10.601927757263184 + ], + [ + "wer", + -10.6021146774292 + ], + [ + "▁battery", + -10.602462768554688 + ], + [ + "▁mostly", + -10.60267448425293 + ], + [ + "▁familiar", + -10.602680206298828 + ], + [ + "▁Sub", + -10.602689743041992 + ], + [ + "▁delicious", + -10.603222846984863 + ], + [ + "doch", + -10.60326099395752 + ], + [ + "60", + -10.603395462036133 + ], + [ + "▁carte", + -10.603611946105957 + ], + [ + "▁avut", + -10.604146957397461 + ], + [ + "▁premium", + -10.60460376739502 + ], + [ + "▁attempt", + -10.604704856872559 + ], + [ + "▁Über", + -10.60473346710205 + ], + [ + "▁combined", + -10.604935646057129 + ], + [ + "lement", + -10.604947090148926 + ], + [ + "▁voi", + -10.605031967163086 + ], + [ + "▁wonder", + -10.605376243591309 + ], + [ + "▁failure", + -10.606106758117676 + ], + [ + "which", + -10.606147766113281 + ], + [ + "esti", + -10.606316566467285 + ], + [ + "31", + -10.606547355651855 + ], + [ + "▁sta", + -10.606734275817871 + ], + [ + "▁transform", + -10.60673999786377 + ], + [ + "▁license", + -10.606743812561035 + ], + [ + "▁depending", + -10.606758117675781 + ], + [ + "▁specifically", + -10.606782913208008 + ], + [ + "▁OF", + -10.60693645477295 + ], + [ + "band", + -10.606959342956543 + ], + [ + "▁Sport", + -10.60731315612793 + ], + [ + "list", + -10.607434272766113 + ], + [ + "▁Tour", + -10.60753059387207 + ], + [ + "▁Israel", + -10.607564926147461 + ], + [ + "▁filled", + -10.607722282409668 + ], + [ + "▁manual", + -10.60776138305664 + ], + [ + "▁watching", + -10.608621597290039 + ], + [ + "▁rule", + -10.608877182006836 + ], + [ + "mat", + -10.60901927947998 + ], + [ + "▁notes", + -10.609585762023926 + ], + [ + "▁Oh", + -10.60960578918457 + ], + [ + "▁bereits", + -10.609634399414062 + ], + [ + "▁foundation", + -10.609916687011719 + ], + [ + "▁vital", + -10.610146522521973 + ], + [ + "▁lassen", + -10.610747337341309 + ], + [ + "▁cât", + -10.611162185668945 + ], + [ + "▁shipping", + -10.611433029174805 + ], + [ + "▁registered", + -10.611513137817383 + ], + [ + "▁jour", + -10.612669944763184 + ], + [ + "▁island", + -10.61276626586914 + ], + [ + "▁sets", + -10.613068580627441 + ], + [ + "▁football", + -10.613683700561523 + ], + [ + "▁EU", + -10.613860130310059 + ], + [ + "▁stone", + -10.614019393920898 + ], + [ + "▁Press", + -10.614699363708496 + ], + [ + "▁adapt", + -10.615066528320312 + ], + [ + "ised", + -10.615425109863281 + ], + [ + "▁thoughts", + -10.615434646606445 + ], + [ + "▁doors", + -10.615851402282715 + ], + [ + "€", + -10.615954399108887 + ], + [ + "▁components", + -10.616040229797363 + ], + [ + "rig", + -10.616332054138184 + ], + [ + "▁generation", + -10.616585731506348 + ], + [ + "▁guess", + -10.616700172424316 + ], + [ + "cker", + -10.61694049835205 + ], + [ + "▁realize", + -10.617207527160645 + ], + [ + "▁Roman", + -10.617310523986816 + ], + [ + "▁contre", + -10.617693901062012 + ], + [ + "▁Out", + -10.617938995361328 + ], + [ + "▁IN", + -10.619051933288574 + ], + [ + "cip", + -10.619085311889648 + ], + [ + "59", + -10.619330406188965 + ], + [ + "▁enhance", + -10.619768142700195 + ], + [ + "▁battle", + -10.61982250213623 + ], + [ + "▁monitor", + -10.619863510131836 + ], + [ + "▁Martin", + -10.62045955657959 + ], + [ + "▁websites", + -10.620461463928223 + ], + [ + "▁DE", + -10.620599746704102 + ], + [ + "▁Festival", + -10.620951652526855 + ], + [ + "ân", + -10.62131118774414 + ], + [ + "▁Place", + -10.621419906616211 + ], + [ + "▁rare", + -10.621554374694824 + ], + [ + "această", + -10.621726989746094 + ], + [ + "▁sollte", + -10.621731758117676 + ], + [ + "▁Read", + -10.621816635131836 + ], + [ + "ware", + -10.622169494628906 + ], + [ + "Those", + -10.622671127319336 + ], + [ + "ende", + -10.623543739318848 + ], + [ + "▁prix", + -10.623835563659668 + ], + [ + "▁roman", + -10.624101638793945 + ], + [ + "▁creation", + -10.624224662780762 + ], + [ + "▁confidence", + -10.624552726745605 + ], + [ + "▁Japan", + -10.624638557434082 + ], + [ + "▁rain", + -10.624942779541016 + ], + [ + "▁guys", + -10.62518310546875 + ], + [ + "▁south", + -10.625236511230469 + ], + [ + "▁trading", + -10.625646591186523 + ], + [ + "▁€", + -10.626100540161133 + ], + [ + "▁Film", + -10.626341819763184 + ], + [ + "▁pana", + -10.627065658569336 + ], + [ + "▁asemenea", + -10.627066612243652 + ], + [ + "36", + -10.627190589904785 + ], + [ + "▁instance", + -10.627884864807129 + ], + [ + "cou", + -10.629385948181152 + ], + [ + "▁nun", + -10.630074501037598 + ], + [ + "▁Pass", + -10.630390167236328 + ], + [ + "Cette", + -10.630579948425293 + ], + [ + "▁Network", + -10.630876541137695 + ], + [ + "▁prime", + -10.631010055541992 + ], + [ + "▁spiritual", + -10.632098197937012 + ], + [ + "▁tough", + -10.633030891418457 + ], + [ + "▁AND", + -10.633086204528809 + ], + [ + "▁Cat", + -10.633601188659668 + ], + [ + "▁boat", + -10.633611679077148 + ], + [ + "▁leads", + -10.634864807128906 + ], + [ + "▁Germany", + -10.63509750366211 + ], + [ + "▁valuable", + -10.635635375976562 + ], + [ + "57", + -10.635892868041992 + ], + [ + "lect", + -10.636148452758789 + ], + [ + "▁distribution", + -10.636445045471191 + ], + [ + "dar", + -10.636518478393555 + ], + [ + "▁Manager", + -10.637701988220215 + ], + [ + "cha", + -10.637725830078125 + ], + [ + "▁obtain", + -10.637741088867188 + ], + [ + "GB", + -10.637908935546875 + ], + [ + "▁unor", + -10.638079643249512 + ], + [ + "schaft", + -10.638603210449219 + ], + [ + "▁zwischen", + -10.638723373413086 + ], + [ + "▁winning", + -10.639172554016113 + ], + [ + "▁suis", + -10.639811515808105 + ], + [ + "58", + -10.640130996704102 + ], + [ + "▁Party", + -10.640372276306152 + ], + [ + "▁ceva", + -10.640416145324707 + ], + [ + "▁comprehensive", + -10.640684127807617 + ], + [ + "▁aceste", + -10.640726089477539 + ], + [ + "▁committed", + -10.640726089477539 + ], + [ + "▁Hu", + -10.641382217407227 + ], + [ + "ţ", + -10.64149284362793 + ], + [ + "▁north", + -10.642021179199219 + ], + [ + "werk", + -10.642542839050293 + ], + [ + "▁interface", + -10.642794609069824 + ], + [ + "▁Valley", + -10.64281177520752 + ], + [ + "▁anywhere", + -10.64281177520752 + ], + [ + "▁Only", + -10.642851829528809 + ], + [ + "TE", + -10.643295288085938 + ], + [ + "hui", + -10.6436767578125 + ], + [ + "bus", + -10.643951416015625 + ], + [ + "vis", + -10.6439790725708 + ], + [ + "▁Society", + -10.645116806030273 + ], + [ + "▁reliable", + -10.64556884765625 + ], + [ + "▁quelques", + -10.64563274383545 + ], + [ + "tech", + -10.646187782287598 + ], + [ + "ual", + -10.646377563476562 + ], + [ + "▁educational", + -10.646418571472168 + ], + [ + "serv", + -10.646490097045898 + ], + [ + "▁opinion", + -10.646628379821777 + ], + [ + "▁appears", + -10.646702766418457 + ], + [ + "▁count", + -10.646795272827148 + ], + [ + "irea", + -10.646981239318848 + ], + [ + "ban", + -10.647504806518555 + ], + [ + "▁45", + -10.647530555725098 + ], + [ + "▁contain", + -10.647661209106445 + ], + [ + "ost", + -10.647663116455078 + ], + [ + "▁anul", + -10.647706031799316 + ], + [ + "rien", + -10.648159980773926 + ], + [ + "gra", + -10.648360252380371 + ], + [ + "▁counter", + -10.648946762084961 + ], + [ + "-3", + -10.650411605834961 + ], + [ + "▁resource", + -10.650463104248047 + ], + [ + "▁Wo", + -10.6505126953125 + ], + [ + "▁posts", + -10.650618553161621 + ], + [ + "▁employee", + -10.651320457458496 + ], + [ + "rol", + -10.651863098144531 + ], + [ + "▁ended", + -10.651969909667969 + ], + [ + "met", + -10.653080940246582 + ], + [ + "▁meine", + -10.653165817260742 + ], + [ + "▁reached", + -10.653368949890137 + ], + [ + "gri", + -10.653716087341309 + ], + [ + "▁Bra", + -10.65374755859375 + ], + [ + "▁conduct", + -10.654294967651367 + ], + [ + "▁housing", + -10.654422760009766 + ], + [ + "▁tickets", + -10.654792785644531 + ], + [ + "▁database", + -10.655674934387207 + ], + [ + "IL", + -10.656150817871094 + ], + [ + "▁perspective", + -10.656359672546387 + ], + [ + "▁Har", + -10.656404495239258 + ], + [ + "▁error", + -10.656549453735352 + ], + [ + "▁meal", + -10.656569480895996 + ], + [ + "▁hearing", + -10.657238006591797 + ], + [ + "▁transition", + -10.657302856445312 + ], + [ + "▁browser", + -10.657609939575195 + ], + [ + "▁supported", + -10.657609939575195 + ], + [ + "▁starts", + -10.658814430236816 + ], + [ + "țe", + -10.658902168273926 + ], + [ + "▁adults", + -10.658905029296875 + ], + [ + "▁România", + -10.65917682647705 + ], + [ + "dra", + -10.659884452819824 + ], + [ + "▁worry", + -10.660222053527832 + ], + [ + "▁avoir", + -10.660497665405273 + ], + [ + "▁regional", + -10.660507202148438 + ], + [ + "▁min", + -10.660722732543945 + ], + [ + "▁Does", + -10.660806655883789 + ], + [ + "▁Keep", + -10.661200523376465 + ], + [ + "rom", + -10.661237716674805 + ], + [ + "sco", + -10.661320686340332 + ], + [ + "tem", + -10.661898612976074 + ], + [ + "▁Old", + -10.661954879760742 + ], + [ + "▁Under", + -10.662552833557129 + ], + [ + "▁Commission", + -10.662557601928711 + ], + [ + "▁Bau", + -10.6632661819458 + ], + [ + "▁News", + -10.663358688354492 + ], + [ + "▁mois", + -10.663444519042969 + ], + [ + "▁respond", + -10.66356372833252 + ], + [ + "▁alles", + -10.663878440856934 + ], + [ + "▁chair", + -10.664475440979004 + ], + [ + "▁ho", + -10.664854049682617 + ], + [ + "right", + -10.664908409118652 + ], + [ + "▁totally", + -10.665532112121582 + ], + [ + "gle", + -10.665534973144531 + ], + [ + "▁32", + -10.665604591369629 + ], + [ + "66", + -10.665664672851562 + ], + [ + "town", + -10.665902137756348 + ], + [ + "Ch", + -10.666261672973633 + ], + [ + "▁gr", + -10.66629695892334 + ], + [ + "▁garage", + -10.666328430175781 + ], + [ + "ții", + -10.666495323181152 + ], + [ + "▁Union", + -10.667136192321777 + ], + [ + "ică", + -10.667343139648438 + ], + [ + "▁2,", + -10.668437004089355 + ], + [ + "▁reflect", + -10.669163703918457 + ], + [ + "▁retail", + -10.669388771057129 + ], + [ + "▁unde", + -10.669605255126953 + ], + [ + "▁accessible", + -10.670262336730957 + ], + [ + "water", + -10.67059326171875 + ], + [ + "▁regard", + -10.670710563659668 + ], + [ + "▁logo", + -10.671489715576172 + ], + [ + "▁inspired", + -10.671518325805664 + ], + [ + "▁Wall", + -10.671859741210938 + ], + [ + "▁Ste", + -10.672093391418457 + ], + [ + "▁asking", + -10.672179222106934 + ], + [ + "▁Journal", + -10.673028945922852 + ], + [ + "▁Teil", + -10.674042701721191 + ], + [ + "▁collaboration", + -10.674185752868652 + ], + [ + "▁acid", + -10.674266815185547 + ], + [ + "▁Fund", + -10.674382209777832 + ], + [ + "▁spirit", + -10.6744384765625 + ], + [ + "despite", + -10.674457550048828 + ], + [ + "▁delivered", + -10.674821853637695 + ], + [ + "▁girls", + -10.675374984741211 + ], + [ + "▁Look", + -10.675896644592285 + ], + [ + "rant", + -10.675949096679688 + ], + [ + "▁District", + -10.676460266113281 + ], + [ + "▁rental", + -10.676709175109863 + ], + [ + "▁spune", + -10.676733016967773 + ], + [ + "els", + -10.677544593811035 + ], + [ + "▁permanent", + -10.677659034729004 + ], + [ + "▁iron", + -10.677709579467773 + ], + [ + "▁Thomas", + -10.677745819091797 + ], + [ + "EL", + -10.678071022033691 + ], + [ + "▁except", + -10.678074836730957 + ], + [ + "▁catch", + -10.678366661071777 + ], + [ + "▁providers", + -10.678375244140625 + ], + [ + "▁2006", + -10.678435325622559 + ], + [ + "▁chat", + -10.679931640625 + ], + [ + "▁emergency", + -10.680281639099121 + ], + [ + "gre", + -10.68030834197998 + ], + [ + "site", + -10.680888175964355 + ], + [ + "▁missing", + -10.68089485168457 + ], + [ + "abil", + -10.680914878845215 + ], + [ + "▁Hill", + -10.68099594116211 + ], + [ + "urs", + -10.681312561035156 + ], + [ + "▁plusieurs", + -10.681716918945312 + ], + [ + "▁birthday", + -10.681726455688477 + ], + [ + "DS", + -10.682019233703613 + ], + [ + "ersten", + -10.682381629943848 + ], + [ + "▁5.", + -10.68252944946289 + ], + [ + "▁library", + -10.68333911895752 + ], + [ + "▁earth", + -10.683515548706055 + ], + [ + "CI", + -10.683645248413086 + ], + [ + "▁lighting", + -10.684442520141602 + ], + [ + "▁fixed", + -10.684879302978516 + ], + [ + "tori", + -10.684891700744629 + ], + [ + "▁replace", + -10.684995651245117 + ], + [ + "▁administration", + -10.685074806213379 + ], + [ + "leurs", + -10.685229301452637 + ], + [ + "▁meat", + -10.686142921447754 + ], + [ + "▁songs", + -10.686662673950195 + ], + [ + "▁confirm", + -10.686866760253906 + ], + [ + "▁rapid", + -10.68698787689209 + ], + [ + "▁Special", + -10.686995506286621 + ], + [ + "▁holding", + -10.687115669250488 + ], + [ + "▁honor", + -10.687271118164062 + ], + [ + "▁Market", + -10.687409400939941 + ], + [ + "La", + -10.687535285949707 + ], + [ + "▁measure", + -10.687760353088379 + ], + [ + "▁guarantee", + -10.68785572052002 + ], + [ + "▁switch", + -10.68813419342041 + ], + [ + "▁extensive", + -10.688294410705566 + ], + [ + "▁Neu", + -10.688674926757812 + ], + [ + "avez", + -10.688901901245117 + ], + [ + "▁protein", + -10.688984870910645 + ], + [ + "▁infrastructure", + -10.689454078674316 + ], + [ + "▁functions", + -10.689494132995605 + ], + [ + "▁cont", + -10.689496040344238 + ], + [ + "row", + -10.689760208129883 + ], + [ + "star", + -10.689773559570312 + ], + [ + "▁Port", + -10.690192222595215 + ], + [ + "Using", + -10.690336227416992 + ], + [ + "▁faster", + -10.690557479858398 + ], + [ + "44", + -10.691168785095215 + ], + [ + "▁measures", + -10.691615104675293 + ], + [ + "▁celor", + -10.69186019897461 + ], + [ + "▁exam", + -10.69189739227295 + ], + [ + "200", + -10.69202995300293 + ], + [ + "î", + -10.692545890808105 + ], + [ + "▁conversation", + -10.692832946777344 + ], + [ + "▁brands", + -10.692959785461426 + ], + [ + "▁Code", + -10.69359016418457 + ], + [ + "▁Website", + -10.693748474121094 + ], + [ + "OS", + -10.693782806396484 + ], + [ + "▁alors", + -10.693822860717773 + ], + [ + "▁organ", + -10.694032669067383 + ], + [ + "▁removed", + -10.694823265075684 + ], + [ + "▁Head", + -10.694905281066895 + ], + [ + "▁Cha", + -10.694908142089844 + ], + [ + "▁visiting", + -10.694928169250488 + ], + [ + "▁wild", + -10.694928169250488 + ], + [ + "▁seit", + -10.694962501525879 + ], + [ + "49", + -10.695109367370605 + ], + [ + "▁organic", + -10.69539737701416 + ], + [ + "aţi", + -10.695775032043457 + ], + [ + "▁kit", + -10.695947647094727 + ], + [ + "68", + -10.695959091186523 + ], + [ + "▁flowers", + -10.696124076843262 + ], + [ + "▁appreciate", + -10.697006225585938 + ], + [ + "▁dead", + -10.697439193725586 + ], + [ + "▁Fire", + -10.697539329528809 + ], + [ + "▁cela", + -10.697591781616211 + ], + [ + "▁Ph", + -10.697633743286133 + ], + [ + "▁arrive", + -10.697921752929688 + ], + [ + "▁purposes", + -10.698213577270508 + ], + [ + "▁qualité", + -10.698226928710938 + ], + [ + "▁restaurants", + -10.698478698730469 + ], + [ + "▁advertising", + -10.698541641235352 + ], + [ + "cur", + -10.69855785369873 + ], + [ + "▁ça", + -10.698973655700684 + ], + [ + "▁introduced", + -10.699088096618652 + ], + [ + "▁returned", + -10.699111938476562 + ], + [ + "▁desire", + -10.699511528015137 + ], + [ + "▁soul", + -10.699983596801758 + ], + [ + "▁Technology", + -10.699994087219238 + ], + [ + ");", + -10.700163841247559 + ], + [ + "▁Royal", + -10.700282096862793 + ], + [ + "tant", + -10.70068645477295 + ], + [ + "▁possibly", + -10.700702667236328 + ], + [ + "▁consumers", + -10.700812339782715 + ], + [ + "▁doua", + -10.70097541809082 + ], + [ + "ified", + -10.70097827911377 + ], + [ + "▁Award", + -10.70114803314209 + ], + [ + "toutes", + -10.70130443572998 + ], + [ + "▁meant", + -10.701325416564941 + ], + [ + "ezi", + -10.701616287231445 + ], + [ + "▁plu", + -10.701766014099121 + ], + [ + "ţii", + -10.7021484375 + ], + [ + "▁talent", + -10.702789306640625 + ], + [ + "▁Security", + -10.703309059143066 + ], + [ + "arii", + -10.703352928161621 + ], + [ + "▁zi", + -10.703455924987793 + ], + [ + "▁Shop", + -10.703667640686035 + ], + [ + "▁breakfast", + -10.704107284545898 + ], + [ + "▁trial", + -10.704485893249512 + ], + [ + "ami", + -10.704936981201172 + ], + [ + "▁register", + -10.705301284790039 + ], + [ + "unserer", + -10.705646514892578 + ], + [ + "▁solar", + -10.705697059631348 + ], + [ + "▁deals", + -10.70591926574707 + ], + [ + "▁Ku", + -10.7059326171875 + ], + [ + "To", + -10.706186294555664 + ], + [ + "bat", + -10.70680046081543 + ], + [ + "MC", + -10.707010269165039 + ], + [ + "▁Global", + -10.707018852233887 + ], + [ + "у", + -10.707405090332031 + ], + [ + "▁nor", + -10.707818984985352 + ], + [ + "▁milk", + -10.707868576049805 + ], + [ + "▁choices", + -10.708206176757812 + ], + [ + "»", + -10.7086763381958 + ], + [ + "▁Sur", + -10.708695411682129 + ], + [ + "more", + -10.708739280700684 + ], + [ + "48", + -10.709024429321289 + ], + [ + "67", + -10.709375381469727 + ], + [ + "▁replacement", + -10.709942817687988 + ], + [ + "34", + -10.710440635681152 + ], + [ + "▁chocolate", + -10.710485458374023 + ], + [ + "▁Family", + -10.71059513092041 + ], + [ + "This", + -10.71122932434082 + ], + [ + "▁novel", + -10.711435317993164 + ], + [ + "▁Chicago", + -10.711563110351562 + ], + [ + "▁participate", + -10.71166706085205 + ], + [ + "▁trei", + -10.712727546691895 + ], + [ + "▁monthly", + -10.713729858398438 + ], + [ + "▁survey", + -10.713977813720703 + ], + [ + "▁End", + -10.714285850524902 + ], + [ + "▁Medical", + -10.71442699432373 + ], + [ + "autres", + -10.714678764343262 + ], + [ + "rich", + -10.714698791503906 + ], + [ + "▁bike", + -10.714703559875488 + ], + [ + "▁eventually", + -10.714717864990234 + ], + [ + "▁HD", + -10.714722633361816 + ], + [ + "bil", + -10.714744567871094 + ], + [ + "cent", + -10.714902877807617 + ], + [ + "▁afin", + -10.715676307678223 + ], + [ + "▁surgery", + -10.716160774230957 + ], + [ + "▁sin", + -10.716455459594727 + ], + [ + "▁manufacturing", + -10.716955184936523 + ], + [ + "▁consumer", + -10.717245101928711 + ], + [ + "system", + -10.717306137084961 + ], + [ + "▁object", + -10.717400550842285 + ], + [ + "▁Ju", + -10.717422485351562 + ], + [ + "ered", + -10.7178373336792 + ], + [ + "rac", + -10.718070030212402 + ], + [ + "▁clinical", + -10.718664169311523 + ], + [ + "▁dollars", + -10.719761848449707 + ], + [ + "▁chain", + -10.71994686126709 + ], + [ + "▁afternoon", + -10.720196723937988 + ], + [ + "▁ligne", + -10.720422744750977 + ], + [ + "▁accounts", + -10.721806526184082 + ], + [ + "ving", + -10.722037315368652 + ], + [ + "▁Australian", + -10.72240924835205 + ], + [ + "38", + -10.722542762756348 + ], + [ + "▁persoane", + -10.72258472442627 + ], + [ + "▁grande", + -10.722668647766113 + ], + [ + "▁Report", + -10.723472595214844 + ], + [ + "▁revenue", + -10.723649024963379 + ], + [ + "▁spre", + -10.723760604858398 + ], + [ + "▁cutting", + -10.7239990234375 + ], + [ + "▁approved", + -10.724133491516113 + ], + [ + "▁glad", + -10.724188804626465 + ], + [ + "chaque", + -10.724395751953125 + ], + [ + "win", + -10.724435806274414 + ], + [ + "▁waren", + -10.724733352661133 + ], + [ + "▁launched", + -10.725071907043457 + ], + [ + "▁layer", + -10.725645065307617 + ], + [ + "▁airport", + -10.725716590881348 + ], + [ + "▁effectively", + -10.72572135925293 + ], + [ + "▁coach", + -10.725946426391602 + ], + [ + "dé", + -10.726130485534668 + ], + [ + "LE", + -10.72627067565918 + ], + [ + "▁müssen", + -10.726386070251465 + ], + [ + "plan", + -10.726641654968262 + ], + [ + "dan", + -10.726705551147461 + ], + [ + "55", + -10.726786613464355 + ], + [ + "bringing", + -10.726895332336426 + ], + [ + "▁$2", + -10.726995468139648 + ], + [ + "nce", + -10.727181434631348 + ], + [ + "▁inspiration", + -10.728177070617676 + ], + [ + "You", + -10.728657722473145 + ], + [ + "▁soll", + -10.729095458984375 + ], + [ + "▁seemed", + -10.729595184326172 + ], + [ + "▁flight", + -10.729687690734863 + ], + [ + "▁prima", + -10.729883193969727 + ], + [ + "▁Welt", + -10.730123519897461 + ], + [ + "▁jetzt", + -10.730315208435059 + ], + [ + "ky", + -10.730428695678711 + ], + [ + "▁Western", + -10.73054027557373 + ], + [ + "▁label", + -10.730600357055664 + ], + [ + "▁möglich", + -10.73081111907959 + ], + [ + "▁input", + -10.730862617492676 + ], + [ + "▁laws", + -10.730995178222656 + ], + [ + "▁personnes", + -10.731708526611328 + ], + [ + "▁paying", + -10.731731414794922 + ], + [ + "▁Uhr", + -10.73173713684082 + ], + [ + "▁Mary", + -10.731745719909668 + ], + [ + "pur", + -10.73190689086914 + ], + [ + "▁covers", + -10.732133865356445 + ], + [ + "▁throw", + -10.732522964477539 + ], + [ + "▁Tor", + -10.733281135559082 + ], + [ + "▁bat", + -10.73355484008789 + ], + [ + "▁Gr", + -10.73373031616211 + ], + [ + "▁farm", + -10.73376178741455 + ], + [ + "▁improved", + -10.733843803405762 + ], + [ + "▁fără", + -10.734286308288574 + ], + [ + "▁theme", + -10.73437213897705 + ], + [ + "pens", + -10.734865188598633 + ], + [ + "▁Cup", + -10.734975814819336 + ], + [ + "▁settings", + -10.735114097595215 + ], + [ + "▁hire", + -10.735234260559082 + ], + [ + "▁massive", + -10.735248565673828 + ], + [ + "▁generate", + -10.735405921936035 + ], + [ + "▁earn", + -10.735837936401367 + ], + [ + "▁tab", + -10.736431121826172 + ], + [ + "For", + -10.736616134643555 + ], + [ + "gang", + -10.736891746520996 + ], + [ + "▁hin", + -10.73709487915039 + ], + [ + "▁roll", + -10.737113952636719 + ], + [ + "▁engagement", + -10.737157821655273 + ], + [ + "▁signed", + -10.737177848815918 + ], + [ + "▁League", + -10.737323760986328 + ], + [ + "▁registration", + -10.737931251525879 + ], + [ + "▁première", + -10.738763809204102 + ], + [ + "isse", + -10.73896598815918 + ], + [ + "▁university", + -10.739027976989746 + ], + [ + "ell", + -10.739157676696777 + ], + [ + "▁nou", + -10.739169120788574 + ], + [ + "rog", + -10.739191055297852 + ], + [ + "▁sitting", + -10.739206314086914 + ], + [ + "▁cazul", + -10.739571571350098 + ], + [ + "▁surrounding", + -10.73983383178711 + ], + [ + "▁Asia", + -10.740357398986816 + ], + [ + "▁bath", + -10.740825653076172 + ], + [ + "hal", + -10.740923881530762 + ], + [ + "▁plate", + -10.741026878356934 + ], + [ + "▁tests", + -10.741151809692383 + ], + [ + "▁presentation", + -10.741156578063965 + ], + [ + "▁chicken", + -10.741501808166504 + ], + [ + "▁Val", + -10.741586685180664 + ], + [ + "ably", + -10.74166488647461 + ], + [ + "▁magazine", + -10.741697311401367 + ], + [ + "▁Maybe", + -10.74187183380127 + ], + [ + "▁sauce", + -10.742673873901367 + ], + [ + "TC", + -10.742887496948242 + ], + [ + "▁exclusive", + -10.74296760559082 + ], + [ + "86", + -10.74306869506836 + ], + [ + "▁teeth", + -10.743474960327148 + ], + [ + "▁regularly", + -10.743524551391602 + ], + [ + "sed", + -10.743824005126953 + ], + [ + "gro", + -10.744174003601074 + ], + [ + "He", + -10.744211196899414 + ], + [ + "▁2017.", + -10.744302749633789 + ], + [ + "▁template", + -10.74489688873291 + ], + [ + "▁gleich", + -10.744938850402832 + ], + [ + "bal", + -10.745061874389648 + ], + [ + "▁African", + -10.74511432647705 + ], + [ + "în", + -10.745231628417969 + ], + [ + "▁rep", + -10.74543571472168 + ], + [ + "▁beat", + -10.74588394165039 + ], + [ + "▁deck", + -10.746064186096191 + ], + [ + "▁intended", + -10.746221542358398 + ], + [ + "▁para", + -10.746513366699219 + ], + [ + "▁IP", + -10.746712684631348 + ], + [ + "▁bra", + -10.746881484985352 + ], + [ + "▁forces", + -10.746966361999512 + ], + [ + "▁routine", + -10.747184753417969 + ], + [ + "▁Jahre", + -10.747758865356445 + ], + [ + "▁Bad", + -10.74797534942627 + ], + [ + "▁drivers", + -10.748074531555176 + ], + [ + "▁updates", + -10.748095512390137 + ], + [ + "▁elegant", + -10.748279571533203 + ], + [ + "▁external", + -10.748444557189941 + ], + [ + "▁engineering", + -10.748819351196289 + ], + [ + "ender", + -10.749544143676758 + ], + [ + "table", + -10.749755859375 + ], + [ + "inter", + -10.749878883361816 + ], + [ + "▁Romania", + -10.749948501586914 + ], + [ + "▁zile", + -10.750468254089355 + ], + [ + "▁luxury", + -10.750570297241211 + ], + [ + "▁calling", + -10.750750541687012 + ], + [ + "▁cooking", + -10.75101375579834 + ], + [ + "▁component", + -10.75114631652832 + ], + [ + "wan", + -10.75121021270752 + ], + [ + "schen", + -10.751212120056152 + ], + [ + "▁birth", + -10.751242637634277 + ], + [ + "asupra", + -10.751349449157715 + ], + [ + "Co", + -10.751471519470215 + ], + [ + "▁opt", + -10.75153923034668 + ], + [ + "▁discovered", + -10.751860618591309 + ], + [ + "▁teach", + -10.752084732055664 + ], + [ + "▁Son", + -10.75234317779541 + ], + [ + "▁guest", + -10.752384185791016 + ], + [ + "▁dogs", + -10.752695083618164 + ], + [ + "▁2003", + -10.752745628356934 + ], + [ + "▁behavior", + -10.752750396728516 + ], + [ + "pé", + -10.7529935836792 + ], + [ + "63", + -10.75316333770752 + ], + [ + "▁Human", + -10.753702163696289 + ], + [ + "▁expression", + -10.754800796508789 + ], + [ + "▁nevoie", + -10.754936218261719 + ], + [ + "▁recherche", + -10.75528621673584 + ], + [ + "ging", + -10.755767822265625 + ], + [ + "related", + -10.755948066711426 + ], + [ + "▁discount", + -10.756040573120117 + ], + [ + "▁Brown", + -10.756054878234863 + ], + [ + "▁Such", + -10.756107330322266 + ], + [ + "▁Ve", + -10.757149696350098 + ], + [ + "▁height", + -10.757265090942383 + ], + [ + "clo", + -10.757414817810059 + ], + [ + "▁incredible", + -10.757912635803223 + ], + [ + "▁bas", + -10.757916450500488 + ], + [ + "▁mă", + -10.75798225402832 + ], + [ + "▁purchased", + -10.758240699768066 + ], + [ + "▁compte", + -10.75831127166748 + ], + [ + "▁instructions", + -10.758537292480469 + ], + [ + "▁Instead", + -10.75866985321045 + ], + [ + "▁output", + -10.758706092834473 + ], + [ + "▁mom", + -10.758886337280273 + ], + [ + "DR", + -10.759828567504883 + ], + [ + "89", + -10.760168075561523 + ], + [ + "▁reduced", + -10.760621070861816 + ], + [ + "98", + -10.7606840133667 + ], + [ + "▁constant", + -10.760879516601562 + ], + [ + "▁therapy", + -10.762417793273926 + ], + [ + "▁capable", + -10.762757301330566 + ], + [ + "mark", + -10.763265609741211 + ], + [ + "▁Sometimes", + -10.76332950592041 + ], + [ + "▁joy", + -10.763419151306152 + ], + [ + "▁perfectly", + -10.763589859008789 + ], + [ + "▁painting", + -10.763704299926758 + ], + [ + "avait", + -10.763765335083008 + ], + [ + "▁Sha", + -10.764384269714355 + ], + [ + "▁dat", + -10.764463424682617 + ], + [ + "▁produits", + -10.764479637145996 + ], + [ + "tric", + -10.76456356048584 + ], + [ + "ierte", + -10.765153884887695 + ], + [ + "▁Smith", + -10.765836715698242 + ], + [ + "▁trebui", + -10.766264915466309 + ], + [ + "▁beaucoup", + -10.766630172729492 + ], + [ + "▁chosen", + -10.767189025878906 + ], + [ + "▁cre", + -10.76732063293457 + ], + [ + "▁complet", + -10.767341613769531 + ], + [ + "▁Ltd", + -10.767599105834961 + ], + [ + "▁recovery", + -10.76781940460205 + ], + [ + "▁district", + -10.768423080444336 + ], + [ + "78", + -10.768640518188477 + ], + [ + "▁Unter", + -10.76872730255127 + ], + [ + "▁schnell", + -10.768729209899902 + ], + [ + "▁apart", + -10.768943786621094 + ], + [ + "▁phase", + -10.76894760131836 + ], + [ + "▁seeking", + -10.769091606140137 + ], + [ + "▁mark", + -10.769148826599121 + ], + [ + "▁pet", + -10.769233703613281 + ], + [ + "▁PDF", + -10.769296646118164 + ], + [ + "▁efficiency", + -10.769577980041504 + ], + [ + "▁buildings", + -10.769611358642578 + ], + [ + "69", + -10.769723892211914 + ], + [ + "▁sens", + -10.769858360290527 + ], + [ + "▁Video", + -10.770115852355957 + ], + [ + "▁destination", + -10.770181655883789 + ], + [ + "▁female", + -10.770319938659668 + ], + [ + "▁supporting", + -10.770674705505371 + ], + [ + "▁signs", + -10.77077865600586 + ], + [ + "▁appeal", + -10.770784378051758 + ], + [ + "76", + -10.77110481262207 + ], + [ + "▁favourite", + -10.771612167358398 + ], + [ + "ock", + -10.771702766418457 + ], + [ + "▁readers", + -10.771757125854492 + ], + [ + "▁Did", + -10.771868705749512 + ], + [ + "rou", + -10.772045135498047 + ], + [ + "PA", + -10.77222728729248 + ], + [ + "▁Jean", + -10.772480964660645 + ], + [ + "▁Em", + -10.772586822509766 + ], + [ + "pass", + -10.77280330657959 + ], + [ + "▁Zi", + -10.773090362548828 + ], + [ + "▁între", + -10.773261070251465 + ], + [ + "▁fly", + -10.773427963256836 + ], + [ + "mos", + -10.773666381835938 + ], + [ + "▁emotional", + -10.773860931396484 + ], + [ + "asse", + -10.774768829345703 + ], + [ + "▁sessions", + -10.775086402893066 + ], + [ + "▁symptoms", + -10.77564811706543 + ], + [ + "▁died", + -10.776217460632324 + ], + [ + "▁seconds", + -10.776628494262695 + ], + [ + "▁procedure", + -10.777206420898438 + ], + [ + "▁express", + -10.777420997619629 + ], + [ + "▁două", + -10.777885437011719 + ], + [ + "▁valid", + -10.778393745422363 + ], + [ + "▁euro", + -10.7788667678833 + ], + [ + "▁interests", + -10.779032707214355 + ], + [ + "Having", + -10.779237747192383 + ], + [ + "▁hundreds", + -10.779669761657715 + ], + [ + "grad", + -10.780023574829102 + ], + [ + "▁neuen", + -10.780084609985352 + ], + [ + "▁cook", + -10.780552864074707 + ], + [ + "▁pur", + -10.780834197998047 + ], + [ + "▁charges", + -10.781024932861328 + ], + [ + "sche", + -10.78118896484375 + ], + [ + "▁smile", + -10.781468391418457 + ], + [ + "▁festival", + -10.781611442565918 + ], + [ + "cho", + -10.781672477722168 + ], + [ + "▁£", + -10.781937599182129 + ], + [ + "cht", + -10.78201675415039 + ], + [ + "▁macht", + -10.782021522521973 + ], + [ + "▁Wasser", + -10.782028198242188 + ], + [ + "▁Cap", + -10.78226375579834 + ], + [ + "▁Learn", + -10.78274154663086 + ], + [ + "▁load", + -10.783162117004395 + ], + [ + "▁aici", + -10.783225059509277 + ], + [ + "▁Ch", + -10.784143447875977 + ], + [ + "▁cycle", + -10.784223556518555 + ], + [ + "▁carried", + -10.784337997436523 + ], + [ + "▁jusqu", + -10.784517288208008 + ], + [ + "stein", + -10.78505802154541 + ], + [ + "ski", + -10.78513240814209 + ], + [ + "cap", + -10.78579330444336 + ], + [ + "▁Bal", + -10.785852432250977 + ], + [ + "▁minor", + -10.786053657531738 + ], + [ + "77", + -10.786175727844238 + ], + [ + "▁considering", + -10.78632640838623 + ], + [ + "innen", + -10.78644847869873 + ], + [ + "▁greatest", + -10.787055015563965 + ], + [ + "▁Training", + -10.787137031555176 + ], + [ + "08", + -10.787307739257812 + ], + [ + "▁significantly", + -10.787607192993164 + ], + [ + "gé", + -10.787728309631348 + ], + [ + "▁dumpster", + -10.788351058959961 + ], + [ + "▁allem", + -10.788930892944336 + ], + [ + "▁bonus", + -10.7889404296875 + ], + [ + "▁guy", + -10.789036750793457 + ], + [ + "fel", + -10.78904914855957 + ], + [ + "▁lifestyle", + -10.789241790771484 + ], + [ + "▁Bro", + -10.78961181640625 + ], + [ + "▁implement", + -10.789687156677246 + ], + [ + "lock", + -10.790046691894531 + ], + [ + "▁Earth", + -10.790142059326172 + ], + [ + "kar", + -10.790733337402344 + ], + [ + "▁invest", + -10.790833473205566 + ], + [ + "▁river", + -10.790933609008789 + ], + [ + "▁accurate", + -10.791494369506836 + ], + [ + "▁mu", + -10.791579246520996 + ], + [ + "▁celebrate", + -10.792119979858398 + ], + [ + "▁ran", + -10.79256820678711 + ], + [ + "▁bigger", + -10.792988777160645 + ], + [ + "▁Mer", + -10.793476104736328 + ], + [ + "▁millions", + -10.793486595153809 + ], + [ + "▁partie", + -10.793563842773438 + ], + [ + "▁dazu", + -10.793951988220215 + ], + [ + "▁Full", + -10.794130325317383 + ], + [ + "gie", + -10.794207572937012 + ], + [ + "bot", + -10.794373512268066 + ], + [ + "roll", + -10.79472827911377 + ], + [ + "▁Women", + -10.795303344726562 + ], + [ + "▁compare", + -10.796135902404785 + ], + [ + "▁van", + -10.796503067016602 + ], + [ + "▁apps", + -10.796521186828613 + ], + [ + "PC", + -10.797050476074219 + ], + [ + "▁drei", + -10.79736042022705 + ], + [ + "▁maison", + -10.797588348388672 + ], + [ + "▁knows", + -10.797712326049805 + ], + [ + "rid", + -10.797972679138184 + ], + [ + "62", + -10.798396110534668 + ], + [ + "class", + -10.798508644104004 + ], + [ + "▁chez", + -10.798669815063477 + ], + [ + "char", + -10.798828125 + ], + [ + "88", + -10.798989295959473 + ], + [ + "▁cast", + -10.79948902130127 + ], + [ + "▁examples", + -10.79973030090332 + ], + [ + "▁Therefore", + -10.799823760986328 + ], + [ + "▁topics", + -10.799941062927246 + ], + [ + "with", + -10.80013656616211 + ], + [ + "▁Anti", + -10.800555229187012 + ], + [ + "how", + -10.800620079040527 + ], + [ + "▁whom", + -10.80094051361084 + ], + [ + "▁Deutschland", + -10.801124572753906 + ], + [ + "tine", + -10.80113697052002 + ], + [ + "▁CEO", + -10.801224708557129 + ], + [ + "▁truck", + -10.801350593566895 + ], + [ + "▁Which", + -10.8015718460083 + ], + [ + "erie", + -10.802017211914062 + ], + [ + "fect", + -10.802069664001465 + ], + [ + "bou", + -10.8026762008667 + ], + [ + "▁(1", + -10.802818298339844 + ], + [ + "sum", + -10.802980422973633 + ], + [ + "▁bonne", + -10.803068161010742 + ], + [ + "▁remaining", + -10.80321216583252 + ], + [ + "▁equal", + -10.803543090820312 + ], + [ + "▁engage", + -10.803561210632324 + ], + [ + "▁RE", + -10.803849220275879 + ], + [ + "style", + -10.804182052612305 + ], + [ + "▁urma", + -10.804337501525879 + ], + [ + "▁Grund", + -10.80496883392334 + ], + [ + "ür", + -10.8051176071167 + ], + [ + "▁font", + -10.805353164672852 + ], + [ + "▁assets", + -10.805916786193848 + ], + [ + "AL", + -10.806102752685547 + ], + [ + "▁rear", + -10.80635929107666 + ], + [ + "▁contemporary", + -10.80646800994873 + ], + [ + "▁occur", + -10.8067045211792 + ], + [ + "rated", + -10.806941986083984 + ], + [ + "▁tight", + -10.807088851928711 + ], + [ + "▁machines", + -10.807921409606934 + ], + [ + "▁0.", + -10.808456420898438 + ], + [ + "▁Aber", + -10.808470726013184 + ], + [ + "sol", + -10.808517456054688 + ], + [ + "rü", + -10.80858039855957 + ], + [ + "▁2007", + -10.809479713439941 + ], + [ + "gg", + -10.809488296508789 + ], + [ + "▁unul", + -10.809691429138184 + ], + [ + "▁était", + -10.809908866882324 + ], + [ + "▁capture", + -10.809980392456055 + ], + [ + "▁command", + -10.810037612915039 + ], + [ + "▁wire", + -10.810425758361816 + ], + [ + "▁shift", + -10.810762405395508 + ], + [ + "▁bread", + -10.81084156036377 + ], + [ + "▁causes", + -10.810937881469727 + ], + [ + "PI", + -10.810938835144043 + ], + [ + "SC", + -10.811086654663086 + ], + [ + "▁lights", + -10.811190605163574 + ], + [ + "▁lived", + -10.811293601989746 + ], + [ + "mul", + -10.811446189880371 + ], + [ + "▁Cur", + -10.811917304992676 + ], + [ + "▁Richard", + -10.811973571777344 + ], + [ + "37", + -10.812638282775879 + ], + [ + "▁cup", + -10.812737464904785 + ], + [ + "▁fields", + -10.812983512878418 + ], + [ + "▁crusher", + -10.813389778137207 + ], + [ + "65", + -10.813774108886719 + ], + [ + "avons", + -10.813822746276855 + ], + [ + "▁gear", + -10.813835144042969 + ], + [ + "▁standing", + -10.813844680786133 + ], + [ + "▁thick", + -10.81445026397705 + ], + [ + "aff", + -10.815132141113281 + ], + [ + "ments", + -10.815434455871582 + ], + [ + "▁conflict", + -10.815728187561035 + ], + [ + "ität", + -10.815825462341309 + ], + [ + "▁worse", + -10.816295623779297 + ], + [ + "SE", + -10.816332817077637 + ], + [ + "imi", + -10.816459655761719 + ], + [ + "▁dating", + -10.817033767700195 + ], + [ + "Do", + -10.817073822021484 + ], + [ + "▁flexible", + -10.817093849182129 + ], + [ + "ologie", + -10.817131996154785 + ], + [ + "SU", + -10.817200660705566 + ], + [ + "▁contribute", + -10.817306518554688 + ], + [ + "▁denn", + -10.817428588867188 + ], + [ + "▁appointment", + -10.81746768951416 + ], + [ + "▁ticket", + -10.817523002624512 + ], + [ + "bed", + -10.817892074584961 + ], + [ + "▁2019.", + -10.817936897277832 + ], + [ + "▁tasks", + -10.81871223449707 + ], + [ + "▁carbon", + -10.818734169006348 + ], + [ + "▁situations", + -10.819400787353516 + ], + [ + "MA", + -10.819402694702148 + ], + [ + "▁portion", + -10.819498062133789 + ], + [ + "▁urban", + -10.819585800170898 + ], + [ + "▁Canadian", + -10.819805145263672 + ], + [ + "▁Bur", + -10.819937705993652 + ], + [ + "▁pack", + -10.81995964050293 + ], + [ + "▁effet", + -10.819992065429688 + ], + [ + "▁Ball", + -10.82008171081543 + ], + [ + "▁timpul", + -10.82014274597168 + ], + [ + "▁owned", + -10.820211410522461 + ], + [ + "▁surprise", + -10.820413589477539 + ], + [ + "▁Mu", + -10.820582389831543 + ], + [ + "▁decades", + -10.821001052856445 + ], + [ + "▁affected", + -10.821728706359863 + ], + [ + "▁proven", + -10.821732521057129 + ], + [ + "▁Fe", + -10.821990966796875 + ], + [ + "zy", + -10.822042465209961 + ], + [ + "42", + -10.822175979614258 + ], + [ + "▁trend", + -10.8223876953125 + ], + [ + "▁autres", + -10.82262897491455 + ], + [ + "No", + -10.823028564453125 + ], + [ + "▁nine", + -10.823565483093262 + ], + [ + "ON", + -10.82376480102539 + ], + [ + "NE", + -10.823953628540039 + ], + [ + "oli", + -10.824359893798828 + ], + [ + "▁Daniel", + -10.824434280395508 + ], + [ + "▁spa", + -10.824939727783203 + ], + [ + "▁messages", + -10.825084686279297 + ], + [ + "PS", + -10.825183868408203 + ], + [ + "47", + -10.825703620910645 + ], + [ + "▁doch", + -10.826032638549805 + ], + [ + "▁improvement", + -10.826187133789062 + ], + [ + "▁mountain", + -10.826350212097168 + ], + [ + "▁Room", + -10.826451301574707 + ], + [ + "▁edition", + -10.826546669006348 + ], + [ + "▁musical", + -10.826712608337402 + ], + [ + "CP", + -10.827024459838867 + ], + [ + "▁Mill", + -10.827027320861816 + ], + [ + "▁steht", + -10.827740669250488 + ], + [ + "▁determined", + -10.828083038330078 + ], + [ + "you", + -10.828392028808594 + ], + [ + "weg", + -10.828554153442383 + ], + [ + "▁Digital", + -10.828624725341797 + ], + [ + "▁filter", + -10.828903198242188 + ], + [ + "▁youth", + -10.829047203063965 + ], + [ + "▁assessment", + -10.829301834106445 + ], + [ + "▁butter", + -10.829370498657227 + ], + [ + "▁Watch", + -10.829427719116211 + ], + [ + "▁zusammen", + -10.829471588134766 + ], + [ + "▁View", + -10.829606056213379 + ], + [ + "09", + -10.829649925231934 + ], + [ + "▁sole", + -10.829816818237305 + ], + [ + ".00", + -10.830018997192383 + ], + [ + "33", + -10.83015251159668 + ], + [ + "▁export", + -10.830229759216309 + ], + [ + "ery", + -10.830373764038086 + ], + [ + "▁zurück", + -10.830426216125488 + ], + [ + "▁walls", + -10.83048152923584 + ], + [ + "▁recognize", + -10.8306884765625 + ], + [ + "law", + -10.830801963806152 + ], + [ + "▁parent", + -10.830863952636719 + ], + [ + "ST", + -10.831357955932617 + ], + [ + "▁description", + -10.831669807434082 + ], + [ + "MS", + -10.831887245178223 + ], + [ + "SM", + -10.83189582824707 + ], + [ + "▁Finally", + -10.831940650939941 + ], + [ + "▁hardware", + -10.831965446472168 + ], + [ + "ident", + -10.832464218139648 + ], + [ + "▁brown", + -10.832566261291504 + ], + [ + "▁kinds", + -10.832950592041016 + ], + [ + "▁Arts", + -10.83297061920166 + ], + [ + "▁concert", + -10.83341121673584 + ], + [ + "▁sec", + -10.83342456817627 + ], + [ + "▁represent", + -10.833512306213379 + ], + [ + "▁institutions", + -10.833597183227539 + ], + [ + "▁fur", + -10.833998680114746 + ], + [ + "▁Support", + -10.83403205871582 + ], + [ + "87", + -10.834076881408691 + ], + [ + "▁ease", + -10.834178924560547 + ], + [ + "▁feels", + -10.834218978881836 + ], + [ + "▁sheet", + -10.834342002868652 + ], + [ + "▁Though", + -10.83437442779541 + ], + [ + "▁propose", + -10.834381103515625 + ], + [ + "▁personnel", + -10.834409713745117 + ], + [ + "bie", + -10.834794044494629 + ], + [ + "▁contest", + -10.834836959838867 + ], + [ + "▁successfully", + -10.835152626037598 + ], + [ + "▁direkt", + -10.835397720336914 + ], + [ + "bietet", + -10.835597038269043 + ], + [ + "▁submit", + -10.835888862609863 + ], + [ + "▁sicher", + -10.835919380187988 + ], + [ + "▁Personal", + -10.83607006072998 + ], + [ + "94", + -10.836341857910156 + ], + [ + "61", + -10.836400985717773 + ], + [ + "▁Very", + -10.836540222167969 + ], + [ + "bol", + -10.836603164672852 + ], + [ + "▁ha", + -10.837089538574219 + ], + [ + "▁channel", + -10.8372220993042 + ], + [ + "mut", + -10.837289810180664 + ], + [ + "▁mouth", + -10.837342262268066 + ], + [ + "▁vast", + -10.837395668029785 + ], + [ + "▁Ob", + -10.837569236755371 + ], + [ + "lit", + -10.83763313293457 + ], + [ + "▁poly", + -10.837878227233887 + ], + [ + "▁trained", + -10.838102340698242 + ], + [ + "▁specialist", + -10.838122367858887 + ], + [ + "UL", + -10.83822250366211 + ], + [ + "▁seiner", + -10.838336944580078 + ], + [ + "SS", + -10.838627815246582 + ], + [ + "▁vacation", + -10.838672637939453 + ], + [ + "▁resume", + -10.839157104492188 + ], + [ + "▁constantly", + -10.839717864990234 + ], + [ + "▁treated", + -10.83986759185791 + ], + [ + "▁150", + -10.840936660766602 + ], + [ + "▁native", + -10.841246604919434 + ], + [ + "▁Russian", + -10.841329574584961 + ], + [ + "▁patterns", + -10.841371536254883 + ], + [ + "▁knowing", + -10.841670989990234 + ], + [ + "▁Pan", + -10.841682434082031 + ], + [ + "peri", + -10.841848373413086 + ], + [ + "aci", + -10.841864585876465 + ], + [ + "▁answers", + -10.842114448547363 + ], + [ + "▁heute", + -10.842985153198242 + ], + [ + "93", + -10.843056678771973 + ], + [ + "▁Winter", + -10.844083786010742 + ], + [ + "▁yes", + -10.844173431396484 + ], + [ + "SP", + -10.844185829162598 + ], + [ + "].", + -10.844388008117676 + ], + [ + "▁kein", + -10.844862937927246 + ], + [ + "▁introduce", + -10.8450927734375 + ], + [ + "-4", + -10.84555435180664 + ], + [ + "▁shoot", + -10.845762252807617 + ], + [ + "AR", + -10.84576416015625 + ], + [ + "▁receiving", + -10.845864295959473 + ], + [ + "▁intre", + -10.84702205657959 + ], + [ + "▁appeared", + -10.84708023071289 + ], + [ + "▁brother", + -10.847321510314941 + ], + [ + "▁extend", + -10.847765922546387 + ], + [ + "▁fara", + -10.848737716674805 + ], + [ + "▁kommt", + -10.848876953125 + ], + [ + "ali", + -10.848913192749023 + ], + [ + "▁numai", + -10.849047660827637 + ], + [ + "▁scientific", + -10.84913158416748 + ], + [ + "▁virtual", + -10.849145889282227 + ], + [ + "▁Ac", + -10.849513053894043 + ], + [ + "▁procedures", + -10.849631309509277 + ], + [ + "▁silver", + -10.849821090698242 + ], + [ + "▁leather", + -10.849979400634766 + ], + [ + "DA", + -10.85014820098877 + ], + [ + "▁executive", + -10.850263595581055 + ], + [ + "▁officials", + -10.850496292114258 + ], + [ + "▁agencies", + -10.850503921508789 + ], + [ + "▁Software", + -10.850540161132812 + ], + [ + "▁cor", + -10.850690841674805 + ], + [ + "Con", + -10.850741386413574 + ], + [ + "▁log", + -10.851066589355469 + ], + [ + "ț", + -10.851147651672363 + ], + [ + "02", + -10.851195335388184 + ], + [ + "▁7.", + -10.85245132446289 + ], + [ + "▁accepted", + -10.852483749389648 + ], + [ + "▁Berlin", + -10.852538108825684 + ], + [ + "ID", + -10.852582931518555 + ], + [ + "cot", + -10.852788925170898 + ], + [ + "▁employment", + -10.852799415588379 + ], + [ + "run", + -10.853020668029785 + ], + [ + "▁identified", + -10.853178977966309 + ], + [ + "96", + -10.853887557983398 + ], + [ + "▁déjà", + -10.853944778442383 + ], + [ + "▁cuisine", + -10.853952407836914 + ], + [ + "turi", + -10.854070663452148 + ], + [ + "▁Japanese", + -10.854316711425781 + ], + [ + "▁golf", + -10.854514122009277 + ], + [ + "▁Ki", + -10.854787826538086 + ], + [ + "▁carefully", + -10.854863166809082 + ], + [ + "▁remote", + -10.854973793029785 + ], + [ + "▁2018,", + -10.855148315429688 + ], + [ + "▁sus", + -10.855154991149902 + ], + [ + "tique", + -10.855293273925781 + ], + [ + "▁residential", + -10.855695724487305 + ], + [ + "97", + -10.855809211730957 + ], + [ + "▁Spring", + -10.855908393859863 + ], + [ + "▁Marketing", + -10.856186866760254 + ], + [ + "▁Control", + -10.85630989074707 + ], + [ + "var", + -10.856344223022461 + ], + [ + "▁historical", + -10.8563814163208 + ], + [ + "▁freedom", + -10.856423377990723 + ], + [ + "sure", + -10.856426239013672 + ], + [ + "▁broken", + -10.856796264648438 + ], + [ + "▁criminal", + -10.856949806213379 + ], + [ + "▁innovation", + -10.857075691223145 + ], + [ + "▁Italian", + -10.857192039489746 + ], + [ + "sper", + -10.857282638549805 + ], + [ + "▁cake", + -10.857653617858887 + ], + [ + "▁candidates", + -10.857894897460938 + ], + [ + "▁sizes", + -10.858267784118652 + ], + [ + "pel", + -10.858366966247559 + ], + [ + "▁frequently", + -10.85889720916748 + ], + [ + "▁planet", + -10.859138488769531 + ], + [ + "▁writer", + -10.859519958496094 + ], + [ + "1,", + -10.859569549560547 + ], + [ + "uvent", + -10.85959529876709 + ], + [ + "▁awareness", + -10.859807968139648 + ], + [ + "name", + -10.859954833984375 + ], + [ + "▁Children", + -10.859980583190918 + ], + [ + "▁relatively", + -10.860311508178711 + ], + [ + "▁pu", + -10.860321998596191 + ], + [ + "▁quiet", + -10.86038875579834 + ], + [ + "▁planned", + -10.860716819763184 + ], + [ + "▁election", + -10.861419677734375 + ], + [ + "▁6.", + -10.861761093139648 + ], + [ + "▁broad", + -10.861772537231445 + ], + [ + "▁skill", + -10.861835479736328 + ], + [ + "▁reasonable", + -10.862037658691406 + ], + [ + "▁Fort", + -10.862283706665039 + ], + [ + "▁aceea", + -10.862407684326172 + ], + [ + "▁arrived", + -10.86263370513916 + ], + [ + "▁payments", + -10.862680435180664 + ], + [ + "ack", + -10.862700462341309 + ], + [ + "▁Ort", + -10.863354682922363 + ], + [ + "▁investors", + -10.863364219665527 + ], + [ + "▁operate", + -10.86351203918457 + ], + [ + "ME", + -10.863556861877441 + ], + [ + "dic", + -10.863683700561523 + ], + [ + "▁foods", + -10.863731384277344 + ], + [ + "▁stick", + -10.863831520080566 + ], + [ + "▁agents", + -10.86412525177002 + ], + [ + "▁crowd", + -10.864175796508789 + ], + [ + "▁Students", + -10.864480972290039 + ], + [ + "▁concerned", + -10.864609718322754 + ], + [ + "test", + -10.864740371704102 + ], + [ + "▁designer", + -10.865334510803223 + ], + [ + "▁Conference", + -10.865593910217285 + ], + [ + "▁saving", + -10.866105079650879 + ], + [ + "▁recorded", + -10.866422653198242 + ], + [ + "▁proposed", + -10.866564750671387 + ], + [ + "▁ship", + -10.86657428741455 + ], + [ + "▁cred", + -10.867274284362793 + ], + [ + "▁Ci", + -10.867440223693848 + ], + [ + "RE", + -10.867619514465332 + ], + [ + "▁tradition", + -10.867753982543945 + ], + [ + "▁worldwide", + -10.867779731750488 + ], + [ + "64", + -10.867944717407227 + ], + [ + "▁television", + -10.867989540100098 + ], + [ + "▁projet", + -10.868102073669434 + ], + [ + "ency", + -10.868487358093262 + ], + [ + "▁struggle", + -10.868514060974121 + ], + [ + "▁twice", + -10.868955612182617 + ], + [ + "▁Off", + -10.869234085083008 + ], + [ + "▁begins", + -10.869577407836914 + ], + [ + "key", + -10.869794845581055 + ], + [ + "▁Table", + -10.869963645935059 + ], + [ + "▁demande", + -10.870177268981934 + ], + [ + "▁liquid", + -10.870441436767578 + ], + [ + "meter", + -10.870684623718262 + ], + [ + "▁2001", + -10.871190071105957 + ], + [ + "▁willing", + -10.871660232543945 + ], + [ + "▁medicine", + -10.871707916259766 + ], + [ + "▁expand", + -10.871747970581055 + ], + [ + "▁2004", + -10.871804237365723 + ], + [ + "▁2002", + -10.872016906738281 + ], + [ + "▁accord", + -10.872292518615723 + ], + [ + "▁Chris", + -10.872446060180664 + ], + [ + "▁prove", + -10.872543334960938 + ], + [ + "ston", + -10.872740745544434 + ], + [ + "mettre", + -10.872800827026367 + ], + [ + "▁moments", + -10.873537063598633 + ], + [ + "tik", + -10.87368392944336 + ], + [ + "such", + -10.874055862426758 + ], + [ + "2.", + -10.874431610107422 + ], + [ + "▁UN", + -10.874561309814453 + ], + [ + "▁jump", + -10.874737739562988 + ], + [ + "▁dish", + -10.87539291381836 + ], + [ + "▁Key", + -10.875663757324219 + ], + [ + "▁challenging", + -10.875975608825684 + ], + [ + "▁domestic", + -10.876410484313965 + ], + [ + "▁impressive", + -10.876752853393555 + ], + [ + "iger", + -10.877022743225098 + ], + [ + "▁Ram", + -10.877157211303711 + ], + [ + "▁doit", + -10.877263069152832 + ], + [ + "▁concrete", + -10.87734317779541 + ], + [ + "▁Unternehmen", + -10.877397537231445 + ], + [ + "▁LED", + -10.877429008483887 + ], + [ + "▁trouver", + -10.877533912658691 + ], + [ + "▁fundamental", + -10.877875328063965 + ], + [ + "▁implementation", + -10.878121376037598 + ], + [ + "85", + -10.878247261047363 + ], + [ + "▁hosting", + -10.87856388092041 + ], + [ + "▁Game", + -10.878691673278809 + ], + [ + "▁taught", + -10.878981590270996 + ], + [ + "tung", + -10.879016876220703 + ], + [ + "ront", + -10.87940502166748 + ], + [ + "▁shoes", + -10.879639625549316 + ], + [ + "79", + -10.8797607421875 + ], + [ + "▁stunning", + -10.879778861999512 + ], + [ + "▁Congress", + -10.880142211914062 + ], + [ + "▁Ent", + -10.880278587341309 + ], + [ + "▁Wer", + -10.880607604980469 + ], + [ + "▁alt", + -10.880608558654785 + ], + [ + "ör", + -10.880699157714844 + ], + [ + "▁calm", + -10.8808012008667 + ], + [ + "46", + -10.881132125854492 + ], + [ + "▁Daca", + -10.881404876708984 + ], + [ + "71", + -10.881938934326172 + ], + [ + "▁Dec", + -10.882392883300781 + ], + [ + "▁Fo", + -10.882437705993652 + ], + [ + "▁defense", + -10.88313102722168 + ], + [ + "▁expectations", + -10.883166313171387 + ], + [ + "▁Alle", + -10.88318920135498 + ], + [ + "▁brief", + -10.883691787719727 + ], + [ + "▁Hospital", + -10.883975982666016 + ], + [ + "▁sides", + -10.884121894836426 + ], + [ + "▁yellow", + -10.884140014648438 + ], + [ + "lei", + -10.88451862335205 + ], + [ + "▁speaking", + -10.884589195251465 + ], + [ + "▁crucial", + -10.885198593139648 + ], + [ + "▁Town", + -10.8854341506958 + ], + [ + "▁married", + -10.885574340820312 + ], + [ + "▁acesta", + -10.885583877563477 + ], + [ + "▁noted", + -10.885611534118652 + ], + [ + "▁Word", + -10.885659217834473 + ], + [ + "▁conducted", + -10.885963439941406 + ], + [ + "▁decor", + -10.886249542236328 + ], + [ + "kon", + -10.886565208435059 + ], + [ + "▁supplies", + -10.8866605758667 + ], + [ + "▁adventure", + -10.886691093444824 + ], + [ + "▁exhibition", + -10.887163162231445 + ], + [ + "heit", + -10.887300491333008 + ], + [ + "▁36", + -10.88744831085205 + ], + [ + "eria", + -10.887505531311035 + ], + [ + "ines", + -10.887551307678223 + ], + [ + "ological", + -10.887582778930664 + ], + [ + "quel", + -10.88806438446045 + ], + [ + "▁Van", + -10.88825511932373 + ], + [ + "-19", + -10.88853645324707 + ], + [ + "2,", + -10.888566970825195 + ], + [ + "▁Band", + -10.888989448547363 + ], + [ + "▁soil", + -10.889184951782227 + ], + [ + "▁Tim", + -10.889599800109863 + ], + [ + "▁NOT", + -10.88968563079834 + ], + [ + "▁pilot", + -10.889753341674805 + ], + [ + "▁Sh", + -10.889774322509766 + ], + [ + "Ho", + -10.890361785888672 + ], + [ + "CA", + -10.890509605407715 + ], + [ + "▁Eu", + -10.890745162963867 + ], + [ + "▁committee", + -10.890829086303711 + ], + [ + "▁Store", + -10.891075134277344 + ], + [ + "▁joint", + -10.89111614227295 + ], + [ + "▁Op", + -10.891315460205078 + ], + [ + "▁Jack", + -10.891985893249512 + ], + [ + "quality", + -10.89216423034668 + ], + [ + "▁Has", + -10.892489433288574 + ], + [ + "▁wenig", + -10.892507553100586 + ], + [ + "hood", + -10.892545700073242 + ], + [ + "▁Class", + -10.892582893371582 + ], + [ + "rus", + -10.892773628234863 + ], + [ + "▁grown", + -10.89294719696045 + ], + [ + "▁About", + -10.893518447875977 + ], + [ + "▁sum", + -10.893942832946777 + ], + [ + "▁Fair", + -10.893946647644043 + ], + [ + "SA", + -10.894149780273438 + ], + [ + "92", + -10.894185066223145 + ], + [ + "▁fourth", + -10.894354820251465 + ], + [ + "▁featured", + -10.894384384155273 + ], + [ + "▁Pen", + -10.89444637298584 + ], + [ + "▁natürlich", + -10.894885063171387 + ], + [ + "ched", + -10.894901275634766 + ], + [ + "▁ban", + -10.895112991333008 + ], + [ + "anne", + -10.89522647857666 + ], + [ + "▁theory", + -10.895413398742676 + ], + [ + "bin", + -10.895438194274902 + ], + [ + "iers", + -10.895819664001465 + ], + [ + "▁strategic", + -10.895903587341309 + ], + [ + "▁jours", + -10.895956039428711 + ], + [ + "▁communicate", + -10.896124839782715 + ], + [ + "▁pin", + -10.896320343017578 + ], + [ + "▁Bon", + -10.89721393585205 + ], + [ + "kom", + -10.897290229797363 + ], + [ + "-5", + -10.898177146911621 + ], + [ + "▁degrees", + -10.898643493652344 + ], + [ + "▁entertainment", + -10.899014472961426 + ], + [ + "ară", + -10.899248123168945 + ], + [ + "ales", + -10.899425506591797 + ], + [ + "▁pendant", + -10.89954662322998 + ], + [ + "▁Series", + -10.899575233459473 + ], + [ + "▁holds", + -10.899592399597168 + ], + [ + "▁Mini", + -10.899828910827637 + ], + [ + "▁Obama", + -10.899898529052734 + ], + [ + "▁conform", + -10.900163650512695 + ], + [ + "-10", + -10.900216102600098 + ], + [ + "▁preparation", + -10.9009370803833 + ], + [ + "▁autre", + -10.90105152130127 + ], + [ + "▁mortgage", + -10.901155471801758 + ], + [ + "▁Kan", + -10.901508331298828 + ], + [ + "▁typical", + -10.901538848876953 + ], + [ + "01", + -10.901711463928223 + ], + [ + "▁Review", + -10.901862144470215 + ], + [ + "▁laptop", + -10.902127265930176 + ], + [ + "CR", + -10.902610778808594 + ], + [ + "▁thread", + -10.90265941619873 + ], + [ + "BS", + -10.902661323547363 + ], + [ + "▁upper", + -10.902700424194336 + ], + [ + "▁searching", + -10.902932167053223 + ], + [ + "▁pen", + -10.903214454650879 + ], + [ + "▁Middle", + -10.90333080291748 + ], + [ + "73", + -10.903359413146973 + ], + [ + "▁leg", + -10.903650283813477 + ], + [ + "onic", + -10.904272079467773 + ], + [ + "IS", + -10.904356956481934 + ], + [ + "▁Kar", + -10.904623985290527 + ], + [ + "anz", + -10.9046630859375 + ], + [ + "▁circuit", + -10.904901504516602 + ], + [ + "▁Casino", + -10.905384063720703 + ], + [ + "07", + -10.90584659576416 + ], + [ + "▁petit", + -10.905906677246094 + ], + [ + "TV", + -10.905978202819824 + ], + [ + "level", + -10.906311988830566 + ], + [ + "▁Point", + -10.906312942504883 + ], + [ + "rau", + -10.906474113464355 + ], + [ + "▁cabinet", + -10.906991958618164 + ], + [ + "▁failed", + -10.907042503356934 + ], + [ + "▁stated", + -10.907126426696777 + ], + [ + "LA", + -10.907461166381836 + ], + [ + "▁privacy", + -10.907596588134766 + ], + [ + "vol", + -10.907901763916016 + ], + [ + "ativ", + -10.908151626586914 + ], + [ + "▁matters", + -10.908210754394531 + ], + [ + "▁Mor", + -10.908555030822754 + ], + [ + "▁Ur", + -10.90860652923584 + ], + [ + "view", + -10.908968925476074 + ], + [ + "▁consultation", + -10.90921688079834 + ], + [ + "TS", + -10.909296989440918 + ], + [ + "▁apartment", + -10.909412384033203 + ], + [ + "▁integrated", + -10.909425735473633 + ], + [ + "74", + -10.909669876098633 + ], + [ + "▁Through", + -10.909710884094238 + ], + [ + "▁kick", + -10.909798622131348 + ], + [ + "▁perioada", + -10.90993881225586 + ], + [ + "▁entirely", + -10.909953117370605 + ], + [ + "▁impossible", + -10.91015911102295 + ], + [ + "▁consideration", + -10.910268783569336 + ], + [ + "▁Alt", + -10.91054916381836 + ], + [ + "▁Come", + -10.911089897155762 + ], + [ + "▁outstanding", + -10.911276817321777 + ], + [ + "83", + -10.911727905273438 + ], + [ + "▁prezent", + -10.911859512329102 + ], + [ + "▁Local", + -10.911993980407715 + ], + [ + "▁Camp", + -10.912056922912598 + ], + [ + "▁bear", + -10.912067413330078 + ], + [ + "enden", + -10.912262916564941 + ], + [ + "life", + -10.91236686706543 + ], + [ + "▁Haus", + -10.912516593933105 + ], + [ + "▁William", + -10.912644386291504 + ], + [ + "“,", + -10.912665367126465 + ], + [ + "▁Instagram", + -10.91285514831543 + ], + [ + "▁solve", + -10.913195610046387 + ], + [ + "▁Ze", + -10.913431167602539 + ], + [ + "▁everyday", + -10.91357135772705 + ], + [ + "bla", + -10.913615226745605 + ], + [ + "eng", + -10.913662910461426 + ], + [ + "ough", + -10.914246559143066 + ], + [ + "84", + -10.914483070373535 + ], + [ + "?\"", + -10.914599418640137 + ], + [ + "rely", + -10.91476821899414 + ], + [ + "TH", + -10.914841651916504 + ], + [ + "lang", + -10.91511058807373 + ], + [ + "82", + -10.915817260742188 + ], + [ + "▁removal", + -10.91589641571045 + ], + [ + "ală", + -10.915956497192383 + ], + [ + "▁circumstances", + -10.916097640991211 + ], + [ + "ente", + -10.91622257232666 + ], + [ + "▁lieu", + -10.91645336151123 + ], + [ + "▁2016.", + -10.91710376739502 + ], + [ + "▁ales", + -10.917342185974121 + ], + [ + "▁pure", + -10.917482376098633 + ], + [ + "▁choosing", + -10.917590141296387 + ], + [ + "▁Russia", + -10.917698860168457 + ], + [ + "amp", + -10.917703628540039 + ], + [ + "▁Santa", + -10.91788387298584 + ], + [ + "▁happening", + -10.918203353881836 + ], + [ + "▁crew", + -10.91822338104248 + ], + [ + "▁lei", + -10.91855239868164 + ], + [ + "IP", + -10.91858196258545 + ], + [ + "RO", + -10.919425964355469 + ], + [ + "▁resort", + -10.919514656066895 + ], + [ + "ened", + -10.919689178466797 + ], + [ + "MB", + -10.920031547546387 + ], + [ + "▁styles", + -10.920052528381348 + ], + [ + "▁dernier", + -10.920533180236816 + ], + [ + "uck", + -10.920699119567871 + ], + [ + "▁Guide", + -10.920710563659668 + ], + [ + "fic", + -10.92096996307373 + ], + [ + "▁fitness", + -10.921977996826172 + ], + [ + "▁healthcare", + -10.92223072052002 + ], + [ + "mol", + -10.92237663269043 + ], + [ + "▁vis", + -10.922721862792969 + ], + [ + "▁atmosphere", + -10.922972679138184 + ], + [ + "▁motion", + -10.922989845275879 + ], + [ + "▁closer", + -10.923114776611328 + ], + [ + "▁SA", + -10.92335319519043 + ], + [ + "▁default", + -10.923371315002441 + ], + [ + "▁architecture", + -10.923471450805664 + ], + [ + "iile", + -10.923528671264648 + ], + [ + "zel", + -10.923675537109375 + ], + [ + "cla", + -10.92387866973877 + ], + [ + "OP", + -10.924382209777832 + ], + [ + "▁west", + -10.924965858459473 + ], + [ + "▁Energy", + -10.925613403320312 + ], + [ + "▁positions", + -10.925777435302734 + ], + [ + "▁contrast", + -10.925885200500488 + ], + [ + "▁serves", + -10.92605972290039 + ], + [ + "cup", + -10.926340103149414 + ], + [ + "▁rose", + -10.926485061645508 + ], + [ + "pers", + -10.92664623260498 + ], + [ + "▁noise", + -10.926846504211426 + ], + [ + "mont", + -10.92690658569336 + ], + [ + "#", + -10.927061080932617 + ], + [ + "lies", + -10.927326202392578 + ], + [ + "pat", + -10.927718162536621 + ], + [ + "IC", + -10.927956581115723 + ], + [ + "arc", + -10.927989959716797 + ], + [ + "▁winner", + -10.928524017333984 + ], + [ + "tent", + -10.928732872009277 + ], + [ + "▁Preis", + -10.929106712341309 + ], + [ + "▁vin", + -10.929254531860352 + ], + [ + "blo", + -10.92929458618164 + ], + [ + "ție", + -10.929520606994629 + ], + [ + "▁OR", + -10.930315017700195 + ], + [ + "▁Buch", + -10.930798530578613 + ], + [ + "▁nearby", + -10.931190490722656 + ], + [ + "▁meetings", + -10.931290626525879 + ], + [ + "▁48", + -10.931465148925781 + ], + [ + "▁quand", + -10.93152904510498 + ], + [ + "▁usual", + -10.931936264038086 + ], + [ + "▁weitere", + -10.932539939880371 + ], + [ + "▁caught", + -10.932571411132812 + ], + [ + "▁issued", + -10.932626724243164 + ], + [ + "ști", + -10.932896614074707 + ], + [ + "upcoming", + -10.933232307434082 + ], + [ + "▁agreed", + -10.933233261108398 + ], + [ + "place", + -10.933353424072266 + ], + [ + "▁Brand", + -10.93344497680664 + ], + [ + "▁relation", + -10.933969497680664 + ], + [ + "▁atât", + -10.934090614318848 + ], + [ + "▁Tre", + -10.934176445007324 + ], + [ + "▁lors", + -10.934438705444336 + ], + [ + "▁adopt", + -10.934452056884766 + ], + [ + "▁celui", + -10.93458366394043 + ], + [ + "cken", + -10.93505859375 + ], + [ + "▁partnership", + -10.935284614562988 + ], + [ + "?”", + -10.935376167297363 + ], + [ + "▁ba", + -10.935746192932129 + ], + [ + "▁ID", + -10.935832023620605 + ], + [ + "▁consistent", + -10.935835838317871 + ], + [ + "▁Ya", + -10.935941696166992 + ], + [ + "▁Academy", + -10.936182022094727 + ], + [ + "cial", + -10.936230659484863 + ], + [ + "1%", + -10.936366081237793 + ], + [ + "▁mise", + -10.936684608459473 + ], + [ + "▁gute", + -10.936728477478027 + ], + [ + "gli", + -10.936939239501953 + ], + [ + "▁Bu", + -10.937679290771484 + ], + [ + "▁reduction", + -10.937917709350586 + ], + [ + "acy", + -10.938126564025879 + ], + [ + "aga", + -10.938161849975586 + ], + [ + "▁Sc", + -10.938273429870605 + ], + [ + "▁Informationen", + -10.938308715820312 + ], + [ + "▁kommen", + -10.938352584838867 + ], + [ + "press", + -10.93837833404541 + ], + [ + "▁bridge", + -10.938379287719727 + ], + [ + "▁qualified", + -10.938671112060547 + ], + [ + "position", + -10.938821792602539 + ], + [ + "▁combat", + -10.938933372497559 + ], + [ + "!\"", + -10.938993453979492 + ], + [ + "eva", + -10.939217567443848 + ], + [ + "oase", + -10.939380645751953 + ], + [ + "▁inner", + -10.939410209655762 + ], + [ + "▁loans", + -10.939720153808594 + ], + [ + "made", + -10.939786911010742 + ], + [ + "▁Mexico", + -10.93993091583252 + ], + [ + "▁formal", + -10.940092086791992 + ], + [ + "▁fell", + -10.94021987915039 + ], + [ + "91", + -10.940524101257324 + ], + [ + "▁campus", + -10.9407320022583 + ], + [ + "ienne", + -10.940869331359863 + ], + [ + "▁framework", + -10.94105339050293 + ], + [ + "ncing", + -10.941157341003418 + ], + [ + "▁Para", + -10.941222190856934 + ], + [ + "▁password", + -10.941298484802246 + ], + [ + "▁sei", + -10.941422462463379 + ], + [ + "▁Cross", + -10.941532135009766 + ], + [ + "▁Ten", + -10.941873550415039 + ], + [ + "bank", + -10.941887855529785 + ], + [ + "▁gun", + -10.942000389099121 + ], + [ + "ient", + -10.942021369934082 + ], + [ + "▁usage", + -10.942176818847656 + ], + [ + "▁(2", + -10.942278861999512 + ], + [ + "Gra", + -10.942320823669434 + ], + [ + "▁prea", + -10.94253158569336 + ], + [ + "▁Als", + -10.942619323730469 + ], + [ + "▁finance", + -10.942638397216797 + ], + [ + "tate", + -10.942665100097656 + ], + [ + "ition", + -10.942703247070312 + ], + [ + "▁regulations", + -10.942741394042969 + ], + [ + "▁Professional", + -10.943001747131348 + ], + [ + "▁pl", + -10.94336986541748 + ], + [ + "▁SEO", + -10.943472862243652 + ], + [ + "▁trecut", + -10.943487167358398 + ], + [ + "▁aller", + -10.943509101867676 + ], + [ + "▁violence", + -10.943986892700195 + ], + [ + "▁membership", + -10.944117546081543 + ], + [ + "▁picked", + -10.944162368774414 + ], + [ + "▁collected", + -10.9443359375 + ], + [ + "▁extended", + -10.944449424743652 + ], + [ + "▁religious", + -10.944661140441895 + ], + [ + "▁salle", + -10.944767951965332 + ], + [ + "RA", + -10.944781303405762 + ], + [ + "▁blend", + -10.945232391357422 + ], + [ + "▁Min", + -10.94532299041748 + ], + [ + "kal", + -10.945887565612793 + ], + [ + "▁featuring", + -10.945902824401855 + ], + [ + "▁researchers", + -10.946263313293457 + ], + [ + "▁Search", + -10.946558952331543 + ], + [ + "CE", + -10.946675300598145 + ], + [ + "▁recognized", + -10.94682502746582 + ], + [ + "▁semi", + -10.94692611694336 + ], + [ + "▁exposure", + -10.94718074798584 + ], + [ + "grew", + -10.947466850280762 + ], + [ + "▁candidate", + -10.948250770568848 + ], + [ + "▁shares", + -10.948908805847168 + ], + [ + "▁edit", + -10.949745178222656 + ], + [ + "CS", + -10.949905395507812 + ], + [ + "▁Cl", + -10.950240135192871 + ], + [ + "▁Enjoy", + -10.951438903808594 + ], + [ + "▁hurt", + -10.951482772827148 + ], + [ + "▁bottle", + -10.951593399047852 + ], + [ + "▁Buy", + -10.95159912109375 + ], + [ + "▁superior", + -10.952286720275879 + ], + [ + "▁missed", + -10.952424049377441 + ], + [ + "▁workshop", + -10.952433586120605 + ], + [ + "action", + -10.952437400817871 + ], + [ + "ple", + -10.952699661254883 + ], + [ + "▁Schul", + -10.952814102172852 + ], + [ + "▁houses", + -10.953080177307129 + ], + [ + "▁2017,", + -10.953569412231445 + ], + [ + "▁killed", + -10.953750610351562 + ], + [ + "▁calendar", + -10.954306602478027 + ], + [ + "▁Mike", + -10.954597473144531 + ], + [ + "FA", + -10.954627990722656 + ], + [ + "nut", + -10.95487117767334 + ], + [ + "▁establish", + -10.955140113830566 + ], + [ + "▁alcohol", + -10.95514965057373 + ], + [ + "▁closely", + -10.955170631408691 + ], + [ + "▁MA", + -10.955381393432617 + ], + [ + "pul", + -10.955389022827148 + ], + [ + "▁defined", + -10.955666542053223 + ], + [ + "aires", + -10.955692291259766 + ], + [ + "▁Shi", + -10.955703735351562 + ], + [ + "▁plays", + -10.956303596496582 + ], + [ + "▁sister", + -10.95690631866455 + ], + [ + "▁cable", + -10.957179069519043 + ], + [ + "▁desk", + -10.957215309143066 + ], + [ + "▁apoi", + -10.957738876342773 + ], + [ + "▁identity", + -10.95785140991211 + ], + [ + "▁stars", + -10.957931518554688 + ], + [ + "▁fata", + -10.958008766174316 + ], + [ + "▁obvious", + -10.958330154418945 + ], + [ + "▁dental", + -10.95843505859375 + ], + [ + "AM", + -10.958802223205566 + ], + [ + "▁sharp", + -10.95881175994873 + ], + [ + "duc", + -10.959053993225098 + ], + [ + "▁manufacturer", + -10.95914077758789 + ], + [ + "!)", + -10.959270477294922 + ], + [ + "▁objects", + -10.959720611572266 + ], + [ + "▁Ag", + -10.959989547729492 + ], + [ + "referred", + -10.960195541381836 + ], + [ + "▁Ak", + -10.960308074951172 + ], + [ + "burg", + -10.960360527038574 + ], + [ + "▁nouveau", + -10.960854530334473 + ], + [ + "▁Pal", + -10.960994720458984 + ], + [ + "▁Arbeits", + -10.961280822753906 + ], + [ + "▁personally", + -10.961288452148438 + ], + [ + "▁Dé", + -10.961292266845703 + ], + [ + "▁import", + -10.961688041687012 + ], + [ + "▁justice", + -10.961913108825684 + ], + [ + "▁photography", + -10.962705612182617 + ], + [ + "▁portfolio", + -10.962841987609863 + ], + [ + "56", + -10.96314525604248 + ], + [ + "▁nouvelle", + -10.963293075561523 + ], + [ + "▁oven", + -10.964197158813477 + ], + [ + "▁400", + -10.964272499084473 + ], + [ + "▁mixed", + -10.964395523071289 + ], + [ + "▁relax", + -10.964427947998047 + ], + [ + "▁imp", + -10.964703559875488 + ], + [ + "▁».", + -10.964734077453613 + ], + [ + "▁mail", + -10.964777946472168 + ], + [ + "rage", + -10.964861869812012 + ], + [ + "nos", + -10.964974403381348 + ], + [ + "▁drugs", + -10.965195655822754 + ], + [ + "▁jede", + -10.965211868286133 + ], + [ + "▁einige", + -10.965232849121094 + ], + [ + "▁8.", + -10.965325355529785 + ], + [ + "ters", + -10.965412139892578 + ], + [ + "▁electrical", + -10.965432167053223 + ], + [ + "▁puis", + -10.965836524963379 + ], + [ + "▁films", + -10.965903282165527 + ], + [ + "41", + -10.966036796569824 + ], + [ + "▁moral", + -10.966398239135742 + ], + [ + "lage", + -10.966402053833008 + ], + [ + "▁spaces", + -10.966415405273438 + ], + [ + "▁Ed", + -10.966462135314941 + ], + [ + "▁classroom", + -10.966588020324707 + ], + [ + "▁große", + -10.966588973999023 + ], + [ + "▁baza", + -10.966887474060059 + ], + [ + "face", + -10.967308044433594 + ], + [ + "▁informed", + -10.967333793640137 + ], + [ + "▁improving", + -10.967477798461914 + ], + [ + "▁guidance", + -10.967880249023438 + ], + [ + "▁gallery", + -10.96800708770752 + ], + [ + "cular", + -10.968046188354492 + ], + [ + "53", + -10.968094825744629 + ], + [ + "Despite", + -10.968238830566406 + ], + [ + "▁forme", + -10.968304634094238 + ], + [ + "▁système", + -10.968415260314941 + ], + [ + "▁Win", + -10.968494415283203 + ], + [ + "▁Small", + -10.968537330627441 + ], + [ + "▁Mobile", + -10.968564987182617 + ], + [ + "▁tape", + -10.968606948852539 + ], + [ + "▁erhalten", + -10.968914985656738 + ], + [ + "▁movies", + -10.968928337097168 + ], + [ + "▁Unfortunately", + -10.968963623046875 + ], + [ + "▁Looking", + -10.96945858001709 + ], + [ + "▁guard", + -10.969584465026855 + ], + [ + "▁pr", + -10.969820976257324 + ], + [ + "▁confident", + -10.96988582611084 + ], + [ + "BA", + -10.970229148864746 + ], + [ + "bas", + -10.970272064208984 + ], + [ + "hum", + -10.97050666809082 + ], + [ + "ular", + -10.9705171585083 + ], + [ + "▁Still", + -10.970593452453613 + ], + [ + "▁flavor", + -10.970656394958496 + ], + [ + "▁boost", + -10.970773696899414 + ], + [ + "▁division", + -10.970842361450195 + ], + [ + "ising", + -10.971006393432617 + ], + [ + "▁monitoring", + -10.971044540405273 + ], + [ + "▁Sen", + -10.97105884552002 + ], + [ + "▁https", + -10.971527099609375 + ], + [ + "mainly", + -10.971735000610352 + ], + [ + "play", + -10.972251892089844 + ], + [ + "▁dynamic", + -10.972357749938965 + ], + [ + "▁coup", + -10.972370147705078 + ], + [ + "▁carpet", + -10.972561836242676 + ], + [ + "iner", + -10.972846984863281 + ], + [ + "ral", + -10.97325611114502 + ], + [ + "iser", + -10.973320007324219 + ], + [ + "RC", + -10.9739990234375 + ], + [ + "▁definition", + -10.97475814819336 + ], + [ + "▁Za", + -10.974767684936523 + ], + [ + "friendly", + -10.974883079528809 + ], + [ + "43", + -10.975123405456543 + ], + [ + "link", + -10.975180625915527 + ], + [ + "▁Multi", + -10.97519302368164 + ], + [ + "▁einmal", + -10.975272178649902 + ], + [ + "▁stopped", + -10.975394248962402 + ], + [ + "vel", + -10.975456237792969 + ], + [ + "▁ongoing", + -10.975565910339355 + ], + [ + "▁ancient", + -10.976259231567383 + ], + [ + "take", + -10.976301193237305 + ], + [ + "cia", + -10.976432800292969 + ], + [ + "▁USB", + -10.976545333862305 + ], + [ + "▁attorney", + -10.976866722106934 + ], + [ + "▁slot", + -10.976866722106934 + ], + [ + "▁Line", + -10.97693157196045 + ], + [ + "rice", + -10.977087020874023 + ], + [ + "ify", + -10.977520942687988 + ], + [ + "ó", + -10.978260040283203 + ], + [ + "▁flash", + -10.978483200073242 + ], + [ + "▁extension", + -10.978555679321289 + ], + [ + "▁Ende", + -10.979022979736328 + ], + [ + "▁powder", + -10.979114532470703 + ], + [ + "ească", + -10.979143142700195 + ], + [ + "03", + -10.979327201843262 + ], + [ + "▁normally", + -10.979416847229004 + ], + [ + "▁pun", + -10.980108261108398 + ], + [ + "viewed", + -10.980138778686523 + ], + [ + "ssen", + -10.980896949768066 + ], + [ + "ache", + -10.981121063232422 + ], + [ + "ește", + -10.98122787475586 + ], + [ + "▁PA", + -10.981266021728516 + ], + [ + "FI", + -10.981945991516113 + ], + [ + "▁Frank", + -10.98198127746582 + ], + [ + "▁apa", + -10.98242473602295 + ], + [ + "▁coast", + -10.982614517211914 + ], + [ + "▁boy", + -10.982665061950684 + ], + [ + "lim", + -10.982902526855469 + ], + [ + "▁putin", + -10.983194351196289 + ], + [ + "▁script", + -10.983332633972168 + ], + [ + "▁noticed", + -10.9837007522583 + ], + [ + "▁dealing", + -10.983922004699707 + ], + [ + "▁Trans", + -10.984100341796875 + ], + [ + "▁border", + -10.984447479248047 + ], + [ + "▁reputation", + -10.984657287597656 + ], + [ + "-2", + -10.984662055969238 + ], + [ + "HS", + -10.984707832336426 + ], + [ + "▁supports", + -10.984724998474121 + ], + [ + "▁horse", + -10.985146522521973 + ], + [ + "nik", + -10.98520565032959 + ], + [ + "▁clothes", + -10.985234260559082 + ], + [ + "▁Card", + -10.985612869262695 + ], + [ + "▁relief", + -10.98595905303955 + ], + [ + "▁Visit", + -10.986259460449219 + ], + [ + "▁luni", + -10.986593246459961 + ], + [ + "81", + -10.986693382263184 + ], + [ + "qua", + -10.986945152282715 + ], + [ + "▁Comp", + -10.98697280883789 + ], + [ + "▁investigation", + -10.987137794494629 + ], + [ + "▁depth", + -10.987598419189453 + ], + [ + "▁earned", + -10.987709045410156 + ], + [ + "▁Ren", + -10.988090515136719 + ], + [ + "▁Dumnezeu", + -10.988107681274414 + ], + [ + "▁Joe", + -10.988210678100586 + ], + [ + "▁goods", + -10.988288879394531 + ], + [ + "▁Vol", + -10.988686561584473 + ], + [ + "▁certified", + -10.989118576049805 + ], + [ + "▁favor", + -10.989326477050781 + ], + [ + "▁Scott", + -10.989599227905273 + ], + [ + "▁protest", + -10.989802360534668 + ], + [ + "▁pace", + -10.989803314208984 + ], + [ + "▁Angeles", + -10.990368843078613 + ], + [ + "inch", + -10.99050521850586 + ], + [ + "▁charged", + -10.99052619934082 + ], + [ + "code", + -10.990968704223633 + ], + [ + "▁convenient", + -10.99138355255127 + ], + [ + "▁Nord", + -10.991556167602539 + ], + [ + "▁yesterday", + -10.991691589355469 + ], + [ + "Dacă", + -10.99169635772705 + ], + [ + "▁Travel", + -10.991786003112793 + ], + [ + "▁kid", + -10.991941452026367 + ], + [ + "ction", + -10.991986274719238 + ], + [ + "▁groupe", + -10.992770195007324 + ], + [ + "pu", + -10.993056297302246 + ], + [ + "bzw", + -10.993196487426758 + ], + [ + "▁mixture", + -10.993513107299805 + ], + [ + "▁Farm", + -10.993715286254883 + ], + [ + "▁acces", + -10.993939399719238 + ], + [ + "matic", + -10.993950843811035 + ], + [ + "▁comparison", + -10.994006156921387 + ], + [ + "reich", + -10.994095802307129 + ], + [ + "pet", + -10.994502067565918 + ], + [ + "▁lit", + -10.994685173034668 + ], + [ + "▁organized", + -10.99476432800293 + ], + [ + "just", + -10.995564460754395 + ], + [ + "▁fellow", + -10.996004104614258 + ], + [ + "Ver", + -10.996209144592285 + ], + [ + "▁trends", + -10.99622631072998 + ], + [ + "▁evaluation", + -10.99626636505127 + ], + [ + "feld", + -10.99639892578125 + ], + [ + "▁Pu", + -10.99671459197998 + ], + [ + "▁equipped", + -10.99727725982666 + ], + [ + "▁catre", + -10.997278213500977 + ], + [ + "eck", + -10.997369766235352 + ], + [ + "▁facing", + -10.997998237609863 + ], + [ + "▁instrument", + -10.998361587524414 + ], + [ + "▁pleased", + -10.998507499694824 + ], + [ + "▁tap", + -10.998818397521973 + ], + [ + "dom", + -10.998826026916504 + ], + [ + "▁pump", + -10.999384880065918 + ], + [ + "▁functional", + -10.999429702758789 + ], + [ + "▁authority", + -10.999455451965332 + ], + [ + "▁experiment", + -10.999478340148926 + ], + [ + "LO", + -10.999529838562012 + ], + [ + "▁scheduled", + -10.999552726745605 + ], + [ + "halt", + -10.999604225158691 + ], + [ + "▁ceiling", + -10.999761581420898 + ], + [ + "▁Step", + -11.000310897827148 + ], + [ + "▁orders", + -11.00032901763916 + ], + [ + "▁speech", + -11.001046180725098 + ], + [ + "▁stands", + -11.001119613647461 + ], + [ + "▁disc", + -11.001920700073242 + ], + [ + "▁rec", + -11.001935958862305 + ], + [ + "▁Text", + -11.00243854522705 + ], + [ + "▁banks", + -11.00294017791748 + ], + [ + "▁oameni", + -11.003045082092285 + ], + [ + "▁communications", + -11.003194808959961 + ], + [ + "trag", + -11.003307342529297 + ], + [ + "▁trail", + -11.003803253173828 + ], + [ + "AN", + -11.00426197052002 + ], + [ + "▁Federal", + -11.004467964172363 + ], + [ + "▁quote", + -11.00455093383789 + ], + [ + "▁spus", + -11.004620552062988 + ], + [ + "▁managing", + -11.004990577697754 + ], + [ + "▁booking", + -11.00505256652832 + ], + [ + "▁Blog", + -11.005669593811035 + ], + [ + "▁tank", + -11.005681991577148 + ], + [ + "pon", + -11.005804061889648 + ], + [ + "GE", + -11.00582218170166 + ], + [ + "▁fiscal", + -11.005871772766113 + ], + [ + "▁satisfaction", + -11.006044387817383 + ], + [ + "cre", + -11.00614070892334 + ], + [ + "▁protected", + -11.006494522094727 + ], + [ + "▁enfants", + -11.006782531738281 + ], + [ + "▁dort", + -11.007554054260254 + ], + [ + "▁Mel", + -11.008041381835938 + ], + [ + "▁turns", + -11.00804615020752 + ], + [ + "▁savings", + -11.008106231689453 + ], + [ + "▁voir", + -11.008358001708984 + ], + [ + "▁Boston", + -11.008394241333008 + ], + [ + "▁debate", + -11.008469581604004 + ], + [ + "▁SO", + -11.008857727050781 + ], + [ + "▁tables", + -11.009193420410156 + ], + [ + "▁honest", + -11.009210586547852 + ], + [ + "mate", + -11.009283065795898 + ], + [ + "▁chart", + -11.0094633102417 + ], + [ + "decât", + -11.009682655334473 + ], + [ + "▁Radio", + -11.009685516357422 + ], + [ + "54", + -11.00986385345459 + ], + [ + "▁vol", + -11.010008811950684 + ], + [ + "last", + -11.010148048400879 + ], + [ + "▁tall", + -11.010408401489258 + ], + [ + "▁Should", + -11.010489463806152 + ], + [ + "▁sink", + -11.010525703430176 + ], + [ + "▁Right", + -11.010527610778809 + ], + [ + "▁male", + -11.010720252990723 + ], + [ + "▁Modern", + -11.010753631591797 + ], + [ + "▁indeed", + -11.010886192321777 + ], + [ + "▁Garden", + -11.011139869689941 + ], + [ + "▁Mod", + -11.011307716369629 + ], + [ + "▁turning", + -11.0115327835083 + ], + [ + "▁inches", + -11.011557579040527 + ], + [ + "▁Police", + -11.01183795928955 + ], + [ + "▁Pay", + -11.012016296386719 + ], + [ + "UE", + -11.0126371383667 + ], + [ + "mé", + -11.012652397155762 + ], + [ + "EE", + -11.013046264648438 + ], + [ + "▁cookies", + -11.013116836547852 + ], + [ + "rip", + -11.013351440429688 + ], + [ + "▁Motor", + -11.01352310180664 + ], + [ + "▁lung", + -11.01379680633545 + ], + [ + "▁Ap", + -11.013995170593262 + ], + [ + "▁sustainable", + -11.014066696166992 + ], + [ + "▁instant", + -11.014240264892578 + ], + [ + "▁Rose", + -11.014464378356934 + ], + [ + "▁Carolina", + -11.014906883239746 + ], + [ + "▁Help", + -11.014969825744629 + ], + [ + "IE", + -11.01535701751709 + ], + [ + "▁Jersey", + -11.015522956848145 + ], + [ + "▁Spanish", + -11.015586853027344 + ], + [ + "▁wheel", + -11.015660285949707 + ], + [ + "▁fishing", + -11.0158109664917 + ], + [ + "gram", + -11.015937805175781 + ], + [ + "▁ST", + -11.016227722167969 + ], + [ + "▁Nov", + -11.01632022857666 + ], + [ + "▁reporting", + -11.016362190246582 + ], + [ + "ked", + -11.016467094421387 + ], + [ + "▁Leben", + -11.016557693481445 + ], + [ + "▁organisation", + -11.016843795776367 + ], + [ + "▁tiny", + -11.017144203186035 + ], + [ + "▁Alex", + -11.017236709594727 + ], + [ + "▁obtained", + -11.017255783081055 + ], + [ + "▁Acest", + -11.017367362976074 + ], + [ + "▁dangerous", + -11.01749038696289 + ], + [ + "utter", + -11.017624855041504 + ], + [ + "▁rev", + -11.01801586151123 + ], + [ + "Un", + -11.018242835998535 + ], + [ + "▁revealed", + -11.018356323242188 + ], + [ + "▁decade", + -11.018709182739258 + ], + [ + "▁possibility", + -11.01945686340332 + ], + [ + "service", + -11.019577980041504 + ], + [ + "è", + -11.01966667175293 + ], + [ + "▁Chief", + -11.019674301147461 + ], + [ + "▁Durch", + -11.019795417785645 + ], + [ + "▁cadre", + -11.019843101501465 + ], + [ + "▁wearing", + -11.019845008850098 + ], + [ + "sized", + -11.01988410949707 + ], + [ + "LY", + -11.01989459991455 + ], + [ + "▁unser", + -11.019963264465332 + ], + [ + "▁2016,", + -11.019988059997559 + ], + [ + "▁fail", + -11.020028114318848 + ], + [ + "iques", + -11.020115852355957 + ], + [ + "▁Angel", + -11.020315170288086 + ], + [ + "▁transportation", + -11.020364761352539 + ], + [ + "▁dates", + -11.020395278930664 + ], + [ + "▁danger", + -11.020731925964355 + ], + [ + "▁forum", + -11.020828247070312 + ], + [ + "zug", + -11.020885467529297 + ], + [ + "▁filed", + -11.021199226379395 + ], + [ + "loc", + -11.021201133728027 + ], + [ + "éri", + -11.021234512329102 + ], + [ + "tribu", + -11.021393775939941 + ], + [ + "▁entered", + -11.021639823913574 + ], + [ + "▁porte", + -11.021928787231445 + ], + [ + "▁arts", + -11.021979331970215 + ], + [ + "▁reform", + -11.022001266479492 + ], + [ + "▁Main", + -11.022101402282715 + ], + [ + "▁dir", + -11.022111892700195 + ], + [ + "▁approval", + -11.022465705871582 + ], + [ + "▁juice", + -11.022750854492188 + ], + [ + "vier", + -11.022771835327148 + ], + [ + "▁nivel", + -11.02318000793457 + ], + [ + "▁returns", + -11.023423194885254 + ], + [ + "▁formed", + -11.023723602294922 + ], + [ + "▁combine", + -11.02436351776123 + ], + [ + "▁cours", + -11.024392127990723 + ], + [ + "▁Standard", + -11.024463653564453 + ], + [ + "▁certification", + -11.024677276611328 + ], + [ + "escu", + -11.024996757507324 + ], + [ + "▁achieved", + -11.025278091430664 + ], + [ + "▁Model", + -11.025280952453613 + ], + [ + "rul", + -11.025404930114746 + ], + [ + "▁Tage", + -11.025530815124512 + ], + [ + "▁injuries", + -11.02560806274414 + ], + [ + "▁Sal", + -11.025671005249023 + ], + [ + "▁expenses", + -11.025887489318848 + ], + [ + "▁cet", + -11.026009559631348 + ], + [ + "▁taxes", + -11.026028633117676 + ], + [ + "diesen", + -11.02626895904541 + ], + [ + "▁fairly", + -11.026638984680176 + ], + [ + "▁Access", + -11.026866912841797 + ], + [ + "wind", + -11.027122497558594 + ], + [ + "IM", + -11.027252197265625 + ], + [ + "ense", + -11.027548789978027 + ], + [ + "▁hang", + -11.027957916259766 + ], + [ + "▁citizens", + -11.028020858764648 + ], + [ + "3%", + -11.028101921081543 + ], + [ + "lum", + -11.028268814086914 + ], + [ + "▁discussed", + -11.028326034545898 + ], + [ + "AC", + -11.02841854095459 + ], + [ + "‘", + -11.0286865234375 + ], + [ + "▁Sol", + -11.028698921203613 + ], + [ + "06", + -11.028816223144531 + ], + [ + "stellen", + -11.029170989990234 + ], + [ + "▁participation", + -11.02917194366455 + ], + [ + "▁Box", + -11.029200553894043 + ], + [ + "▁bieten", + -11.029687881469727 + ], + [ + "▁Louis", + -11.029730796813965 + ], + [ + "▁lessons", + -11.029789924621582 + ], + [ + "▁visible", + -11.029966354370117 + ], + [ + "▁Cam", + -11.030128479003906 + ], + [ + "▁Ban", + -11.03053092956543 + ], + [ + "▁Far", + -11.03060245513916 + ], + [ + "▁travers", + -11.030759811401367 + ], + [ + "▁telling", + -11.030808448791504 + ], + [ + "▁magic", + -11.030855178833008 + ], + [ + "▁Night", + -11.031316757202148 + ], + [ + "▁judge", + -11.031400680541992 + ], + [ + "▁Pat", + -11.031482696533203 + ], + [ + "▁Southern", + -11.031901359558105 + ], + [ + "OL", + -11.031929969787598 + ], + [ + "fully", + -11.032191276550293 + ], + [ + "▁acestea", + -11.03223705291748 + ], + [ + "▁Order", + -11.032383918762207 + ], + [ + "▁facut", + -11.032523155212402 + ], + [ + "▁Matt", + -11.032600402832031 + ], + [ + "registr", + -11.03278923034668 + ], + [ + "▁Yet", + -11.032811164855957 + ], + [ + "ß", + -11.033596992492676 + ], + [ + "▁făcut", + -11.033618927001953 + ], + [ + "▁versions", + -11.033780097961426 + ], + [ + "▁Force", + -11.03396224975586 + ], + [ + "rick", + -11.034153938293457 + ], + [ + "▁rund", + -11.034563064575195 + ], + [ + "ike", + -11.034658432006836 + ], + [ + "▁Young", + -11.034675598144531 + ], + [ + "▁ski", + -11.034927368164062 + ], + [ + "CU", + -11.035385131835938 + ], + [ + "▁Second", + -11.035510063171387 + ], + [ + "▁graduate", + -11.03554916381836 + ], + [ + "▁Bible", + -11.036049842834473 + ], + [ + "▁vary", + -11.036060333251953 + ], + [ + "▁celebration", + -11.036151885986328 + ], + [ + "▁risks", + -11.036210060119629 + ], + [ + "erii", + -11.036327362060547 + ], + [ + "rance", + -11.036577224731445 + ], + [ + "▁MP", + -11.036787986755371 + ], + [ + "▁tale", + -11.036788940429688 + ], + [ + "▁Ford", + -11.037044525146484 + ], + [ + "▁attached", + -11.037278175354004 + ], + [ + "▁Sy", + -11.037312507629395 + ], + [ + "▁Ly", + -11.03765869140625 + ], + [ + "stellung", + -11.037687301635742 + ], + [ + "▁trop", + -11.0377197265625 + ], + [ + "▁années", + -11.037736892700195 + ], + [ + "▁linked", + -11.03792667388916 + ], + [ + "pit", + -11.038352012634277 + ], + [ + "So", + -11.03835391998291 + ], + [ + "ţe", + -11.038473129272461 + ], + [ + "▁origin", + -11.038509368896484 + ], + [ + "▁boys", + -11.039263725280762 + ], + [ + "holder", + -11.039352416992188 + ], + [ + "read", + -11.039461135864258 + ], + [ + "▁relative", + -11.03950023651123 + ], + [ + "▁industries", + -11.03958511352539 + ], + [ + "making", + -11.039688110351562 + ], + [ + "▁tun", + -11.039917945861816 + ], + [ + "▁forced", + -11.041061401367188 + ], + [ + "▁Welcome", + -11.041086196899414 + ], + [ + "▁explained", + -11.041138648986816 + ], + [ + "MP", + -11.041389465332031 + ], + [ + "▁Three", + -11.041613578796387 + ], + [ + "aza", + -11.041768074035645 + ], + [ + "▁1999", + -11.041924476623535 + ], + [ + "▁erst", + -11.042237281799316 + ], + [ + "RS", + -11.042623519897461 + ], + [ + "▁attractive", + -11.04279899597168 + ], + [ + "▁visited", + -11.042805671691895 + ], + [ + "▁nom", + -11.042874336242676 + ], + [ + "▁drum", + -11.042933464050293 + ], + [ + "cast", + -11.043068885803223 + ], + [ + "ogen", + -11.043105125427246 + ], + [ + "▁tech", + -11.04360294342041 + ], + [ + "▁Comment", + -11.043664932250977 + ], + [ + "▁Little", + -11.04405689239502 + ], + [ + "▁suggested", + -11.044086456298828 + ], + [ + "▁gar", + -11.044205665588379 + ], + [ + "▁crack", + -11.04458999633789 + ], + [ + "▁shooting", + -11.044676780700684 + ], + [ + "▁Try", + -11.044759750366211 + ], + [ + "▁Remember", + -11.045008659362793 + ], + [ + "▁folks", + -11.045217514038086 + ], + [ + "▁MS", + -11.045512199401855 + ], + [ + "▁Dia", + -11.04584789276123 + ], + [ + "3)", + -11.046561241149902 + ], + [ + "arbeit", + -11.04697036743164 + ], + [ + "▁pepper", + -11.047065734863281 + ], + [ + "zz", + -11.047107696533203 + ], + [ + "▁extreme", + -11.047235488891602 + ], + [ + "▁extrem", + -11.047367095947266 + ], + [ + "▁severe", + -11.047768592834473 + ], + [ + "▁networks", + -11.047882080078125 + ], + [ + "păr", + -11.047910690307617 + ], + [ + "sent", + -11.047933578491211 + ], + [ + "▁structures", + -11.048048973083496 + ], + [ + "▁Join", + -11.048078536987305 + ], + [ + "▁privind", + -11.048255920410156 + ], + [ + "▁marriage", + -11.04865837097168 + ], + [ + "▁liegt", + -11.048918724060059 + ], + [ + "eben", + -11.048995971679688 + ], + [ + "▁produse", + -11.049076080322266 + ], + [ + "▁tested", + -11.049090385437012 + ], + [ + "▁Queen", + -11.049134254455566 + ], + [ + "▁Tax", + -11.049687385559082 + ], + [ + "rian", + -11.049710273742676 + ], + [ + "▁Problem", + -11.050151824951172 + ], + [ + "izat", + -11.05023193359375 + ], + [ + "udi", + -11.050324440002441 + ], + [ + "▁LA", + -11.050718307495117 + ], + [ + "▁afford", + -11.051108360290527 + ], + [ + "▁percentage", + -11.05121898651123 + ], + [ + "▁cute", + -11.051547050476074 + ], + [ + "▁gorgeous", + -11.051891326904297 + ], + [ + "▁indoor", + -11.05190372467041 + ], + [ + "▁configuration", + -11.052103042602539 + ], + [ + "▁immediate", + -11.052303314208984 + ], + [ + "▁exemple", + -11.052450180053711 + ], + [ + "▁Being", + -11.052550315856934 + ], + [ + "▁introduction", + -11.052591323852539 + ], + [ + "ella", + -11.053206443786621 + ], + [ + "bare", + -11.053521156311035 + ], + [ + "▁besser", + -11.053539276123047 + ], + [ + "▁Put", + -11.053740501403809 + ], + [ + "gon", + -11.054248809814453 + ], + [ + "▁Italy", + -11.054259300231934 + ], + [ + "▁Thus", + -11.05435562133789 + ], + [ + "tari", + -11.054437637329102 + ], + [ + "0.000", + -11.054460525512695 + ], + [ + "▁Price", + -11.054651260375977 + ], + [ + "▁Trust", + -11.054824829101562 + ], + [ + "▁contra", + -11.054863929748535 + ], + [ + "▁layout", + -11.05504035949707 + ], + [ + "▁Ireland", + -11.055187225341797 + ], + [ + "ctor", + -11.055344581604004 + ], + [ + "atoare", + -11.055540084838867 + ], + [ + "pra", + -11.055729866027832 + ], + [ + "rent", + -11.055892944335938 + ], + [ + "▁Seite", + -11.05605411529541 + ], + [ + "▁ori", + -11.056280136108398 + ], + [ + "spiel", + -11.056541442871094 + ], + [ + "▁Times", + -11.056883811950684 + ], + [ + "primarily", + -11.056974411010742 + ], + [ + "nov", + -11.05703067779541 + ], + [ + "▁desired", + -11.057061195373535 + ], + [ + "▁Would", + -11.057072639465332 + ], + [ + "PL", + -11.057225227355957 + ], + [ + "▁originally", + -11.057367324829102 + ], + [ + "▁Ana", + -11.057463645935059 + ], + [ + "EN", + -11.05754566192627 + ], + [ + "▁occasion", + -11.05755615234375 + ], + [ + "▁grant", + -11.057572364807129 + ], + [ + "igkeit", + -11.057975769042969 + ], + [ + "▁scheme", + -11.058146476745605 + ], + [ + "▁2015.", + -11.058621406555176 + ], + [ + "izare", + -11.058778762817383 + ], + [ + "gate", + -11.058792114257812 + ], + [ + "▁poker", + -11.058899879455566 + ], + [ + "pping", + -11.058998107910156 + ], + [ + "▁Wild", + -11.059511184692383 + ], + [ + "▁YouTube", + -11.059995651245117 + ], + [ + "▁assume", + -11.060284614562988 + ], + [ + "с", + -11.060614585876465 + ], + [ + "▁rapport", + -11.060623168945312 + ], + [ + "▁labor", + -11.060996055603027 + ], + [ + "teur", + -11.061041831970215 + ], + [ + "▁genre", + -11.06116008758545 + ], + [ + "▁plat", + -11.061745643615723 + ], + [ + "▁listening", + -11.061750411987305 + ], + [ + "sky", + -11.061777114868164 + ], + [ + "▁neighborhood", + -11.061782836914062 + ], + [ + "▁3-", + -11.062150001525879 + ], + [ + "▁Library", + -11.062162399291992 + ], + [ + "agit", + -11.062249183654785 + ], + [ + "▁platforms", + -11.062849998474121 + ], + [ + "bei", + -11.062882423400879 + ], + [ + "AB", + -11.062897682189941 + ], + [ + "▁manufacturers", + -11.06295394897461 + ], + [ + "▁printing", + -11.063141822814941 + ], + [ + "▁crisis", + -11.063326835632324 + ], + [ + "▁Smart", + -11.06335163116455 + ], + [ + "▁drawing", + -11.063406944274902 + ], + [ + "MO", + -11.06348991394043 + ], + [ + "▁durable", + -11.063569068908691 + ], + [ + "chant", + -11.0636625289917 + ], + [ + "▁chemical", + -11.063764572143555 + ], + [ + "▁savoir", + -11.063776016235352 + ], + [ + "▁Max", + -11.063802719116211 + ], + [ + "gestellt", + -11.06380844116211 + ], + [ + "▁rural", + -11.063854217529297 + ], + [ + "52", + -11.064105033874512 + ], + [ + "▁invited", + -11.064169883728027 + ], + [ + "▁fil", + -11.0642728805542 + ], + [ + "▁Rob", + -11.064284324645996 + ], + [ + "▁Bell", + -11.064387321472168 + ], + [ + "▁neck", + -11.064831733703613 + ], + [ + "pac", + -11.064879417419434 + ], + [ + "wal", + -11.06491470336914 + ], + [ + "▁là", + -11.064922332763672 + ], + [ + "▁Virginia", + -11.065081596374512 + ], + [ + "▁applicable", + -11.06509017944336 + ], + [ + "▁abuse", + -11.065153121948242 + ], + [ + "aide", + -11.065321922302246 + ], + [ + "▁increases", + -11.065396308898926 + ], + [ + "▁moi", + -11.065568923950195 + ], + [ + "▁Non", + -11.065577507019043 + ], + [ + "▁Produkt", + -11.065627098083496 + ], + [ + "FC", + -11.065644264221191 + ], + [ + "▁shops", + -11.065677642822266 + ], + [ + "▁prendre", + -11.065923690795898 + ], + [ + "atul", + -11.065990447998047 + ], + [ + "▁sal", + -11.066137313842773 + ], + [ + "▁société", + -11.06627082824707 + ], + [ + "▁Hot", + -11.066329002380371 + ], + [ + "rim", + -11.066587448120117 + ], + [ + "gue", + -11.06661605834961 + ], + [ + "▁enterprise", + -11.066624641418457 + ], + [ + "▁33", + -11.067329406738281 + ], + [ + "mittel", + -11.067395210266113 + ], + [ + "ged", + -11.067439079284668 + ], + [ + "▁formula", + -11.06777286529541 + ], + [ + "▁spin", + -11.067784309387207 + ], + [ + "als", + -11.067826271057129 + ], + [ + "2%", + -11.06785774230957 + ], + [ + "bon", + -11.068192481994629 + ], + [ + "▁Executive", + -11.068323135375977 + ], + [ + "▁wirklich", + -11.068427085876465 + ], + [ + "îl", + -11.068608283996582 + ], + [ + "1.", + -11.068917274475098 + ], + [ + "▁Arm", + -11.069157600402832 + ], + [ + "▁rid", + -11.069358825683594 + ], + [ + "aries", + -11.069727897644043 + ], + [ + "▁incident", + -11.06982421875 + ], + [ + "▁copii", + -11.070008277893066 + ], + [ + "▁Charles", + -11.070141792297363 + ], + [ + "▁meals", + -11.070147514343262 + ], + [ + "▁wireless", + -11.070237159729004 + ], + [ + "Ex", + -11.070364952087402 + ], + [ + "▁Financial", + -11.070540428161621 + ], + [ + "▁AM", + -11.070615768432617 + ], + [ + "▁fest", + -11.070645332336426 + ], + [ + "▁Ol", + -11.071410179138184 + ], + [ + "oir", + -11.071447372436523 + ], + [ + "300", + -11.071893692016602 + ], + [ + "▁punct", + -11.072138786315918 + ], + [ + "▁Mad", + -11.07283878326416 + ], + [ + "▁Ali", + -11.072907447814941 + ], + [ + "lag", + -11.073214530944824 + ], + [ + "▁ocean", + -11.073314666748047 + ], + [ + "▁mirror", + -11.073326110839844 + ], + [ + "▁Additionally", + -11.073869705200195 + ], + [ + "alia", + -11.073884963989258 + ], + [ + "▁county", + -11.073899269104004 + ], + [ + "▁hip", + -11.074305534362793 + ], + [ + "dale", + -11.074395179748535 + ], + [ + "▁Stra", + -11.074429512023926 + ], + [ + "▁drag", + -11.074575424194336 + ], + [ + "▁Sand", + -11.074851036071777 + ], + [ + "▁historic", + -11.074980735778809 + ], + [ + "ière", + -11.075427055358887 + ], + [ + "▁examine", + -11.075624465942383 + ], + [ + "soci", + -11.075634002685547 + ], + [ + "ime", + -11.076088905334473 + ], + [ + "▁Insurance", + -11.07621955871582 + ], + [ + "▁crime", + -11.076736450195312 + ], + [ + "▁pare", + -11.076945304870605 + ], + [ + "▁craft", + -11.077105522155762 + ], + [ + "▁Building", + -11.077279090881348 + ], + [ + "mission", + -11.077534675598145 + ], + [ + "▁Americans", + -11.077573776245117 + ], + [ + "▁mg", + -11.077799797058105 + ], + [ + "▁passage", + -11.077938079833984 + ], + [ + "▁deposit", + -11.078346252441406 + ], + [ + "▁widely", + -11.078444480895996 + ], + [ + "nch", + -11.078453063964844 + ], + [ + "▁Coast", + -11.078756332397461 + ], + [ + "▁recipes", + -11.078784942626953 + ], + [ + "▁Ziel", + -11.07951545715332 + ], + [ + "▁duty", + -11.079646110534668 + ], + [ + "▁gerne", + -11.079704284667969 + ], + [ + "most", + -11.080034255981445 + ], + [ + "▁argument", + -11.080158233642578 + ], + [ + "▁root", + -11.08021354675293 + ], + [ + "▁consult", + -11.08024787902832 + ], + [ + "▁muscle", + -11.080255508422852 + ], + [ + "▁spoke", + -11.08038330078125 + ], + [ + "▁Cum", + -11.080950736999512 + ], + [ + "▁orange", + -11.081033706665039 + ], + [ + "▁reader", + -11.081123352050781 + ], + [ + "schw", + -11.081151008605957 + ], + [ + "▁commission", + -11.081332206726074 + ], + [ + "histoire", + -11.081811904907227 + ], + [ + "▁represents", + -11.082064628601074 + ], + [ + "▁meilleur", + -11.082343101501465 + ], + [ + "▁10.", + -11.082358360290527 + ], + [ + "HA", + -11.082427024841309 + ], + [ + "▁Systems", + -11.082573890686035 + ], + [ + "▁blind", + -11.082603454589844 + ], + [ + "▁HP", + -11.083221435546875 + ], + [ + "▁doi", + -11.083307266235352 + ], + [ + "▁signature", + -11.083404541015625 + ], + [ + "▁invite", + -11.083505630493164 + ], + [ + "▁Samsung", + -11.083802223205566 + ], + [ + "▁liber", + -11.083942413330078 + ], + [ + "▁letters", + -11.0840482711792 + ], + [ + "▁primul", + -11.084186553955078 + ], + [ + "▁losing", + -11.084328651428223 + ], + [ + "resulting", + -11.084467887878418 + ], + [ + "▁Computer", + -11.08474063873291 + ], + [ + "▁poll", + -11.0847749710083 + ], + [ + "rile", + -11.085102081298828 + ], + [ + "TI", + -11.085142135620117 + ], + [ + "▁cur", + -11.08566951751709 + ], + [ + "▁fonction", + -11.085833549499512 + ], + [ + "gat", + -11.086359977722168 + ], + [ + "AA", + -11.086480140686035 + ], + [ + "tiv", + -11.086692810058594 + ], + [ + "▁Str", + -11.087076187133789 + ], + [ + "ești", + -11.087677955627441 + ], + [ + "▁officer", + -11.0877046585083 + ], + [ + "reducing", + -11.08772087097168 + ], + [ + "▁gifts", + -11.08780288696289 + ], + [ + "▁performing", + -11.08788776397705 + ], + [ + "▁»,", + -11.088349342346191 + ], + [ + "▁guitar", + -11.08838939666748 + ], + [ + "▁segment", + -11.088580131530762 + ], + [ + "▁Tar", + -11.08861255645752 + ], + [ + "▁ultimately", + -11.088805198669434 + ], + [ + "▁cam", + -11.088960647583008 + ], + [ + "▁Arbeit", + -11.089076042175293 + ], + [ + "▁accessories", + -11.089418411254883 + ], + [ + "bad", + -11.089820861816406 + ], + [ + "home", + -11.0899019241333 + ], + [ + "▁clip", + -11.08995532989502 + ], + [ + "range", + -11.090432167053223 + ], + [ + "CM", + -11.090867042541504 + ], + [ + "▁printed", + -11.090883255004883 + ], + [ + "▁Pet", + -11.091177940368652 + ], + [ + "▁attract", + -11.091333389282227 + ], + [ + "date", + -11.091501235961914 + ], + [ + "▁Senior", + -11.091503143310547 + ], + [ + "▁genau", + -11.092177391052246 + ], + [ + "num", + -11.092435836791992 + ], + [ + "▁attended", + -11.092674255371094 + ], + [ + "▁Turn", + -11.092824935913086 + ], + [ + "▁History", + -11.092830657958984 + ], + [ + "some", + -11.092852592468262 + ], + [ + "▁describe", + -11.09308910369873 + ], + [ + "▁Lee", + -11.093143463134766 + ], + [ + "▁Fre", + -11.093314170837402 + ], + [ + "▁league", + -11.093345642089844 + ], + [ + "new", + -11.093505859375 + ], + [ + "tors", + -11.093535423278809 + ], + [ + "▁storm", + -11.094005584716797 + ], + [ + "▁Beispiel", + -11.094197273254395 + ], + [ + "▁index", + -11.094344139099121 + ], + [ + "▁awarded", + -11.094613075256348 + ], + [ + "state", + -11.094625473022461 + ], + [ + "▁1990", + -11.094874382019043 + ], + [ + "▁ends", + -11.094902992248535 + ], + [ + "kor", + -11.095070838928223 + ], + [ + "far", + -11.095418930053711 + ], + [ + "▁Page", + -11.095541000366211 + ], + [ + "▁promotion", + -11.095610618591309 + ], + [ + "▁weekly", + -11.095726013183594 + ], + [ + "400", + -11.095966339111328 + ], + [ + "iuni", + -11.096365928649902 + ], + [ + "▁Summer", + -11.096376419067383 + ], + [ + "▁thin", + -11.096627235412598 + ], + [ + "▁dafür", + -11.09669303894043 + ], + [ + "51", + -11.096769332885742 + ], + [ + "PR", + -11.096978187561035 + ], + [ + "▁Hy", + -11.097001075744629 + ], + [ + "gas", + -11.097013473510742 + ], + [ + "▁atat", + -11.097166061401367 + ], + [ + "▁mining", + -11.097347259521484 + ], + [ + "▁principles", + -11.09741497039795 + ], + [ + "gent", + -11.097545623779297 + ], + [ + "ika", + -11.097685813903809 + ], + [ + "▁religion", + -11.097787857055664 + ], + [ + "▁ordered", + -11.098284721374512 + ], + [ + "▁developers", + -11.098298072814941 + ], + [ + "▁pleasure", + -11.098456382751465 + ], + [ + "vit", + -11.098505020141602 + ], + [ + "mers", + -11.0988130569458 + ], + [ + "▁Section", + -11.098873138427734 + ], + [ + "▁por", + -11.098960876464844 + ], + [ + "▁Name", + -11.099200248718262 + ], + [ + "▁pink", + -11.099260330200195 + ], + [ + "dig", + -11.09934139251709 + ], + [ + "▁eligible", + -11.099397659301758 + ], + [ + "▁Happy", + -11.09941577911377 + ], + [ + "▁fo", + -11.099480628967285 + ], + [ + "▁availability", + -11.099541664123535 + ], + [ + "GO", + -11.099583625793457 + ], + [ + "▁Europa", + -11.099637985229492 + ], + [ + "▁Unit", + -11.099656105041504 + ], + [ + "▁1000", + -11.099837303161621 + ], + [ + "▁Berg", + -11.099846839904785 + ], + [ + "fini", + -11.099853515625 + ], + [ + "▁$3", + -11.100565910339355 + ], + [ + "iza", + -11.100749969482422 + ], + [ + "▁promo", + -11.100830078125 + ], + [ + "▁Low", + -11.101234436035156 + ], + [ + "abord", + -11.101326942443848 + ], + [ + "äh", + -11.101485252380371 + ], + [ + "▁Professor", + -11.101570129394531 + ], + [ + "▁array", + -11.101579666137695 + ], + [ + "▁hate", + -11.101594924926758 + ], + [ + "▁recording", + -11.101601600646973 + ], + [ + "RI", + -11.101649284362793 + ], + [ + "▁proof", + -11.101710319519043 + ], + [ + "lay", + -11.10185718536377 + ], + [ + "DE", + -11.102007865905762 + ], + [ + "▁surprised", + -11.102066040039062 + ], + [ + "▁boxes", + -11.102193832397461 + ], + [ + "▁noastre", + -11.102386474609375 + ], + [ + "zie", + -11.102387428283691 + ], + [ + "▁însă", + -11.10254192352295 + ], + [ + "▁ajuta", + -11.102783203125 + ], + [ + "▁weil", + -11.1028413772583 + ], + [ + "▁whenever", + -11.103026390075684 + ], + [ + "shi", + -11.103194236755371 + ], + [ + "satz", + -11.103605270385742 + ], + [ + "▁remind", + -11.10401725769043 + ], + [ + "▁consist", + -11.10412311553955 + ], + [ + "▁motiv", + -11.104240417480469 + ], + [ + "▁PS", + -11.1043062210083 + ], + [ + "▁trois", + -11.104543685913086 + ], + [ + "pad", + -11.10477352142334 + ], + [ + "▁besten", + -11.104904174804688 + ], + [ + "▁Stone", + -11.105140686035156 + ], + [ + "itz", + -11.105157852172852 + ], + [ + "fit", + -11.105164527893066 + ], + [ + "▁Mountain", + -11.105178833007812 + ], + [ + "OC", + -11.10519027709961 + ], + [ + "▁depends", + -11.105228424072266 + ], + [ + "▁Cover", + -11.105387687683105 + ], + [ + "▁bags", + -11.106058120727539 + ], + [ + "▁Bel", + -11.106199264526367 + ], + [ + "▁Engineering", + -11.106304168701172 + ], + [ + "▁flower", + -11.106647491455078 + ], + [ + "▁gratuit", + -11.106670379638672 + ], + [ + "▁smartphone", + -11.106780052185059 + ], + [ + "stan", + -11.107197761535645 + ], + [ + "spect", + -11.10726261138916 + ], + [ + "SL", + -11.107282638549805 + ], + [ + "sho", + -11.10738754272461 + ], + [ + "▁Ser", + -11.10791301727295 + ], + [ + "▁Perhaps", + -11.108247756958008 + ], + [ + "▁codes", + -11.108342170715332 + ], + [ + "▁Wind", + -11.10849666595459 + ], + [ + "aient", + -11.108757019042969 + ], + [ + "▁Prin", + -11.108802795410156 + ], + [ + "▁(1)", + -11.109090805053711 + ], + [ + "▁figures", + -11.109450340270996 + ], + [ + "▁ausge", + -11.10972785949707 + ], + [ + "▁episode", + -11.110050201416016 + ], + [ + "▁Spa", + -11.110370635986328 + ], + [ + "▁Silver", + -11.110386848449707 + ], + [ + "▁Sky", + -11.110396385192871 + ], + [ + "▁capabilities", + -11.1107177734375 + ], + [ + "▁Uni", + -11.11073112487793 + ], + [ + "▁încă", + -11.110876083374023 + ], + [ + "TO", + -11.111289978027344 + ], + [ + "▁Hal", + -11.111358642578125 + ], + [ + "ghi", + -11.111414909362793 + ], + [ + "▁sofa", + -11.111438751220703 + ], + [ + "hard", + -11.11150074005127 + ], + [ + "▁FOR", + -11.111587524414062 + ], + [ + "▁Ber", + -11.111820220947266 + ], + [ + "▁firms", + -11.11187744140625 + ], + [ + "▁memories", + -11.111883163452148 + ], + [ + "▁lift", + -11.11214542388916 + ], + [ + "▁sending", + -11.11214542388916 + ], + [ + "▁narrow", + -11.112646102905273 + ], + [ + "▁Steve", + -11.112784385681152 + ], + [ + "▁integration", + -11.112905502319336 + ], + [ + "known", + -11.113122940063477 + ], + [ + "▁nostru", + -11.113237380981445 + ], + [ + "iţi", + -11.113422393798828 + ], + [ + "▁Georgia", + -11.113759994506836 + ], + [ + "▁slowly", + -11.114026069641113 + ], + [ + "iere", + -11.114028930664062 + ], + [ + "aka", + -11.114255905151367 + ], + [ + "PE", + -11.114320755004883 + ], + [ + "▁venue", + -11.11468505859375 + ], + [ + "jar", + -11.11474609375 + ], + [ + "buch", + -11.114755630493164 + ], + [ + "rad", + -11.114858627319336 + ], + [ + "▁resistance", + -11.114899635314941 + ], + [ + "▁stehen", + -11.114914894104004 + ], + [ + "chin", + -11.11504077911377 + ], + [ + "▁weak", + -11.11535358428955 + ], + [ + "▁DVD", + -11.115598678588867 + ], + [ + "▁bodies", + -11.115856170654297 + ], + [ + "▁split", + -11.115884780883789 + ], + [ + "What", + -11.116231918334961 + ], + [ + "setzen", + -11.116467475891113 + ], + [ + "▁loves", + -11.116561889648438 + ], + [ + "▁kleine", + -11.117077827453613 + ], + [ + "▁increasingly", + -11.11746883392334 + ], + [ + "▁alert", + -11.117583274841309 + ], + [ + "▁AC", + -11.117647171020508 + ], + [ + "▁partir", + -11.117974281311035 + ], + [ + "▁ratio", + -11.11807918548584 + ], + [ + "▁keeps", + -11.118539810180664 + ], + [ + "▁Area", + -11.118544578552246 + ], + [ + "▁données", + -11.119071960449219 + ], + [ + "▁flag", + -11.119254112243652 + ], + [ + "▁NO", + -11.119277000427246 + ], + [ + "▁hotels", + -11.119336128234863 + ], + [ + "▁debut", + -11.119365692138672 + ], + [ + "▁suffer", + -11.119368553161621 + ], + [ + "▁hidden", + -11.119810104370117 + ], + [ + "▁clothing", + -11.120074272155762 + ], + [ + "▁household", + -11.120235443115234 + ], + [ + "medi", + -11.120268821716309 + ], + [ + "▁reste", + -11.120274543762207 + ], + [ + "bro", + -11.120381355285645 + ], + [ + "▁Bus", + -11.120405197143555 + ], + [ + "▁Ken", + -11.120572090148926 + ], + [ + "IR", + -11.120758056640625 + ], + [ + "▁suffering", + -11.121212005615234 + ], + [ + "▁publication", + -11.121246337890625 + ], + [ + "▁Mat", + -11.121360778808594 + ], + [ + "▁impression", + -11.121509552001953 + ], + [ + "▁founded", + -11.121562957763672 + ], + [ + "▁stable", + -11.121566772460938 + ], + [ + "▁promise", + -11.121719360351562 + ], + [ + "▁Cloud", + -11.121770858764648 + ], + [ + "▁prison", + -11.122099876403809 + ], + [ + "cor", + -11.122355461120605 + ], + [ + "▁Sports", + -11.122716903686523 + ], + [ + "▁erste", + -11.122745513916016 + ], + [ + "shire", + -11.122757911682129 + ], + [ + "▁recommendations", + -11.122916221618652 + ], + [ + "▁permit", + -11.123100280761719 + ], + [ + "▁tomorrow", + -11.123126983642578 + ], + [ + "▁lucky", + -11.123422622680664 + ], + [ + "▁realized", + -11.123449325561523 + ], + [ + "▁famille", + -11.123473167419434 + ], + [ + "▁Zealand", + -11.123542785644531 + ], + [ + "▁wooden", + -11.123601913452148 + ], + [ + "▁east", + -11.124269485473633 + ], + [ + "▁Bereich", + -11.12458324432373 + ], + [ + "während", + -11.124653816223145 + ], + [ + "rite", + -11.124836921691895 + ], + [ + "▁fla", + -11.124902725219727 + ], + [ + "platz", + -11.124991416931152 + ], + [ + "▁zero", + -11.125292778015137 + ], + [ + "▁priority", + -11.12535572052002 + ], + [ + "▁Airport", + -11.125506401062012 + ], + [ + "▁Kauf", + -11.125590324401855 + ], + [ + "▁ultimate", + -11.12601375579834 + ], + [ + "▁chest", + -11.126175880432129 + ], + [ + "▁tone", + -11.126376152038574 + ], + [ + "▁Kal", + -11.126431465148926 + ], + [ + "▁supposed", + -11.12669849395752 + ], + [ + "▁vedere", + -11.126846313476562 + ], + [ + "▁50%", + -11.126872062683105 + ], + [ + "▁Ger", + -11.127785682678223 + ], + [ + "pack", + -11.127849578857422 + ], + [ + "▁priv", + -11.128241539001465 + ], + [ + "▁Kit", + -11.128263473510742 + ], + [ + "▁tent", + -11.128457069396973 + ], + [ + "▁guidelines", + -11.128461837768555 + ], + [ + "▁Republic", + -11.128824234008789 + ], + [ + "including", + -11.129239082336426 + ], + [ + "▁chief", + -11.129615783691406 + ], + [ + "▁Living", + -11.129766464233398 + ], + [ + "keit", + -11.1298189163208 + ], + [ + "▁convert", + -11.129831314086914 + ], + [ + "tail", + -11.129928588867188 + ], + [ + "orient", + -11.129960060119629 + ], + [ + "eigenen", + -11.130245208740234 + ], + [ + "▁soup", + -11.130587577819824 + ], + [ + "▁zona", + -11.130661010742188 + ], + [ + "▁composition", + -11.130690574645996 + ], + [ + "▁Bob", + -11.130831718444824 + ], + [ + "▁exception", + -11.131170272827148 + ], + [ + "▁cr", + -11.131287574768066 + ], + [ + "▁str", + -11.131482124328613 + ], + [ + "▁Fl", + -11.13178825378418 + ], + [ + "AT", + -11.131909370422363 + ], + [ + "kel", + -11.132002830505371 + ], + [ + "▁pricing", + -11.132189750671387 + ], + [ + "▁Mass", + -11.132258415222168 + ], + [ + "vir", + -11.132333755493164 + ], + [ + "leg", + -11.132448196411133 + ], + [ + "▁rating", + -11.132455825805664 + ], + [ + "▁Sale", + -11.132628440856934 + ], + [ + "▁somewhere", + -11.132866859436035 + ], + [ + "▁submitted", + -11.133084297180176 + ], + [ + "▁Pop", + -11.133296012878418 + ], + [ + "▁papers", + -11.13330364227295 + ], + [ + "▁authorities", + -11.133326530456543 + ], + [ + "▁Person", + -11.133381843566895 + ], + [ + "▁kill", + -11.133512496948242 + ], + [ + "▁suggestions", + -11.133548736572266 + ], + [ + "-6", + -11.133644104003906 + ], + [ + "▁dust", + -11.133750915527344 + ], + [ + "taire", + -11.133805274963379 + ], + [ + "▁recognition", + -11.133870124816895 + ], + [ + "3.", + -11.134047508239746 + ], + [ + "▁Mont", + -11.134230613708496 + ], + [ + "▁produit", + -11.13430118560791 + ], + [ + "▁transmission", + -11.134340286254883 + ], + [ + "▁Th", + -11.13475513458252 + ], + [ + "▁passing", + -11.134928703308105 + ], + [ + "▁Partner", + -11.135161399841309 + ], + [ + "▁dire", + -11.135205268859863 + ], + [ + "▁DC", + -11.135432243347168 + ], + [ + "▁sky", + -11.135659217834473 + ], + [ + "▁Kitchen", + -11.135890007019043 + ], + [ + "▁fluid", + -11.135929107666016 + ], + [ + "▁scored", + -11.136005401611328 + ], + [ + "▁chapter", + -11.136100769042969 + ], + [ + "If", + -11.136231422424316 + ], + [ + "letzten", + -11.136275291442871 + ], + [ + "▁officers", + -11.13641357421875 + ], + [ + "▁avem", + -11.136631965637207 + ], + [ + "ister", + -11.136666297912598 + ], + [ + "▁involves", + -11.136688232421875 + ], + [ + "ico", + -11.136898040771484 + ], + [ + "bur", + -11.137056350708008 + ], + [ + "▁mieux", + -11.137064933776855 + ], + [ + "▁Photo", + -11.1371431350708 + ], + [ + "▁Cro", + -11.137228012084961 + ], + [ + "▁professor", + -11.137245178222656 + ], + [ + "▁besonders", + -11.137313842773438 + ], + [ + "д", + -11.137367248535156 + ], + [ + "▁alongside", + -11.137382507324219 + ], + [ + "▁stored", + -11.13770580291748 + ], + [ + "▁activ", + -11.137849807739258 + ], + [ + "▁setup", + -11.138169288635254 + ], + [ + "▁extract", + -11.138627052307129 + ], + [ + "▁accent", + -11.138633728027344 + ], + [ + "▁replaced", + -11.138638496398926 + ], + [ + "tec", + -11.138800621032715 + ], + [ + "▁Natur", + -11.138848304748535 + ], + [ + "▁Pacific", + -11.138887405395508 + ], + [ + "▁NY", + -11.139485359191895 + ], + [ + "▁Capital", + -11.139583587646484 + ], + [ + "▁forest", + -11.13969898223877 + ], + [ + "incredibly", + -11.14006233215332 + ], + [ + "▁choix", + -11.14021110534668 + ], + [ + "▁seriously", + -11.140281677246094 + ], + [ + "▁konnte", + -11.14030933380127 + ], + [ + "▁2014.", + -11.140443801879883 + ], + [ + "ensuring", + -11.140534400939941 + ], + [ + "▁handling", + -11.140661239624023 + ], + [ + "▁9.", + -11.140715599060059 + ], + [ + "▁relations", + -11.140876770019531 + ], + [ + "▁Kom", + -11.141045570373535 + ], + [ + "▁Hol", + -11.141282081604004 + ], + [ + "▁none", + -11.141515731811523 + ], + [ + "rob", + -11.141718864440918 + ], + [ + "▁Forum", + -11.141759872436523 + ], + [ + "hour", + -11.141776084899902 + ], + [ + "ème", + -11.141809463500977 + ], + [ + "▁Space", + -11.141986846923828 + ], + [ + "▁Ham", + -11.142992973327637 + ], + [ + "rap", + -11.143169403076172 + ], + [ + "▁Michigan", + -11.14317512512207 + ], + [ + "km", + -11.143202781677246 + ], + [ + "▁utilize", + -11.143548965454102 + ], + [ + "lov", + -11.143775939941406 + ], + [ + "▁luck", + -11.144388198852539 + ], + [ + "lä", + -11.144824981689453 + ], + [ + "▁healing", + -11.145010948181152 + ], + [ + "▁neu", + -11.145182609558105 + ], + [ + "aging", + -11.145251274108887 + ], + [ + "▁compliance", + -11.145583152770996 + ], + [ + "▁vertical", + -11.145675659179688 + ], + [ + "▁FREE", + -11.145729064941406 + ], + [ + "▁differences", + -11.146014213562012 + ], + [ + "▁Server", + -11.146252632141113 + ], + [ + "▁estimated", + -11.146378517150879 + ], + [ + "schutz", + -11.146692276000977 + ], + [ + "▁notamment", + -11.146736145019531 + ], + [ + "▁120", + -11.146919250488281 + ], + [ + "72", + -11.147282600402832 + ], + [ + "▁heating", + -11.147347450256348 + ], + [ + "late", + -11.14756965637207 + ], + [ + "▁younger", + -11.14783000946045 + ], + [ + "▁Intel", + -11.148171424865723 + ], + [ + "▁salad", + -11.148362159729004 + ], + [ + "▁commonly", + -11.148563385009766 + ], + [ + "▁treatments", + -11.148682594299316 + ], + [ + "▁speaker", + -11.148770332336426 + ], + [ + "▁producing", + -11.149120330810547 + ], + [ + "▁eggs", + -11.149367332458496 + ], + [ + "▁Spirit", + -11.149892807006836 + ], + [ + "▁beide", + -11.149918556213379 + ], + [ + "▁transaction", + -11.150283813476562 + ], + [ + "▁Machine", + -11.150464057922363 + ], + [ + "▁Games", + -11.150527000427246 + ], + [ + "▁niveau", + -11.150687217712402 + ], + [ + "▁Need", + -11.15082836151123 + ], + [ + "radi", + -11.150959968566895 + ], + [ + "mir", + -11.15096664428711 + ], + [ + "causing", + -11.151000022888184 + ], + [ + "▁début", + -11.151042938232422 + ], + [ + "▁rencontre", + -11.151063919067383 + ], + [ + "▁threat", + -11.151153564453125 + ], + [ + "▁enjoying", + -11.151320457458496 + ], + [ + "Com", + -11.151386260986328 + ], + [ + "▁Johnson", + -11.151555061340332 + ], + [ + "▁tournament", + -11.15156364440918 + ], + [ + "▁Micro", + -11.151582717895508 + ], + [ + "▁Drive", + -11.151667594909668 + ], + [ + "▁Cre", + -11.151866912841797 + ], + [ + "▁Lebens", + -11.151930809020996 + ], + [ + "▁categories", + -11.152358055114746 + ], + [ + "5,000", + -11.15261173248291 + ], + [ + "▁confirmed", + -11.152617454528809 + ], + [ + "pli", + -11.152763366699219 + ], + [ + "▁Francisco", + -11.153139114379883 + ], + [ + "▁raw", + -11.153157234191895 + ], + [ + "▁managers", + -11.153223991394043 + ], + [ + "ţie", + -11.153365135192871 + ], + [ + "UR", + -11.153368949890137 + ], + [ + "▁aproape", + -11.154065132141113 + ], + [ + "via", + -11.154606819152832 + ], + [ + "▁engaged", + -11.154646873474121 + ], + [ + "▁parti", + -11.154741287231445 + ], + [ + "▁posting", + -11.15517807006836 + ], + [ + "CO", + -11.155484199523926 + ], + [ + "▁bois", + -11.155815124511719 + ], + [ + "▁inch", + -11.15590763092041 + ], + [ + "vie", + -11.156068801879883 + ], + [ + "▁aside", + -11.156314849853516 + ], + [ + "▁exceptional", + -11.15658950805664 + ], + [ + "▁vintage", + -11.156668663024902 + ], + [ + "▁Him", + -11.156795501708984 + ], + [ + "▁expansion", + -11.156806945800781 + ], + [ + "▁Weg", + -11.157122611999512 + ], + [ + "▁authors", + -11.157535552978516 + ], + [ + "▁deine", + -11.15764045715332 + ], + [ + "▁Prime", + -11.158016204833984 + ], + [ + "▁scan", + -11.158055305480957 + ], + [ + "▁reg", + -11.158112525939941 + ], + [ + "ția", + -11.158141136169434 + ], + [ + "riv", + -11.158258438110352 + ], + [ + "selon", + -11.158440589904785 + ], + [ + "▁Studio", + -11.158571243286133 + ], + [ + "▁dich", + -11.158658027648926 + ], + [ + "▁vi", + -11.158745765686035 + ], + [ + "▁sequence", + -11.159016609191895 + ], + [ + "▁Four", + -11.159046173095703 + ], + [ + "RT", + -11.159050941467285 + ], + [ + "▁ihn", + -11.159072875976562 + ], + [ + "▁employ", + -11.159223556518555 + ], + [ + "umb", + -11.159659385681152 + ], + [ + "ită", + -11.159818649291992 + ], + [ + "▁Station", + -11.159950256347656 + ], + [ + "▁upload", + -11.159972190856934 + ], + [ + "▁upgrade", + -11.160445213317871 + ], + [ + "▁exterior", + -11.160528182983398 + ], + [ + "▁writers", + -11.160531997680664 + ], + [ + "▁plot", + -11.160543441772461 + ], + [ + "▁Gen", + -11.16068172454834 + ], + [ + "TER", + -11.160821914672852 + ], + [ + "-12", + -11.160930633544922 + ], + [ + "http", + -11.162168502807617 + ], + [ + "▁smell", + -11.1621732711792 + ], + [ + "post", + -11.162522315979004 + ], + [ + "von", + -11.162790298461914 + ], + [ + "mili", + -11.16280746459961 + ], + [ + "8%", + -11.162972450256348 + ], + [ + "▁Andrew", + -11.163065910339355 + ], + [ + "▁spun", + -11.16321086883545 + ], + [ + "▁grass", + -11.163444519042969 + ], + [ + "unter", + -11.163474082946777 + ], + [ + "▁burn", + -11.16356086730957 + ], + [ + "▁Gegen", + -11.163601875305176 + ], + [ + "fest", + -11.163721084594727 + ], + [ + "▁Northern", + -11.163738250732422 + ], + [ + "▁consumption", + -11.163775444030762 + ], + [ + "▁bird", + -11.164069175720215 + ], + [ + "▁Miss", + -11.164369583129883 + ], + [ + "anti", + -11.16447925567627 + ], + [ + "▁viata", + -11.164583206176758 + ], + [ + "bereich", + -11.164602279663086 + ], + [ + "▁Change", + -11.164871215820312 + ], + [ + "▁pouvoir", + -11.165255546569824 + ], + [ + "▁demonstrate", + -11.165435791015625 + ], + [ + "▁requirement", + -11.165483474731445 + ], + [ + "BI", + -11.16577434539795 + ], + [ + "ied", + -11.166099548339844 + ], + [ + "▁spray", + -11.166358947753906 + ], + [ + "▁calitate", + -11.166379928588867 + ], + [ + "▁souvent", + -11.1665620803833 + ], + [ + "▁samples", + -11.166682243347168 + ], + [ + "▁compete", + -11.166930198669434 + ], + [ + "ank", + -11.166946411132812 + ], + [ + "année", + -11.167037963867188 + ], + [ + "wick", + -11.167183876037598 + ], + [ + "iff", + -11.167254447937012 + ], + [ + "noi", + -11.167255401611328 + ], + [ + "ography", + -11.167450904846191 + ], + [ + "▁SE", + -11.167508125305176 + ], + [ + "▁250", + -11.16779899597168 + ], + [ + "▁wealth", + -11.167884826660156 + ], + [ + "4%", + -11.168235778808594 + ], + [ + "▁swimming", + -11.168269157409668 + ], + [ + "enne", + -11.168338775634766 + ], + [ + "Qu", + -11.168400764465332 + ], + [ + "▁connections", + -11.168476104736328 + ], + [ + "onne", + -11.16852855682373 + ], + [ + "▁Way", + -11.168676376342773 + ], + [ + "voll", + -11.168793678283691 + ], + [ + "▁extent", + -11.169041633605957 + ], + [ + "▁objective", + -11.169572830200195 + ], + [ + "▁clinic", + -11.169581413269043 + ], + [ + "NA", + -11.169848442077637 + ], + [ + "▁Hope", + -11.170098304748535 + ], + [ + "▁coat", + -11.170331954956055 + ], + [ + "▁depend", + -11.170393943786621 + ], + [ + "▁tine", + -11.170463562011719 + ], + [ + "acc", + -11.170486450195312 + ], + [ + "▁editor", + -11.170598983764648 + ], + [ + "▁Jim", + -11.170690536499023 + ], + [ + "600", + -11.171262741088867 + ], + [ + "▁module", + -11.171302795410156 + ], + [ + "▁deja", + -11.171821594238281 + ], + [ + "atur", + -11.171841621398926 + ], + [ + "▁maintaining", + -11.171918869018555 + ], + [ + "▁hoch", + -11.172059059143066 + ], + [ + "▁covering", + -11.17239761352539 + ], + [ + "vielen", + -11.172450065612793 + ], + [ + "hem", + -11.172531127929688 + ], + [ + "▁illegal", + -11.172656059265137 + ], + [ + "▁certificate", + -11.17329216003418 + ], + [ + "▁collective", + -11.173357963562012 + ], + [ + "▁blow", + -11.17343807220459 + ], + [ + "▁programming", + -11.17343807220459 + ], + [ + "HE", + -11.173727989196777 + ], + [ + "▁Division", + -11.173842430114746 + ], + [ + "▁ceux", + -11.174081802368164 + ], + [ + "▁saved", + -11.174202919006348 + ], + [ + "▁worst", + -11.17426586151123 + ], + [ + "▁arms", + -11.17430305480957 + ], + [ + "▁Officer", + -11.17463493347168 + ], + [ + "▁association", + -11.174838066101074 + ], + [ + "ington", + -11.1749906539917 + ], + [ + "▁belle", + -11.175024032592773 + ], + [ + "tting", + -11.17537784576416 + ], + [ + "▁attacks", + -11.175446510314941 + ], + [ + "▁vei", + -11.17546558380127 + ], + [ + "▁gerade", + -11.175470352172852 + ], + [ + "▁strain", + -11.175748825073242 + ], + [ + "▁offices", + -11.1759672164917 + ], + [ + "EM", + -11.17627239227295 + ], + [ + "EST", + -11.176509857177734 + ], + [ + "-8", + -11.176758766174316 + ], + [ + "▁faculty", + -11.176998138427734 + ], + [ + "▁Plant", + -11.177046775817871 + ], + [ + "pla", + -11.177295684814453 + ], + [ + "card", + -11.177618980407715 + ], + [ + "▁loose", + -11.177982330322266 + ], + [ + "▁PR", + -11.178044319152832 + ], + [ + "profit", + -11.178071022033691 + ], + [ + "▁channels", + -11.178119659423828 + ], + [ + "ATE", + -11.178257942199707 + ], + [ + "atic", + -11.178304672241211 + ], + [ + "wegen", + -11.178404808044434 + ], + [ + "word", + -11.178621292114258 + ], + [ + "▁sehen", + -11.178659439086914 + ], + [ + "▁nombre", + -11.178744316101074 + ], + [ + "▁DO", + -11.178763389587402 + ], + [ + "▁hoping", + -11.178949356079102 + ], + [ + "▁wollen", + -11.179091453552246 + ], + [ + "▁decat", + -11.179244995117188 + ], + [ + "IF", + -11.179386138916016 + ], + [ + "▁permission", + -11.179396629333496 + ], + [ + "▁Williams", + -11.179936408996582 + ], + [ + "▁beer", + -11.179962158203125 + ], + [ + "▁dernière", + -11.180052757263184 + ], + [ + "▁purchasing", + -11.18025016784668 + ], + [ + "▁pride", + -11.180416107177734 + ], + [ + "solv", + -11.180598258972168 + ], + [ + "ego", + -11.180691719055176 + ], + [ + "▁Oil", + -11.18079662322998 + ], + [ + "▁dishes", + -11.18102741241455 + ], + [ + "▁Baby", + -11.181109428405762 + ], + [ + "▁Roll", + -11.181137084960938 + ], + [ + "vez", + -11.18134593963623 + ], + [ + "▁drept", + -11.181367874145508 + ], + [ + "lly", + -11.18148136138916 + ], + [ + "▁potrivit", + -11.181495666503906 + ], + [ + "person", + -11.181961059570312 + ], + [ + "▁interactive", + -11.182269096374512 + ], + [ + "▁brilliant", + -11.182304382324219 + ], + [ + "▁000", + -11.182357788085938 + ], + [ + "▁giant", + -11.182657241821289 + ], + [ + "▁plain", + -11.182945251464844 + ], + [ + "▁lock", + -11.183197975158691 + ], + [ + "▁inspection", + -11.183762550354004 + ], + [ + "▁symbol", + -11.18392276763916 + ], + [ + "▁Gal", + -11.183953285217285 + ], + [ + "▁concepts", + -11.1840181350708 + ], + [ + "▁venture", + -11.18411922454834 + ], + [ + "▁Tr", + -11.184402465820312 + ], + [ + "▁Color", + -11.184469223022461 + ], + [ + "▁behalf", + -11.184635162353516 + ], + [ + "ink", + -11.184715270996094 + ], + [ + "atii", + -11.1848726272583 + ], + [ + "wie", + -11.184907913208008 + ], + [ + "▁stream", + -11.18514347076416 + ], + [ + "▁buyers", + -11.185192108154297 + ], + [ + "legen", + -11.185526847839355 + ], + [ + "iness", + -11.18578815460205 + ], + [ + "▁absolute", + -11.185945510864258 + ], + [ + "▁council", + -11.186067581176758 + ], + [ + "▁displayed", + -11.186172485351562 + ], + [ + "▁Bun", + -11.186405181884766 + ], + [ + "▁darauf", + -11.186585426330566 + ], + [ + "▁rod", + -11.186829566955566 + ], + [ + "▁repeat", + -11.186898231506348 + ], + [ + "quelle", + -11.187023162841797 + ], + [ + "lation", + -11.187433242797852 + ], + [ + "gul", + -11.18774700164795 + ], + [ + "▁compensation", + -11.188064575195312 + ], + [ + "▁string", + -11.1881685256958 + ], + [ + "▁joining", + -11.188251495361328 + ], + [ + "▁Pra", + -11.188429832458496 + ], + [ + "hab", + -11.188936233520508 + ], + [ + "▁plane", + -11.189024925231934 + ], + [ + "▁conversion", + -11.189078330993652 + ], + [ + "▁lesson", + -11.189361572265625 + ], + [ + "bound", + -11.1893949508667 + ], + [ + "▁seats", + -11.18946361541748 + ], + [ + "voc", + -11.189902305603027 + ], + [ + "▁Disney", + -11.190120697021484 + ], + [ + "esse", + -11.190277099609375 + ], + [ + "▁awards", + -11.190279006958008 + ], + [ + "▁initiative", + -11.190483093261719 + ], + [ + "UM", + -11.19050407409668 + ], + [ + "▁intelligence", + -11.190763473510742 + ], + [ + "▁laser", + -11.191128730773926 + ], + [ + "än", + -11.191228866577148 + ], + [ + "▁generated", + -11.191231727600098 + ], + [ + "▁allen", + -11.19186782836914 + ], + [ + "▁Aug", + -11.19261360168457 + ], + [ + "lini", + -11.192968368530273 + ], + [ + "▁Update", + -11.193015098571777 + ], + [ + "▁grab", + -11.193095207214355 + ], + [ + "▁Bridge", + -11.193219184875488 + ], + [ + "rock", + -11.193289756774902 + ], + [ + "hold", + -11.193461418151855 + ], + [ + "seinen", + -11.193643569946289 + ], + [ + "▁false", + -11.193758010864258 + ], + [ + "type", + -11.193792343139648 + ], + [ + "▁outcome", + -11.193906784057617 + ], + [ + "▁crazy", + -11.194161415100098 + ], + [ + "▁Platz", + -11.194281578063965 + ], + [ + "▁believed", + -11.194426536560059 + ], + [ + "▁adjust", + -11.194503784179688 + ], + [ + "▁entrance", + -11.194644927978516 + ], + [ + "▁Colorado", + -11.194751739501953 + ], + [ + "▁concentration", + -11.194865226745605 + ], + [ + "aid", + -11.194958686828613 + ], + [ + "▁regardless", + -11.195035934448242 + ], + [ + "▁mici", + -11.195063591003418 + ], + [ + "▁potentially", + -11.195109367370605 + ], + [ + "▁Custom", + -11.195867538452148 + ], + [ + "rag", + -11.196009635925293 + ], + [ + "▁employer", + -11.19604206085205 + ], + [ + "tagged", + -11.196158409118652 + ], + [ + "▁34", + -11.196271896362305 + ], + [ + "fro", + -11.196895599365234 + ], + [ + "▁Pas", + -11.197010040283203 + ], + [ + "▁AS", + -11.197013854980469 + ], + [ + "PP", + -11.197031021118164 + ], + [ + "stru", + -11.19741439819336 + ], + [ + "grâce", + -11.198037147521973 + ], + [ + "▁anyway", + -11.198240280151367 + ], + [ + "▁streets", + -11.1986083984375 + ], + [ + "▁Region", + -11.199190139770508 + ], + [ + "▁newly", + -11.199280738830566 + ], + [ + "▁assistant", + -11.199461936950684 + ], + [ + "▁requests", + -11.199618339538574 + ], + [ + "▁Ohio", + -11.199705123901367 + ], + [ + "▁continuing", + -11.200072288513184 + ], + [ + "▁îm", + -11.200136184692383 + ], + [ + "7%", + -11.20031452178955 + ], + [ + "▁basically", + -11.200325965881348 + ], + [ + "gabe", + -11.200334548950195 + ], + [ + "▁ultra", + -11.200355529785156 + ], + [ + "pic", + -11.200571060180664 + ], + [ + "▁jeder", + -11.200939178466797 + ], + [ + "▁Cook", + -11.201225280761719 + ], + [ + "▁tie", + -11.201227188110352 + ], + [ + "▁yard", + -11.20151424407959 + ], + [ + "▁wash", + -11.20152759552002 + ], + [ + "▁3,", + -11.20194149017334 + ], + [ + "▁exista", + -11.202128410339355 + ], + [ + "▁egg", + -11.202342987060547 + ], + [ + "▁marché", + -11.202616691589355 + ], + [ + "kommen", + -11.202630996704102 + ], + [ + "▁Select", + -11.202999114990234 + ], + [ + "geben", + -11.203126907348633 + ], + [ + "▁Joseph", + -11.203531265258789 + ], + [ + "▁Ces", + -11.203642845153809 + ], + [ + "▁hundred", + -11.203676223754883 + ], + [ + "even", + -11.203792572021484 + ], + [ + "gal", + -11.204232215881348 + ], + [ + "800", + -11.20443058013916 + ], + [ + "▁Jones", + -11.204599380493164 + ], + [ + "ova", + -11.204681396484375 + ], + [ + "▁careful", + -11.204727172851562 + ], + [ + "▁alarm", + -11.205070495605469 + ], + [ + "NI", + -11.205113410949707 + ], + [ + "▁residence", + -11.205327987670898 + ], + [ + "▁wäre", + -11.20590877532959 + ], + [ + "▁Dor", + -11.205986976623535 + ], + [ + "▁amounts", + -11.206369400024414 + ], + [ + "▁mistake", + -11.206687927246094 + ], + [ + "ates", + -11.206796646118164 + ], + [ + "▁bune", + -11.206951141357422 + ], + [ + "▁vegetables", + -11.207124710083008 + ], + [ + "▁Ann", + -11.207204818725586 + ], + [ + "logical", + -11.20776081085205 + ], + [ + "stadt", + -11.207806587219238 + ], + [ + "▁chances", + -11.207921981811523 + ], + [ + "%)", + -11.208030700683594 + ], + [ + "▁minimal", + -11.20810604095459 + ], + [ + "▁naturally", + -11.20817756652832 + ], + [ + "▁Geld", + -11.20822525024414 + ], + [ + "▁Yu", + -11.208361625671387 + ], + [ + "▁wrap", + -11.20840072631836 + ], + [ + "rest", + -11.208674430847168 + ], + [ + "▁legs", + -11.208758354187012 + ], + [ + "PM", + -11.208806991577148 + ], + [ + "▁Heart", + -11.208888053894043 + ], + [ + "▁suspect", + -11.209020614624023 + ], + [ + "Go", + -11.209098815917969 + ], + [ + "▁Fil", + -11.209175109863281 + ], + [ + "▁YOU", + -11.209175109863281 + ], + [ + "▁victory", + -11.209245681762695 + ], + [ + "pun", + -11.20960807800293 + ], + [ + "▁Zo", + -11.209632873535156 + ], + [ + "CT", + -11.209640502929688 + ], + [ + "▁trim", + -11.20969009399414 + ], + [ + "▁stuck", + -11.209836959838867 + ], + [ + "ators", + -11.209877014160156 + ], + [ + "▁Ideas", + -11.210016250610352 + ], + [ + "▁voyage", + -11.210166931152344 + ], + [ + "▁Restaurant", + -11.210205078125 + ], + [ + "▁pat", + -11.210234642028809 + ], + [ + "▁bond", + -11.210521697998047 + ], + [ + "▁Del", + -11.210552215576172 + ], + [ + "▁fighting", + -11.210705757141113 + ], + [ + "▁concerning", + -11.210867881774902 + ], + [ + "▁etwa", + -11.211141586303711 + ], + [ + "▁Thema", + -11.211237907409668 + ], + [ + "▁preferred", + -11.211423873901367 + ], + [ + "▁pitch", + -11.211465835571289 + ], + [ + "▁Singapore", + -11.211971282958984 + ], + [ + "▁tub", + -11.212018013000488 + ], + [ + "FT", + -11.212053298950195 + ], + [ + "▁Product", + -11.21212100982666 + ], + [ + "▁applying", + -11.212285995483398 + ], + [ + "▁Fr", + -11.212340354919434 + ], + [ + "ţa", + -11.212599754333496 + ], + [ + "▁iPad", + -11.212861061096191 + ], + [ + "PD", + -11.2129545211792 + ], + [ + "▁comun", + -11.212995529174805 + ], + [ + "▁pie", + -11.213286399841309 + ], + [ + "rank", + -11.21364688873291 + ], + [ + "tron", + -11.213677406311035 + ], + [ + "▁pest", + -11.213906288146973 + ], + [ + "▁herself", + -11.213936805725098 + ], + [ + "▁intense", + -11.213964462280273 + ], + [ + "foot", + -11.21413803100586 + ], + [ + "▁1998", + -11.2141695022583 + ], + [ + "▁anxiety", + -11.214616775512695 + ], + [ + "▁portable", + -11.214674949645996 + ], + [ + "▁harm", + -11.214735984802246 + ], + [ + "▁admit", + -11.214885711669922 + ], + [ + "sted", + -11.214900016784668 + ], + [ + "▁regions", + -11.215450286865234 + ], + [ + "cie", + -11.215556144714355 + ], + [ + "▁robust", + -11.21577262878418 + ], + [ + "▁stem", + -11.215982437133789 + ], + [ + "▁roles", + -11.216024398803711 + ], + [ + "▁Latin", + -11.216224670410156 + ], + [ + "▁Ré", + -11.216378211975098 + ], + [ + "▁ref", + -11.216381072998047 + ], + [ + "isme", + -11.216426849365234 + ], + [ + "▁contribution", + -11.216776847839355 + ], + [ + "▁forever", + -11.217447280883789 + ], + [ + "▁frei", + -11.21754264831543 + ], + [ + "▁mont", + -11.217818260192871 + ], + [ + "that", + -11.217999458312988 + ], + [ + "▁sensitive", + -11.218116760253906 + ], + [ + "▁wider", + -11.218175888061523 + ], + [ + "AF", + -11.218234062194824 + ], + [ + "▁liability", + -11.218748092651367 + ], + [ + "ţiei", + -11.219043731689453 + ], + [ + "▁Cho", + -11.219260215759277 + ], + [ + "aria", + -11.21960735321045 + ], + [ + "rang", + -11.21977710723877 + ], + [ + "▁Account", + -11.21986198425293 + ], + [ + "▁III", + -11.219941139221191 + ], + [ + "▁tooth", + -11.220222473144531 + ], + [ + "▁factory", + -11.220240592956543 + ], + [ + "▁dropped", + -11.220495223999023 + ], + [ + "horn", + -11.220780372619629 + ], + [ + "RP", + -11.221110343933105 + ], + [ + "▁container", + -11.22118091583252 + ], + [ + "fran", + -11.221474647521973 + ], + [ + "▁lawyer", + -11.221842765808105 + ], + [ + "▁Image", + -11.221907615661621 + ], + [ + "HO", + -11.22195816040039 + ], + [ + "▁incorporate", + -11.221992492675781 + ], + [ + "▁lume", + -11.22226333618164 + ], + [ + "GA", + -11.222331047058105 + ], + [ + "itati", + -11.222370147705078 + ], + [ + "autre", + -11.222665786743164 + ], + [ + "ierten", + -11.222688674926758 + ], + [ + "[", + -11.222746849060059 + ], + [ + "▁packages", + -11.222758293151855 + ], + [ + "▁Simon", + -11.22290325164795 + ], + [ + "▁somewhat", + -11.223734855651855 + ], + [ + "mbo", + -11.223737716674805 + ], + [ + "lite", + -11.223844528198242 + ], + [ + "▁eliminate", + -11.22395133972168 + ], + [ + "▁decrease", + -11.224117279052734 + ], + [ + "▁geben", + -11.224214553833008 + ], + [ + "▁approaches", + -11.224482536315918 + ], + [ + "▁tissue", + -11.224940299987793 + ], + [ + "▁personne", + -11.225192070007324 + ], + [ + "ional", + -11.225587844848633 + ], + [ + "unable", + -11.2256498336792 + ], + [ + "▁Case", + -11.225736618041992 + ], + [ + "hill", + -11.225744247436523 + ], + [ + "och", + -11.225862503051758 + ], + [ + "▁minister", + -11.225920677185059 + ], + [ + "▁Rad", + -11.226285934448242 + ], + [ + "▁yoga", + -11.226390838623047 + ], + [ + "▁encounter", + -11.22661018371582 + ], + [ + "text", + -11.22670841217041 + ], + [ + "▁OS", + -11.226719856262207 + ], + [ + "▁opera", + -11.22673225402832 + ], + [ + "▁loving", + -11.226977348327637 + ], + [ + "▁birds", + -11.227363586425781 + ], + [ + "▁prim", + -11.227389335632324 + ], + [ + "easca", + -11.227432250976562 + ], + [ + "park", + -11.227453231811523 + ], + [ + "fü", + -11.227797508239746 + ], + [ + "▁champion", + -11.227824211120605 + ], + [ + "▁warning", + -11.228245735168457 + ], + [ + "DC", + -11.228271484375 + ], + [ + "▁yield", + -11.228310585021973 + ], + [ + "raum", + -11.228334426879883 + ], + [ + "▁Student", + -11.228434562683105 + ], + [ + "▁Rev", + -11.22848892211914 + ], + [ + "▁Fu", + -11.228501319885254 + ], + [ + "▁intra", + -11.22854232788086 + ], + [ + "▁proces", + -11.228585243225098 + ], + [ + "▁margin", + -11.228621482849121 + ], + [ + "lands", + -11.228816986083984 + ], + [ + "04", + -11.228952407836914 + ], + [ + "▁Steel", + -11.229897499084473 + ], + [ + "▁besoin", + -11.230081558227539 + ], + [ + "şti", + -11.230561256408691 + ], + [ + "▁39", + -11.230635643005371 + ], + [ + "▁outcomes", + -11.230677604675293 + ], + [ + "wert", + -11.230719566345215 + ], + [ + "3,", + -11.23080062866211 + ], + [ + "▁hole", + -11.230888366699219 + ], + [ + "▁Create", + -11.23096752166748 + ], + [ + "▁hall", + -11.231266975402832 + ], + [ + "nach", + -11.231595039367676 + ], + [ + "▁indicate", + -11.232311248779297 + ], + [ + "cum", + -11.232604026794434 + ], + [ + "▁Mann", + -11.232690811157227 + ], + [ + "▁reaction", + -11.232828140258789 + ], + [ + "▁empty", + -11.23289680480957 + ], + [ + "▁Sign", + -11.232941627502441 + ], + [ + "▁pm", + -11.23300838470459 + ], + [ + "erung", + -11.23322582244873 + ], + [ + "▁würde", + -11.233592987060547 + ], + [ + "▁declarat", + -11.233602523803711 + ], + [ + "6%", + -11.23371410369873 + ], + [ + "▁Client", + -11.23377513885498 + ], + [ + "vil", + -11.234295845031738 + ], + [ + "▁electricity", + -11.234469413757324 + ], + [ + "▁75", + -11.234505653381348 + ], + [ + "▁buna", + -11.234505653381348 + ], + [ + "eşte", + -11.23473834991455 + ], + [ + "▁prop", + -11.234792709350586 + ], + [ + "▁journal", + -11.234883308410645 + ], + [ + "▁meu", + -11.23495101928711 + ], + [ + "▁chef", + -11.235034942626953 + ], + [ + "▁Ever", + -11.235102653503418 + ], + [ + "▁feelings", + -11.235466003417969 + ], + [ + "PT", + -11.23551082611084 + ], + [ + "▁proposal", + -11.235651969909668 + ], + [ + "▁Its", + -11.235709190368652 + ], + [ + "▁2013.", + -11.235795974731445 + ], + [ + "▁Bundes", + -11.23595142364502 + ], + [ + "▁droit", + -11.236333847045898 + ], + [ + "▁10%", + -11.236671447753906 + ], + [ + "gard", + -11.236772537231445 + ], + [ + "information", + -11.236814498901367 + ], + [ + "FE", + -11.237309455871582 + ], + [ + "▁Dun", + -11.237340927124023 + ], + [ + "▁Stock", + -11.237472534179688 + ], + [ + "ație", + -11.2374849319458 + ], + [ + "▁mag", + -11.237603187561035 + ], + [ + "▁br", + -11.237665176391602 + ], + [ + "▁sight", + -11.237772941589355 + ], + [ + "phone", + -11.237796783447266 + ], + [ + "▁Cy", + -11.237811088562012 + ], + [ + "▁opposite", + -11.238035202026367 + ], + [ + "ically", + -11.238235473632812 + ], + [ + "großen", + -11.238388061523438 + ], + [ + "▁Without", + -11.23845100402832 + ], + [ + "espace", + -11.238515853881836 + ], + [ + "▁chairs", + -11.238595008850098 + ], + [ + "▁matches", + -11.238685607910156 + ], + [ + "ateur", + -11.238697052001953 + ], + [ + "▁Cost", + -11.238699913024902 + ], + [ + "▁WordPress", + -11.238880157470703 + ], + [ + "▁Opera", + -11.239195823669434 + ], + [ + "walked", + -11.239234924316406 + ], + [ + "▁transactions", + -11.239521026611328 + ], + [ + "▁nuclear", + -11.239579200744629 + ], + [ + "ways", + -11.239594459533691 + ], + [ + "▁Oct", + -11.239738464355469 + ], + [ + "▁bomb", + -11.239835739135742 + ], + [ + "▁tracking", + -11.239879608154297 + ], + [ + "▁photograph", + -11.240066528320312 + ], + [ + "bio", + -11.240309715270996 + ], + [ + "▁branch", + -11.240363121032715 + ], + [ + "▁$5", + -11.240684509277344 + ], + [ + "▁diagram", + -11.240986824035645 + ], + [ + "▁Hard", + -11.241218566894531 + ], + [ + "bach", + -11.241232872009277 + ], + [ + "▁42", + -11.241249084472656 + ], + [ + "logy", + -11.241472244262695 + ], + [ + "▁tile", + -11.241593360900879 + ], + [ + "▁API", + -11.241833686828613 + ], + [ + "seront", + -11.24204158782959 + ], + [ + "ENT", + -11.242156982421875 + ], + [ + "▁accommodation", + -11.242409706115723 + ], + [ + "▁fiber", + -11.242438316345215 + ], + [ + "▁Give", + -11.242792129516602 + ], + [ + "▁Gas", + -11.242916107177734 + ], + [ + "▁Spain", + -11.243086814880371 + ], + [ + "▁listing", + -11.24312686920166 + ], + [ + "▁blocks", + -11.24349308013916 + ], + [ + "▁constitu", + -11.243762969970703 + ], + [ + "▁convenience", + -11.243797302246094 + ], + [ + "▁prize", + -11.243823051452637 + ], + [ + "▁aircraft", + -11.24404239654541 + ], + [ + "containing", + -11.244124412536621 + ], + [ + "▁vice", + -11.244247436523438 + ], + [ + "▁organisations", + -11.244304656982422 + ], + [ + "▁complicated", + -11.244588851928711 + ], + [ + "rons", + -11.244647979736328 + ], + [ + "▁bars", + -11.244670867919922 + ], + [ + "était", + -11.244705200195312 + ], + [ + "▁checking", + -11.245287895202637 + ], + [ + "vant", + -11.245542526245117 + ], + [ + "▁couch", + -11.245657920837402 + ], + [ + "▁brush", + -11.245870590209961 + ], + [ + "▁printer", + -11.245922088623047 + ], + [ + "▁Rat", + -11.246051788330078 + ], + [ + "▁announce", + -11.246057510375977 + ], + [ + "▁salari", + -11.246200561523438 + ], + [ + "▁Sk", + -11.246356964111328 + ], + [ + "pal", + -11.246383666992188 + ], + [ + "▁yards", + -11.24658203125 + ], + [ + "▁flexibility", + -11.246652603149414 + ], + [ + "▁jamais", + -11.24670696258545 + ], + [ + "UC", + -11.246740341186523 + ], + [ + "▁4,", + -11.246793746948242 + ], + [ + "▁Made", + -11.247078895568848 + ], + [ + "▁solche", + -11.247113227844238 + ], + [ + "▁tri", + -11.247237205505371 + ], + [ + "▁outfit", + -11.247243881225586 + ], + [ + "м", + -11.247267723083496 + ], + [ + "▁encouraged", + -11.247477531433105 + ], + [ + "trac", + -11.247552871704102 + ], + [ + "▁genetic", + -11.24755859375 + ], + [ + "▁beneficial", + -11.247747421264648 + ], + [ + "mă", + -11.247849464416504 + ], + [ + "involving", + -11.247879028320312 + ], + [ + "▁knee", + -11.247879028320312 + ], + [ + "▁respective", + -11.248316764831543 + ], + [ + "▁controlled", + -11.248350143432617 + ], + [ + "▁Rück", + -11.24837589263916 + ], + [ + "LC", + -11.248592376708984 + ], + [ + "▁highlight", + -11.248634338378906 + ], + [ + "chem", + -11.248797416687012 + ], + [ + "▁Bis", + -11.24956226348877 + ], + [ + "▁graphics", + -11.249592781066895 + ], + [ + "▁posibil", + -11.249672889709473 + ], + [ + "orul", + -11.249682426452637 + ], + [ + "imagin", + -11.249836921691895 + ], + [ + "▁draft", + -11.250006675720215 + ], + [ + "shaped", + -11.250219345092773 + ], + [ + "▁suggests", + -11.250221252441406 + ], + [ + "uvre", + -11.250509262084961 + ], + [ + "page", + -11.250545501708984 + ], + [ + "▁sentiment", + -11.250685691833496 + ], + [ + "▁loop", + -11.251015663146973 + ], + [ + "▁Quality", + -11.251839637756348 + ], + [ + "▁volunteers", + -11.251869201660156 + ], + [ + "▁representation", + -11.251923561096191 + ], + [ + "▁examination", + -11.252134323120117 + ], + [ + "▁(2)", + -11.252225875854492 + ], + [ + "assi", + -11.252435684204102 + ], + [ + "▁till", + -11.252486228942871 + ], + [ + "▁Catholic", + -11.252618789672852 + ], + [ + "▁2020", + -11.252726554870605 + ], + [ + "▁random", + -11.252764701843262 + ], + [ + "tage", + -11.253146171569824 + ], + [ + "▁baking", + -11.253690719604492 + ], + [ + "▁Musik", + -11.253852844238281 + ], + [ + "▁SC", + -11.253867149353027 + ], + [ + "▁möchte", + -11.254390716552734 + ], + [ + "▁gene", + -11.254411697387695 + ], + [ + "▁kam", + -11.254928588867188 + ], + [ + "▁inspire", + -11.254974365234375 + ], + [ + "unk", + -11.255097389221191 + ], + [ + "▁Final", + -11.255477905273438 + ], + [ + "▁jeden", + -11.255497932434082 + ], + [ + "▁LLC", + -11.255962371826172 + ], + [ + "▁sistem", + -11.25613784790039 + ], + [ + "▁stages", + -11.256441116333008 + ], + [ + "▁texture", + -11.256613731384277 + ], + [ + "rib", + -11.256739616394043 + ], + [ + "lung", + -11.256782531738281 + ], + [ + "▁breath", + -11.256814002990723 + ], + [ + "▁hosted", + -11.256844520568848 + ], + [ + "▁Kingdom", + -11.257079124450684 + ], + [ + "▁politics", + -11.257121086120605 + ], + [ + "▁mood", + -11.257122993469238 + ], + [ + "cam", + -11.257285118103027 + ], + [ + "▁liked", + -11.257287979125977 + ], + [ + "▁Credit", + -11.257304191589355 + ], + [ + "tisch", + -11.257527351379395 + ], + [ + "▁everywhere", + -11.257692337036133 + ], + [ + "▁poti", + -11.257915496826172 + ], + [ + "▁fruits", + -11.258264541625977 + ], + [ + "oire", + -11.258322715759277 + ], + [ + "▁mesure", + -11.258586883544922 + ], + [ + "▁Studies", + -11.258838653564453 + ], + [ + "▁provision", + -11.25888729095459 + ], + [ + "▁Maria", + -11.258927345275879 + ], + [ + "▁necessarily", + -11.259103775024414 + ], + [ + "▁Net", + -11.259212493896484 + ], + [ + "▁scar", + -11.259307861328125 + ], + [ + "▁tracks", + -11.259424209594727 + ], + [ + "▁ads", + -11.259856224060059 + ], + [ + "termin", + -11.259861946105957 + ], + [ + "▁Yo", + -11.26022720336914 + ], + [ + "atory", + -11.260252952575684 + ], + [ + "itoare", + -11.26025676727295 + ], + [ + "▁colours", + -11.260563850402832 + ], + [ + "▁correctly", + -11.260817527770996 + ], + [ + "▁Trade", + -11.26090145111084 + ], + [ + "▁Week", + -11.261052131652832 + ], + [ + "▁Premier", + -11.261499404907227 + ], + [ + "▁designers", + -11.261600494384766 + ], + [ + "▁BE", + -11.261879920959473 + ], + [ + "▁desktop", + -11.261929512023926 + ], + [ + "▁lifetime", + -11.262046813964844 + ], + [ + "▁Kind", + -11.26213264465332 + ], + [ + "▁divers", + -11.262246131896973 + ], + [ + "rain", + -11.262260437011719 + ], + [ + "▁Von", + -11.262263298034668 + ], + [ + "▁bal", + -11.262568473815918 + ], + [ + "▁shots", + -11.262624740600586 + ], + [ + "▁accommodate", + -11.262767791748047 + ], + [ + "▁Paper", + -11.263001441955566 + ], + [ + "▁interaction", + -11.263191223144531 + ], + [ + "▁acquisition", + -11.263233184814453 + ], + [ + "▁neuro", + -11.26378345489502 + ], + [ + "▁institution", + -11.26391887664795 + ], + [ + "▁automatic", + -11.26403522491455 + ], + [ + "▁assess", + -11.264177322387695 + ], + [ + "▁manifest", + -11.264199256896973 + ], + [ + "▁audit", + -11.264202117919922 + ], + [ + "▁câte", + -11.264406204223633 + ], + [ + "▁insight", + -11.264533996582031 + ], + [ + "▁lange", + -11.264781951904297 + ], + [ + "▁retirement", + -11.264795303344727 + ], + [ + "sons", + -11.264864921569824 + ], + [ + "▁Asian", + -11.26492691040039 + ], + [ + "▁rail", + -11.264978408813477 + ], + [ + "▁Awards", + -11.264982223510742 + ], + [ + "Avec", + -11.265035629272461 + ], + [ + "SO", + -11.26511287689209 + ], + [ + "para", + -11.265304565429688 + ], + [ + "▁tant", + -11.265562057495117 + ], + [ + "▁strike", + -11.265693664550781 + ], + [ + "▁transformation", + -11.265742301940918 + ], + [ + "▁leicht", + -11.26586627960205 + ], + [ + "л", + -11.265996932983398 + ], + [ + "fat", + -11.26629638671875 + ], + [ + "▁Qui", + -11.266626358032227 + ], + [ + "▁chip", + -11.26663589477539 + ], + [ + "titude", + -11.266640663146973 + ], + [ + "▁Projekt", + -11.266998291015625 + ], + [ + "▁statt", + -11.267010688781738 + ], + [ + "▁findet", + -11.267184257507324 + ], + [ + "▁telephone", + -11.267251968383789 + ], + [ + "▁staying", + -11.267267227172852 + ], + [ + "▁Mess", + -11.267353057861328 + ], + [ + "▁patio", + -11.267382621765137 + ], + [ + "▁afla", + -11.267890930175781 + ], + [ + "▁administrative", + -11.267910957336426 + ], + [ + "▁gemeinsam", + -11.268129348754883 + ], + [ + "▁suppliers", + -11.268136024475098 + ], + [ + "ark", + -11.268181800842285 + ], + [ + "▁rice", + -11.268397331237793 + ], + [ + "▁stretch", + -11.268439292907715 + ], + [ + "▁compact", + -11.268651008605957 + ], + [ + "fire", + -11.268756866455078 + ], + [ + "в", + -11.268963813781738 + ], + [ + "vision", + -11.269035339355469 + ], + [ + "▁Mag", + -11.269368171691895 + ], + [ + "▁dreams", + -11.269472122192383 + ], + [ + "▁funny", + -11.26968765258789 + ], + [ + "▁lässt", + -11.270216941833496 + ], + [ + "cade", + -11.270448684692383 + ], + [ + "▁drama", + -11.270484924316406 + ], + [ + "▁schimb", + -11.270767211914062 + ], + [ + "PO", + -11.270785331726074 + ], + [ + "▁Sim", + -11.270806312561035 + ], + [ + "▁motivation", + -11.271045684814453 + ], + [ + "▁presents", + -11.27138614654541 + ], + [ + "▁1997", + -11.271828651428223 + ], + [ + "agi", + -11.271883010864258 + ], + [ + "▁optimal", + -11.27198314666748 + ], + [ + "▁folder", + -11.271995544433594 + ], + [ + "stro", + -11.272034645080566 + ], + [ + "▁Han", + -11.272072792053223 + ], + [ + "▁Ei", + -11.27220344543457 + ], + [ + "▁pus", + -11.272356986999512 + ], + [ + "▁Learning", + -11.272531509399414 + ], + [ + "oop", + -11.272603034973145 + ], + [ + "▁Type", + -11.272658348083496 + ], + [ + "space", + -11.272665023803711 + ], + [ + "▁define", + -11.273098945617676 + ], + [ + "▁plug", + -11.273098945617676 + ], + [ + "yard", + -11.273188591003418 + ], + [ + "▁utility", + -11.273297309875488 + ], + [ + "über", + -11.273561477661133 + ], + [ + "▁commun", + -11.273627281188965 + ], + [ + "▁directed", + -11.273842811584473 + ], + [ + "▁consent", + -11.273893356323242 + ], + [ + "▁DNA", + -11.274068832397461 + ], + [ + "▁statements", + -11.274130821228027 + ], + [ + "real", + -11.274298667907715 + ], + [ + "active", + -11.274430274963379 + ], + [ + "school", + -11.274965286254883 + ], + [ + "▁mic", + -11.275360107421875 + ], + [ + "▁acestui", + -11.275467872619629 + ], + [ + "scale", + -11.27550220489502 + ], + [ + "▁Mid", + -11.275628089904785 + ], + [ + "▁Chair", + -11.275874137878418 + ], + [ + "к", + -11.275936126708984 + ], + [ + "▁Bas", + -11.27630615234375 + ], + [ + "▁38", + -11.276379585266113 + ], + [ + "erin", + -11.276461601257324 + ], + [ + "▁Everyone", + -11.27686882019043 + ], + [ + "COM", + -11.276907920837402 + ], + [ + "▁chronic", + -11.277079582214355 + ], + [ + "▁doctors", + -11.277222633361816 + ], + [ + "▁sh", + -11.277276039123535 + ], + [ + "sport", + -11.27740478515625 + ], + [ + "▁volunteer", + -11.277512550354004 + ], + [ + "▁drinking", + -11.277839660644531 + ], + [ + "▁Mas", + -11.277868270874023 + ], + [ + "▁pursue", + -11.2780122756958 + ], + [ + "▁exposed", + -11.278536796569824 + ], + [ + "exe", + -11.278660774230957 + ], + [ + "hung", + -11.278841972351074 + ], + [ + "▁Tier", + -11.278921127319336 + ], + [ + "▁plac", + -11.279121398925781 + ], + [ + "▁proiect", + -11.279136657714844 + ], + [ + "▁literally", + -11.279288291931152 + ], + [ + "▁acolo", + -11.279412269592285 + ], + [ + "▁User", + -11.279485702514648 + ], + [ + "UT", + -11.279598236083984 + ], + [ + "▁hyper", + -11.279623985290527 + ], + [ + "▁seed", + -11.279794692993164 + ], + [ + "▁literature", + -11.2802734375 + ], + [ + "▁Holy", + -11.280373573303223 + ], + [ + "▁jeu", + -11.280396461486816 + ], + [ + "▁licensed", + -11.280896186828613 + ], + [ + "station", + -11.280900955200195 + ], + [ + "▁criteria", + -11.281292915344238 + ], + [ + "▁sufficient", + -11.281292915344238 + ], + [ + "▁gestion", + -11.281512260437012 + ], + [ + "▁pic", + -11.281549453735352 + ], + [ + "▁64", + -11.28170108795166 + ], + [ + "▁facts", + -11.281905174255371 + ], + [ + "▁Bild", + -11.282098770141602 + ], + [ + "obi", + -11.28212833404541 + ], + [ + "▁nie", + -11.282362937927246 + ], + [ + "▁Jewish", + -11.282756805419922 + ], + [ + "bor", + -11.28281307220459 + ], + [ + "▁1980", + -11.28286361694336 + ], + [ + "▁Fach", + -11.282917976379395 + ], + [ + "craft", + -11.283047676086426 + ], + [ + "▁Pakistan", + -11.283408164978027 + ], + [ + "▁Mos", + -11.283621788024902 + ], + [ + "▁toilet", + -11.283844947814941 + ], + [ + "partea", + -11.28391170501709 + ], + [ + "case", + -11.284221649169922 + ], + [ + "▁clock", + -11.28430461883545 + ], + [ + "▁parc", + -11.284602165222168 + ], + [ + "▁legislation", + -11.284692764282227 + ], + [ + "▁icon", + -11.284933090209961 + ], + [ + "etz", + -11.285178184509277 + ], + [ + "ept", + -11.285270690917969 + ], + [ + "▁Corporation", + -11.28585433959961 + ], + [ + "▁requested", + -11.285983085632324 + ], + [ + "▁column", + -11.286088943481445 + ], + [ + "rier", + -11.286120414733887 + ], + [ + "uß", + -11.2861967086792 + ], + [ + "▁wohl", + -11.286418914794922 + ], + [ + "tell", + -11.286569595336914 + ], + [ + "gno", + -11.286608695983887 + ], + [ + "▁diseases", + -11.286726951599121 + ], + [ + "Sch", + -11.286762237548828 + ], + [ + "▁colon", + -11.287075996398926 + ], + [ + "▁Based", + -11.28709602355957 + ], + [ + "▁flu", + -11.28725528717041 + ], + [ + "▁vocal", + -11.287408828735352 + ], + [ + "▁virus", + -11.287693977355957 + ], + [ + "▁traveling", + -11.287750244140625 + ], + [ + "bul", + -11.287837982177734 + ], + [ + "т", + -11.28794002532959 + ], + [ + "city", + -11.287961959838867 + ], + [ + "AU", + -11.287991523742676 + ], + [ + "wide", + -11.288037300109863 + ], + [ + "▁solo", + -11.288061141967773 + ], + [ + "▁functionality", + -11.288214683532715 + ], + [ + "▁reveal", + -11.28831672668457 + ], + [ + "sign", + -11.288952827453613 + ], + [ + "▁closing", + -11.288971900939941 + ], + [ + "▁peak", + -11.289087295532227 + ], + [ + "▁practic", + -11.289398193359375 + ], + [ + "than", + -11.289473533630371 + ], + [ + "▁driven", + -11.289484977722168 + ], + [ + "êtes", + -11.289548873901367 + ], + [ + "high", + -11.290016174316406 + ], + [ + "power", + -11.290226936340332 + ], + [ + "▁Lin", + -11.29028606414795 + ], + [ + "▁dose", + -11.29034423828125 + ], + [ + "▁pocket", + -11.290650367736816 + ], + [ + "▁Classic", + -11.29067611694336 + ], + [ + "▁packaging", + -11.290792465209961 + ], + [ + "▁distinct", + -11.290800094604492 + ], + [ + "▁côté", + -11.291094779968262 + ], + [ + "▁breast", + -11.29127025604248 + ], + [ + "▁folosit", + -11.29133129119873 + ], + [ + "▁drinks", + -11.291353225708008 + ], + [ + "▁Dog", + -11.291529655456543 + ], + [ + "ailleurs", + -11.291658401489258 + ], + [ + "▁caz", + -11.291804313659668 + ], + [ + "▁escape", + -11.29188346862793 + ], + [ + "▁warranty", + -11.291902542114258 + ], + [ + "▁pulled", + -11.291996955871582 + ], + [ + "data", + -11.292088508605957 + ], + [ + "▁facilitate", + -11.292213439941406 + ], + [ + "É", + -11.292335510253906 + ], + [ + "▁SP", + -11.292403221130371 + ], + [ + "lant", + -11.292557716369629 + ], + [ + "AD", + -11.29256534576416 + ], + [ + "▁Print", + -11.292802810668945 + ], + [ + "mond", + -11.292863845825195 + ], + [ + "▁strange", + -11.292875289916992 + ], + [ + "▁Hor", + -11.293227195739746 + ], + [ + "▁Collection", + -11.293328285217285 + ], + [ + "arm", + -11.29346752166748 + ], + [ + "cas", + -11.293691635131836 + ], + [ + "arrow", + -11.29379940032959 + ], + [ + "▁carrying", + -11.293927192687988 + ], + [ + "▁wave", + -11.294661521911621 + ], + [ + "setzt", + -11.294907569885254 + ], + [ + "▁construct", + -11.29514217376709 + ], + [ + "▁acts", + -11.295269966125488 + ], + [ + "▁Action", + -11.295342445373535 + ], + [ + "▁Kim", + -11.295354843139648 + ], + [ + "oxid", + -11.295459747314453 + ], + [ + "fish", + -11.295519828796387 + ], + [ + "▁damaged", + -11.295660018920898 + ], + [ + "▁Greek", + -11.295747756958008 + ], + [ + "▁belt", + -11.295772552490234 + ], + [ + "▁Prior", + -11.295778274536133 + ], + [ + "▁marks", + -11.295936584472656 + ], + [ + "▁lumea", + -11.296183586120605 + ], + [ + "▁twenty", + -11.296196937561035 + ], + [ + "▁locul", + -11.296360969543457 + ], + [ + "▁Army", + -11.296524047851562 + ], + [ + "apt", + -11.296602249145508 + ], + [ + "▁limits", + -11.296733856201172 + ], + [ + "▁cruise", + -11.296966552734375 + ], + [ + "▁List", + -11.296998023986816 + ], + [ + "utilisation", + -11.29753589630127 + ], + [ + "▁personality", + -11.297622680664062 + ], + [ + "▁sections", + -11.297759056091309 + ], + [ + "▁drawn", + -11.29797649383545 + ], + [ + "▁mold", + -11.298277854919434 + ], + [ + "▁Think", + -11.298333168029785 + ], + [ + "▁holidays", + -11.298355102539062 + ], + [ + "▁critic", + -11.298545837402344 + ], + [ + "grade", + -11.298660278320312 + ], + [ + "▁sick", + -11.299074172973633 + ], + [ + "▁characteristics", + -11.299237251281738 + ], + [ + "▁echipa", + -11.299272537231445 + ], + [ + "▁Fast", + -11.29929256439209 + ], + [ + "▁Br", + -11.299600601196289 + ], + [ + "▁Reise", + -11.299734115600586 + ], + [ + "teen", + -11.299749374389648 + ], + [ + "uci", + -11.299949645996094 + ], + [ + "!”", + -11.300180435180664 + ], + [ + "ppe", + -11.300532341003418 + ], + [ + "▁talked", + -11.301164627075195 + ], + [ + "▁gap", + -11.301473617553711 + ], + [ + "homme", + -11.301778793334961 + ], + [ + "▁interact", + -11.301934242248535 + ], + [ + "▁dollar", + -11.302276611328125 + ], + [ + "▁bone", + -11.302309036254883 + ], + [ + "▁Einsatz", + -11.302343368530273 + ], + [ + "▁sad", + -11.302434921264648 + ], + [ + "any", + -11.302445411682129 + ], + [ + "tation", + -11.302666664123535 + ], + [ + "▁Haupt", + -11.302748680114746 + ], + [ + "iva", + -11.302781105041504 + ], + [ + "▁Schu", + -11.302916526794434 + ], + [ + "▁evaluate", + -11.3036470413208 + ], + [ + "▁variant", + -11.303807258605957 + ], + [ + "▁IS", + -11.303879737854004 + ], + [ + "▁PRO", + -11.303947448730469 + ], + [ + "▁vine", + -11.303959846496582 + ], + [ + "rut", + -11.304062843322754 + ], + [ + "▁existence", + -11.30443286895752 + ], + [ + "-7", + -11.304525375366211 + ], + [ + "ancy", + -11.304702758789062 + ], + [ + "▁Want", + -11.305023193359375 + ], + [ + "alism", + -11.305127143859863 + ], + [ + "ranging", + -11.30550765991211 + ], + [ + "preis", + -11.305551528930664 + ], + [ + "All", + -11.305620193481445 + ], + [ + "▁reception", + -11.30565071105957 + ], + [ + "mai", + -11.305730819702148 + ], + [ + "▁lease", + -11.30577278137207 + ], + [ + "▁finest", + -11.30578899383545 + ], + [ + "▁evident", + -11.305874824523926 + ], + [ + "▁Easy", + -11.306075096130371 + ], + [ + "▁gilt", + -11.306085586547852 + ], + [ + "▁trips", + -11.306344985961914 + ], + [ + "▁skilled", + -11.306368827819824 + ], + [ + "consists", + -11.306456565856934 + ], + [ + "front", + -11.306635856628418 + ], + [ + "rati", + -11.306652069091797 + ], + [ + "▁Following", + -11.30678653717041 + ], + [ + "▁Medicine", + -11.307161331176758 + ], + [ + "▁pune", + -11.30729866027832 + ], + [ + "▁errors", + -11.307354927062988 + ], + [ + "arian", + -11.307613372802734 + ], + [ + "lib", + -11.30811882019043 + ], + [ + "SR", + -11.308351516723633 + ], + [ + "ML", + -11.308568000793457 + ], + [ + "▁Safety", + -11.308823585510254 + ], + [ + "▁clar", + -11.309355735778809 + ], + [ + "New", + -11.309764862060547 + ], + [ + "▁37", + -11.309773445129395 + ], + [ + "▁Administration", + -11.309823036193848 + ], + [ + "▁2.0", + -11.310120582580566 + ], + [ + "▁obviously", + -11.310196876525879 + ], + [ + "▁Mitarbeiter", + -11.310254096984863 + ], + [ + "▁improvements", + -11.31043529510498 + ], + [ + "▁Cut", + -11.310630798339844 + ], + [ + "▁Natural", + -11.310672760009766 + ], + [ + "▁arrival", + -11.311182975769043 + ], + [ + "▁pizza", + -11.311339378356934 + ], + [ + "eşti", + -11.311570167541504 + ], + [ + "cept", + -11.311654090881348 + ], + [ + "▁livre", + -11.311686515808105 + ], + [ + "▁nombreux", + -11.312195777893066 + ], + [ + "▁authentic", + -11.312231063842773 + ], + [ + "▁gemacht", + -11.312472343444824 + ], + [ + "▁broadcast", + -11.312478065490723 + ], + [ + "▁stronger", + -11.312545776367188 + ], + [ + "UP", + -11.31257152557373 + ], + [ + "▁centers", + -11.312614440917969 + ], + [ + "▁petite", + -11.312617301940918 + ], + [ + "▁spots", + -11.312626838684082 + ], + [ + "▁crystal", + -11.312756538391113 + ], + [ + "▁salon", + -11.313044548034668 + ], + [ + "▁gained", + -11.313098907470703 + ], + [ + "▁Mus", + -11.313215255737305 + ], + [ + "▁lens", + -11.313223838806152 + ], + [ + "▁ihm", + -11.313231468200684 + ], + [ + "minute", + -11.313573837280273 + ], + [ + "▁greatly", + -11.313587188720703 + ], + [ + "LP", + -11.31361198425293 + ], + [ + "rait", + -11.314027786254883 + ], + [ + "▁bid", + -11.314154624938965 + ], + [ + "▁cit", + -11.314203262329102 + ], + [ + "entreprise", + -11.31435775756836 + ], + [ + "▁55", + -11.314533233642578 + ], + [ + "▁respectively", + -11.314536094665527 + ], + [ + "▁lo", + -11.314638137817383 + ], + [ + "▁cons", + -11.314743995666504 + ], + [ + "▁Energie", + -11.315169334411621 + ], + [ + "▁OK", + -11.31521224975586 + ], + [ + "▁grill", + -11.315338134765625 + ], + [ + "▁heading", + -11.31549072265625 + ], + [ + "▁sollten", + -11.315491676330566 + ], + [ + "▁Fragen", + -11.315528869628906 + ], + [ + "▁Poli", + -11.315556526184082 + ], + [ + "▁studying", + -11.315723419189453 + ], + [ + "▁développement", + -11.315882682800293 + ], + [ + "▁foam", + -11.316035270690918 + ], + [ + "▁1996", + -11.316511154174805 + ], + [ + "▁disaster", + -11.31662654876709 + ], + [ + "▁cafe", + -11.317262649536133 + ], + [ + "▁moves", + -11.317267417907715 + ], + [ + "focuses", + -11.317712783813477 + ], + [ + "▁Avenue", + -11.317834854125977 + ], + [ + "▁humans", + -11.31784439086914 + ], + [ + "▁(3", + -11.318021774291992 + ], + [ + "▁région", + -11.318347930908203 + ], + [ + "▁DJ", + -11.318608283996582 + ], + [ + "shop", + -11.318819046020508 + ], + [ + "▁acting", + -11.318843841552734 + ], + [ + "▁Justice", + -11.318967819213867 + ], + [ + "▁trouve", + -11.319010734558105 + ], + [ + "▁Estate", + -11.319040298461914 + ], + [ + "▁strict", + -11.319231986999512 + ], + [ + "▁talks", + -11.319283485412598 + ], + [ + "▁mat", + -11.319290161132812 + ], + [ + "▁completion", + -11.319327354431152 + ], + [ + "delivering", + -11.31943416595459 + ], + [ + "CD", + -11.31973934173584 + ], + [ + "0%", + -11.319960594177246 + ], + [ + "▁creativity", + -11.320253372192383 + ], + [ + "BR", + -11.320272445678711 + ], + [ + "▁occurred", + -11.320357322692871 + ], + [ + "Car", + -11.320590019226074 + ], + [ + "▁rising", + -11.320761680603027 + ], + [ + "gger", + -11.32086181640625 + ], + [ + "▁Gene", + -11.320901870727539 + ], + [ + "▁workplace", + -11.320914268493652 + ], + [ + "phy", + -11.321065902709961 + ], + [ + "▁Bla", + -11.32107162475586 + ], + [ + "▁trailer", + -11.32120418548584 + ], + [ + "▁Forest", + -11.321205139160156 + ], + [ + "▁profession", + -11.321246147155762 + ], + [ + "▁Father", + -11.32137680053711 + ], + [ + "flu", + -11.321487426757812 + ], + [ + "tone", + -11.321489334106445 + ], + [ + "▁sexual", + -11.321736335754395 + ], + [ + "▁Map", + -11.321805953979492 + ], + [ + "OT", + -11.3218412399292 + ], + [ + "▁Us", + -11.321878433227539 + ], + [ + "tôt", + -11.321892738342285 + ], + [ + "▁Wert", + -11.321901321411133 + ], + [ + "preparing", + -11.322121620178223 + ], + [ + "isé", + -11.322243690490723 + ], + [ + "▁lake", + -11.322461128234863 + ], + [ + "eed", + -11.32270336151123 + ], + [ + "jun", + -11.322888374328613 + ], + [ + "▁implemented", + -11.323014259338379 + ], + [ + "vid", + -11.323116302490234 + ], + [ + "igne", + -11.323201179504395 + ], + [ + "▁follows", + -11.323214530944824 + ], + [ + "▁Eric", + -11.323430061340332 + ], + [ + "body", + -11.323530197143555 + ], + [ + "▁contained", + -11.323585510253906 + ], + [ + "▁massage", + -11.323715209960938 + ], + [ + "AV", + -11.323725700378418 + ], + [ + "▁insa", + -11.323850631713867 + ], + [ + "▁observed", + -11.323892593383789 + ], + [ + "▁marque", + -11.324137687683105 + ], + [ + "lines", + -11.324451446533203 + ], + [ + "▁Frage", + -11.324482917785645 + ], + [ + "largely", + -11.324647903442383 + ], + [ + "gegeben", + -11.32473087310791 + ], + [ + "▁colleagues", + -11.324762344360352 + ], + [ + "pha", + -11.32494068145752 + ], + [ + "▁representative", + -11.325217247009277 + ], + [ + "▁shut", + -11.325650215148926 + ], + [ + "▁secondary", + -11.325779914855957 + ], + [ + "▁exhibit", + -11.325927734375 + ], + [ + "1)", + -11.325932502746582 + ], + [ + "mid", + -11.326109886169434 + ], + [ + "▁Due", + -11.326229095458984 + ], + [ + "▁initiatives", + -11.326457023620605 + ], + [ + "▁occurs", + -11.326458930969238 + ], + [ + "lent", + -11.326478958129883 + ], + [ + "▁façon", + -11.326778411865234 + ], + [ + "▁iOS", + -11.326803207397461 + ], + [ + "▁exploring", + -11.327000617980957 + ], + [ + "▁stations", + -11.327103614807129 + ], + [ + "nton", + -11.327234268188477 + ], + [ + "▁Country", + -11.32729721069336 + ], + [ + "▁shouldn", + -11.327406883239746 + ], + [ + "▁casual", + -11.327611923217773 + ], + [ + "-18", + -11.32769775390625 + ], + [ + "▁maintained", + -11.32772445678711 + ], + [ + "▁cart", + -11.327790260314941 + ], + [ + "▁propre", + -11.327836036682129 + ], + [ + "▁asset", + -11.327948570251465 + ], + [ + "firm", + -11.32803726196289 + ], + [ + "gla", + -11.328231811523438 + ], + [ + "viv", + -11.3282470703125 + ], + [ + "▁scientists", + -11.328873634338379 + ], + [ + "▁Nor", + -11.328936576843262 + ], + [ + "ites", + -11.329320907592773 + ], + [ + "▁engaging", + -11.329933166503906 + ], + [ + "My", + -11.330178260803223 + ], + [ + "▁workshops", + -11.330282211303711 + ], + [ + "ffer", + -11.3303804397583 + ], + [ + "activité", + -11.33047103881836 + ], + [ + "▁tension", + -11.330567359924316 + ], + [ + "▁dual", + -11.330668449401855 + ], + [ + "uer", + -11.33084774017334 + ], + [ + "900", + -11.330941200256348 + ], + [ + "SF", + -11.33108139038086 + ], + [ + "▁kannst", + -11.331146240234375 + ], + [ + "▁bur", + -11.33115291595459 + ], + [ + "▁visitor", + -11.331156730651855 + ], + [ + "▁granted", + -11.331178665161133 + ], + [ + "▁union", + -11.331355094909668 + ], + [ + "▁tablet", + -11.331461906433105 + ], + [ + "▁Choose", + -11.33146858215332 + ], + [ + "ibil", + -11.331551551818848 + ], + [ + "▁settlement", + -11.331830978393555 + ], + [ + "genommen", + -11.331892967224121 + ], + [ + "▁marked", + -11.332956314086914 + ], + [ + "▁diagnostic", + -11.333370208740234 + ], + [ + "▁prayer", + -11.333529472351074 + ], + [ + "▁Toronto", + -11.334035873413086 + ], + [ + "trans", + -11.334146499633789 + ], + [ + "▁respectiv", + -11.334160804748535 + ], + [ + "▁2012.", + -11.334207534790039 + ], + [ + "icul", + -11.334394454956055 + ], + [ + "▁satisfied", + -11.334527969360352 + ], + [ + "▁Fla", + -11.334596633911133 + ], + [ + "▁estimate", + -11.334638595581055 + ], + [ + "▁Agency", + -11.33466911315918 + ], + [ + "OD", + -11.334708213806152 + ], + [ + "▁McC", + -11.334746360778809 + ], + [ + "bert", + -11.334748268127441 + ], + [ + "▁seal", + -11.334771156311035 + ], + [ + "aine", + -11.334839820861816 + ], + [ + "▁cauza", + -11.334848403930664 + ], + [ + "▁wallpaper", + -11.335081100463867 + ], + [ + "▁alb", + -11.33536434173584 + ], + [ + "▁Sound", + -11.335681915283203 + ], + [ + "worth", + -11.33572769165039 + ], + [ + "chten", + -11.335858345031738 + ], + [ + "programm", + -11.335896492004395 + ], + [ + "▁pounds", + -11.336215019226074 + ], + [ + "▁coaching", + -11.336278915405273 + ], + [ + "▁Furthermore", + -11.336454391479492 + ], + [ + "▁Korea", + -11.336471557617188 + ], + [ + "▁flour", + -11.336530685424805 + ], + [ + "▁sommes", + -11.33657169342041 + ], + [ + "▁Repair", + -11.33661937713623 + ], + [ + "”)", + -11.336642265319824 + ], + [ + "itch", + -11.336675643920898 + ], + [ + "blu", + -11.336786270141602 + ], + [ + "zar", + -11.336882591247559 + ], + [ + "▁diferite", + -11.33745002746582 + ], + [ + "▁Golf", + -11.337685585021973 + ], + [ + "arch", + -11.33772087097168 + ], + [ + "▁panels", + -11.337799072265625 + ], + [ + "jan", + -11.337956428527832 + ], + [ + "“.", + -11.338240623474121 + ], + [ + "izarea", + -11.338324546813965 + ], + [ + "▁golden", + -11.33854866027832 + ], + [ + "▁flying", + -11.338550567626953 + ], + [ + "▁museum", + -11.338700294494629 + ], + [ + "▁equivalent", + -11.338759422302246 + ], + [ + "▁Lang", + -11.339032173156738 + ], + [ + "schi", + -11.339539527893066 + ], + [ + "MI", + -11.339595794677734 + ], + [ + "▁faci", + -11.339838027954102 + ], + [ + "▁Rahmen", + -11.339988708496094 + ], + [ + "▁attending", + -11.340130805969238 + ], + [ + "′′", + -11.340483665466309 + ], + [ + "▁Tro", + -11.341070175170898 + ], + [ + "▁gaming", + -11.341447830200195 + ], + [ + "▁aujourd", + -11.341479301452637 + ], + [ + "▁Wochen", + -11.341526985168457 + ], + [ + "▁entering", + -11.341535568237305 + ], + [ + "its", + -11.34155559539795 + ], + [ + "▁Private", + -11.341866493225098 + ], + [ + "▁Ocean", + -11.34188175201416 + ], + [ + "▁01", + -11.342098236083984 + ], + [ + "▁coloring", + -11.342188835144043 + ], + [ + "ător", + -11.34253215789795 + ], + [ + "▁flooring", + -11.342548370361328 + ], + [ + "▁downtown", + -11.34276294708252 + ], + [ + "rab", + -11.342998504638672 + ], + [ + "HI", + -11.343221664428711 + ], + [ + "▁illness", + -11.343234062194824 + ], + [ + "▁whil", + -11.343307495117188 + ], + [ + "▁diamond", + -11.34333324432373 + ], + [ + "Mail", + -11.343419075012207 + ], + [ + "▁Dream", + -11.34344482421875 + ], + [ + "▁Golden", + -11.344099044799805 + ], + [ + "▁rein", + -11.344220161437988 + ], + [ + "▁hi", + -11.344283103942871 + ], + [ + "▁expressed", + -11.344489097595215 + ], + [ + "▁luat", + -11.344511985778809 + ], + [ + "▁Share", + -11.34453010559082 + ], + [ + "▁Programm", + -11.344706535339355 + ], + [ + "▁Sales", + -11.344707489013672 + ], + [ + "▁prof", + -11.344890594482422 + ], + [ + "▁MO", + -11.34505844116211 + ], + [ + "▁Short", + -11.345088958740234 + ], + [ + "▁charm", + -11.345290184020996 + ], + [ + "▁Cer", + -11.345373153686523 + ], + [ + "▁Run", + -11.34553337097168 + ], + [ + "▁tutorial", + -11.345589637756348 + ], + [ + "oul", + -11.34561824798584 + ], + [ + "▁Fest", + -11.345794677734375 + ], + [ + "▁uniform", + -11.345929145812988 + ], + [ + "aß", + -11.346014976501465 + ], + [ + "▁pipe", + -11.346076965332031 + ], + [ + "▁Square", + -11.346283912658691 + ], + [ + "▁Kosten", + -11.346365928649902 + ], + [ + "▁checked", + -11.346590042114258 + ], + [ + "▁65", + -11.346626281738281 + ], + [ + "▁Adam", + -11.346686363220215 + ], + [ + "cel", + -11.346700668334961 + ], + [ + "ello", + -11.346965789794922 + ], + [ + "▁Res", + -11.347023963928223 + ], + [ + "▁drain", + -11.34708309173584 + ], + [ + "ză", + -11.347129821777344 + ], + [ + "▁Tech", + -11.34739875793457 + ], + [ + "▁strive", + -11.34749698638916 + ], + [ + "cycl", + -11.347506523132324 + ], + [ + "▁stark", + -11.347541809082031 + ], + [ + "load", + -11.34754753112793 + ], + [ + "▁Stat", + -11.347589492797852 + ], + [ + "▁Rec", + -11.347622871398926 + ], + [ + "ians", + -11.347716331481934 + ], + [ + "▁Tin", + -11.347738265991211 + ], + [ + "▁Agreement", + -11.347840309143066 + ], + [ + "▁pret", + -11.348027229309082 + ], + [ + "-9", + -11.348326683044434 + ], + [ + "▁sentence", + -11.348380088806152 + ], + [ + "▁Direct", + -11.348426818847656 + ], + [ + "▁Rep", + -11.348465919494629 + ], + [ + "▁Prozent", + -11.348799705505371 + ], + [ + "▁invitation", + -11.34882640838623 + ], + [ + "▁refund", + -11.349113464355469 + ], + [ + "▁Kids", + -11.349287986755371 + ], + [ + "stock", + -11.349383354187012 + ], + [ + "TP", + -11.349400520324707 + ], + [ + "▁tau", + -11.34941291809082 + ], + [ + "from", + -11.349421501159668 + ], + [ + "▁Ash", + -11.349451065063477 + ], + [ + "store", + -11.349535942077637 + ], + [ + "▁Common", + -11.34958553314209 + ], + [ + "▁Qualität", + -11.34968376159668 + ], + [ + "▁strongly", + -11.349727630615234 + ], + [ + "▁importante", + -11.34979248046875 + ], + [ + "ome", + -11.349912643432617 + ], + [ + "▁surtout", + -11.349946022033691 + ], + [ + "enables", + -11.35020637512207 + ], + [ + "▁decent", + -11.350221633911133 + ], + [ + "▁neutral", + -11.350237846374512 + ], + [ + "▁produs", + -11.350356101989746 + ], + [ + "bury", + -11.350451469421387 + ], + [ + "▁Level", + -11.350618362426758 + ], + [ + "▁interes", + -11.350699424743652 + ], + [ + "mov", + -11.350797653198242 + ], + [ + "▁backup", + -11.350939750671387 + ], + [ + "même", + -11.351094245910645 + ], + [ + "doc", + -11.351119041442871 + ], + [ + "▁#1", + -11.35130786895752 + ], + [ + "▁specified", + -11.351495742797852 + ], + [ + "▁founder", + -11.351655960083008 + ], + [ + "And", + -11.352090835571289 + ], + [ + "isten", + -11.352149963378906 + ], + [ + "▁lecture", + -11.352729797363281 + ], + [ + "▁wake", + -11.352895736694336 + ], + [ + "▁vraiment", + -11.352980613708496 + ], + [ + "▁swing", + -11.353188514709473 + ], + [ + "▁addresses", + -11.353275299072266 + ], + [ + "▁Verfügung", + -11.353504180908203 + ], + [ + "▁deadline", + -11.353761672973633 + ], + [ + "н", + -11.353791236877441 + ], + [ + "▁Content", + -11.353970527648926 + ], + [ + "▁Gre", + -11.354111671447754 + ], + [ + "▁Experience", + -11.354378700256348 + ], + [ + "tura", + -11.354458808898926 + ], + [ + "▁exit", + -11.354642868041992 + ], + [ + "▁Britain", + -11.354652404785156 + ], + [ + "▁Sunt", + -11.354684829711914 + ], + [ + "▁documentation", + -11.354690551757812 + ], + [ + "▁showcase", + -11.3547945022583 + ], + [ + "▁photographs", + -11.354822158813477 + ], + [ + "qué", + -11.35483169555664 + ], + [ + "zin", + -11.354909896850586 + ], + [ + "pres", + -11.354933738708496 + ], + [ + "▁decline", + -11.354955673217773 + ], + [ + "▁Large", + -11.355030059814453 + ], + [ + "▁bills", + -11.355141639709473 + ], + [ + "▁entitled", + -11.355222702026367 + ], + [ + "▁passionate", + -11.355393409729004 + ], + [ + "▁workout", + -11.355413436889648 + ], + [ + "▁Again", + -11.35560417175293 + ], + [ + "▁Haut", + -11.35582160949707 + ], + [ + "▁guaranteed", + -11.35599136352539 + ], + [ + "▁vue", + -11.35600471496582 + ], + [ + "▁farmers", + -11.356224060058594 + ], + [ + "▁admission", + -11.356500625610352 + ], + [ + "▁manière", + -11.357080459594727 + ], + [ + "▁reverse", + -11.357121467590332 + ], + [ + "▁FL", + -11.357142448425293 + ], + [ + "▁terminal", + -11.357206344604492 + ], + [ + "GI", + -11.35731029510498 + ], + [ + "▁speakers", + -11.35739803314209 + ], + [ + "▁responses", + -11.357398986816406 + ], + [ + "▁Doch", + -11.357457160949707 + ], + [ + "▁2013,", + -11.357717514038086 + ], + [ + "▁phones", + -11.357789993286133 + ], + [ + "ential", + -11.357851028442383 + ], + [ + "▁operator", + -11.357916831970215 + ], + [ + "▁steam", + -11.358036994934082 + ], + [ + "burn", + -11.358091354370117 + ], + [ + "▁seul", + -11.35815715789795 + ], + [ + "▁unusual", + -11.358322143554688 + ], + [ + "▁educate", + -11.358403205871582 + ], + [ + "▁Que", + -11.358680725097656 + ], + [ + "▁believes", + -11.359137535095215 + ], + [ + "▁succeed", + -11.359344482421875 + ], + [ + "▁delay", + -11.359533309936523 + ], + [ + "▁deeper", + -11.359633445739746 + ], + [ + "▁reaching", + -11.359890937805176 + ], + [ + "▁objectives", + -11.360086441040039 + ], + [ + "▁temporary", + -11.36028003692627 + ], + [ + "▁artistic", + -11.360421180725098 + ], + [ + "▁sou", + -11.360471725463867 + ], + [ + "▁transparent", + -11.36062240600586 + ], + [ + "There", + -11.360798835754395 + ], + [ + "ception", + -11.360836029052734 + ], + [ + "▁excess", + -11.360939979553223 + ], + [ + "▁gathering", + -11.361008644104004 + ], + [ + "▁Save", + -11.361095428466797 + ], + [ + "ază", + -11.361166000366211 + ], + [ + "▁français", + -11.361197471618652 + ], + [ + "▁laid", + -11.361210823059082 + ], + [ + "▁modul", + -11.361394882202148 + ], + [ + "avoir", + -11.361465454101562 + ], + [ + "under", + -11.362113952636719 + ], + [ + "dding", + -11.362226486206055 + ], + [ + "▁falls", + -11.362232208251953 + ], + [ + "▁Möglichkeit", + -11.362369537353516 + ], + [ + "▁ceremony", + -11.362370491027832 + ], + [ + "rai", + -11.36237621307373 + ], + [ + "▁Bor", + -11.362709045410156 + ], + [ + "▁Below", + -11.362750053405762 + ], + [ + "4)", + -11.362759590148926 + ], + [ + "▁Field", + -11.362833023071289 + ], + [ + "wear", + -11.362935066223145 + ], + [ + "motion", + -11.362948417663574 + ], + [ + "print", + -11.363311767578125 + ], + [ + "game", + -11.363360404968262 + ], + [ + "▁Irish", + -11.363458633422852 + ], + [ + "▁Las", + -11.363458633422852 + ], + [ + "Among", + -11.363570213317871 + ], + [ + "atori", + -11.363580703735352 + ], + [ + "▁ajuns", + -11.363837242126465 + ], + [ + "▁alive", + -11.363860130310059 + ], + [ + "▁retour", + -11.363900184631348 + ], + [ + "▁smoke", + -11.3640775680542 + ], + [ + "▁math", + -11.364285469055176 + ], + [ + "▁Ye", + -11.364337921142578 + ], + [ + "▁Denn", + -11.36436653137207 + ], + [ + "▁1995", + -11.364412307739258 + ], + [ + "▁bani", + -11.364644050598145 + ], + [ + "raz", + -11.364998817443848 + ], + [ + "world", + -11.365026473999023 + ], + [ + "▁engines", + -11.365140914916992 + ], + [ + "nehmen", + -11.365192413330078 + ], + [ + "stor", + -11.365328788757324 + ], + [ + "▁interpret", + -11.365403175354004 + ], + [ + "▁Ven", + -11.365489959716797 + ], + [ + "▁cotton", + -11.365622520446777 + ], + [ + "▁represented", + -11.366004943847656 + ], + [ + "▁fabulous", + -11.366166114807129 + ], + [ + "▁gender", + -11.366301536560059 + ], + [ + "Mar", + -11.366668701171875 + ], + [ + "vic", + -11.366991996765137 + ], + [ + "▁newsletter", + -11.367432594299316 + ], + [ + "sburg", + -11.367574691772461 + ], + [ + "pond", + -11.36838436126709 + ], + [ + "▁Carl", + -11.368454933166504 + ], + [ + "▁bunch", + -11.368714332580566 + ], + [ + "▁tower", + -11.368847846984863 + ], + [ + "▁trigger", + -11.368976593017578 + ], + [ + "▁explanation", + -11.369091033935547 + ], + [ + "Man", + -11.369114875793457 + ], + [ + "iunea", + -11.369168281555176 + ], + [ + "▁announcement", + -11.369492530822754 + ], + [ + "▁seeds", + -11.36952018737793 + ], + [ + "▁shell", + -11.369865417480469 + ], + [ + "▁Working", + -11.36989688873291 + ], + [ + "viz", + -11.370267868041992 + ], + [ + "▁Simply", + -11.370329856872559 + ], + [ + "sub", + -11.37037181854248 + ], + [ + "▁Village", + -11.37060832977295 + ], + [ + "▁falling", + -11.370742797851562 + ], + [ + "▁fits", + -11.37084674835205 + ], + [ + "▁wichtig", + -11.37088394165039 + ], + [ + "▁Down", + -11.37108039855957 + ], + [ + "bble", + -11.371573448181152 + ], + [ + "▁Orange", + -11.37165641784668 + ], + [ + "promoting", + -11.371932029724121 + ], + [ + "▁rapidly", + -11.37217903137207 + ], + [ + "▁translation", + -11.372330665588379 + ], + [ + "nig", + -11.3723726272583 + ], + [ + "fusion", + -11.37240982055664 + ], + [ + "kosten", + -11.372611045837402 + ], + [ + "2)", + -11.372783660888672 + ], + [ + "▁Express", + -11.372958183288574 + ], + [ + "▁Sw", + -11.373003959655762 + ], + [ + "▁frequency", + -11.373086929321289 + ], + [ + "▁diversity", + -11.373348236083984 + ], + [ + "MT", + -11.373452186584473 + ], + [ + "▁bekannt", + -11.373530387878418 + ], + [ + "lion", + -11.373871803283691 + ], + [ + "▁cop", + -11.37393856048584 + ], + [ + "▁Customer", + -11.374072074890137 + ], + [ + "▁demands", + -11.374427795410156 + ], + [ + "▁corn", + -11.374516487121582 + ], + [ + "▁Hamburg", + -11.374551773071289 + ], + [ + "SD", + -11.374628067016602 + ], + [ + "▁Rome", + -11.374677658081055 + ], + [ + "▁Pur", + -11.374750137329102 + ], + [ + "▁stamp", + -11.374885559082031 + ], + [ + "▁grateful", + -11.374967575073242 + ], + [ + "RM", + -11.37511157989502 + ], + [ + "▁Pl", + -11.37511920928955 + ], + [ + "▁Tele", + -11.375154495239258 + ], + [ + "▁plugin", + -11.375492095947266 + ], + [ + "▁maxim", + -11.375675201416016 + ], + [ + "▁Hoch", + -11.37574577331543 + ], + [ + "igung", + -11.375823020935059 + ], + [ + "▁Entwicklung", + -11.375858306884766 + ], + [ + "▁File", + -11.375931739807129 + ], + [ + "▁Eastern", + -11.376070022583008 + ], + [ + "▁scrap", + -11.376331329345703 + ], + [ + "▁acquired", + -11.376338958740234 + ], + [ + "sau", + -11.376364707946777 + ], + [ + "▁Klein", + -11.376452445983887 + ], + [ + "▁milioane", + -11.376492500305176 + ], + [ + "▁Stand", + -11.376693725585938 + ], + [ + "▁childhood", + -11.37671184539795 + ], + [ + "▁artificial", + -11.376752853393555 + ], + [ + "▁substantial", + -11.376851081848145 + ], + [ + "druck", + -11.377315521240234 + ], + [ + "▁Kra", + -11.377562522888184 + ], + [ + "▁performances", + -11.377645492553711 + ], + [ + "▁row", + -11.377824783325195 + ], + [ + "NT", + -11.377899169921875 + ], + [ + "mod", + -11.377904891967773 + ], + [ + "remained", + -11.378399848937988 + ], + [ + "▁nimic", + -11.378462791442871 + ], + [ + "▁Limited", + -11.378555297851562 + ], + [ + "▁cookie", + -11.378718376159668 + ], + [ + "▁retain", + -11.378816604614258 + ], + [ + "▁600", + -11.379144668579102 + ], + [ + "▁eigene", + -11.379158020019531 + ], + [ + "▁tune", + -11.379209518432617 + ], + [ + "NS", + -11.379256248474121 + ], + [ + "▁dad", + -11.379284858703613 + ], + [ + "Moreover", + -11.379415512084961 + ], + [ + "ès", + -11.379434585571289 + ], + [ + "▁worship", + -11.379439353942871 + ], + [ + "▁Material", + -11.3794584274292 + ], + [ + "▁verb", + -11.379528045654297 + ], + [ + "ziehen", + -11.37957763671875 + ], + [ + "lton", + -11.379645347595215 + ], + [ + "▁boot", + -11.379982948303223 + ], + [ + "plo", + -11.380118370056152 + ], + [ + "CF", + -11.380212783813477 + ], + [ + "GM", + -11.380215644836426 + ], + [ + "▁Mix", + -11.38046932220459 + ], + [ + "▁Front", + -11.380474090576172 + ], + [ + "▁repairs", + -11.380655288696289 + ], + [ + "▁proportion", + -11.381068229675293 + ], + [ + "▁habit", + -11.381132125854492 + ], + [ + "▁hide", + -11.38156509399414 + ], + [ + "focusing", + -11.381707191467285 + ], + [ + "▁Annual", + -11.381717681884766 + ], + [ + "▁twin", + -11.3817777633667 + ], + [ + "▁acord", + -11.381780624389648 + ], + [ + "ehr", + -11.381814956665039 + ], + [ + "month", + -11.382303237915039 + ], + [ + "venir", + -11.382535934448242 + ], + [ + "Or", + -11.38254165649414 + ], + [ + "awa", + -11.382600784301758 + ], + [ + "lass", + -11.382735252380371 + ], + [ + "ffe", + -11.383048057556152 + ], + [ + "iți", + -11.383074760437012 + ], + [ + "NO", + -11.3831148147583 + ], + [ + "▁scope", + -11.383295059204102 + ], + [ + "▁lowest", + -11.383527755737305 + ], + [ + "▁afraid", + -11.383572578430176 + ], + [ + "▁subjects", + -11.383578300476074 + ], + [ + "▁templates", + -11.383586883544922 + ], + [ + "▁jos", + -11.383604049682617 + ], + [ + "DM", + -11.383687973022461 + ], + [ + "ensemble", + -11.383792877197266 + ], + [ + "▁Ski", + -11.383941650390625 + ], + [ + "DP", + -11.384099960327148 + ], + [ + "▁grip", + -11.384171485900879 + ], + [ + "2-", + -11.38436222076416 + ], + [ + "▁sécurité", + -11.384743690490723 + ], + [ + "▁mono", + -11.384749412536621 + ], + [ + "▁controls", + -11.384854316711426 + ], + [ + "SV", + -11.384879112243652 + ], + [ + "install", + -11.384970664978027 + ], + [ + "berry", + -11.385042190551758 + ], + [ + "nial", + -11.385120391845703 + ], + [ + "shed", + -11.385462760925293 + ], + [ + "▁celle", + -11.385830879211426 + ], + [ + "FR", + -11.385936737060547 + ], + [ + "äng", + -11.385950088500977 + ], + [ + "▁gaz", + -11.385984420776367 + ], + [ + "êt", + -11.386184692382812 + ], + [ + "▁viewing", + -11.386412620544434 + ], + [ + "▁asigura", + -11.386524200439453 + ], + [ + "bling", + -11.3865327835083 + ], + [ + "master", + -11.386919975280762 + ], + [ + "▁Fin", + -11.387160301208496 + ], + [ + "VC", + -11.387365341186523 + ], + [ + "▁patent", + -11.387715339660645 + ], + [ + "▁Clean", + -11.38773250579834 + ], + [ + "▁1970", + -11.387789726257324 + ], + [ + "▁Char", + -11.387971878051758 + ], + [ + "thi", + -11.388010025024414 + ], + [ + "bli", + -11.388141632080078 + ], + [ + "▁haut", + -11.388307571411133 + ], + [ + "tica", + -11.38836669921875 + ], + [ + "▁venit", + -11.388578414916992 + ], + [ + "▁compatible", + -11.388678550720215 + ], + [ + "▁hanging", + -11.388690948486328 + ], + [ + "UN", + -11.388842582702637 + ], + [ + "▁forth", + -11.388911247253418 + ], + [ + "▁painted", + -11.388912200927734 + ], + [ + "lip", + -11.389031410217285 + ], + [ + "▁deeply", + -11.389089584350586 + ], + [ + "▁participating", + -11.389242172241211 + ], + [ + "▁Iran", + -11.38968276977539 + ], + [ + "▁conventional", + -11.389769554138184 + ], + [ + "ARE", + -11.38985824584961 + ], + [ + "▁accuracy", + -11.389896392822266 + ], + [ + "▁Familie", + -11.389955520629883 + ], + [ + "▁Dir", + -11.39001178741455 + ], + [ + "▁gehen", + -11.390127182006836 + ], + [ + "▁moderne", + -11.39022159576416 + ], + [ + "▁Iraq", + -11.39050579071045 + ], + [ + "▁vente", + -11.390582084655762 + ], + [ + "▁Donald", + -11.390998840332031 + ], + [ + "▁passer", + -11.391051292419434 + ], + [ + "▁mehrere", + -11.391267776489258 + ], + [ + "▁Everything", + -11.391291618347168 + ], + [ + "▁studied", + -11.391307830810547 + ], + [ + "▁acquire", + -11.391312599182129 + ], + [ + "für", + -11.391477584838867 + ], + [ + "▁gal", + -11.391502380371094 + ], + [ + "▁headed", + -11.391809463500977 + ], + [ + "▁screening", + -11.391865730285645 + ], + [ + "▁findings", + -11.392303466796875 + ], + [ + "▁nutrition", + -11.392305374145508 + ], + [ + "▁Secretary", + -11.392308235168457 + ], + [ + "duct", + -11.392431259155273 + ], + [ + "born", + -11.392436027526855 + ], + [ + "«", + -11.39261531829834 + ], + [ + "▁statistics", + -11.392616271972656 + ], + [ + "▁Sydney", + -11.392800331115723 + ], + [ + "▁Prof", + -11.392829895019531 + ], + [ + "▁dialogue", + -11.39327621459961 + ], + [ + "▁gather", + -11.393425941467285 + ], + [ + "valu", + -11.393746376037598 + ], + [ + "▁currency", + -11.394073486328125 + ], + [ + "▁Kat", + -11.394092559814453 + ], + [ + "gotten", + -11.394189834594727 + ], + [ + "main", + -11.39432144165039 + ], + [ + "▁coin", + -11.394340515136719 + ], + [ + "▁Nick", + -11.394380569458008 + ], + [ + "vă", + -11.394658088684082 + ], + [ + "▁Victoria", + -11.394832611083984 + ], + [ + "▁conclusion", + -11.3949613571167 + ], + [ + "▁lemon", + -11.394998550415039 + ], + [ + "▁Article", + -11.39516830444336 + ], + [ + "▁necesar", + -11.39516830444336 + ], + [ + "mag", + -11.395180702209473 + ], + [ + "▁riding", + -11.39537239074707 + ], + [ + "▁Eli", + -11.395599365234375 + ], + [ + "▁cord", + -11.395635604858398 + ], + [ + "wä", + -11.39572811126709 + ], + [ + "ußerdem", + -11.395737648010254 + ], + [ + "▁Bed", + -11.395759582519531 + ], + [ + "▁layers", + -11.395833015441895 + ], + [ + "▁harder", + -11.395975112915039 + ], + [ + "▁processor", + -11.396040916442871 + ], + [ + "▁Ils", + -11.39613151550293 + ], + [ + "▁Edition", + -11.39615535736084 + ], + [ + "▁Link", + -11.396393775939941 + ], + [ + "éré", + -11.396461486816406 + ], + [ + "▁nume", + -11.396576881408691 + ], + [ + "▁Boy", + -11.39659595489502 + ], + [ + "▁equally", + -11.396646499633789 + ], + [ + "▁Regel", + -11.397119522094727 + ], + [ + "▁hopes", + -11.397185325622559 + ], + [ + "odor", + -11.397311210632324 + ], + [ + "▁initially", + -11.397430419921875 + ], + [ + "▁$4", + -11.3974609375 + ], + [ + "▁exemplu", + -11.397537231445312 + ], + [ + "▁vari", + -11.397565841674805 + ], + [ + "schl", + -11.397698402404785 + ], + [ + "▁southern", + -11.39809799194336 + ], + [ + "▁mein", + -11.39818000793457 + ], + [ + "▁1994", + -11.398300170898438 + ], + [ + "▁importantly", + -11.398401260375977 + ], + [ + "▁succes", + -11.398526191711426 + ], + [ + "▁developer", + -11.398598670959473 + ], + [ + "▁lips", + -11.39889144897461 + ], + [ + "▁attitude", + -11.39900016784668 + ], + [ + "▁Age", + -11.399541854858398 + ], + [ + "▁corps", + -11.399713516235352 + ], + [ + "▁clicking", + -11.39976978302002 + ], + [ + "▁putem", + -11.399832725524902 + ], + [ + "▁journée", + -11.40003776550293 + ], + [ + "boy", + -11.4002103805542 + ], + [ + "▁injured", + -11.40028190612793 + ], + [ + "▁watched", + -11.400433540344238 + ], + [ + "▁flights", + -11.40079116821289 + ], + [ + "turn", + -11.400980949401855 + ], + [ + "▁stainless", + -11.401562690734863 + ], + [ + "▁besondere", + -11.40156364440918 + ], + [ + "▁Tur", + -11.401596069335938 + ], + [ + "▁hiring", + -11.401650428771973 + ], + [ + "▁roads", + -11.401727676391602 + ], + [ + "ificat", + -11.401785850524902 + ], + [ + "▁Flor", + -11.402045249938965 + ], + [ + "▁puternic", + -11.402215003967285 + ], + [ + "▁unexpected", + -11.40223503112793 + ], + [ + "▁Est", + -11.40238094329834 + ], + [ + "▁adopted", + -11.40253734588623 + ], + [ + "▁Fox", + -11.402647972106934 + ], + [ + "▁contributions", + -11.402870178222656 + ], + [ + "sec", + -11.402968406677246 + ], + [ + "IO", + -11.403059959411621 + ], + [ + "▁santé", + -11.403432846069336 + ], + [ + "▁Tree", + -11.403763771057129 + ], + [ + "▁scurt", + -11.40381908416748 + ], + [ + "▁Products", + -11.403848648071289 + ], + [ + "▁forecast", + -11.403998374938965 + ], + [ + "▁actor", + -11.404143333435059 + ], + [ + "▁Gallery", + -11.404149055480957 + ], + [ + "▁continuous", + -11.404163360595703 + ], + [ + "▁Hat", + -11.404291152954102 + ], + [ + "▁slip", + -11.404501914978027 + ], + [ + "9%", + -11.404960632324219 + ], + [ + "▁depression", + -11.405043601989746 + ], + [ + "UI", + -11.405229568481445 + ], + [ + "abile", + -11.405648231506348 + ], + [ + "▁merit", + -11.405671119689941 + ], + [ + "▁Fer", + -11.405805587768555 + ], + [ + "▁robot", + -11.405888557434082 + ], + [ + "▁gel", + -11.40589427947998 + ], + [ + "▁gentle", + -11.406017303466797 + ], + [ + "▁wanting", + -11.406071662902832 + ], + [ + "▁understood", + -11.406157493591309 + ], + [ + "▁terrain", + -11.406161308288574 + ], + [ + "▁associate", + -11.406176567077637 + ], + [ + "▁discussions", + -11.40632152557373 + ], + [ + "▁Job", + -11.406365394592285 + ], + [ + "spec", + -11.406440734863281 + ], + [ + "Dabei", + -11.406475067138672 + ], + [ + "etic", + -11.406517028808594 + ], + [ + "gol", + -11.40654468536377 + ], + [ + "▁20%", + -11.406584739685059 + ], + [ + "▁grup", + -11.406606674194336 + ], + [ + "▁Doctor", + -11.406813621520996 + ], + [ + "verse", + -11.407246589660645 + ], + [ + "▁victim", + -11.407258033752441 + ], + [ + "ță", + -11.407302856445312 + ], + [ + "▁scores", + -11.407544136047363 + ], + [ + "▁Policy", + -11.407634735107422 + ], + [ + "▁Anna", + -11.407736778259277 + ], + [ + "IV", + -11.407804489135742 + ], + [ + "▁mineral", + -11.408202171325684 + ], + [ + "live", + -11.40821647644043 + ], + [ + "▁grey", + -11.408368110656738 + ], + [ + "struct", + -11.40852165222168 + ], + [ + "▁emails", + -11.408738136291504 + ], + [ + "▁anymore", + -11.409114837646484 + ], + [ + "▁productivity", + -11.409387588500977 + ], + [ + "▁Dark", + -11.409463882446289 + ], + [ + "▁neither", + -11.409481048583984 + ], + [ + "▁quotes", + -11.409611701965332 + ], + [ + "LS", + -11.410368919372559 + ], + [ + "▁Arizona", + -11.41040325164795 + ], + [ + "night", + -11.410497665405273 + ], + [ + "élé", + -11.411019325256348 + ], + [ + "▁assigned", + -11.411153793334961 + ], + [ + "▁satellite", + -11.411328315734863 + ], + [ + "▁stability", + -11.411665916442871 + ], + [ + "▁networking", + -11.41172981262207 + ], + [ + "▁Transport", + -11.411847114562988 + ], + [ + "▁persons", + -11.411856651306152 + ], + [ + "fund", + -11.412043571472168 + ], + [ + "▁pratique", + -11.41213321685791 + ], + [ + "▁inca", + -11.412134170532227 + ], + [ + "iller", + -11.412349700927734 + ], + [ + "▁packed", + -11.41239070892334 + ], + [ + "▁Vegas", + -11.412484169006348 + ], + [ + "▁offre", + -11.412493705749512 + ], + [ + "▁Bin", + -11.412518501281738 + ], + [ + "stop", + -11.412609100341797 + ], + [ + "mini", + -11.412860870361328 + ], + [ + "▁jam", + -11.412877082824707 + ], + [ + "cord", + -11.41289234161377 + ], + [ + "▁Beautiful", + -11.412996292114258 + ], + [ + "▁trash", + -11.413012504577637 + ], + [ + "▁wise", + -11.413092613220215 + ], + [ + "▁accounting", + -11.413178443908691 + ], + [ + "▁différents", + -11.413182258605957 + ], + [ + "▁stil", + -11.413214683532715 + ], + [ + "suit", + -11.413951873779297 + ], + [ + "▁vier", + -11.414209365844727 + ], + [ + "▁permis", + -11.414224624633789 + ], + [ + "flow", + -11.414238929748535 + ], + [ + "▁col", + -11.414749145507812 + ], + [ + "ected", + -11.414960861206055 + ], + [ + "▁singer", + -11.414999008178711 + ], + [ + "▁GmbH", + -11.415038108825684 + ], + [ + "tics", + -11.415094375610352 + ], + [ + "▁ser", + -11.415159225463867 + ], + [ + "On", + -11.415315628051758 + ], + [ + "▁insights", + -11.415605545043945 + ], + [ + "BB", + -11.415946960449219 + ], + [ + "▁differ", + -11.415959358215332 + ], + [ + "▁Glass", + -11.416131973266602 + ], + [ + "▁Six", + -11.416482925415039 + ], + [ + "▁subscription", + -11.416584968566895 + ], + [ + "BC", + -11.416606903076172 + ], + [ + "▁returning", + -11.416664123535156 + ], + [ + "kleinen", + -11.416693687438965 + ], + [ + "▁advantages", + -11.416747093200684 + ], + [ + "omme", + -11.416852951049805 + ], + [ + "lus", + -11.417071342468262 + ], + [ + "now", + -11.417141914367676 + ], + [ + "▁Pack", + -11.417253494262695 + ], + [ + "▁leak", + -11.417333602905273 + ], + [ + "▁muscles", + -11.41748332977295 + ], + [ + "▁davon", + -11.417492866516113 + ], + [ + "mph", + -11.417858123779297 + ], + [ + "▁temple", + -11.417868614196777 + ], + [ + "▁Après", + -11.417901039123535 + ], + [ + "▁Illinois", + -11.41801643371582 + ], + [ + "▁variable", + -11.418065071105957 + ], + [ + "▁judgment", + -11.418389320373535 + ], + [ + "gran", + -11.41861629486084 + ], + [ + "▁pose", + -11.418621063232422 + ], + [ + "das", + -11.418647766113281 + ], + [ + "ures", + -11.418673515319824 + ], + [ + "▁Championship", + -11.418689727783203 + ], + [ + "ebenfalls", + -11.41872501373291 + ], + [ + "▁hydro", + -11.418753623962402 + ], + [ + "▁angle", + -11.419268608093262 + ], + [ + "▁5-", + -11.41940975189209 + ], + [ + "▁gest", + -11.419547080993652 + ], + [ + "▁Frau", + -11.420233726501465 + ], + [ + "▁knock", + -11.420275688171387 + ], + [ + "FS", + -11.420442581176758 + ], + [ + "spi", + -11.420577049255371 + ], + [ + "▁Regional", + -11.420717239379883 + ], + [ + "lets", + -11.421098709106445 + ], + [ + "▁Date", + -11.42115592956543 + ], + [ + "▁Finance", + -11.421211242675781 + ], + [ + "▁Dann", + -11.421320915222168 + ], + [ + "Star", + -11.421380043029785 + ], + [ + "▁Creek", + -11.421393394470215 + ], + [ + "▁fu", + -11.421648979187012 + ], + [ + "wohn", + -11.422141075134277 + ], + [ + "▁anniversary", + -11.422219276428223 + ], + [ + "▁investments", + -11.422292709350586 + ], + [ + "▁universal", + -11.422601699829102 + ], + [ + "▁pit", + -11.422745704650879 + ], + [ + "ște", + -11.422784805297852 + ], + [ + "▁lab", + -11.422822952270508 + ], + [ + "dienst", + -11.422884941101074 + ], + [ + "▁pal", + -11.422889709472656 + ], + [ + "▁graphic", + -11.42289924621582 + ], + [ + "▁bearing", + -11.422900199890137 + ], + [ + "▁stylish", + -11.423087120056152 + ], + [ + "▁mé", + -11.42319393157959 + ], + [ + "▁există", + -11.42326545715332 + ], + [ + "▁découvrir", + -11.423477172851562 + ], + [ + "comp", + -11.423606872558594 + ], + [ + "ridge", + -11.423667907714844 + ], + [ + "▁heads", + -11.423765182495117 + ], + [ + "▁consequences", + -11.423835754394531 + ], + [ + "self", + -11.423842430114746 + ], + [ + "fried", + -11.423870086669922 + ], + [ + "▁inventory", + -11.424199104309082 + ], + [ + "▁strip", + -11.42422866821289 + ], + [ + "▁Civil", + -11.42424488067627 + ], + [ + "bell", + -11.424307823181152 + ], + [ + "▁neben", + -11.424444198608398 + ], + [ + "▁Perfect", + -11.424470901489258 + ], + [ + "▁Notre", + -11.424478530883789 + ], + [ + "▁fraud", + -11.424630165100098 + ], + [ + "▁employers", + -11.424656867980957 + ], + [ + "▁Jackson", + -11.42470645904541 + ], + [ + "▁probleme", + -11.424915313720703 + ], + [ + "▁richtig", + -11.424957275390625 + ], + [ + "▁Method", + -11.425009727478027 + ], + [ + "▁tired", + -11.425010681152344 + ], + [ + "dies", + -11.425031661987305 + ], + [ + "▁Number", + -11.425315856933594 + ], + [ + "rland", + -11.425652503967285 + ], + [ + "▁latter", + -11.426031112670898 + ], + [ + "rendre", + -11.426064491271973 + ], + [ + "▁cameras", + -11.426095962524414 + ], + [ + "▁euch", + -11.426630020141602 + ], + [ + "▁Description", + -11.427038192749023 + ], + [ + "Spec", + -11.427061080932617 + ], + [ + "▁mile", + -11.427437782287598 + ], + [ + "▁Challenge", + -11.427474021911621 + ], + [ + "▁Solutions", + -11.427504539489746 + ], + [ + "▁trusted", + -11.427509307861328 + ], + [ + "▁einge", + -11.427515029907227 + ], + [ + "rück", + -11.427528381347656 + ], + [ + "▁Ober", + -11.427635192871094 + ], + [ + "kes", + -11.42764949798584 + ], + [ + "▁Log", + -11.427684783935547 + ], + [ + "▁dessert", + -11.427776336669922 + ], + [ + "▁murder", + -11.428033828735352 + ], + [ + "▁1/2", + -11.428311347961426 + ], + [ + "▁Provide", + -11.42872142791748 + ], + [ + "nivelul", + -11.428800582885742 + ], + [ + "nici", + -11.428818702697754 + ], + [ + "▁observe", + -11.42889404296875 + ], + [ + "▁prescription", + -11.429162979125977 + ], + [ + "▁Sau", + -11.429170608520508 + ], + [ + "▁genuine", + -11.42919635772705 + ], + [ + "▁operated", + -11.429231643676758 + ], + [ + "▁generous", + -11.429267883300781 + ], + [ + "▁weapons", + -11.429458618164062 + ], + [ + "▁belief", + -11.4295015335083 + ], + [ + "▁consum", + -11.429584503173828 + ], + [ + "▁unknown", + -11.430116653442383 + ], + [ + "deoarece", + -11.430135726928711 + ], + [ + "Art", + -11.430147171020508 + ], + [ + "▁kurz", + -11.430183410644531 + ], + [ + "▁Gut", + -11.430258750915527 + ], + [ + "▁medication", + -11.430522918701172 + ], + [ + "▁Mau", + -11.43058967590332 + ], + [ + "▁divorce", + -11.430678367614746 + ], + [ + "▁claimed", + -11.430811882019043 + ], + [ + "halten", + -11.430848121643066 + ], + [ + "▁Cons", + -11.43089485168457 + ], + [ + "▁operational", + -11.430975914001465 + ], + [ + "▁Hong", + -11.431081771850586 + ], + [ + "VI", + -11.431143760681152 + ], + [ + "▁Blick", + -11.431485176086426 + ], + [ + "▁lamp", + -11.431706428527832 + ], + [ + "pati", + -11.431853294372559 + ], + [ + "▁4-", + -11.43192195892334 + ], + [ + "▁interven", + -11.431964874267578 + ], + [ + "ques", + -11.43201732635498 + ], + [ + "▁Talk", + -11.432096481323242 + ], + [ + "▁zeigt", + -11.432318687438965 + ], + [ + "▁targeted", + -11.432390213012695 + ], + [ + "round", + -11.432640075683594 + ], + [ + "enfant", + -11.432748794555664 + ], + [ + "▁Reg", + -11.432836532592773 + ], + [ + "▁instruments", + -11.432872772216797 + ], + [ + "▁calcul", + -11.433363914489746 + ], + [ + "▁Henry", + -11.4335298538208 + ], + [ + "▁Cla", + -11.433616638183594 + ], + [ + "▁rack", + -11.433661460876465 + ], + [ + "sehen", + -11.43375301361084 + ], + [ + "▁ending", + -11.433754920959473 + ], + [ + "▁resolve", + -11.434130668640137 + ], + [ + "▁advise", + -11.434178352355957 + ], + [ + "▁sociale", + -11.434386253356934 + ], + [ + "▁cabin", + -11.434536933898926 + ], + [ + "▁involve", + -11.43480396270752 + ], + [ + "gă", + -11.434889793395996 + ], + [ + "▁automat", + -11.435132026672363 + ], + [ + "▁consultant", + -11.435258865356445 + ], + [ + "Bu", + -11.435370445251465 + ], + [ + "▁safely", + -11.435466766357422 + ], + [ + "état", + -11.435478210449219 + ], + [ + "▁pros", + -11.435657501220703 + ], + [ + "▁lies", + -11.435659408569336 + ], + [ + "▁Brian", + -11.435914993286133 + ], + [ + "▁talented", + -11.435954093933105 + ], + [ + "pus", + -11.43599796295166 + ], + [ + "▁hub", + -11.436060905456543 + ], + [ + "▁Ji", + -11.436066627502441 + ], + [ + "▁sought", + -11.436102867126465 + ], + [ + "▁energie", + -11.436210632324219 + ], + [ + "▁möchten", + -11.43634033203125 + ], + [ + "▁11.", + -11.436558723449707 + ], + [ + "▁Kong", + -11.436662673950195 + ], + [ + "▁grave", + -11.43666934967041 + ], + [ + "▁lists", + -11.436800956726074 + ], + [ + "tati", + -11.436809539794922 + ], + [ + "verschiedenen", + -11.43692398071289 + ], + [ + "dam", + -11.437061309814453 + ], + [ + "▁charity", + -11.437249183654785 + ], + [ + "▁breaking", + -11.43735122680664 + ], + [ + "kins", + -11.43747329711914 + ], + [ + "▁könnte", + -11.437517166137695 + ], + [ + "▁appointed", + -11.437532424926758 + ], + [ + "roc", + -11.4376859664917 + ], + [ + "▁Senate", + -11.437979698181152 + ], + [ + "wit", + -11.438002586364746 + ], + [ + "▁emerging", + -11.438162803649902 + ], + [ + "▁année", + -11.438288688659668 + ], + [ + "▁Cool", + -11.438365936279297 + ], + [ + "▁sensor", + -11.43842887878418 + ], + [ + "How", + -11.438488960266113 + ], + [ + "▁Ryan", + -11.438626289367676 + ], + [ + "▁computers", + -11.43871784210205 + ], + [ + "▁fault", + -11.4388427734375 + ], + [ + "▁présent", + -11.438843727111816 + ], + [ + "ulation", + -11.439149856567383 + ], + [ + "▁stir", + -11.439348220825195 + ], + [ + "lauf", + -11.439703941345215 + ], + [ + "▁AI", + -11.440389633178711 + ], + [ + "▁Bri", + -11.440438270568848 + ], + [ + "▁bain", + -11.441011428833008 + ], + [ + "▁5,", + -11.441287994384766 + ], + [ + "schein", + -11.44157886505127 + ], + [ + "▁weiß", + -11.441596031188965 + ], + [ + "▁possibilities", + -11.44235610961914 + ], + [ + "gur", + -11.442413330078125 + ], + [ + "▁hinter", + -11.442647933959961 + ], + [ + "Innen", + -11.442755699157715 + ], + [ + "▁vorba", + -11.442992210388184 + ], + [ + "fahren", + -11.443008422851562 + ], + [ + "▁Cell", + -11.443072319030762 + ], + [ + "univers", + -11.443137168884277 + ], + [ + "▁Follow", + -11.443424224853516 + ], + [ + "▁emotions", + -11.44360637664795 + ], + [ + "▁Ministry", + -11.443694114685059 + ], + [ + "▁curriculum", + -11.443694114685059 + ], + [ + "Je", + -11.443764686584473 + ], + [ + "▁gab", + -11.444080352783203 + ], + [ + "▁sigur", + -11.444270133972168 + ], + [ + "rise", + -11.444416999816895 + ], + [ + "Pri", + -11.44466495513916 + ], + [ + "▁stabil", + -11.444781303405762 + ], + [ + "▁superb", + -11.445100784301758 + ], + [ + "▁Oak", + -11.44510269165039 + ], + [ + "▁rubber", + -11.445286750793457 + ], + [ + "▁tag", + -11.445306777954102 + ], + [ + "PG", + -11.445361137390137 + ], + [ + "▁Heat", + -11.445477485656738 + ], + [ + "▁thousand", + -11.445504188537598 + ], + [ + "▁meets", + -11.445521354675293 + ], + [ + "▁faced", + -11.445578575134277 + ], + [ + "▁reserve", + -11.445640563964844 + ], + [ + "cateva", + -11.445767402648926 + ], + [ + "▁gym", + -11.445771217346191 + ], + [ + "▁vitamin", + -11.445960998535156 + ], + [ + "▁Rest", + -11.446457862854004 + ], + [ + "▁Single", + -11.446535110473633 + ], + [ + "▁Stephen", + -11.446623802185059 + ], + [ + "▁trick", + -11.446824073791504 + ], + [ + "DU", + -11.44694709777832 + ], + [ + "▁telefon", + -11.44711685180664 + ], + [ + "▁gând", + -11.447120666503906 + ], + [ + "▁primit", + -11.447345733642578 + ], + [ + "▁Connect", + -11.447351455688477 + ], + [ + "▁führt", + -11.447440147399902 + ], + [ + "▁Info", + -11.447500228881836 + ], + [ + "▁recall", + -11.447848320007324 + ], + [ + "▁restore", + -11.447885513305664 + ], + [ + "lege", + -11.44792652130127 + ], + [ + "▁franchise", + -11.448189735412598 + ], + [ + "▁seulement", + -11.44856071472168 + ], + [ + "reci", + -11.448598861694336 + ], + [ + "▁2019,", + -11.44864273071289 + ], + [ + "▁Ring", + -11.448663711547852 + ], + [ + "▁assembly", + -11.448678970336914 + ], + [ + "intérieur", + -11.448775291442871 + ], + [ + "▁shade", + -11.44887924194336 + ], + [ + "▁meaningful", + -11.448881149291992 + ], + [ + "bag", + -11.448989868164062 + ], + [ + "ONE", + -11.449249267578125 + ], + [ + "▁globe", + -11.449287414550781 + ], + [ + "▁WA", + -11.449406623840332 + ], + [ + "▁intervention", + -11.449495315551758 + ], + [ + "öl", + -11.449531555175781 + ], + [ + "▁Marine", + -11.45029067993164 + ], + [ + "▁Angebot", + -11.450512886047363 + ], + [ + "▁align", + -11.450618743896484 + ], + [ + "▁temperatures", + -11.450634956359863 + ], + [ + "ifier", + -11.45091724395752 + ], + [ + "▁Nigeria", + -11.451189041137695 + ], + [ + "▁survive", + -11.451216697692871 + ], + [ + "ounce", + -11.451275825500488 + ], + [ + "▁placement", + -11.451416969299316 + ], + [ + "▁deci", + -11.451528549194336 + ], + [ + "▁Taylor", + -11.451759338378906 + ], + [ + "step", + -11.45190715789795 + ], + [ + "▁Geschichte", + -11.452054023742676 + ], + [ + "▁Bet", + -11.452169418334961 + ], + [ + "▁Nature", + -11.45224380493164 + ], + [ + "▁FC", + -11.452256202697754 + ], + [ + "▁ownership", + -11.452286720275879 + ], + [ + "▁behaviour", + -11.452474594116211 + ], + [ + "▁deutlich", + -11.452532768249512 + ], + [ + "▁wondering", + -11.452798843383789 + ], + [ + "▁cleaner", + -11.453295707702637 + ], + [ + "uring", + -11.4534912109375 + ], + [ + "rä", + -11.453496932983398 + ], + [ + "▁ga", + -11.454296112060547 + ], + [ + "ador", + -11.454482078552246 + ], + [ + "▁artwork", + -11.454564094543457 + ], + [ + "ologic", + -11.45457649230957 + ], + [ + "▁eigentlich", + -11.454848289489746 + ], + [ + "▁hell", + -11.45522403717041 + ], + [ + "source", + -11.455251693725586 + ], + [ + "▁gem", + -11.455265045166016 + ], + [ + "▁boss", + -11.455307006835938 + ], + [ + "▁arise", + -11.455460548400879 + ], + [ + "about", + -11.455711364746094 + ], + [ + "▁SI", + -11.455951690673828 + ], + [ + "▁ME", + -11.45610237121582 + ], + [ + "akt", + -11.456191062927246 + ], + [ + "▁Style", + -11.456259727478027 + ], + [ + "▁Körper", + -11.456493377685547 + ], + [ + "gui", + -11.456799507141113 + ], + [ + "▁navigate", + -11.456819534301758 + ], + [ + "▁Meanwhile", + -11.456977844238281 + ], + [ + "▁așa", + -11.457111358642578 + ], + [ + "▁bulk", + -11.457298278808594 + ], + [ + "▁directions", + -11.457310676574707 + ], + [ + "▁brick", + -11.457747459411621 + ], + [ + "▁Poly", + -11.457752227783203 + ], + [ + "▁politique", + -11.457772254943848 + ], + [ + "▁patch", + -11.457777976989746 + ], + [ + "ра", + -11.457816123962402 + ], + [ + "commerce", + -11.457844734191895 + ], + [ + "▁înainte", + -11.457884788513184 + ], + [ + "▁intelligent", + -11.45823860168457 + ], + [ + "▁infection", + -11.458426475524902 + ], + [ + "▁Tru", + -11.458494186401367 + ], + [ + "▁raising", + -11.458504676818848 + ], + [ + "tragen", + -11.458539009094238 + ], + [ + "▁portrait", + -11.45858383178711 + ], + [ + "▁meisten", + -11.458783149719238 + ], + [ + "▁organize", + -11.45893669128418 + ], + [ + "metric", + -11.458962440490723 + ], + [ + "▁Season", + -11.459036827087402 + ], + [ + "▁enforcement", + -11.459259033203125 + ], + [ + "origine", + -11.459836959838867 + ], + [ + "▁Ros", + -11.460065841674805 + ], + [ + "▁Mount", + -11.460083961486816 + ], + [ + "have", + -11.460237503051758 + ], + [ + "▁romantic", + -11.460258483886719 + ], + [ + "▁comic", + -11.460810661315918 + ], + [ + "▁greu", + -11.461116790771484 + ], + [ + "ET", + -11.46133041381836 + ], + [ + "▁hook", + -11.461407661437988 + ], + [ + "▁mort", + -11.461411476135254 + ], + [ + "▁indicated", + -11.461583137512207 + ], + [ + "▁7,", + -11.461982727050781 + ], + [ + "▁Neben", + -11.46204662322998 + ], + [ + "yer", + -11.46214485168457 + ], + [ + "▁momentul", + -11.46214771270752 + ], + [ + "note", + -11.462313652038574 + ], + [ + "▁baz", + -11.46231460571289 + ], + [ + "▁abroad", + -11.462320327758789 + ], + [ + "nite", + -11.462464332580566 + ], + [ + "▁bass", + -11.462701797485352 + ], + [ + "▁norm", + -11.462714195251465 + ], + [ + "▁É", + -11.462788581848145 + ], + [ + "4.", + -11.462881088256836 + ], + [ + "▁province", + -11.463004112243652 + ], + [ + "▁merge", + -11.463419914245605 + ], + [ + "arbeiten", + -11.463438987731934 + ], + [ + "-20", + -11.463574409484863 + ], + [ + "▁Nicht", + -11.463674545288086 + ], + [ + "spo", + -11.463783264160156 + ], + [ + "size", + -11.463815689086914 + ], + [ + "▁assure", + -11.463849067687988 + ], + [ + "charge", + -11.463987350463867 + ], + [ + "▁olive", + -11.464017868041992 + ], + [ + "▁Pot", + -11.46408462524414 + ], + [ + "▁Figure", + -11.4642333984375 + ], + [ + "clair", + -11.464336395263672 + ], + [ + "▁discipline", + -11.464600563049316 + ], + [ + "elli", + -11.464639663696289 + ], + [ + "▁tackle", + -11.465169906616211 + ], + [ + "▁buyer", + -11.465237617492676 + ], + [ + "▁loud", + -11.465479850769043 + ], + [ + "▁180", + -11.465534210205078 + ], + [ + "▁căt", + -11.465587615966797 + ], + [ + "▁Palm", + -11.465738296508789 + ], + [ + "away", + -11.46593189239502 + ], + [ + "▁Mother", + -11.46607494354248 + ], + [ + "onia", + -11.466240882873535 + ], + [ + "▁Protection", + -11.466416358947754 + ], + [ + "auto", + -11.466547966003418 + ], + [ + "▁Version", + -11.466583251953125 + ], + [ + "▁Nice", + -11.466714859008789 + ], + [ + "▁12.", + -11.46682071685791 + ], + [ + "▁0,", + -11.466835021972656 + ], + [ + "ATION", + -11.466911315917969 + ], + [ + "▁Produkte", + -11.466955184936523 + ], + [ + "▁tube", + -11.467084884643555 + ], + [ + "▁Houston", + -11.467106819152832 + ], + [ + "chu", + -11.467500686645508 + ], + [ + "pas", + -11.467717170715332 + ], + [ + "▁Ele", + -11.467801094055176 + ], + [ + "▁mountains", + -11.467835426330566 + ], + [ + "PH", + -11.467937469482422 + ], + [ + "▁languages", + -11.468672752380371 + ], + [ + "▁servicii", + -11.468722343444824 + ], + [ + "▁Stay", + -11.468999862670898 + ], + [ + "fil", + -11.469138145446777 + ], + [ + "▁propos", + -11.469801902770996 + ], + [ + "▁coll", + -11.469825744628906 + ], + [ + "▁mor", + -11.470197677612305 + ], + [ + "▁arrange", + -11.470410346984863 + ], + [ + "▁sorry", + -11.470475196838379 + ], + [ + "▁instruction", + -11.470723152160645 + ], + [ + "▁holes", + -11.47077465057373 + ], + [ + "letting", + -11.471046447753906 + ], + [ + "▁wa", + -11.471074104309082 + ], + [ + "▁Feb", + -11.471227645874023 + ], + [ + "omb", + -11.471232414245605 + ], + [ + "▁prise", + -11.471290588378906 + ], + [ + "VO", + -11.471305847167969 + ], + [ + "week", + -11.471349716186523 + ], + [ + "▁Event", + -11.471427917480469 + ], + [ + "▁AT", + -11.471485137939453 + ], + [ + "ket", + -11.471492767333984 + ], + [ + "haft", + -11.471579551696777 + ], + [ + "▁hits", + -11.47159194946289 + ], + [ + "foli", + -11.471681594848633 + ], + [ + "this", + -11.471948623657227 + ], + [ + "GP", + -11.471970558166504 + ], + [ + "▁Pin", + -11.472332954406738 + ], + [ + "▁Stein", + -11.472503662109375 + ], + [ + "thing", + -11.472512245178223 + ], + [ + "▁emphasis", + -11.472556114196777 + ], + [ + "▁Mur", + -11.472631454467773 + ], + [ + "▁Bag", + -11.472647666931152 + ], + [ + "cons", + -11.47273063659668 + ], + [ + "tons", + -11.472835540771484 + ], + [ + "lash", + -11.472987174987793 + ], + [ + "▁Grant", + -11.473104476928711 + ], + [ + "▁pris", + -11.473175048828125 + ], + [ + "▁bună", + -11.47323989868164 + ], + [ + "▁buc", + -11.473699569702148 + ], + [ + "▁passe", + -11.473746299743652 + ], + [ + "▁jewelry", + -11.474213600158691 + ], + [ + "iens", + -11.474342346191406 + ], + [ + "▁forma", + -11.47453784942627 + ], + [ + "▁Med", + -11.474651336669922 + ], + [ + "laufen", + -11.474778175354004 + ], + [ + "▁hunt", + -11.474977493286133 + ], + [ + "stayed", + -11.475086212158203 + ], + [ + "party", + -11.475152015686035 + ], + [ + "▁fra", + -11.47529411315918 + ], + [ + "▁scenes", + -11.475305557250977 + ], + [ + "▁absorb", + -11.47535228729248 + ], + [ + "▁abilities", + -11.475377082824707 + ], + [ + "lug", + -11.475507736206055 + ], + [ + "▁Sarah", + -11.475693702697754 + ], + [ + "mpf", + -11.47570514678955 + ], + [ + "▁fle", + -11.4757080078125 + ], + [ + "accès", + -11.475872993469238 + ], + [ + "▁solicit", + -11.475926399230957 + ], + [ + "pie", + -11.476278305053711 + ], + [ + "▁Zum", + -11.476296424865723 + ], + [ + "▁universe", + -11.476390838623047 + ], + [ + "▁exists", + -11.476449012756348 + ], + [ + "oane", + -11.476597785949707 + ], + [ + "IVE", + -11.47668743133545 + ], + [ + "▁2011.", + -11.476906776428223 + ], + [ + "▁specialists", + -11.477072715759277 + ], + [ + "▁mess", + -11.477309226989746 + ], + [ + "fach", + -11.477402687072754 + ], + [ + "▁Recht", + -11.477404594421387 + ], + [ + "▁hack", + -11.47755241394043 + ], + [ + "▁jacket", + -11.477564811706543 + ], + [ + "HC", + -11.47769832611084 + ], + [ + "▁substance", + -11.477728843688965 + ], + [ + "▁signing", + -11.477775573730469 + ], + [ + "▁allerdings", + -11.478032112121582 + ], + [ + "▁publish", + -11.478139877319336 + ], + [ + "▁Lab", + -11.478157043457031 + ], + [ + "▁agenda", + -11.478249549865723 + ], + [ + "lane", + -11.478299140930176 + ], + [ + "stream", + -11.478620529174805 + ], + [ + "schau", + -11.47879409790039 + ], + [ + "▁realizat", + -11.478971481323242 + ], + [ + "▁supplier", + -11.479019165039062 + ], + [ + "▁moderate", + -11.47902774810791 + ], + [ + "▁tours", + -11.479212760925293 + ], + [ + "▁narrative", + -11.479220390319824 + ], + [ + "ația", + -11.479279518127441 + ], + [ + "▁maps", + -11.479423522949219 + ], + [ + "treten", + -11.479447364807129 + ], + [ + "▁mars", + -11.479706764221191 + ], + [ + "▁moon", + -11.479745864868164 + ], + [ + "rose", + -11.479751586914062 + ], + [ + "▁exp", + -11.479766845703125 + ], + [ + "zahl", + -11.480154037475586 + ], + [ + "psych", + -11.480195999145508 + ], + [ + "▁gehört", + -11.48024845123291 + ], + [ + "▁bound", + -11.4803466796875 + ], + [ + "▁submission", + -11.480451583862305 + ], + [ + "▁clubs", + -11.480722427368164 + ], + [ + "Am", + -11.480755805969238 + ], + [ + "tenir", + -11.480782508850098 + ], + [ + "▁boast", + -11.480851173400879 + ], + [ + "▁boards", + -11.4810791015625 + ], + [ + "▁Geschäfts", + -11.481216430664062 + ], + [ + "zing", + -11.48126220703125 + ], + [ + "wort", + -11.48137092590332 + ], + [ + "lid", + -11.481417655944824 + ], + [ + "▁contractor", + -11.481528282165527 + ], + [ + "▁donner", + -11.481672286987305 + ], + [ + "▁coupon", + -11.481974601745605 + ], + [ + "adresse", + -11.482004165649414 + ], + [ + "colo", + -11.48210334777832 + ], + [ + "▁perception", + -11.482124328613281 + ], + [ + "NC", + -11.48222541809082 + ], + [ + "▁abge", + -11.482245445251465 + ], + [ + "▁cheaper", + -11.482268333435059 + ], + [ + "▁grace", + -11.482312202453613 + ], + [ + "▁resident", + -11.482718467712402 + ], + [ + "kla", + -11.4828462600708 + ], + [ + "▁bug", + -11.4828462600708 + ], + [ + "▁Available", + -11.482893943786621 + ], + [ + "▁BA", + -11.483323097229004 + ], + [ + "▁Met", + -11.483601570129395 + ], + [ + "▁climb", + -11.48365592956543 + ], + [ + "▁expanded", + -11.484349250793457 + ], + [ + "ying", + -11.484426498413086 + ], + [ + "▁matching", + -11.484469413757324 + ], + [ + "▁suffered", + -11.484733581542969 + ], + [ + "▁employed", + -11.484755516052246 + ], + [ + "pper", + -11.484843254089355 + ], + [ + "▁experiencing", + -11.484884262084961 + ], + [ + "ddy", + -11.484953880310059 + ], + [ + "▁philosophy", + -11.484955787658691 + ], + [ + "▁utilisé", + -11.485008239746094 + ], + [ + "▁Jane", + -11.485079765319824 + ], + [ + "LI", + -11.485087394714355 + ], + [ + "▁elected", + -11.485185623168945 + ], + [ + "▁MI", + -11.485264778137207 + ], + [ + "▁ISO", + -11.485340118408203 + ], + [ + "winning", + -11.48537540435791 + ], + [ + "▁vot", + -11.485424041748047 + ], + [ + "▁generic", + -11.485519409179688 + ], + [ + "▁Bol", + -11.485650062561035 + ], + [ + "▁copies", + -11.48568058013916 + ], + [ + "▁mechanical", + -11.48568058013916 + ], + [ + "günstig", + -11.485682487487793 + ], + [ + "roy", + -11.485770225524902 + ], + [ + "Astfel", + -11.485808372497559 + ], + [ + "media", + -11.485868453979492 + ], + [ + "▁shoulder", + -11.4859037399292 + ], + [ + "▁directory", + -11.486000061035156 + ], + [ + "▁banking", + -11.486016273498535 + ], + [ + "▁mistakes", + -11.486040115356445 + ], + [ + "▁Fran", + -11.486425399780273 + ], + [ + "▁Jon", + -11.486544609069824 + ], + [ + "▁spare", + -11.486579895019531 + ], + [ + "metri", + -11.486668586730957 + ], + [ + "▁mask", + -11.486879348754883 + ], + [ + "▁consistently", + -11.48695182800293 + ], + [ + "▁Columbia", + -11.487278938293457 + ], + [ + "roid", + -11.48774242401123 + ], + [ + "essen", + -11.487935066223145 + ], + [ + "▁(“", + -11.48798656463623 + ], + [ + "▁série", + -11.488212585449219 + ], + [ + "▁Phil", + -11.488249778747559 + ], + [ + "▁usor", + -11.488249778747559 + ], + [ + "▁stood", + -11.488279342651367 + ], + [ + "▁racing", + -11.488335609436035 + ], + [ + "▁Comme", + -11.488555908203125 + ], + [ + "▁exceed", + -11.488565444946289 + ], + [ + "на", + -11.488618850708008 + ], + [ + "▁activate", + -11.48873233795166 + ], + [ + "▁circle", + -11.488836288452148 + ], + [ + "▁bold", + -11.488956451416016 + ], + [ + "▁handy", + -11.48909854888916 + ], + [ + "merely", + -11.489114761352539 + ], + [ + "▁Edward", + -11.489147186279297 + ], + [ + "▁contracts", + -11.489530563354492 + ], + [ + "ê", + -11.489595413208008 + ], + [ + "▁campaigns", + -11.489673614501953 + ], + [ + "▁ought", + -11.489733695983887 + ], + [ + "▁nursing", + -11.489781379699707 + ], + [ + "▁Jr", + -11.489917755126953 + ], + [ + "▁rarely", + -11.490032196044922 + ], + [ + "▁Mir", + -11.490050315856934 + ], + [ + "▁diagnosis", + -11.490379333496094 + ], + [ + "▁Theatre", + -11.490394592285156 + ], + [ + "▁producer", + -11.490407943725586 + ], + [ + "Currently", + -11.490492820739746 + ], + [ + "▁fitting", + -11.490580558776855 + ], + [ + "▁ajunge", + -11.490618705749512 + ], + [ + "minte", + -11.490754127502441 + ], + [ + "▁termen", + -11.490838050842285 + ], + [ + "▁Linux", + -11.491013526916504 + ], + [ + "▁1-", + -11.491068840026855 + ], + [ + "▁hätte", + -11.491202354431152 + ], + [ + "▁Resort", + -11.49129867553711 + ], + [ + "image", + -11.491527557373047 + ], + [ + "▁Rod", + -11.49189281463623 + ], + [ + "▁Fly", + -11.491924285888672 + ], + [ + "try", + -11.492317199707031 + ], + [ + "▁expense", + -11.49245834350586 + ], + [ + "▁Interior", + -11.492799758911133 + ], + [ + "▁fence", + -11.492920875549316 + ], + [ + "▁Kontakt", + -11.493063926696777 + ], + [ + "▁ALL", + -11.493142127990723 + ], + [ + "VA", + -11.493229866027832 + ], + [ + "▁Exchange", + -11.493316650390625 + ], + [ + "ranked", + -11.493558883666992 + ], + [ + "▁Performance", + -11.493621826171875 + ], + [ + "prim", + -11.493635177612305 + ], + [ + "▁basket", + -11.493694305419922 + ], + [ + "▁Vice", + -11.493703842163086 + ], + [ + "phan", + -11.4937105178833 + ], + [ + "▁broke", + -11.494003295898438 + ], + [ + "voir", + -11.49431324005127 + ], + [ + "arg", + -11.494512557983398 + ], + [ + "ART", + -11.494529724121094 + ], + [ + "▁floors", + -11.494856834411621 + ], + [ + "pression", + -11.495025634765625 + ], + [ + "▁possession", + -11.49507999420166 + ], + [ + "▁domaine", + -11.49510669708252 + ], + [ + "▁valeur", + -11.495132446289062 + ], + [ + "▁suddenly", + -11.495282173156738 + ], + [ + "▁mild", + -11.495304107666016 + ], + [ + "▁aflat", + -11.495431900024414 + ], + [ + "▁Tea", + -11.495731353759766 + ], + [ + "tritt", + -11.495767593383789 + ], + [ + "▁Mittel", + -11.495773315429688 + ], + [ + "▁regulatory", + -11.49580192565918 + ], + [ + "▁spectacular", + -11.495905876159668 + ], + [ + "fahrt", + -11.495949745178223 + ], + [ + "GS", + -11.496026039123535 + ], + [ + "MM", + -11.4961576461792 + ], + [ + "▁environments", + -11.496203422546387 + ], + [ + "▁Raum", + -11.496381759643555 + ], + [ + "▁lay", + -11.496664047241211 + ], + [ + "▁cré", + -11.496713638305664 + ], + [ + "▁Selbst", + -11.496726989746094 + ], + [ + "▁opposition", + -11.496821403503418 + ], + [ + "two", + -11.49729061126709 + ], + [ + "▁Clark", + -11.497822761535645 + ], + [ + "▁Netz", + -11.497845649719238 + ], + [ + "bald", + -11.497983932495117 + ], + [ + "▁Innovation", + -11.4982271194458 + ], + [ + "▁overcome", + -11.49825382232666 + ], + [ + "quot", + -11.499013900756836 + ], + [ + "▁Sin", + -11.499106407165527 + ], + [ + "▁Sto", + -11.499320983886719 + ], + [ + "▁grain", + -11.499560356140137 + ], + [ + "▁collections", + -11.499724388122559 + ], + [ + "▁applies", + -11.49986743927002 + ], + [ + "mach", + -11.499934196472168 + ], + [ + "▁wheels", + -11.499958992004395 + ], + [ + "▁universities", + -11.500049591064453 + ], + [ + "▁Ray", + -11.500182151794434 + ], + [ + "lina", + -11.500238418579102 + ], + [ + "▁arrangements", + -11.500393867492676 + ], + [ + "▁western", + -11.500728607177734 + ], + [ + "rous", + -11.500768661499023 + ], + [ + "aise", + -11.500784873962402 + ], + [ + "▁highlights", + -11.50112533569336 + ], + [ + "▁intend", + -11.501265525817871 + ], + [ + "aimed", + -11.501358032226562 + ], + [ + "▁Scotland", + -11.501360893249512 + ], + [ + "▁acestei", + -11.501466751098633 + ], + [ + "graf", + -11.50150203704834 + ], + [ + "duction", + -11.501517295837402 + ], + [ + "path", + -11.50156021118164 + ], + [ + "▁evil", + -11.501633644104004 + ], + [ + "▁scris", + -11.501791000366211 + ], + [ + "▁disposition", + -11.501927375793457 + ], + [ + "▁designing", + -11.5020751953125 + ], + [ + "zwar", + -11.502172470092773 + ], + [ + "▁Retrieve", + -11.50217342376709 + ], + [ + "▁aggressive", + -11.502374649047852 + ], + [ + "▁Glen", + -11.502411842346191 + ], + [ + "▁daher", + -11.502473831176758 + ], + [ + "▁Quick", + -11.502494812011719 + ], + [ + "▁recover", + -11.502632141113281 + ], + [ + "▁prominent", + -11.50288200378418 + ], + [ + "▁visits", + -11.503198623657227 + ], + [ + "▁Mis", + -11.503376960754395 + ], + [ + "▁edited", + -11.503456115722656 + ], + [ + "▁distributed", + -11.503564834594727 + ], + [ + "▁dés", + -11.503580093383789 + ], + [ + "▁alter", + -11.5035982131958 + ], + [ + "▁cooked", + -11.503697395324707 + ], + [ + "embl", + -11.503706932067871 + ], + [ + "Univers", + -11.503715515136719 + ], + [ + "▁Minuten", + -11.504156112670898 + ], + [ + "▁compris", + -11.504179954528809 + ], + [ + "rais", + -11.504182815551758 + ], + [ + "essentially", + -11.504199028015137 + ], + [ + "▁rel", + -11.504340171813965 + ], + [ + "▁appel", + -11.504570007324219 + ], + [ + "▁trace", + -11.504788398742676 + ], + [ + "relating", + -11.504830360412598 + ], + [ + "dès", + -11.504937171936035 + ], + [ + "aste", + -11.504961013793945 + ], + [ + "▁raison", + -11.504963874816895 + ], + [ + "▁frequent", + -11.505281448364258 + ], + [ + "▁beds", + -11.505316734313965 + ], + [ + "▁Miami", + -11.505511283874512 + ], + [ + "▁vibrant", + -11.50564193725586 + ], + [ + "▁Kam", + -11.505721092224121 + ], + [ + "▁klar", + -11.505861282348633 + ], + [ + "▁Tan", + -11.50598430633545 + ], + [ + "▁vidéo", + -11.506032943725586 + ], + [ + "▁Kur", + -11.506115913391113 + ], + [ + "▁themes", + -11.506134033203125 + ], + [ + "▁struggling", + -11.506440162658691 + ], + [ + "▁Magazine", + -11.506444931030273 + ], + [ + "maker", + -11.506476402282715 + ], + [ + "veni", + -11.506564140319824 + ], + [ + "▁Groß", + -11.506732940673828 + ], + [ + "▁streaming", + -11.506772994995117 + ], + [ + "▁analyze", + -11.506876945495605 + ], + [ + "▁titles", + -11.506982803344727 + ], + [ + "pier", + -11.507316589355469 + ], + [ + "▁participant", + -11.507347106933594 + ], + [ + "aims", + -11.507607460021973 + ], + [ + "▁convention", + -11.507638931274414 + ], + [ + "▁flood", + -11.507780075073242 + ], + [ + "▁nights", + -11.507842063903809 + ], + [ + "▁titre", + -11.50792407989502 + ], + [ + "▁voul", + -11.508010864257812 + ], + [ + "weit", + -11.50816822052002 + ], + [ + "where", + -11.508213996887207 + ], + [ + "▁Seiten", + -11.508286476135254 + ], + [ + "▁relaxing", + -11.508628845214844 + ], + [ + "▁piano", + -11.50883674621582 + ], + [ + "▁Pick", + -11.508842468261719 + ], + [ + "▁Sony", + -11.508955001831055 + ], + [ + "▁enhanced", + -11.509017944335938 + ], + [ + "▁visa", + -11.50915241241455 + ], + [ + "CH", + -11.50930118560791 + ], + [ + "▁instantly", + -11.50930404663086 + ], + [ + "▁Fan", + -11.509721755981445 + ], + [ + "▁diabetes", + -11.509988784790039 + ], + [ + "▁popul", + -11.50999641418457 + ], + [ + "Ang", + -11.510232925415039 + ], + [ + "▁Ask", + -11.510295867919922 + ], + [ + "cate", + -11.510650634765625 + ], + [ + "▁simplu", + -11.510666847229004 + ], + [ + "nahme", + -11.510685920715332 + ], + [ + "▁dentist", + -11.510842323303223 + ], + [ + "ubi", + -11.510920524597168 + ], + [ + "article", + -11.511030197143555 + ], + [ + "▁graph", + -11.511094093322754 + ], + [ + "▁rival", + -11.51121711730957 + ], + [ + "jahr", + -11.5113525390625 + ], + [ + "▁bloc", + -11.511370658874512 + ], + [ + "fern", + -11.511427879333496 + ], + [ + "▁dispar", + -11.511516571044922 + ], + [ + "▁servers", + -11.511582374572754 + ], + [ + "▁patru", + -11.511610984802246 + ], + [ + "▁Within", + -11.511634826660156 + ], + [ + "▁situated", + -11.511896133422852 + ], + [ + "▁HR", + -11.511981964111328 + ], + [ + "▁leaf", + -11.511981964111328 + ], + [ + "▁curs", + -11.512049674987793 + ], + [ + "antes", + -11.512325286865234 + ], + [ + "lux", + -11.512406349182129 + ], + [ + "▁1993", + -11.512463569641113 + ], + [ + "stance", + -11.512650489807129 + ], + [ + "▁northern", + -11.512683868408203 + ], + [ + "lves", + -11.512718200683594 + ], + [ + "▁contractors", + -11.512882232666016 + ], + [ + "▁dimensions", + -11.512920379638672 + ], + [ + "▁rolling", + -11.513068199157715 + ], + [ + "▁automobile", + -11.513211250305176 + ], + [ + "▁cru", + -11.51342487335205 + ], + [ + "▁displays", + -11.513570785522461 + ], + [ + "web", + -11.513812065124512 + ], + [ + "had", + -11.513850212097168 + ], + [ + "▁Never", + -11.513893127441406 + ], + [ + "▁2-", + -11.513932228088379 + ], + [ + "vine", + -11.51393985748291 + ], + [ + "▁Wahl", + -11.513975143432617 + ], + [ + "▁Markt", + -11.514166831970215 + ], + [ + "▁Double", + -11.514227867126465 + ], + [ + "▁acknowledge", + -11.514229774475098 + ], + [ + "stal", + -11.514288902282715 + ], + [ + "▁equity", + -11.514620780944824 + ], + [ + "▁ministry", + -11.514823913574219 + ], + [ + "▁Lor", + -11.514875411987305 + ], + [ + "▁sud", + -11.514968872070312 + ], + [ + "idée", + -11.515044212341309 + ], + [ + "▁measured", + -11.515448570251465 + ], + [ + "▁editing", + -11.515609741210938 + ], + [ + "▁singur", + -11.515620231628418 + ], + [ + "▁coal", + -11.515623092651367 + ], + [ + "▁dramatic", + -11.516212463378906 + ], + [ + "AG", + -11.516251564025879 + ], + [ + "asca", + -11.516280174255371 + ], + [ + "▁crash", + -11.516321182250977 + ], + [ + "ischer", + -11.516597747802734 + ], + [ + "▁Pla", + -11.516871452331543 + ], + [ + "▁psycho", + -11.517054557800293 + ], + [ + "piece", + -11.517118453979492 + ], + [ + "▁finger", + -11.517121315002441 + ], + [ + "▁Hollywood", + -11.517123222351074 + ], + [ + "▁Cr", + -11.517345428466797 + ], + [ + "▁locally", + -11.517622947692871 + ], + [ + "▁mouse", + -11.517792701721191 + ], + [ + "▁Base", + -11.517867088317871 + ], + [ + "uite", + -11.518095016479492 + ], + [ + "▁detect", + -11.518099784851074 + ], + [ + "cea", + -11.518150329589844 + ], + [ + "▁bull", + -11.518194198608398 + ], + [ + "▁curve", + -11.518208503723145 + ], + [ + "été", + -11.518218994140625 + ], + [ + "ddle", + -11.51839542388916 + ], + [ + "▁span", + -11.518523216247559 + ], + [ + "WS", + -11.518878936767578 + ], + [ + "CL", + -11.519017219543457 + ], + [ + "▁officially", + -11.519042015075684 + ], + [ + "▁corect", + -11.519168853759766 + ], + [ + "▁Artikel", + -11.5193510055542 + ], + [ + "▁customized", + -11.520099639892578 + ], + [ + "▁intellectual", + -11.52018928527832 + ], + [ + "▁heures", + -11.520334243774414 + ], + [ + "schule", + -11.520444869995117 + ], + [ + "▁investing", + -11.520585060119629 + ], + [ + "▁parallel", + -11.521227836608887 + ], + [ + "▁loi", + -11.521263122558594 + ], + [ + "ările", + -11.521566390991211 + ], + [ + "р", + -11.521679878234863 + ], + [ + "▁bench", + -11.521724700927734 + ], + [ + "▁principle", + -11.521756172180176 + ], + [ + "▁Galaxy", + -11.521829605102539 + ], + [ + "ța", + -11.522237777709961 + ], + [ + "▁(4", + -11.522418975830078 + ], + [ + "▁bedrooms", + -11.522578239440918 + ], + [ + "née", + -11.52273941040039 + ], + [ + "▁surely", + -11.52275276184082 + ], + [ + "very", + -11.522927284240723 + ], + [ + "stelle", + -11.523200988769531 + ], + [ + "activ", + -11.523216247558594 + ], + [ + "cite", + -11.523551940917969 + ], + [ + "▁Original", + -11.523553848266602 + ], + [ + "▁palm", + -11.523665428161621 + ], + [ + "▁losses", + -11.523934364318848 + ], + [ + "▁newspaper", + -11.524153709411621 + ], + [ + "ciu", + -11.52436351776123 + ], + [ + "▁Hold", + -11.524392127990723 + ], + [ + "BO", + -11.524422645568848 + ], + [ + "▁CON", + -11.524598121643066 + ], + [ + "▁modified", + -11.524624824523926 + ], + [ + "▁stake", + -11.524735450744629 + ], + [ + "▁Ton", + -11.524798393249512 + ], + [ + "▁luna", + -11.524968147277832 + ], + [ + "▁Mind", + -11.525094985961914 + ], + [ + "lap", + -11.525150299072266 + ], + [ + "▁opinions", + -11.525247573852539 + ], + [ + "▁Jordan", + -11.525351524353027 + ], + [ + "div", + -11.52537727355957 + ], + [ + "indi", + -11.525418281555176 + ], + [ + "▁Story", + -11.525476455688477 + ], + [ + "▁affiliate", + -11.52585506439209 + ], + [ + "▁matière", + -11.525918960571289 + ], + [ + "▁fifth", + -11.526399612426758 + ], + [ + "▁sheets", + -11.52645492553711 + ], + [ + "▁puțin", + -11.526909828186035 + ], + [ + "ush", + -11.526947021484375 + ], + [ + "geführt", + -11.526993751525879 + ], + [ + "▁Falls", + -11.527168273925781 + ], + [ + "legi", + -11.527295112609863 + ], + [ + "▁auction", + -11.527326583862305 + ], + [ + "▁cooperation", + -11.52735424041748 + ], + [ + "▁Fee", + -11.527474403381348 + ], + [ + "▁Daily", + -11.52774715423584 + ], + [ + "pies", + -11.527853965759277 + ], + [ + "▁basketball", + -11.527976036071777 + ], + [ + "removing", + -11.528056144714355 + ], + [ + "Besides", + -11.528294563293457 + ], + [ + "▁Body", + -11.528355598449707 + ], + [ + "▁AD", + -11.528369903564453 + ], + [ + "RU", + -11.528435707092285 + ], + [ + "ţia", + -11.52894401550293 + ], + [ + "▁Extra", + -11.528986930847168 + ], + [ + "▁Practice", + -11.52900218963623 + ], + [ + "▁Jeff", + -11.529017448425293 + ], + [ + "▁început", + -11.529253005981445 + ], + [ + "ching", + -11.529269218444824 + ], + [ + "▁Gift", + -11.529281616210938 + ], + [ + "kk", + -11.529295921325684 + ], + [ + "\")", + -11.529349327087402 + ], + [ + "▁Austin", + -11.529651641845703 + ], + [ + "thro", + -11.529766082763672 + ], + [ + "▁camping", + -11.529810905456543 + ], + [ + "▁theatre", + -11.529850959777832 + ], + [ + "école", + -11.529916763305664 + ], + [ + "vient", + -11.530159950256348 + ], + [ + "▁faces", + -11.530226707458496 + ], + [ + "▁constructed", + -11.530437469482422 + ], + [ + "▁overnight", + -11.530472755432129 + ], + [ + "▁locale", + -11.530574798583984 + ], + [ + "▁roots", + -11.530611038208008 + ], + [ + "▁bu", + -11.530662536621094 + ], + [ + "4,", + -11.530683517456055 + ], + [ + "▁Enterprise", + -11.530865669250488 + ], + [ + "screen", + -11.530935287475586 + ], + [ + "▁Chef", + -11.53096866607666 + ], + [ + "▁Along", + -11.531298637390137 + ], + [ + "▁MD", + -11.531431198120117 + ], + [ + "▁Supreme", + -11.531597137451172 + ], + [ + "En", + -11.531655311584473 + ], + [ + "▁verwendet", + -11.532015800476074 + ], + [ + "▁processed", + -11.532425880432129 + ], + [ + "▁vendors", + -11.532549858093262 + ], + [ + "▁FA", + -11.532651901245117 + ], + [ + "▁44", + -11.532716751098633 + ], + [ + "▁beautifully", + -11.532933235168457 + ], + [ + "▁eficient", + -11.533092498779297 + ], + [ + "▁Wil", + -11.533117294311523 + ], + [ + "▁Member", + -11.533121109008789 + ], + [ + "▁damages", + -11.5332670211792 + ], + [ + "▁mutual", + -11.533288955688477 + ], + [ + "SN", + -11.533506393432617 + ], + [ + "▁Dave", + -11.533665657043457 + ], + [ + "??", + -11.533998489379883 + ], + [ + "stat", + -11.534090995788574 + ], + [ + "▁tourist", + -11.534374237060547 + ], + [ + "fie", + -11.534425735473633 + ], + [ + "şte", + -11.534754753112793 + ], + [ + "▁donne", + -11.534764289855957 + ], + [ + "▁shadow", + -11.53493881225586 + ], + [ + "▁dough", + -11.534993171691895 + ], + [ + "▁Gro", + -11.535002708435059 + ], + [ + "▁Mah", + -11.535066604614258 + ], + [ + "RF", + -11.535126686096191 + ], + [ + "▁mechanism", + -11.535163879394531 + ], + [ + "▁2011,", + -11.535179138183594 + ], + [ + "▁Alter", + -11.53530502319336 + ], + [ + "▁opposed", + -11.53538990020752 + ], + [ + "▁Fri", + -11.535501480102539 + ], + [ + "▁remarkable", + -11.535572052001953 + ], + [ + "oral", + -11.535635948181152 + ], + [ + "▁verschiedene", + -11.535653114318848 + ], + [ + "▁difficulty", + -11.535691261291504 + ], + [ + "▁Application", + -11.535840034484863 + ], + [ + "▁Hay", + -11.535888671875 + ], + [ + "▁continua", + -11.535935401916504 + ], + [ + "EP", + -11.53609848022461 + ], + [ + "▁Pr", + -11.53617000579834 + ], + [ + "▁Lady", + -11.53631591796875 + ], + [ + "▁interval", + -11.536457061767578 + ], + [ + "▁Mil", + -11.536504745483398 + ], + [ + "▁2010.", + -11.537042617797852 + ], + [ + "VE", + -11.537074089050293 + ], + [ + "integr", + -11.537360191345215 + ], + [ + "▁création", + -11.537415504455566 + ], + [ + "weed", + -11.537456512451172 + ], + [ + "EG", + -11.53760051727295 + ], + [ + "▁6,", + -11.537784576416016 + ], + [ + "▁god", + -11.537866592407227 + ], + [ + "▁accomplish", + -11.537947654724121 + ], + [ + "▁thoroughly", + -11.538019180297852 + ], + [ + "2019", + -11.538228988647461 + ], + [ + "izer", + -11.538246154785156 + ], + [ + "▁Wal", + -11.538300514221191 + ], + [ + "ifying", + -11.538701057434082 + ], + [ + "▁Wohn", + -11.539227485656738 + ], + [ + "▁Holz", + -11.539474487304688 + ], + [ + "▁Advanced", + -11.539528846740723 + ], + [ + "▁honey", + -11.539626121520996 + ], + [ + "proof", + -11.539634704589844 + ], + [ + "▁saison", + -11.540029525756836 + ], + [ + "ându", + -11.540035247802734 + ], + [ + "▁Kevin", + -11.540116310119629 + ], + [ + "▁shelter", + -11.540199279785156 + ], + [ + "▁discut", + -11.540257453918457 + ], + [ + "▁hike", + -11.540257453918457 + ], + [ + "ités", + -11.540461540222168 + ], + [ + "▁boutique", + -11.540672302246094 + ], + [ + "▁Email", + -11.54067611694336 + ], + [ + "▁cosmetic", + -11.540830612182617 + ], + [ + "dian", + -11.540916442871094 + ], + [ + "▁hohe", + -11.540940284729004 + ], + [ + "▁absence", + -11.541071891784668 + ], + [ + "axi", + -11.541136741638184 + ], + [ + "nah", + -11.541178703308105 + ], + [ + "▁Frauen", + -11.541236877441406 + ], + [ + "▁actively", + -11.541278839111328 + ], + [ + "bind", + -11.541468620300293 + ], + [ + "▁everybody", + -11.541740417480469 + ], + [ + "▁controller", + -11.541802406311035 + ], + [ + "▁1.5", + -11.5418062210083 + ], + [ + "erau", + -11.541842460632324 + ], + [ + "gehen", + -11.541988372802734 + ], + [ + "▁scenario", + -11.542038917541504 + ], + [ + "▁odd", + -11.542083740234375 + ], + [ + "▁Ultra", + -11.542089462280273 + ], + [ + "▁finishing", + -11.542366981506348 + ], + [ + "▁cuts", + -11.542383193969727 + ], + [ + "▁financing", + -11.542515754699707 + ], + [ + "▁Chance", + -11.542579650878906 + ], + [ + "surrounded", + -11.542818069458008 + ], + [ + "▁joc", + -11.542903900146484 + ], + [ + "▁shelf", + -11.543004035949707 + ], + [ + "tief", + -11.54308032989502 + ], + [ + "▁Sir", + -11.543146133422852 + ], + [ + "▁Agent", + -11.543197631835938 + ], + [ + "▁scratch", + -11.543560981750488 + ], + [ + "2,000", + -11.54360294342041 + ], + [ + "nutri", + -11.54365348815918 + ], + [ + "nier", + -11.544063568115234 + ], + [ + "▁Dur", + -11.544175148010254 + ], + [ + "▁grid", + -11.544268608093262 + ], + [ + "road", + -11.544413566589355 + ], + [ + "▁pets", + -11.544429779052734 + ], + [ + "stud", + -11.54448127746582 + ], + [ + "OM", + -11.544569969177246 + ], + [ + "Die", + -11.544877052307129 + ], + [ + "▁800", + -11.54496955871582 + ], + [ + "▁arrangement", + -11.545088768005371 + ], + [ + "▁Sri", + -11.545185089111328 + ], + [ + "▁Patrick", + -11.545187950134277 + ], + [ + "ava", + -11.545212745666504 + ], + [ + "▁pension", + -11.54523754119873 + ], + [ + "dung", + -11.545353889465332 + ], + [ + "▁Chapter", + -11.545475006103516 + ], + [ + "▁Property", + -11.545475006103516 + ], + [ + "▁structural", + -11.545571327209473 + ], + [ + "▁overview", + -11.545731544494629 + ], + [ + "2015", + -11.545917510986328 + ], + [ + "▁lawn", + -11.545924186706543 + ], + [ + "▁Vin", + -11.546219825744629 + ], + [ + "lik", + -11.546402931213379 + ], + [ + "dus", + -11.546418190002441 + ], + [ + "Several", + -11.54654598236084 + ], + [ + "▁Bou", + -11.546670913696289 + ], + [ + "▁copper", + -11.546703338623047 + ], + [ + "▁duration", + -11.546867370605469 + ], + [ + "inate", + -11.546982765197754 + ], + [ + "▁podcast", + -11.547204971313477 + ], + [ + "▁Self", + -11.547208786010742 + ], + [ + "▁Construction", + -11.547491073608398 + ], + [ + "achat", + -11.54768180847168 + ], + [ + "???", + -11.547683715820312 + ], + [ + "▁Electric", + -11.547974586486816 + ], + [ + "▁Mrs", + -11.54799747467041 + ], + [ + "▁CT", + -11.548019409179688 + ], + [ + "▁proceed", + -11.548324584960938 + ], + [ + "▁Course", + -11.548333168029785 + ], + [ + "▁Frei", + -11.548699378967285 + ], + [ + "▁heavily", + -11.548868179321289 + ], + [ + "rique", + -11.548872947692871 + ], + [ + "version", + -11.549016952514648 + ], + [ + "▁representatives", + -11.549118041992188 + ], + [ + "▁tourism", + -11.549182891845703 + ], + [ + "▁shirt", + -11.5494966506958 + ], + [ + "▁rough", + -11.549507141113281 + ], + [ + "▁weniger", + -11.549735069274902 + ], + [ + "▁keyboard", + -11.550058364868164 + ], + [ + "▁heritage", + -11.550149917602539 + ], + [ + "kat", + -11.550535202026367 + ], + [ + "assez", + -11.550567626953125 + ], + [ + "▁cabinets", + -11.550591468811035 + ], + [ + "▁Komm", + -11.550762176513672 + ], + [ + "▁impressed", + -11.55078411102295 + ], + [ + "▁Oregon", + -11.550788879394531 + ], + [ + "▁Davis", + -11.55081558227539 + ], + [ + "specialized", + -11.55097770690918 + ], + [ + "▁gross", + -11.550999641418457 + ], + [ + "Located", + -11.551044464111328 + ], + [ + "ttle", + -11.551044464111328 + ], + [ + "▁2010,", + -11.551224708557129 + ], + [ + "chan", + -11.551253318786621 + ], + [ + "mine", + -11.551305770874023 + ], + [ + "▁aduce", + -11.551637649536133 + ], + [ + "▁subsequent", + -11.551729202270508 + ], + [ + "▁demo", + -11.551851272583008 + ], + [ + "aba", + -11.552209854125977 + ], + [ + "▁shock", + -11.552389144897461 + ], + [ + "▁theater", + -11.552854537963867 + ], + [ + "▁engineers", + -11.55294418334961 + ], + [ + "▁feu", + -11.553037643432617 + ], + [ + "▁Rot", + -11.553058624267578 + ], + [ + "▁addressed", + -11.553155899047852 + ], + [ + "▁Letter", + -11.553431510925293 + ], + [ + "gré", + -11.553448677062988 + ], + [ + "▁quantity", + -11.553449630737305 + ], + [ + "▁Seit", + -11.553640365600586 + ], + [ + "▁bacteria", + -11.553681373596191 + ], + [ + "kg", + -11.55408000946045 + ], + [ + "▁conservation", + -11.554191589355469 + ], + [ + "▁entreprises", + -11.55420207977295 + ], + [ + "▁pleasant", + -11.554207801818848 + ], + [ + "armed", + -11.554228782653809 + ], + [ + "dorf", + -11.554286003112793 + ], + [ + "fact", + -11.554320335388184 + ], + [ + "▁Much", + -11.554388046264648 + ], + [ + "▁laugh", + -11.55482006072998 + ], + [ + "▁blade", + -11.554835319519043 + ], + [ + "amine", + -11.554838180541992 + ], + [ + "▁insert", + -11.55493450164795 + ], + [ + "▁toys", + -11.555326461791992 + ], + [ + "▁в", + -11.555726051330566 + ], + [ + "cell", + -11.555747985839844 + ], + [ + "▁strengthen", + -11.555864334106445 + ], + [ + "GR", + -11.555882453918457 + ], + [ + "▁autor", + -11.556114196777344 + ], + [ + "▁LI", + -11.556147575378418 + ], + [ + "▁oamenii", + -11.556184768676758 + ], + [ + "▁Modell", + -11.556222915649414 + ], + [ + "▁sophisticated", + -11.556225776672363 + ], + [ + "▁Write", + -11.556283950805664 + ], + [ + "eți", + -11.556295394897461 + ], + [ + "say", + -11.556641578674316 + ], + [ + "▁nutzen", + -11.556783676147461 + ], + [ + "▁amenities", + -11.556979179382324 + ], + [ + "chel", + -11.557068824768066 + ], + [ + "Unlike", + -11.55720043182373 + ], + [ + "▁Bilder", + -11.557208061218262 + ], + [ + "fertig", + -11.55722713470459 + ], + [ + "PER", + -11.557244300842285 + ], + [ + "▁apparently", + -11.557282447814941 + ], + [ + "▁pointed", + -11.557332992553711 + ], + [ + "lop", + -11.557435989379883 + ], + [ + "▁commande", + -11.557848930358887 + ], + [ + "▁NEW", + -11.557923316955566 + ], + [ + "▁primi", + -11.55798625946045 + ], + [ + "▁aluminum", + -11.558046340942383 + ], + [ + "ificare", + -11.558063507080078 + ], + [ + "open", + -11.55815315246582 + ], + [ + "▁establishment", + -11.558305740356445 + ], + [ + "▁blanc", + -11.558349609375 + ], + [ + "▁1960", + -11.558454513549805 + ], + [ + "▁parameters", + -11.55856990814209 + ], + [ + "schluss", + -11.558685302734375 + ], + [ + "▁jet", + -11.55879020690918 + ], + [ + "gam", + -11.55902099609375 + ], + [ + "▁oral", + -11.559290885925293 + ], + [ + "▁tons", + -11.559348106384277 + ], + [ + "▁AL", + -11.55935001373291 + ], + [ + "▁intention", + -11.55947494506836 + ], + [ + "ives", + -11.55974292755127 + ], + [ + "▁BMW", + -11.559837341308594 + ], + [ + "gun", + -11.559967041015625 + ], + [ + "leben", + -11.560046195983887 + ], + [ + "▁Fresh", + -11.56010913848877 + ], + [ + "▁tuturor", + -11.560193061828613 + ], + [ + "▁marine", + -11.560208320617676 + ], + [ + "mile", + -11.560260772705078 + ], + [ + "▁alta", + -11.560271263122559 + ], + [ + "nnen", + -11.56050968170166 + ], + [ + "▁courts", + -11.560530662536621 + ], + [ + "▁Hello", + -11.560791015625 + ], + [ + "BL", + -11.560895919799805 + ], + [ + "▁reply", + -11.560962677001953 + ], + [ + "environnement", + -11.560975074768066 + ], + [ + "American", + -11.560995101928711 + ], + [ + "▁Tell", + -11.561040878295898 + ], + [ + "▁chic", + -11.56148624420166 + ], + [ + "bir", + -11.561542510986328 + ], + [ + "▁singing", + -11.561788558959961 + ], + [ + "▁earnings", + -11.561819076538086 + ], + [ + "▁ensemble", + -11.562082290649414 + ], + [ + "▁($", + -11.562169075012207 + ], + [ + "▁Tout", + -11.562192916870117 + ], + [ + "▁Abs", + -11.562264442443848 + ], + [ + "▁describes", + -11.562322616577148 + ], + [ + "▁navigation", + -11.5625 + ], + [ + "▁destul", + -11.562532424926758 + ], + [ + "legate", + -11.562586784362793 + ], + [ + "tral", + -11.562599182128906 + ], + [ + "aţie", + -11.562753677368164 + ], + [ + "▁supplied", + -11.562775611877441 + ], + [ + "▁paar", + -11.562911987304688 + ], + [ + "ionat", + -11.563241958618164 + ], + [ + "9.", + -11.563263893127441 + ], + [ + "▁41", + -11.563348770141602 + ], + [ + "▁Track", + -11.563451766967773 + ], + [ + "▁happiness", + -11.563636779785156 + ], + [ + "▁Personen", + -11.563680648803711 + ], + [ + "▁sac", + -11.56373119354248 + ], + [ + "▁shapes", + -11.563774108886719 + ], + [ + "eld", + -11.56393051147461 + ], + [ + "bett", + -11.563963890075684 + ], + [ + "tile", + -11.56400203704834 + ], + [ + "▁divided", + -11.564035415649414 + ], + [ + "▁13.", + -11.56403923034668 + ], + [ + "market", + -11.564109802246094 + ], + [ + "crafted", + -11.564115524291992 + ], + [ + "▁periods", + -11.564120292663574 + ], + [ + "uş", + -11.564568519592285 + ], + [ + "▁trainer", + -11.56460952758789 + ], + [ + "▁Licht", + -11.564871788024902 + ], + [ + "▁advisor", + -11.564948081970215 + ], + [ + "▁Herr", + -11.564980506896973 + ], + [ + "▁Halloween", + -11.565147399902344 + ], + [ + "alter", + -11.565154075622559 + ], + [ + "▁radical", + -11.565155029296875 + ], + [ + "▁nose", + -11.56527042388916 + ], + [ + "▁Sat", + -11.565323829650879 + ], + [ + "▁Mom", + -11.565372467041016 + ], + [ + "moni", + -11.565377235412598 + ], + [ + "▁semn", + -11.565397262573242 + ], + [ + "vé", + -11.565672874450684 + ], + [ + "identifie", + -11.56570053100586 + ], + [ + "▁hatten", + -11.565957069396973 + ], + [ + "completing", + -11.565959930419922 + ], + [ + "▁gust", + -11.565963745117188 + ], + [ + "▁creat", + -11.56601333618164 + ], + [ + "ché", + -11.566075325012207 + ], + [ + "pay", + -11.566216468811035 + ], + [ + "▁Money", + -11.566229820251465 + ], + [ + "IG", + -11.566243171691895 + ], + [ + "▁Cash", + -11.566327095031738 + ], + [ + "altă", + -11.566420555114746 + ], + [ + "▁bekommen", + -11.566620826721191 + ], + [ + "▁43", + -11.56662654876709 + ], + [ + "▁supplement", + -11.566637992858887 + ], + [ + "▁Early", + -11.566754341125488 + ], + [ + "▁mattress", + -11.56692123413086 + ], + [ + "▁worn", + -11.567182540893555 + ], + [ + "rov", + -11.567197799682617 + ], + [ + "▁pray", + -11.56733226776123 + ], + [ + "▁beans", + -11.567673683166504 + ], + [ + "▁passé", + -11.567782402038574 + ], + [ + "▁facilit", + -11.56782054901123 + ], + [ + "▁meters", + -11.56784439086914 + ], + [ + "cke", + -11.568163871765137 + ], + [ + "▁Villa", + -11.568199157714844 + ], + [ + "▁Diego", + -11.568217277526855 + ], + [ + "▁chips", + -11.568244934082031 + ], + [ + "▁mes", + -11.568349838256836 + ], + [ + "▁Seattle", + -11.568421363830566 + ], + [ + "BU", + -11.568621635437012 + ], + [ + "▁nevoi", + -11.568714141845703 + ], + [ + "▁lets", + -11.568737030029297 + ], + [ + "▁hopefully", + -11.56894302368164 + ], + [ + "▁AG", + -11.568954467773438 + ], + [ + "liable", + -11.568999290466309 + ], + [ + "pound", + -11.569067001342773 + ], + [ + "près", + -11.569085121154785 + ], + [ + "arul", + -11.56920337677002 + ], + [ + "isiert", + -11.569281578063965 + ], + [ + "▁Expert", + -11.569297790527344 + ], + [ + "▁particulier", + -11.569367408752441 + ], + [ + "stoff", + -11.569952964782715 + ], + [ + "▁interpretation", + -11.56999397277832 + ], + [ + "După", + -11.57007884979248 + ], + [ + "sait", + -11.57011604309082 + ], + [ + "▁nouvelles", + -11.570173263549805 + ], + [ + "▁Ok", + -11.570175170898438 + ], + [ + "tap", + -11.570301055908203 + ], + [ + "▁targets", + -11.570327758789062 + ], + [ + "rung", + -11.57052230834961 + ], + [ + "▁stare", + -11.570576667785645 + ], + [ + "▁efficiently", + -11.570908546447754 + ], + [ + "EV", + -11.571003913879395 + ], + [ + "évit", + -11.571310997009277 + ], + [ + "▁Moldova", + -11.571542739868164 + ], + [ + "▁Face", + -11.571663856506348 + ], + [ + "▁flo", + -11.57168960571289 + ], + [ + "▁acestora", + -11.5717134475708 + ], + [ + "▁Victor", + -11.57183837890625 + ], + [ + "▁breed", + -11.57198429107666 + ], + [ + "morph", + -11.572230339050293 + ], + [ + "sley", + -11.572274208068848 + ], + [ + "mot", + -11.57234001159668 + ], + [ + "▁URL", + -11.572395324707031 + ], + [ + "ellen", + -11.572502136230469 + ], + [ + "▁resist", + -11.572781562805176 + ], + [ + "zon", + -11.57282829284668 + ], + [ + "ndel", + -11.572967529296875 + ], + [ + "will", + -11.572989463806152 + ], + [ + "▁alege", + -11.573076248168945 + ], + [ + "▁Easter", + -11.573114395141602 + ], + [ + "▁Bat", + -11.573190689086914 + ], + [ + "▁Höhe", + -11.573223114013672 + ], + [ + "▁fascinating", + -11.573387145996094 + ], + [ + "▁Know", + -11.5735445022583 + ], + [ + "illon", + -11.573602676391602 + ], + [ + "flex", + -11.57363224029541 + ], + [ + "who", + -11.573701858520508 + ], + [ + "▁Always", + -11.573729515075684 + ], + [ + "▁Bush", + -11.573777198791504 + ], + [ + "ICE", + -11.574009895324707 + ], + [ + "verein", + -11.57448673248291 + ], + [ + "▁später", + -11.57448959350586 + ], + [ + "▁cherch", + -11.574575424194336 + ], + [ + "makers", + -11.574753761291504 + ], + [ + "versus", + -11.574790954589844 + ], + [ + "▁Clear", + -11.574846267700195 + ], + [ + "▁Pennsylvania", + -11.574912071228027 + ], + [ + "Dieser", + -11.575041770935059 + ], + [ + "▁picking", + -11.575072288513184 + ], + [ + "▁restoration", + -11.57513427734375 + ], + [ + "▁interviews", + -11.575201988220215 + ], + [ + "pressed", + -11.575210571289062 + ], + [ + "nnerhalb", + -11.575674057006836 + ], + [ + "▁connecting", + -11.575834274291992 + ], + [ + "jou", + -11.575943946838379 + ], + [ + "▁react", + -11.576189041137695 + ], + [ + "▁Merci", + -11.576223373413086 + ], + [ + "▁Phone", + -11.576356887817383 + ], + [ + "▁1)", + -11.57652473449707 + ], + [ + "▁victims", + -11.576618194580078 + ], + [ + "▁Spo", + -11.576685905456543 + ], + [ + "atului", + -11.576735496520996 + ], + [ + "▁Harry", + -11.576837539672852 + ], + [ + "▁Sala", + -11.576875686645508 + ], + [ + "Pol", + -11.577075958251953 + ], + [ + "▁Clo", + -11.577167510986328 + ], + [ + "▁Erfolg", + -11.577211380004883 + ], + [ + "autour", + -11.577308654785156 + ], + [ + "▁Template", + -11.577314376831055 + ], + [ + "▁invention", + -11.57754898071289 + ], + [ + "▁schwer", + -11.57761287689209 + ], + [ + "vac", + -11.577625274658203 + ], + [ + "▁Trail", + -11.577627182006836 + ], + [ + "▁Vietnam", + -11.577638626098633 + ], + [ + "▁Size", + -11.577689170837402 + ], + [ + "▁Bern", + -11.577783584594727 + ], + [ + "▁emp", + -11.577845573425293 + ], + [ + "▁shake", + -11.57787799835205 + ], + [ + "▁Ave", + -11.57794189453125 + ], + [ + "▁productive", + -11.578009605407715 + ], + [ + "▁apple", + -11.578015327453613 + ], + [ + "▁portal", + -11.578052520751953 + ], + [ + "▁ceramic", + -11.578082084655762 + ], + [ + "▁pad", + -11.578110694885254 + ], + [ + "▁Syn", + -11.578316688537598 + ], + [ + "Ab", + -11.57845401763916 + ], + [ + "▁syn", + -11.578761100769043 + ], + [ + "find", + -11.578888893127441 + ], + [ + "▁settle", + -11.578909873962402 + ], + [ + "▁général", + -11.578965187072754 + ], + [ + "▁okay", + -11.579032897949219 + ], + [ + "▁receipt", + -11.57906436920166 + ], + [ + "orii", + -11.579117774963379 + ], + [ + "▁Mission", + -11.579122543334961 + ], + [ + "entrée", + -11.579304695129395 + ], + [ + "▁besteht", + -11.579394340515137 + ], + [ + "▁wisdom", + -11.57950210571289 + ], + [ + "▁heraus", + -11.579645156860352 + ], + [ + "▁balanced", + -11.579753875732422 + ], + [ + "▁habits", + -11.579773902893066 + ], + [ + "tang", + -11.579888343811035 + ], + [ + "ură", + -11.580151557922363 + ], + [ + "▁winners", + -11.580182075500488 + ], + [ + "ç", + -11.580215454101562 + ], + [ + "▁folosi", + -11.580242156982422 + ], + [ + "aliment", + -11.5802583694458 + ], + [ + "▁fiction", + -11.580373764038086 + ], + [ + "▁Spe", + -11.580534934997559 + ], + [ + "▁elsewhere", + -11.580663681030273 + ], + [ + "▁dependent", + -11.580808639526367 + ], + [ + "▁Anne", + -11.581167221069336 + ], + [ + "▁excellence", + -11.581695556640625 + ], + [ + "▁Feel", + -11.581753730773926 + ], + [ + "lieb", + -11.581811904907227 + ], + [ + "▁sectors", + -11.581865310668945 + ], + [ + "▁expir", + -11.581886291503906 + ], + [ + "▁surfaces", + -11.58191204071045 + ], + [ + "▁minim", + -11.581937789916992 + ], + [ + "▁tumor", + -11.58204460144043 + ], + [ + "▁paragraph", + -11.582289695739746 + ], + [ + "▁disk", + -11.58232307434082 + ], + [ + "▁tonight", + -11.582379341125488 + ], + [ + "▁precious", + -11.582794189453125 + ], + [ + "▁console", + -11.58288288116455 + ], + [ + "Th", + -11.582939147949219 + ], + [ + "neu", + -11.583020210266113 + ], + [ + "effective", + -11.5839262008667 + ], + [ + "▁Republican", + -11.583944320678711 + ], + [ + "format", + -11.584297180175781 + ], + [ + "▁preserve", + -11.58436107635498 + ], + [ + "▁wiring", + -11.584599494934082 + ], + [ + "▁exercises", + -11.584757804870605 + ], + [ + "▁pregnancy", + -11.584774017333984 + ], + [ + "tries", + -11.58481502532959 + ], + [ + "▁jeunes", + -11.584883689880371 + ], + [ + "▁publishing", + -11.584932327270508 + ], + [ + "▁nehmen", + -11.584935188293457 + ], + [ + "▁capability", + -11.5849609375 + ], + [ + "▁prompt", + -11.584965705871582 + ], + [ + "▁Further", + -11.58497428894043 + ], + [ + "▁semaine", + -11.585173606872559 + ], + [ + "abo", + -11.585216522216797 + ], + [ + "▁evolution", + -11.585319519042969 + ], + [ + "▁Sud", + -11.585403442382812 + ], + [ + "▁frais", + -11.585525512695312 + ], + [ + "LT", + -11.585619926452637 + ], + [ + "▁stack", + -11.58581829071045 + ], + [ + "▁Inside", + -11.585854530334473 + ], + [ + "▁programmes", + -11.585997581481934 + ], + [ + "▁passes", + -11.586196899414062 + ], + [ + "mü", + -11.586474418640137 + ], + [ + "▁progressive", + -11.586518287658691 + ], + [ + "▁calculator", + -11.58658218383789 + ], + [ + "▁Core", + -11.586655616760254 + ], + [ + "BT", + -11.586956977844238 + ], + [ + "core", + -11.586996078491211 + ], + [ + "▁Moon", + -11.587004661560059 + ], + [ + "▁tender", + -11.587040901184082 + ], + [ + "durch", + -11.58721923828125 + ], + [ + "▁commune", + -11.587453842163086 + ], + [ + "▁Prince", + -11.587594032287598 + ], + [ + "▁demonstrated", + -11.587693214416504 + ], + [ + "▁conversations", + -11.587890625 + ], + [ + "▁fri", + -11.587984085083008 + ], + [ + "igh", + -11.587992668151855 + ], + [ + "being", + -11.588334083557129 + ], + [ + "pause", + -11.58853530883789 + ], + [ + "▁Bear", + -11.58871841430664 + ], + [ + "ayant", + -11.588875770568848 + ], + [ + "▁Industry", + -11.588967323303223 + ], + [ + "▁sponsor", + -11.589012145996094 + ], + [ + "▁numele", + -11.589098930358887 + ], + [ + "▁VA", + -11.589167594909668 + ], + [ + "▁Sommer", + -11.589366912841797 + ], + [ + "TB", + -11.589380264282227 + ], + [ + "▁optional", + -11.589505195617676 + ], + [ + "▁Landes", + -11.589812278747559 + ], + [ + "coli", + -11.589963912963867 + ], + [ + "empt", + -11.59018325805664 + ], + [ + "▁Iron", + -11.590620040893555 + ], + [ + "▁1992", + -11.59090518951416 + ], + [ + "▁attempts", + -11.59090518951416 + ], + [ + "halb", + -11.590960502624512 + ], + [ + "▁photographer", + -11.59097671508789 + ], + [ + "▁witness", + -11.59097957611084 + ], + [ + "bru", + -11.591073989868164 + ], + [ + "▁Ras", + -11.59107780456543 + ], + [ + "▁burden", + -11.591142654418945 + ], + [ + "▁kaufen", + -11.591256141662598 + ], + [ + "▁vu", + -11.591362953186035 + ], + [ + "▁Wedding", + -11.591601371765137 + ], + [ + "▁Kla", + -11.591604232788086 + ], + [ + "occasion", + -11.591915130615234 + ], + [ + "▁keys", + -11.592131614685059 + ], + [ + "▁oferi", + -11.592279434204102 + ], + [ + "▁puzzle", + -11.592302322387695 + ], + [ + "eaux", + -11.59254264831543 + ], + [ + "▁Eco", + -11.592805862426758 + ], + [ + "▁52", + -11.592817306518555 + ], + [ + "▁Elizabeth", + -11.59284496307373 + ], + [ + "▁dispose", + -11.593144416809082 + ], + [ + "▁cluster", + -11.59326171875 + ], + [ + "iki", + -11.593283653259277 + ], + [ + "▁Guys", + -11.593595504760742 + ], + [ + "▁Economic", + -11.593632698059082 + ], + [ + "▁apar", + -11.593677520751953 + ], + [ + "▁ziua", + -11.593688011169434 + ], + [ + "▁integral", + -11.593740463256836 + ], + [ + "▁tac", + -11.59376335144043 + ], + [ + "▁restrictions", + -11.593778610229492 + ], + [ + "▁nerve", + -11.593794822692871 + ], + [ + "▁Stop", + -11.59386157989502 + ], + [ + "burger", + -11.593897819519043 + ], + [ + "explo", + -11.593944549560547 + ], + [ + "lö", + -11.593958854675293 + ], + [ + "NP", + -11.594077110290527 + ], + [ + "▁Brook", + -11.59418773651123 + ], + [ + "▁Close", + -11.594278335571289 + ], + [ + "▁representing", + -11.59446907043457 + ], + [ + "▁certaine", + -11.594767570495605 + ], + [ + "▁discovery", + -11.594836235046387 + ], + [ + "▁rece", + -11.594964981079102 + ], + [ + "FF", + -11.594970703125 + ], + [ + "▁salary", + -11.595069885253906 + ], + [ + "▁Wolf", + -11.595137596130371 + ], + [ + "▁deserve", + -11.595166206359863 + ], + [ + "ţele", + -11.595417976379395 + ], + [ + "gathered", + -11.595934867858887 + ], + [ + "▁comply", + -11.59599494934082 + ], + [ + "lagen", + -11.596034049987793 + ], + [ + "ătoare", + -11.596192359924316 + ], + [ + "▁relate", + -11.596410751342773 + ], + [ + "▁Roger", + -11.59656810760498 + ], + [ + "▁blame", + -11.596575736999512 + ], + [ + "▁Jen", + -11.596914291381836 + ], + [ + "▁army", + -11.596936225891113 + ], + [ + "▁$10", + -11.597129821777344 + ], + [ + "▁Cabinet", + -11.597185134887695 + ], + [ + "Gu", + -11.597367286682129 + ], + [ + "▁wildlife", + -11.597452163696289 + ], + [ + "▁Memorial", + -11.597643852233887 + ], + [ + "▁Holiday", + -11.597742080688477 + ], + [ + "▁curat", + -11.598291397094727 + ], + [ + "iilor", + -11.598299026489258 + ], + [ + "▁fleet", + -11.598408699035645 + ], + [ + "▁reviewed", + -11.59843635559082 + ], + [ + "cet", + -11.598450660705566 + ], + [ + "▁virtually", + -11.598487854003906 + ], + [ + "▁Crusher", + -11.59852409362793 + ], + [ + "▁slide", + -11.59858226776123 + ], + [ + "▁générale", + -11.598604202270508 + ], + [ + "▁sensation", + -11.598630905151367 + ], + [ + "▁garlic", + -11.598638534545898 + ], + [ + "5)", + -11.598657608032227 + ], + [ + "▁batteries", + -11.598756790161133 + ], + [ + "SH", + -11.59876823425293 + ], + [ + "▁seller", + -11.59882926940918 + ], + [ + "design", + -11.598871231079102 + ], + [ + "5.", + -11.598944664001465 + ], + [ + "▁Overall", + -11.598969459533691 + ], + [ + "▁investigate", + -11.599058151245117 + ], + [ + "max", + -11.599064826965332 + ], + [ + "▁attach", + -11.599166870117188 + ], + [ + "▁Future", + -11.599209785461426 + ], + [ + "OUR", + -11.599284172058105 + ], + [ + "▁LE", + -11.59968090057373 + ], + [ + "▁bite", + -11.599811553955078 + ], + [ + "tige", + -11.599874496459961 + ], + [ + "▁twist", + -11.59987735748291 + ], + [ + "hole", + -11.600180625915527 + ], + [ + "▁Tony", + -11.600510597229004 + ], + [ + "LU", + -11.600598335266113 + ], + [ + "▁Organization", + -11.600617408752441 + ], + [ + "▁invit", + -11.600632667541504 + ], + [ + "▁Ant", + -11.600739479064941 + ], + [ + "NR", + -11.600788116455078 + ], + [ + "sorgt", + -11.600854873657227 + ], + [ + "▁Lan", + -11.600860595703125 + ], + [ + "▁Manchester", + -11.60091495513916 + ], + [ + "schrift", + -11.601066589355469 + ], + [ + "▁kg", + -11.601150512695312 + ], + [ + "▁aroma", + -11.60132884979248 + ], + [ + "▁Source", + -11.601388931274414 + ], + [ + "▁permite", + -11.601445198059082 + ], + [ + "▁Consider", + -11.601457595825195 + ], + [ + "▁Artist", + -11.601627349853516 + ], + [ + "▁transmit", + -11.601783752441406 + ], + [ + "oasa", + -11.601834297180176 + ], + [ + "▁Zen", + -11.60198974609375 + ], + [ + "ANT", + -11.602235794067383 + ], + [ + "▁consulting", + -11.602404594421387 + ], + [ + "▁commence", + -11.6025390625 + ], + [ + "▁quilt", + -11.60261058807373 + ], + [ + "owned", + -11.602642059326172 + ], + [ + "▁bro", + -11.602689743041992 + ], + [ + "▁integrate", + -11.602715492248535 + ], + [ + "▁Ontario", + -11.602775573730469 + ], + [ + "TF", + -11.602832794189453 + ], + [ + "▁Study", + -11.602887153625488 + ], + [ + "▁ensuite", + -11.603155136108398 + ], + [ + "itatii", + -11.603180885314941 + ], + [ + "Mon", + -11.603235244750977 + ], + [ + "-11", + -11.603299140930176 + ], + [ + "what", + -11.603384017944336 + ], + [ + "▁Things", + -11.60361385345459 + ], + [ + "▁Eye", + -11.603819847106934 + ], + [ + "▁présente", + -11.603828430175781 + ], + [ + "tention", + -11.603915214538574 + ], + [ + "|", + -11.603957176208496 + ], + [ + "stall", + -11.603963851928711 + ], + [ + "▁beef", + -11.603992462158203 + ], + [ + "figur", + -11.604005813598633 + ], + [ + "▁cancel", + -11.604146003723145 + ], + [ + "▁domeniul", + -11.604252815246582 + ], + [ + "▁360", + -11.604290008544922 + ], + [ + "▁sleeping", + -11.6045560836792 + ], + [ + "▁traitement", + -11.604580879211426 + ], + [ + "ühl", + -11.604769706726074 + ], + [ + "▁Environmental", + -11.604835510253906 + ], + [ + "cier", + -11.604894638061523 + ], + [ + "▁NC", + -11.604907035827637 + ], + [ + "pub", + -11.604925155639648 + ], + [ + "▁addiction", + -11.605071067810059 + ], + [ + "▁nest", + -11.605128288269043 + ], + [ + "▁ON", + -11.605395317077637 + ], + [ + "▁discrimin", + -11.605396270751953 + ], + [ + "▁proved", + -11.605517387390137 + ], + [ + "▁occasions", + -11.605864524841309 + ], + [ + "OH", + -11.606184959411621 + ], + [ + "▁lawyers", + -11.606203079223633 + ], + [ + "own", + -11.606290817260742 + ], + [ + "▁Meeting", + -11.606596946716309 + ], + [ + "▁Industrial", + -11.606704711914062 + ], + [ + "owed", + -11.606736183166504 + ], + [ + "▁Cel", + -11.606793403625488 + ], + [ + "legt", + -11.60706615447998 + ], + [ + "ily", + -11.607085227966309 + ], + [ + "▁wins", + -11.607155799865723 + ], + [ + "▁strap", + -11.607367515563965 + ], + [ + "digit", + -11.607441902160645 + ], + [ + "▁hinaus", + -11.607504844665527 + ], + [ + "mple", + -11.607712745666504 + ], + [ + "▁(5", + -11.607797622680664 + ], + [ + "▁pdf", + -11.607894897460938 + ], + [ + "▁eco", + -11.607915878295898 + ], + [ + "▁junior", + -11.608172416687012 + ], + [ + "DB", + -11.608556747436523 + ], + [ + "gelegt", + -11.608636856079102 + ], + [ + "ION", + -11.608678817749023 + ], + [ + "▁competitors", + -11.60880184173584 + ], + [ + "▁Arab", + -11.60898208618164 + ], + [ + "▁Secret", + -11.609148979187012 + ], + [ + "▁Kunst", + -11.609283447265625 + ], + [ + "▁worried", + -11.609297752380371 + ], + [ + "meiner", + -11.609378814697266 + ], + [ + "▁Magic", + -11.609450340270996 + ], + [ + "▁groß", + -11.609537124633789 + ], + [ + "▁travaux", + -11.609748840332031 + ], + [ + "▁sollen", + -11.609772682189941 + ], + [ + "▁Sciences", + -11.609850883483887 + ], + [ + "▁athletes", + -11.610055923461914 + ], + [ + "▁discounts", + -11.610079765319824 + ], + [ + "kit", + -11.610211372375488 + ], + [ + "lind", + -11.610305786132812 + ], + [ + "▁enjoyable", + -11.610421180725098 + ], + [ + "ground", + -11.610489845275879 + ], + [ + "▁Tat", + -11.610529899597168 + ], + [ + "▁passengers", + -11.610576629638672 + ], + [ + "▁Dami", + -11.610677719116211 + ], + [ + "▁Major", + -11.61070728302002 + ], + [ + "watch", + -11.610796928405762 + ], + [ + "working", + -11.610908508300781 + ], + [ + "arrêt", + -11.610923767089844 + ], + [ + "▁subtle", + -11.611069679260254 + ], + [ + "▁epi", + -11.611197471618652 + ], + [ + "▁Jahres", + -11.61128044128418 + ], + [ + "▁cooling", + -11.61141586303711 + ], + [ + "▁makeup", + -11.611427307128906 + ], + [ + "jet", + -11.611495018005371 + ], + [ + "▁Given", + -11.611519813537598 + ], + [ + "plex", + -11.61158275604248 + ], + [ + "▁exploit", + -11.611590385437012 + ], + [ + "rine", + -11.611604690551758 + ], + [ + "▁delivers", + -11.612122535705566 + ], + [ + "▁summary", + -11.612236022949219 + ], + [ + "▁beaches", + -11.612459182739258 + ], + [ + "lift", + -11.612550735473633 + ], + [ + "▁Suite", + -11.612554550170898 + ], + [ + "▁Assistant", + -11.612688064575195 + ], + [ + "▁taxi", + -11.61273193359375 + ], + [ + "▁peaceful", + -11.612805366516113 + ], + [ + "▁Mode", + -11.612980842590332 + ], + [ + "▁Fun", + -11.613059043884277 + ], + [ + "▁diameter", + -11.613142967224121 + ], + [ + "▁phrase", + -11.613150596618652 + ], + [ + "ACT", + -11.613265037536621 + ], + [ + "▁différentes", + -11.613322257995605 + ], + [ + "▁14.", + -11.613417625427246 + ], + [ + "▁CE", + -11.61352825164795 + ], + [ + "▁2)", + -11.613739013671875 + ], + [ + "▁Nat", + -11.613785743713379 + ], + [ + "▁delete", + -11.61388111114502 + ], + [ + "other", + -11.613930702209473 + ], + [ + "hang", + -11.613985061645508 + ], + [ + "▁sujet", + -11.614117622375488 + ], + [ + "▁precise", + -11.614212989807129 + ], + [ + "▁Total", + -11.614290237426758 + ], + [ + "▁chambre", + -11.614483833312988 + ], + [ + "sati", + -11.614666938781738 + ], + [ + "▁Metal", + -11.614995956420898 + ], + [ + "rust", + -11.615038871765137 + ], + [ + "▁Brazil", + -11.615508079528809 + ], + [ + "▁hybrid", + -11.615636825561523 + ], + [ + "ops", + -11.615691184997559 + ], + [ + "▁electro", + -11.615789413452148 + ], + [ + "utz", + -11.61608600616455 + ], + [ + "▁quoi", + -11.616246223449707 + ], + [ + "▁adoption", + -11.616331100463867 + ], + [ + "3.5", + -11.616518020629883 + ], + [ + "50,000", + -11.616599082946777 + ], + [ + "veti", + -11.616630554199219 + ], + [ + "hir", + -11.616957664489746 + ], + [ + "▁adequate", + -11.617067337036133 + ], + [ + "ologist", + -11.617109298706055 + ], + [ + "torii", + -11.617295265197754 + ], + [ + "wasser", + -11.617355346679688 + ], + [ + "▁Authority", + -11.617362976074219 + ], + [ + "▁donation", + -11.617364883422852 + ], + [ + "700", + -11.617375373840332 + ], + [ + "▁somehow", + -11.617375373840332 + ], + [ + "▁kostenlos", + -11.617425918579102 + ], + [ + "▁generations", + -11.617537498474121 + ], + [ + "▁Turkey", + -11.617711067199707 + ], + [ + "rata", + -11.617819786071777 + ], + [ + "▁animation", + -11.618206024169922 + ], + [ + "▁CH", + -11.618281364440918 + ], + [ + "ending", + -11.618317604064941 + ], + [ + "welt", + -11.618376731872559 + ], + [ + "bac", + -11.618380546569824 + ], + [ + "MG", + -11.618460655212402 + ], + [ + "▁parks", + -11.618468284606934 + ], + [ + "▁placing", + -11.618870735168457 + ], + [ + "sort", + -11.61915111541748 + ], + [ + "▁Bitcoin", + -11.619163513183594 + ], + [ + "▁disorder", + -11.619282722473145 + ], + [ + "MAN", + -11.619302749633789 + ], + [ + "aught", + -11.619412422180176 + ], + [ + "▁guides", + -11.61956787109375 + ], + [ + "▁circul", + -11.619651794433594 + ], + [ + "▁Steven", + -11.619954109191895 + ], + [ + "rrière", + -11.619976997375488 + ], + [ + "▁Arch", + -11.61999225616455 + ], + [ + "▁plates", + -11.620091438293457 + ], + [ + "MR", + -11.620118141174316 + ], + [ + "▁cow", + -11.620142936706543 + ], + [ + "▁integrity", + -11.620210647583008 + ], + [ + "▁(18", + -11.620217323303223 + ], + [ + "▁totul", + -11.62024211883545 + ], + [ + "jack", + -11.620373725891113 + ], + [ + "▁privire", + -11.620588302612305 + ], + [ + "▁terme", + -11.620752334594727 + ], + [ + "▁execution", + -11.620781898498535 + ], + [ + "▁organism", + -11.620838165283203 + ], + [ + "▁führen", + -11.620853424072266 + ], + [ + "▁patron", + -11.620940208435059 + ], + [ + "▁appreciated", + -11.62096881866455 + ], + [ + "liant", + -11.62100601196289 + ], + [ + "▁Solar", + -11.621055603027344 + ], + [ + "▁vinyl", + -11.621134757995605 + ], + [ + "▁treasure", + -11.621137619018555 + ], + [ + "▁retro", + -11.621167182922363 + ], + [ + "▁bout", + -11.621174812316895 + ], + [ + "lab", + -11.621183395385742 + ], + [ + "▁dimension", + -11.621394157409668 + ], + [ + "called", + -11.62146282196045 + ], + [ + "▁intern", + -11.621479034423828 + ], + [ + "issement", + -11.62173843383789 + ], + [ + "▁Erst", + -11.621837615966797 + ], + [ + "▁stellen", + -11.621920585632324 + ], + [ + "▁familia", + -11.622069358825684 + ], + [ + "▁notion", + -11.622176170349121 + ], + [ + "▁Could", + -11.622322082519531 + ], + [ + "Getting", + -11.622323036193848 + ], + [ + "▁drives", + -11.622397422790527 + ], + [ + "▁Israeli", + -11.622520446777344 + ], + [ + "▁nations", + -11.622546195983887 + ], + [ + "▁duties", + -11.622700691223145 + ], + [ + "▁personalized", + -11.622788429260254 + ], + [ + "▁weren", + -11.62282657623291 + ], + [ + "▁chemicals", + -11.622847557067871 + ], + [ + "▁killing", + -11.622913360595703 + ], + [ + "▁masa", + -11.622994422912598 + ], + [ + "▁parce", + -11.623026847839355 + ], + [ + "▁lady", + -11.623178482055664 + ], + [ + "ides", + -11.623221397399902 + ], + [ + "▁execut", + -11.62340259552002 + ], + [ + "▁floral", + -11.62341594696045 + ], + [ + "▁Child", + -11.623428344726562 + ], + [ + "▁medal", + -11.623503684997559 + ], + [ + "▁casa", + -11.623603820800781 + ], + [ + "▁enabled", + -11.623650550842285 + ], + [ + "12.", + -11.624239921569824 + ], + [ + "nger", + -11.624266624450684 + ], + [ + "▁vent", + -11.624297142028809 + ], + [ + "▁urmă", + -11.624727249145508 + ], + [ + "▁Herz", + -11.624835968017578 + ], + [ + "▁Jay", + -11.624916076660156 + ], + [ + ".....", + -11.624942779541016 + ], + [ + "▁Kris", + -11.62499713897705 + ], + [ + "kenn", + -11.625001907348633 + ], + [ + "ress", + -11.625027656555176 + ], + [ + "weight", + -11.62519359588623 + ], + [ + "▁indicates", + -11.625198364257812 + ], + [ + "▁mentor", + -11.625328063964844 + ], + [ + "using", + -11.625386238098145 + ], + [ + "▁femmes", + -11.625460624694824 + ], + [ + "▁Jung", + -11.625528335571289 + ], + [ + "▁Send", + -11.625574111938477 + ], + [ + "▁seasons", + -11.625906944274902 + ], + [ + "▁aesthetic", + -11.625964164733887 + ], + [ + "▁Block", + -11.626086235046387 + ], + [ + "▁babies", + -11.626150131225586 + ], + [ + "zig", + -11.626242637634277 + ], + [ + "edge", + -11.626428604125977 + ], + [ + "▁alike", + -11.626458168029785 + ], + [ + "▁immune", + -11.626609802246094 + ], + [ + "▁magical", + -11.626710891723633 + ], + [ + "▁Snow", + -11.626748085021973 + ], + [ + "▁spacious", + -11.627058982849121 + ], + [ + "▁Melbourne", + -11.62706184387207 + ], + [ + "order", + -11.627081871032715 + ], + [ + "▁timing", + -11.627176284790039 + ], + [ + "▁inainte", + -11.627220153808594 + ], + [ + "▁width", + -11.627327919006348 + ], + [ + "bild", + -11.627386093139648 + ], + [ + "Tra", + -11.627429008483887 + ], + [ + "▁appliances", + -11.627449989318848 + ], + [ + "▁dirt", + -11.627498626708984 + ], + [ + "▁Rent", + -11.627689361572266 + ], + [ + "responsibilities", + -11.627747535705566 + ], + [ + "▁blogs", + -11.62778377532959 + ], + [ + "nächsten", + -11.627799034118652 + ], + [ + "▁argue", + -11.627928733825684 + ], + [ + "▁Resume", + -11.627985954284668 + ], + [ + "▁Michel", + -11.628044128417969 + ], + [ + "▁terrible", + -11.628092765808105 + ], + [ + "graph", + -11.628151893615723 + ], + [ + "bird", + -11.628202438354492 + ], + [ + "▁Simple", + -11.628457069396973 + ], + [ + "nning", + -11.628658294677734 + ], + [ + "▁coconut", + -11.628683090209961 + ], + [ + "▁comprise", + -11.628787994384766 + ], + [ + "heure", + -11.628918647766113 + ], + [ + "▁nichts", + -11.628921508789062 + ], + [ + "▁manufacture", + -11.628966331481934 + ], + [ + "▁Sar", + -11.629011154174805 + ], + [ + "green", + -11.629014015197754 + ], + [ + "lining", + -11.62910270690918 + ], + [ + "▁tremendous", + -11.629128456115723 + ], + [ + "▁Wine", + -11.629164695739746 + ], + [ + "gir", + -11.629290580749512 + ], + [ + "▁Nothing", + -11.629562377929688 + ], + [ + "▁Miller", + -11.62957763671875 + ], + [ + "▁Schwe", + -11.629712104797363 + ], + [ + "zone", + -11.629942893981934 + ], + [ + "▁cunoscut", + -11.629964828491211 + ], + [ + "rupt", + -11.630166053771973 + ], + [ + "kle", + -11.630187034606934 + ], + [ + "▁Bucuresti", + -11.630510330200195 + ], + [ + "▁Abend", + -11.630574226379395 + ], + [ + "▁aura", + -11.630583763122559 + ], + [ + "▁Dance", + -11.63073444366455 + ], + [ + "▁Wilson", + -11.63086986541748 + ], + [ + "icide", + -11.630901336669922 + ], + [ + "bai", + -11.630910873413086 + ], + [ + "oriented", + -11.63103199005127 + ], + [ + "▁celebrated", + -11.631421089172363 + ], + [ + "schlag", + -11.631531715393066 + ], + [ + "▁10-", + -11.631600379943848 + ], + [ + "Unsere", + -11.63167667388916 + ], + [ + "énergie", + -11.632009506225586 + ], + [ + "▁qualify", + -11.63205623626709 + ], + [ + "▁contenu", + -11.632177352905273 + ], + [ + "▁Lauf", + -11.63220500946045 + ], + [ + "▁einzelne", + -11.632360458374023 + ], + [ + "▁Youth", + -11.632415771484375 + ], + [ + "explains", + -11.632601737976074 + ], + [ + "grat", + -11.632782936096191 + ], + [ + "▁72", + -11.632804870605469 + ], + [ + "labor", + -11.632885932922363 + ], + [ + "2018", + -11.632940292358398 + ], + [ + "▁Dank", + -11.633149147033691 + ], + [ + "▁Hey", + -11.633523941040039 + ], + [ + "▁refuse", + -11.633536338806152 + ], + [ + "▁graduated", + -11.633599281311035 + ], + [ + "▁României", + -11.633627891540527 + ], + [ + "punkt", + -11.633807182312012 + ], + [ + "▁regulation", + -11.633834838867188 + ], + [ + "Bru", + -11.633842468261719 + ], + [ + "▁Side", + -11.633891105651855 + ], + [ + "▁sol", + -11.633970260620117 + ], + [ + "▁extraordinary", + -11.634182929992676 + ], + [ + "▁ging", + -11.634247779846191 + ], + [ + "▁Creative", + -11.634299278259277 + ], + [ + "▁expanding", + -11.634349822998047 + ], + [ + "▁problème", + -11.63444995880127 + ], + [ + "▁Reserve", + -11.63459300994873 + ], + [ + "auteur", + -11.634642601013184 + ], + [ + "sphere", + -11.634657859802246 + ], + [ + "season", + -11.634716987609863 + ], + [ + "frei", + -11.634756088256836 + ], + [ + "▁8,", + -11.634765625 + ], + [ + "▁filing", + -11.634810447692871 + ], + [ + "▁Complete", + -11.635017395019531 + ], + [ + "▁revolution", + -11.635035514831543 + ], + [ + "▁unele", + -11.63520622253418 + ], + [ + "/8", + -11.635272979736328 + ], + [ + "istes", + -11.635310173034668 + ], + [ + "backed", + -11.635400772094727 + ], + [ + "shirt", + -11.635554313659668 + ], + [ + "▁Details", + -11.635673522949219 + ], + [ + "rod", + -11.635695457458496 + ], + [ + "▁pod", + -11.63582992553711 + ], + [ + "▁operators", + -11.635921478271484 + ], + [ + "was", + -11.635930061340332 + ], + [ + "hou", + -11.63594913482666 + ], + [ + "▁Coach", + -11.636075019836426 + ], + [ + "irii", + -11.636138916015625 + ], + [ + "▁ordinary", + -11.636186599731445 + ], + [ + "Institut", + -11.63620662689209 + ], + [ + "▁Flash", + -11.63633918762207 + ], + [ + "0-", + -11.636537551879883 + ], + [ + "▁flavour", + -11.6367769241333 + ], + [ + "specific", + -11.636906623840332 + ], + [ + "▁landing", + -11.636930465698242 + ], + [ + "▁geo", + -11.636935234069824 + ], + [ + "▁legend", + -11.636983871459961 + ], + [ + "vari", + -11.63703441619873 + ], + [ + "rop", + -11.637084007263184 + ], + [ + "▁Excel", + -11.6370849609375 + ], + [ + "▁Flu", + -11.637203216552734 + ], + [ + "▁intent", + -11.637582778930664 + ], + [ + "▁Deep", + -11.637594223022461 + ], + [ + "▁Kor", + -11.63763427734375 + ], + [ + "▁Philadelphia", + -11.637914657592773 + ], + [ + "▁rând", + -11.63800048828125 + ], + [ + "▁USD", + -11.638033866882324 + ], + [ + "laden", + -11.63803482055664 + ], + [ + "▁Hin", + -11.638047218322754 + ], + [ + "hap", + -11.638197898864746 + ], + [ + "▁thorough", + -11.638227462768555 + ], + [ + "▁oferit", + -11.63826847076416 + ], + [ + "kind", + -11.63831615447998 + ], + [ + "▁Cancer", + -11.638428688049316 + ], + [ + "apo", + -11.638596534729004 + ], + [ + "▁valve", + -11.638650894165039 + ], + [ + "▁encouraging", + -11.63884449005127 + ], + [ + "▁sûr", + -11.638904571533203 + ], + [ + "shing", + -11.638981819152832 + ], + [ + "▁49", + -11.639132499694824 + ], + [ + "gov", + -11.639142990112305 + ], + [ + "▁Five", + -11.63933277130127 + ], + [ + "▁stroke", + -11.639344215393066 + ], + [ + "▁apă", + -11.639398574829102 + ], + [ + "▁gambling", + -11.639543533325195 + ], + [ + "▁nord", + -11.63963508605957 + ], + [ + "onal", + -11.639691352844238 + ], + [ + "▁captured", + -11.63979721069336 + ], + [ + "▁lucruri", + -11.640068054199219 + ], + [ + "serait", + -11.640192985534668 + ], + [ + "▁Members", + -11.640265464782715 + ], + [ + "ital", + -11.640275955200195 + ], + [ + "▁mounted", + -11.640475273132324 + ], + [ + "▁opens", + -11.640792846679688 + ], + [ + "▁Marie", + -11.640861511230469 + ], + [ + "Tech", + -11.640902519226074 + ], + [ + "▁wishes", + -11.641016006469727 + ], + [ + "▁regards", + -11.641073226928711 + ], + [ + "going", + -11.641156196594238 + ], + [ + "Opti", + -11.641250610351562 + ], + [ + "▁femei", + -11.641331672668457 + ], + [ + "▁Fish", + -11.64142894744873 + ], + [ + "▁mount", + -11.641800880432129 + ], + [ + "▁Hunt", + -11.641887664794922 + ], + [ + "▁probabil", + -11.64205265045166 + ], + [ + "▁assured", + -11.642191886901855 + ], + [ + "pho", + -11.642230033874512 + ], + [ + "▁manufactured", + -11.642313003540039 + ], + [ + "▁realistic", + -11.642437934875488 + ], + [ + "ații", + -11.642580032348633 + ], + [ + "▁Planning", + -11.642598152160645 + ], + [ + "▁român", + -11.642645835876465 + ], + [ + "ggy", + -11.642669677734375 + ], + [ + "▁produces", + -11.642696380615234 + ], + [ + "▁reminder", + -11.64284896850586 + ], + [ + "TION", + -11.642868041992188 + ], + [ + "▁brake", + -11.642909049987793 + ], + [ + "▁pla", + -11.643172264099121 + ], + [ + "▁Premium", + -11.643270492553711 + ], + [ + "▁carb", + -11.643310546875 + ], + [ + "▁shine", + -11.643390655517578 + ], + [ + "▁carrier", + -11.643492698669434 + ], + [ + "▁poverty", + -11.64350414276123 + ], + [ + "▁effectiveness", + -11.6436128616333 + ], + [ + "administr", + -11.643655776977539 + ], + [ + "▁Chamber", + -11.643658638000488 + ], + [ + "▁suntem", + -11.64376163482666 + ], + [ + "▁noastră", + -11.643855094909668 + ], + [ + "▁sofort", + -11.643877983093262 + ], + [ + "▁moisture", + -11.644058227539062 + ], + [ + "limb", + -11.6441011428833 + ], + [ + "entre", + -11.644328117370605 + ], + [ + "▁SD", + -11.644330978393555 + ], + [ + "▁BC", + -11.644539833068848 + ], + [ + "▁selecting", + -11.6445951461792 + ], + [ + "achieving", + -11.644673347473145 + ], + [ + "info", + -11.644735336303711 + ], + [ + "▁membres", + -11.644983291625977 + ], + [ + "▁shoe", + -11.645014762878418 + ], + [ + "▁locate", + -11.645065307617188 + ], + [ + "▁assignment", + -11.645085334777832 + ], + [ + "lern", + -11.645283699035645 + ], + [ + "▁defeat", + -11.645406723022461 + ], + [ + "▁endless", + -11.645458221435547 + ], + [ + "▁Stunden", + -11.645523071289062 + ], + [ + "то", + -11.645561218261719 + ], + [ + "▁mur", + -11.645586013793945 + ], + [ + "▁wissen", + -11.645844459533691 + ], + [ + "aime", + -11.645915031433105 + ], + [ + "1-2", + -11.646056175231934 + ], + [ + "▁femme", + -11.646212577819824 + ], + [ + "robe", + -11.646468162536621 + ], + [ + "▁embrace", + -11.64647102355957 + ], + [ + "▁baseball", + -11.646614074707031 + ], + [ + "▁hunting", + -11.64663314819336 + ], + [ + "betrieb", + -11.646790504455566 + ], + [ + "▁gardens", + -11.647045135498047 + ], + [ + "▁risc", + -11.647096633911133 + ], + [ + "▁Cri", + -11.647263526916504 + ], + [ + "best", + -11.647506713867188 + ], + [ + "▁Audio", + -11.647621154785156 + ], + [ + "▁intens", + -11.647659301757812 + ], + [ + "▁Round", + -11.647744178771973 + ], + [ + "▁fireplace", + -11.6478271484375 + ], + [ + "▁dozen", + -11.647912979125977 + ], + [ + "▁hospitals", + -11.64802360534668 + ], + [ + "▁profits", + -11.648076057434082 + ], + [ + "▁Mail", + -11.64811897277832 + ], + [ + "obtenir", + -11.648191452026367 + ], + [ + "▁Ross", + -11.648241996765137 + ], + [ + "bun", + -11.648573875427246 + ], + [ + "polar", + -11.648688316345215 + ], + [ + "▁reflection", + -11.648873329162598 + ], + [ + "▁fut", + -11.648992538452148 + ], + [ + "phon", + -11.649017333984375 + ], + [ + "deck", + -11.649094581604004 + ], + [ + "renowned", + -11.649188041687012 + ], + [ + "▁cate", + -11.649308204650879 + ], + [ + "▁decorative", + -11.6494722366333 + ], + [ + "ieri", + -11.64957332611084 + ], + [ + "▁Tap", + -11.64958381652832 + ], + [ + "▁Dallas", + -11.649600982666016 + ], + [ + "rik", + -11.649665832519531 + ], + [ + "▁pied", + -11.649727821350098 + ], + [ + "rés", + -11.649821281433105 + ], + [ + "ppy", + -11.650137901306152 + ], + [ + "▁bitte", + -11.650188446044922 + ], + [ + "▁cave", + -11.650257110595703 + ], + [ + "▁rescue", + -11.650559425354004 + ], + [ + "▁Hilfe", + -11.650714874267578 + ], + [ + "▁Jason", + -11.650786399841309 + ], + [ + "▁Nations", + -11.650838851928711 + ], + [ + "▁profil", + -11.650938987731934 + ], + [ + "▁Atlantic", + -11.651105880737305 + ], + [ + "▁rub", + -11.651126861572266 + ], + [ + "▁collaborative", + -11.65113353729248 + ], + [ + "étude", + -11.651150703430176 + ], + [ + "▁Workshop", + -11.651389122009277 + ], + [ + "nez", + -11.651628494262695 + ], + [ + "▁chacun", + -11.651714324951172 + ], + [ + "▁Too", + -11.65211296081543 + ], + [ + "App", + -11.652313232421875 + ], + [ + "▁conseil", + -11.652399063110352 + ], + [ + "▁signals", + -11.652474403381348 + ], + [ + "▁Dead", + -11.652497291564941 + ], + [ + "▁Austria", + -11.652522087097168 + ], + [ + "▁slots", + -11.652579307556152 + ], + [ + "▁Dies", + -11.652623176574707 + ], + [ + "raj", + -11.652629852294922 + ], + [ + "stick", + -11.652833938598633 + ], + [ + "▁jaw", + -11.653030395507812 + ], + [ + "▁lounge", + -11.653059005737305 + ], + [ + "curi", + -11.653359413146973 + ], + [ + "nem", + -11.653456687927246 + ], + [ + "▁Cluj", + -11.653512954711914 + ], + [ + "▁rapide", + -11.653584480285645 + ], + [ + "▁companion", + -11.653716087341309 + ], + [ + "▁WE", + -11.653879165649414 + ], + [ + "▁bord", + -11.65389347076416 + ], + [ + "ody", + -11.654045104980469 + ], + [ + "gru", + -11.654057502746582 + ], + [ + "▁46", + -11.654410362243652 + ], + [ + "kra", + -11.654717445373535 + ], + [ + "eller", + -11.65477180480957 + ], + [ + "naire", + -11.65511703491211 + ], + [ + "hose", + -11.655253410339355 + ], + [ + "▁Atlanta", + -11.655254364013672 + ], + [ + "▁violent", + -11.65530776977539 + ], + [ + "▁imagination", + -11.655352592468262 + ], + [ + "▁reward", + -11.655389785766602 + ], + [ + "▁Korean", + -11.655441284179688 + ], + [ + "▁branches", + -11.655501365661621 + ], + [ + "▁GPS", + -11.655625343322754 + ], + [ + "glo", + -11.655633926391602 + ], + [ + "▁condo", + -11.655705451965332 + ], + [ + "▁Investment", + -11.655765533447266 + ], + [ + "▁involvement", + -11.655813217163086 + ], + [ + "▁trap", + -11.655829429626465 + ], + [ + "▁schön", + -11.655872344970703 + ], + [ + "▁ofera", + -11.655933380126953 + ], + [ + "▁unterschiedlich", + -11.65596866607666 + ], + [ + "Net", + -11.655987739562988 + ], + [ + "▁predict", + -11.656113624572754 + ], + [ + "identifying", + -11.656309127807617 + ], + [ + "▁noir", + -11.6566162109375 + ], + [ + "kos", + -11.656816482543945 + ], + [ + "poz", + -11.656816482543945 + ], + [ + "▁11,", + -11.65698528289795 + ], + [ + "▁fitted", + -11.657384872436523 + ], + [ + "MU", + -11.657469749450684 + ], + [ + "TT", + -11.657645225524902 + ], + [ + "▁vrea", + -11.657846450805664 + ], + [ + "▁wound", + -11.657864570617676 + ], + [ + "lac", + -11.657971382141113 + ], + [ + "▁purchases", + -11.658409118652344 + ], + [ + "▁Cape", + -11.65843677520752 + ], + [ + "▁Foto", + -11.658537864685059 + ], + [ + "▁acres", + -11.65865707397461 + ], + [ + "▁nec", + -11.658677101135254 + ], + [ + "▁burning", + -11.659050941467285 + ], + [ + "conf", + -11.659457206726074 + ], + [ + "▁browse", + -11.659486770629883 + ], + [ + "ural", + -11.659762382507324 + ], + [ + "▁Ah", + -11.659841537475586 + ], + [ + "▁stellt", + -11.65992259979248 + ], + [ + "▁ratings", + -11.660012245178223 + ], + [ + "▁Bowl", + -11.660027503967285 + ], + [ + "▁grav", + -11.660289764404297 + ], + [ + "titi", + -11.66048526763916 + ], + [ + "▁prêt", + -11.66075325012207 + ], + [ + "▁fallen", + -11.660818099975586 + ], + [ + "▁nombreuses", + -11.660940170288086 + ], + [ + "train", + -11.660953521728516 + ], + [ + "ène", + -11.661009788513184 + ], + [ + "Aceasta", + -11.661091804504395 + ], + [ + "▁drill", + -11.661421775817871 + ], + [ + "▁Exam", + -11.661477088928223 + ], + [ + "▁Furniture", + -11.661651611328125 + ], + [ + "eanu", + -11.661919593811035 + ], + [ + "étant", + -11.66230297088623 + ], + [ + "sville", + -11.662391662597656 + ], + [ + "▁swim", + -11.662796020507812 + ], + [ + "▁routes", + -11.662826538085938 + ], + [ + "INE", + -11.662860870361328 + ], + [ + "▁Por", + -11.662976264953613 + ], + [ + "ither", + -11.663168907165527 + ], + [ + "▁optim", + -11.663180351257324 + ], + [ + "▁lua", + -11.66331958770752 + ], + [ + "▁myth", + -11.663491249084473 + ], + [ + "▁Bett", + -11.6635103225708 + ], + [ + "chim", + -11.66355037689209 + ], + [ + "▁cyber", + -11.663553237915039 + ], + [ + "▁engineer", + -11.663825035095215 + ], + [ + "▁exploration", + -11.663918495178223 + ], + [ + "arranged", + -11.663973808288574 + ], + [ + "▁aged", + -11.663993835449219 + ], + [ + "▁beau", + -11.664024353027344 + ], + [ + "OUT", + -11.66402530670166 + ], + [ + "▁Minnesota", + -11.664031982421875 + ], + [ + "tress", + -11.664407730102539 + ], + [ + "▁Commercial", + -11.664509773254395 + ], + [ + "▁inspiring", + -11.66462516784668 + ], + [ + "▁Mare", + -11.664725303649902 + ], + [ + "apa", + -11.665140151977539 + ], + [ + "▁ignore", + -11.6651611328125 + ], + [ + "▁gros", + -11.665186882019043 + ], + [ + "▁measurement", + -11.66531753540039 + ], + [ + "ager", + -11.665395736694336 + ], + [ + "intele", + -11.665966987609863 + ], + [ + "▁suspension", + -11.666180610656738 + ], + [ + "▁cultures", + -11.666211128234863 + ], + [ + "▁Wow", + -11.666231155395508 + ], + [ + "▁pushing", + -11.666363716125488 + ], + [ + "▁bands", + -11.666438102722168 + ], + [ + "nage", + -11.666450500488281 + ], + [ + "▁Math", + -11.666515350341797 + ], + [ + "comb", + -11.66658878326416 + ], + [ + "▁créer", + -11.66658878326416 + ], + [ + "▁Lewis", + -11.666685104370117 + ], + [ + "▁VI", + -11.66678524017334 + ], + [ + "emploi", + -11.666791915893555 + ], + [ + "▁elections", + -11.666890144348145 + ], + [ + "▁logic", + -11.666982650756836 + ], + [ + "▁unlike", + -11.667122840881348 + ], + [ + "▁Matthew", + -11.66743278503418 + ], + [ + "▁pă", + -11.667486190795898 + ], + [ + "oxy", + -11.667620658874512 + ], + [ + "équipe", + -11.667717933654785 + ], + [ + "▁worden", + -11.668088912963867 + ], + [ + "dev", + -11.668258666992188 + ], + [ + "▁Massachusetts", + -11.668691635131836 + ], + [ + "▁Return", + -11.668695449829102 + ], + [ + "▁Friends", + -11.66891098022461 + ], + [ + "▁movements", + -11.66894245147705 + ], + [ + "chie", + -11.668964385986328 + ], + [ + "rak", + -11.669017791748047 + ], + [ + "▁Fit", + -11.66904354095459 + ], + [ + "▁copil", + -11.669113159179688 + ], + [ + "iunii", + -11.669188499450684 + ], + [ + "▁intensive", + -11.669234275817871 + ], + [ + "▁rug", + -11.669452667236328 + ], + [ + "lichkeit", + -11.669686317443848 + ], + [ + "kov", + -11.669724464416504 + ], + [ + "▁pense", + -11.66978645324707 + ], + [ + "pop", + -11.66978931427002 + ], + [ + "▁closet", + -11.669865608215332 + ], + [ + "▁prevention", + -11.669920921325684 + ], + [ + "▁Deb", + -11.670256614685059 + ], + [ + "▁devant", + -11.670430183410645 + ], + [ + "▁construit", + -11.670440673828125 + ], + [ + "▁breaks", + -11.67082405090332 + ], + [ + "otic", + -11.670886993408203 + ], + [ + "▁dig", + -11.67088794708252 + ], + [ + "▁près", + -11.670930862426758 + ], + [ + "chte", + -11.671029090881348 + ], + [ + "▁Chat", + -11.671029090881348 + ], + [ + "wel", + -11.671219825744629 + ], + [ + "▁edges", + -11.671272277832031 + ], + [ + "▁keen", + -11.671419143676758 + ], + [ + "▁infant", + -11.671716690063477 + ], + [ + "▁Hills", + -11.6719388961792 + ], + [ + "▁grounds", + -11.671969413757324 + ], + [ + "▁hab", + -11.672039031982422 + ], + [ + "▁Mun", + -11.67215347290039 + ], + [ + "▁references", + -11.672215461730957 + ], + [ + "▁hearts", + -11.672446250915527 + ], + [ + "exprim", + -11.672487258911133 + ], + [ + "▁tratament", + -11.672553062438965 + ], + [ + "LD", + -11.67258358001709 + ], + [ + "ssel", + -11.67275333404541 + ], + [ + "cover", + -11.672782897949219 + ], + [ + "bridge", + -11.672837257385254 + ], + [ + "▁Wein", + -11.672924995422363 + ], + [ + "▁voiture", + -11.673035621643066 + ], + [ + "▁Gemeinde", + -11.67313289642334 + ], + [ + "AI", + -11.673169136047363 + ], + [ + "▁renovation", + -11.673264503479004 + ], + [ + "bid", + -11.673285484313965 + ], + [ + "▁Reading", + -11.673481941223145 + ], + [ + "▁Gor", + -11.673490524291992 + ], + [ + "fur", + -11.673527717590332 + ], + [ + "▁Yoga", + -11.673544883728027 + ], + [ + "▁exclusively", + -11.673630714416504 + ], + [ + "▁emissions", + -11.67385482788086 + ], + [ + "ète", + -11.673905372619629 + ], + [ + "▁glasses", + -11.674055099487305 + ], + [ + "▁organizat", + -11.674135208129883 + ], + [ + "▁washing", + -11.67415714263916 + ], + [ + "▁Audi", + -11.674173355102539 + ], + [ + "▁Labor", + -11.674331665039062 + ], + [ + "▁legacy", + -11.674381256103516 + ], + [ + "▁abstract", + -11.674519538879395 + ], + [ + "▁knowledgeable", + -11.674601554870605 + ], + [ + "▁Glo", + -11.674795150756836 + ], + [ + "▁pregnant", + -11.67481803894043 + ], + [ + "liter", + -11.674851417541504 + ], + [ + "▁paintings", + -11.67522144317627 + ], + [ + "▁tête", + -11.675244331359863 + ], + [ + "voy", + -11.675626754760742 + ], + [ + "▁Jacob", + -11.675667762756348 + ], + [ + "▁dressing", + -11.675679206848145 + ], + [ + "▁provisions", + -11.675768852233887 + ], + [ + "bahn", + -11.675870895385742 + ], + [ + "▁depict", + -11.675875663757324 + ], + [ + "AW", + -11.676068305969238 + ], + [ + "▁bleibt", + -11.676163673400879 + ], + [ + "AND", + -11.676292419433594 + ], + [ + "▁fünf", + -11.676386833190918 + ], + [ + "▁hosts", + -11.676426887512207 + ], + [ + "vas", + -11.676708221435547 + ], + [ + "DO", + -11.67674732208252 + ], + [ + "▁max", + -11.676753997802734 + ], + [ + "▁contributed", + -11.676774978637695 + ], + [ + "roz", + -11.676796913146973 + ], + [ + "▁deschis", + -11.676800727844238 + ], + [ + "itaire", + -11.676809310913086 + ], + [ + "tube", + -11.676959991455078 + ], + [ + "▁Beck", + -11.676959991455078 + ], + [ + "▁curious", + -11.677130699157715 + ], + [ + "▁waves", + -11.677178382873535 + ], + [ + "▁regret", + -11.677248001098633 + ], + [ + "FO", + -11.677326202392578 + ], + [ + "droit", + -11.67734146118164 + ], + [ + "rö", + -11.677565574645996 + ], + [ + "▁Panel", + -11.677624702453613 + ], + [ + "▁pile", + -11.677660942077637 + ], + [ + "▁installing", + -11.677674293518066 + ], + [ + "▁Intr", + -11.677797317504883 + ], + [ + "nung", + -11.677823066711426 + ], + [ + "▁Outdoor", + -11.677855491638184 + ], + [ + "▁generator", + -11.67786693572998 + ], + [ + "▁zahlreiche", + -11.677868843078613 + ], + [ + "▁Third", + -11.67813491821289 + ], + [ + "frac", + -11.678180694580078 + ], + [ + "ovi", + -11.678236961364746 + ], + [ + "▁Casa", + -11.678374290466309 + ], + [ + "▁stomach", + -11.678393363952637 + ], + [ + "▁Lincoln", + -11.67844009399414 + ], + [ + "▁Electronic", + -11.678584098815918 + ], + [ + "coding", + -11.67895221710205 + ], + [ + "2017", + -11.67900276184082 + ], + [ + "▁friendship", + -11.679238319396973 + ], + [ + "ried", + -11.679250717163086 + ], + [ + "но", + -11.679265022277832 + ], + [ + "▁tail", + -11.679267883300781 + ], + [ + "▁petits", + -11.679308891296387 + ], + [ + "▁réseau", + -11.679696083068848 + ], + [ + "▁churches", + -11.679999351501465 + ], + [ + "▁marketplace", + -11.680062294006348 + ], + [ + "▁Pool", + -11.680318832397461 + ], + [ + "▁popularity", + -11.680455207824707 + ], + [ + "▁sprijin", + -11.680496215820312 + ], + [ + "▁Od", + -11.680527687072754 + ], + [ + "▁Transfer", + -11.680562973022461 + ], + [ + "▁fake", + -11.680791854858398 + ], + [ + "▁9,", + -11.681007385253906 + ], + [ + "▁weit", + -11.681264877319336 + ], + [ + "▁relaxed", + -11.681415557861328 + ], + [ + "pig", + -11.68161678314209 + ], + [ + "▁Lauren", + -11.68166732788086 + ], + [ + "gesetzt", + -11.681669235229492 + ], + [ + "▁Clar", + -11.681694984436035 + ], + [ + "▁unlikely", + -11.681731224060059 + ], + [ + "color", + -11.681832313537598 + ], + [ + "▁spouse", + -11.681843757629395 + ], + [ + "▁facile", + -11.681859970092773 + ], + [ + "▁Speed", + -11.681872367858887 + ], + [ + "KE", + -11.682230949401855 + ], + [ + "▁PO", + -11.68231201171875 + ], + [ + "▁Channel", + -11.682321548461914 + ], + [ + "argent", + -11.682356834411621 + ], + [ + "▁Making", + -11.682430267333984 + ], + [ + "▁Coll", + -11.682585716247559 + ], + [ + "cci", + -11.682721138000488 + ], + [ + "corresponding", + -11.68300724029541 + ], + [ + "▁heaven", + -11.683160781860352 + ], + [ + "ţă", + -11.68319320678711 + ], + [ + "▁darüber", + -11.683236122131348 + ], + [ + "acted", + -11.683420181274414 + ], + [ + "only", + -11.683460235595703 + ], + [ + "▁slight", + -11.683465003967285 + ], + [ + "lian", + -11.68348503112793 + ], + [ + "flă", + -11.683510780334473 + ], + [ + "▁vulnerable", + -11.683530807495117 + ], + [ + "▁creator", + -11.68356704711914 + ], + [ + "▁protecting", + -11.68360424041748 + ], + [ + "writing", + -11.68360710144043 + ], + [ + "▁Ter", + -11.68387222290039 + ], + [ + "▁barb", + -11.683987617492676 + ], + [ + "▁dată", + -11.683995246887207 + ], + [ + "▁Screen", + -11.684052467346191 + ], + [ + "▁BBC", + -11.684082984924316 + ], + [ + "Col", + -11.684206008911133 + ], + [ + "fung", + -11.684453964233398 + ], + [ + "▁dreptul", + -11.684494972229004 + ], + [ + "derived", + -11.684538841247559 + ], + [ + "▁designated", + -11.684553146362305 + ], + [ + "▁interactions", + -11.684617042541504 + ], + [ + "SG", + -11.684621810913086 + ], + [ + "▁häufig", + -11.684625625610352 + ], + [ + "▁Mega", + -11.684638023376465 + ], + [ + "▁jazz", + -11.684660911560059 + ], + [ + "lbs", + -11.684797286987305 + ], + [ + "▁Manual", + -11.68484115600586 + ], + [ + "pushed", + -11.685017585754395 + ], + [ + "▁analytics", + -11.685234069824219 + ], + [ + "▁lawsuit", + -11.68533706665039 + ], + [ + "▁gray", + -11.685364723205566 + ], + [ + "shirts", + -11.685401916503906 + ], + [ + "▁hill", + -11.685508728027344 + ], + [ + "▁1991", + -11.68550968170166 + ], + [ + "▁obligations", + -11.685568809509277 + ], + [ + "▁Dubai", + -11.68580436706543 + ], + [ + "()", + -11.685808181762695 + ], + [ + "▁acceptable", + -11.685810089111328 + ], + [ + "therapist", + -11.685877799987793 + ], + [ + "inger", + -11.6860990524292 + ], + [ + "▁territory", + -11.686208724975586 + ], + [ + "▁sang", + -11.6862211227417 + ], + [ + "ät", + -11.686224937438965 + ], + [ + "▁Zukunft", + -11.686238288879395 + ], + [ + "TU", + -11.68657398223877 + ], + [ + "▁horizontal", + -11.68665599822998 + ], + [ + "▁entrepreneurs", + -11.686710357666016 + ], + [ + "▁Eltern", + -11.687017440795898 + ], + [ + "▁presentations", + -11.687129974365234 + ], + [ + "▁confirmation", + -11.687173843383789 + ], + [ + "▁technological", + -11.687432289123535 + ], + [ + "▁1989", + -11.687530517578125 + ], + [ + "EF", + -11.687640190124512 + ], + [ + "ponent", + -11.687663078308105 + ], + [ + "NET", + -11.687699317932129 + ], + [ + "750", + -11.687772750854492 + ], + [ + "▁desert", + -11.687891960144043 + ], + [ + "▁contribu", + -11.687932968139648 + ], + [ + "▁Gun", + -11.687944412231445 + ], + [ + "▁Juli", + -11.688091278076172 + ], + [ + "ERS", + -11.688261985778809 + ], + [ + "▁inceput", + -11.688261985778809 + ], + [ + "▁answered", + -11.688369750976562 + ], + [ + "▁basement", + -11.688410758972168 + ], + [ + "film", + -11.688434600830078 + ], + [ + "▁taille", + -11.688593864440918 + ], + [ + "▁survival", + -11.688655853271484 + ], + [ + "ihnen", + -11.68869400024414 + ], + [ + "▁Bird", + -11.688840866088867 + ], + [ + "speed", + -11.689336776733398 + ], + [ + "▁journalist", + -11.68941879272461 + ], + [ + "▁Indonesia", + -11.689626693725586 + ], + [ + "▁15.", + -11.689973831176758 + ], + [ + "▁19.", + -11.690025329589844 + ], + [ + "étaient", + -11.690114974975586 + ], + [ + "▁tennis", + -11.69024658203125 + ], + [ + "▁aproximativ", + -11.69039249420166 + ], + [ + "▁Hans", + -11.690650939941406 + ], + [ + "▁Remove", + -11.69067096710205 + ], + [ + "▁cats", + -11.691022872924805 + ], + [ + "▁calories", + -11.691052436828613 + ], + [ + "▁limitations", + -11.69119644165039 + ], + [ + "▁subscribe", + -11.691198348999023 + ], + [ + "▁Dem", + -11.691339492797852 + ], + [ + "lust", + -11.691370010375977 + ], + [ + "▁adresa", + -11.691394805908203 + ], + [ + "▁sais", + -11.69140911102295 + ], + [ + "...\"", + -11.691473960876465 + ], + [ + "▁Luft", + -11.691485404968262 + ], + [ + "DL", + -11.691597938537598 + ], + [ + "▁estimates", + -11.691600799560547 + ], + [ + "▁protocol", + -11.691603660583496 + ], + [ + "▁Namen", + -11.691776275634766 + ], + [ + "▁grands", + -11.691901206970215 + ], + [ + "▁voter", + -11.691970825195312 + ], + [ + "▁vacuum", + -11.692075729370117 + ], + [ + "▁versch", + -11.692103385925293 + ], + [ + "▁Democratic", + -11.692107200622559 + ], + [ + "▁Books", + -11.692170143127441 + ], + [ + "▁frames", + -11.692727088928223 + ], + [ + "▁Bee", + -11.692864418029785 + ], + [ + "▁helfen", + -11.692934036254883 + ], + [ + "▁dive", + -11.692963600158691 + ], + [ + "▁physician", + -11.693037033081055 + ], + [ + "▁powered", + -11.693131446838379 + ], + [ + "▁zones", + -11.693337440490723 + ], + [ + "▁regime", + -11.69345474243164 + ], + [ + "check", + -11.693578720092773 + ], + [ + "11.", + -11.693793296813965 + ], + [ + "▁plaisir", + -11.693793296813965 + ], + [ + "▁physically", + -11.693811416625977 + ], + [ + "▁Pul", + -11.694245338439941 + ], + [ + "▁jardin", + -11.694294929504395 + ], + [ + "▁Nur", + -11.694417953491211 + ], + [ + "WC", + -11.694425582885742 + ], + [ + "▁Lock", + -11.694506645202637 + ], + [ + "▁économique", + -11.694530487060547 + ], + [ + "user", + -11.694536209106445 + ], + [ + "▁commit", + -11.694731712341309 + ], + [ + "▁oldest", + -11.694764137268066 + ], + [ + "▁fulfill", + -11.694780349731445 + ], + [ + "▁nervous", + -11.69482135772705 + ], + [ + "▁SH", + -11.695014953613281 + ], + [ + "SK", + -11.695150375366211 + ], + [ + "▁plein", + -11.695291519165039 + ], + [ + "show", + -11.695354461669922 + ], + [ + "▁disability", + -11.695356369018555 + ], + [ + "papier", + -11.69544506072998 + ], + [ + "▁Corp", + -11.695611000061035 + ], + [ + "ători", + -11.695676803588867 + ], + [ + "nţă", + -11.695813179016113 + ], + [ + "▁overseas", + -11.696009635925293 + ], + [ + "▁struck", + -11.69603157043457 + ], + [ + "astic", + -11.69607162475586 + ], + [ + "▁advised", + -11.696088790893555 + ], + [ + "BE", + -11.696161270141602 + ], + [ + "▁UV", + -11.696218490600586 + ], + [ + "patient", + -11.69626235961914 + ], + [ + "▁texte", + -11.696344375610352 + ], + [ + "▁timely", + -11.696444511413574 + ], + [ + "used", + -11.696471214294434 + ], + [ + "▁occasionally", + -11.696524620056152 + ], + [ + "▁entries", + -11.696550369262695 + ], + [ + "underlying", + -11.6967191696167 + ], + [ + "01.", + -11.696748733520508 + ], + [ + "▁automated", + -11.696791648864746 + ], + [ + "yes", + -11.696828842163086 + ], + [ + "▁Staff", + -11.697057723999023 + ], + [ + "▁Einzel", + -11.697546005249023 + ], + [ + "quit", + -11.697687149047852 + ], + [ + "▁Cela", + -11.697951316833496 + ], + [ + "▁snap", + -11.698298454284668 + ], + [ + "▁followers", + -11.698330879211426 + ], + [ + "CN", + -11.698709487915039 + ], + [ + "▁Cooper", + -11.698892593383789 + ], + [ + "ô", + -11.698921203613281 + ], + [ + "▁memorable", + -11.698965072631836 + ], + [ + "▁jur", + -11.698996543884277 + ], + [ + "▁ajutorul", + -11.69905948638916 + ], + [ + "▁Enter", + -11.6991548538208 + ], + [ + "Often", + -11.699294090270996 + ], + [ + "▁dintr", + -11.699341773986816 + ], + [ + "-30", + -11.699419975280762 + ], + [ + "ESS", + -11.699454307556152 + ], + [ + "▁weird", + -11.699462890625 + ], + [ + "▁Animal", + -11.699706077575684 + ], + [ + "▁complement", + -11.699719429016113 + ], + [ + "▁Bot", + -11.699756622314453 + ], + [ + "▁darf", + -11.699764251708984 + ], + [ + "yed", + -11.699808120727539 + ], + [ + "▁Mul", + -11.699872016906738 + ], + [ + "lick", + -11.700080871582031 + ], + [ + "▁Cambridge", + -11.700216293334961 + ], + [ + "adore", + -11.700407981872559 + ], + [ + "▁Dutch", + -11.700420379638672 + ], + [ + "▁Castle", + -11.700431823730469 + ], + [ + "igi", + -11.700563430786133 + ], + [ + "▁enemy", + -11.70071029663086 + ], + [ + "accompanied", + -11.700725555419922 + ], + [ + "▁teren", + -11.701102256774902 + ], + [ + "▁ET", + -11.701498985290527 + ], + [ + "ffle", + -11.701557159423828 + ], + [ + "-15", + -11.701651573181152 + ], + [ + "▁Geo", + -11.701680183410645 + ], + [ + "▁attractions", + -11.701730728149414 + ], + [ + "iker", + -11.70185661315918 + ], + [ + "▁bă", + -11.701990127563477 + ], + [ + "▁heal", + -11.701995849609375 + ], + [ + "weisen", + -11.702144622802734 + ], + [ + "▁spectrum", + -11.702186584472656 + ], + [ + "meld", + -11.702394485473633 + ], + [ + "▁eveniment", + -11.70247745513916 + ], + [ + "arra", + -11.702478408813477 + ], + [ + "rete", + -11.70250129699707 + ], + [ + "▁Had", + -11.70250415802002 + ], + [ + "looking", + -11.702692031860352 + ], + [ + "isierung", + -11.702805519104004 + ], + [ + "▁moyen", + -11.703129768371582 + ], + [ + "▁gesamte", + -11.703202247619629 + ], + [ + "▁destroy", + -11.703407287597656 + ], + [ + "125", + -11.703518867492676 + ], + [ + "▁suivant", + -11.703913688659668 + ], + [ + "▁declared", + -11.703925132751465 + ], + [ + "▁Urban", + -11.704131126403809 + ], + [ + "▁16.", + -11.704168319702148 + ], + [ + "▁Beg", + -11.704168319702148 + ], + [ + "▁canal", + -11.704225540161133 + ], + [ + "▁Pres", + -11.70431137084961 + ], + [ + "▁geeignet", + -11.704339981079102 + ], + [ + "▁strat", + -11.704365730285645 + ], + [ + "UB", + -11.704395294189453 + ], + [ + "▁Alexander", + -11.704424858093262 + ], + [ + "cycle", + -11.704666137695312 + ], + [ + "▁Var", + -11.704802513122559 + ], + [ + "▁domin", + -11.704805374145508 + ], + [ + "▁lasting", + -11.704939842224121 + ], + [ + "terio", + -11.705262184143066 + ], + [ + "▁Battle", + -11.705339431762695 + ], + [ + "▁publications", + -11.705647468566895 + ], + [ + "▁implica", + -11.705886840820312 + ], + [ + "▁NA", + -11.705963134765625 + ], + [ + "▁stocks", + -11.706036567687988 + ], + [ + "Plat", + -11.70611572265625 + ], + [ + "▁excitement", + -11.706149101257324 + ], + [ + "▁Muslim", + -11.706524848937988 + ], + [ + "▁Mari", + -11.706530570983887 + ], + [ + "▁Ul", + -11.706647872924805 + ], + [ + "nächst", + -11.706757545471191 + ], + [ + "▁trait", + -11.706833839416504 + ], + [ + "▁(3)", + -11.706852912902832 + ], + [ + "▁Attorney", + -11.706894874572754 + ], + [ + "▁Malaysia", + -11.70689582824707 + ], + [ + "▁slab", + -11.706960678100586 + ], + [ + "▁dam", + -11.707113265991211 + ], + [ + "▁Bir", + -11.707226753234863 + ], + [ + "▁sing", + -11.70738410949707 + ], + [ + "▁Culture", + -11.7073974609375 + ], + [ + "UD", + -11.707417488098145 + ], + [ + "▁Mes", + -11.707443237304688 + ], + [ + "ități", + -11.707615852355957 + ], + [ + "▁possess", + -11.708173751831055 + ], + [ + "enabling", + -11.70820426940918 + ], + [ + "▁settled", + -11.708335876464844 + ], + [ + "▁sagen", + -11.708492279052734 + ], + [ + "▁erfolgt", + -11.708564758300781 + ], + [ + "dog", + -11.708600997924805 + ], + [ + "ndu", + -11.708732604980469 + ], + [ + "ității", + -11.708745002746582 + ], + [ + "▁Islam", + -11.708930015563965 + ], + [ + "▁catalog", + -11.708931922912598 + ], + [ + "▁simt", + -11.709102630615234 + ], + [ + "tische", + -11.709150314331055 + ], + [ + "▁Mach", + -11.709334373474121 + ], + [ + "▁EP", + -11.709359169006348 + ], + [ + "▁Certified", + -11.709386825561523 + ], + [ + "▁Resources", + -11.70945930480957 + ], + [ + "▁Past", + -11.709607124328613 + ], + [ + "▁Termin", + -11.709755897521973 + ], + [ + "▁lightweight", + -11.709755897521973 + ], + [ + "▁championship", + -11.70994758605957 + ], + [ + "gebiet", + -11.710122108459473 + ], + [ + "▁jurisdiction", + -11.710135459899902 + ], + [ + "▁euros", + -11.710169792175293 + ], + [ + "▁Familien", + -11.710554122924805 + ], + [ + "▁GT", + -11.710677146911621 + ], + [ + "▁dvs", + -11.71081256866455 + ], + [ + "▁nouveaux", + -11.710838317871094 + ], + [ + "▁chill", + -11.710916519165039 + ], + [ + "▁ridicat", + -11.710920333862305 + ], + [ + "his", + -11.711079597473145 + ], + [ + "▁Indi", + -11.711159706115723 + ], + [ + "▁arrested", + -11.71116828918457 + ], + [ + "ităţii", + -11.711170196533203 + ], + [ + "onul", + -11.711274147033691 + ], + [ + "appar", + -11.711296081542969 + ], + [ + "▁Bachelor", + -11.711297988891602 + ], + [ + "▁erfolgreich", + -11.711426734924316 + ], + [ + "▁versatile", + -11.71163558959961 + ], + [ + "▁nécessaire", + -11.711761474609375 + ], + [ + "▁facial", + -11.712160110473633 + ], + [ + "▁Bull", + -11.712226867675781 + ], + [ + "Comm", + -11.712237358093262 + ], + [ + "atte", + -11.712307929992676 + ], + [ + "hom", + -11.7123384475708 + ], + [ + "start", + -11.712576866149902 + ], + [ + "▁roughly", + -11.712936401367188 + ], + [ + "▁bay", + -11.712984085083008 + ], + [ + "▁american", + -11.712986946105957 + ], + [ + "▁Wisconsin", + -11.713135719299316 + ], + [ + "▁Clinton", + -11.713142395019531 + ], + [ + "appareil", + -11.713153839111328 + ], + [ + "▁liberal", + -11.713455200195312 + ], + [ + "▁dau", + -11.713519096374512 + ], + [ + "ech", + -11.713521957397461 + ], + [ + "2014", + -11.713624000549316 + ], + [ + "▁lip", + -11.713645935058594 + ], + [ + "▁maintenant", + -11.713762283325195 + ], + [ + "▁Sil", + -11.713805198669434 + ], + [ + "rben", + -11.713891983032227 + ], + [ + "▁contents", + -11.713980674743652 + ], + [ + "▁magnetic", + -11.714111328125 + ], + [ + "▁terre", + -11.714151382446289 + ], + [ + "▁Rights", + -11.714475631713867 + ], + [ + "lose", + -11.714570045471191 + ], + [ + "▁crown", + -11.71468448638916 + ], + [ + "▁oils", + -11.7147216796875 + ], + [ + "▁entertaining", + -11.714841842651367 + ], + [ + "▁Option", + -11.714848518371582 + ], + [ + "▁Previous", + -11.714916229248047 + ], + [ + "▁vrai", + -11.714930534362793 + ], + [ + "▁Auswahl", + -11.715056419372559 + ], + [ + "▁horses", + -11.715106010437012 + ], + [ + "▁Author", + -11.71533489227295 + ], + [ + "▁Writing", + -11.715461730957031 + ], + [ + "▁travelling", + -11.715522766113281 + ], + [ + "▁350", + -11.715567588806152 + ], + [ + "daten", + -11.71560287475586 + ], + [ + "zan", + -11.715765953063965 + ], + [ + "▁sweat", + -11.715924263000488 + ], + [ + "▁Junior", + -11.715970993041992 + ], + [ + "markt", + -11.71609878540039 + ], + [ + "after", + -11.716105461120605 + ], + [ + "▁admitted", + -11.716262817382812 + ], + [ + "▁1950", + -11.716347694396973 + ], + [ + "▁Sche", + -11.71648120880127 + ], + [ + "▁dorit", + -11.716818809509277 + ], + [ + "▁transferred", + -11.716958045959473 + ], + [ + "utilise", + -11.717194557189941 + ], + [ + "sitz", + -11.717301368713379 + ], + [ + "gio", + -11.717320442199707 + ], + [ + "▁bisher", + -11.717473983764648 + ], + [ + "RD", + -11.717491149902344 + ], + [ + "▁Wales", + -11.717747688293457 + ], + [ + "▁smoking", + -11.717904090881348 + ], + [ + "dire", + -11.717939376831055 + ], + [ + "▁seating", + -11.717979431152344 + ], + [ + "▁constat", + -11.718056678771973 + ], + [ + "▁Hub", + -11.718324661254883 + ], + [ + "▁sieht", + -11.718345642089844 + ], + [ + "▁prospect", + -11.718378067016602 + ], + [ + "▁RO", + -11.718413352966309 + ], + [ + "▁Wars", + -11.718423843383789 + ], + [ + "eek", + -11.718496322631836 + ], + [ + "▁Bring", + -11.718646049499512 + ], + [ + "▁bleiben", + -11.718696594238281 + ], + [ + "arri", + -11.718826293945312 + ], + [ + "inal", + -11.718904495239258 + ], + [ + "▁Maryland", + -11.718932151794434 + ], + [ + "▁Process", + -11.719145774841309 + ], + [ + "They", + -11.719154357910156 + ], + [ + "▁Oxford", + -11.719176292419434 + ], + [ + "▁neat", + -11.719330787658691 + ], + [ + "▁cinema", + -11.719597816467285 + ], + [ + "▁Ist", + -11.719620704650879 + ], + [ + "▁vegan", + -11.719682693481445 + ], + [ + "wall", + -11.719708442687988 + ], + [ + "▁motive", + -11.72010612487793 + ], + [ + "▁mature", + -11.720544815063477 + ], + [ + "▁Dragon", + -11.720653533935547 + ], + [ + "▁google", + -11.720677375793457 + ], + [ + "blick", + -11.72110652923584 + ], + [ + "▁Cod", + -11.721220970153809 + ], + [ + "▁suffi", + -11.721319198608398 + ], + [ + "▁terrorist", + -11.721478462219238 + ], + [ + "Posted", + -11.721484184265137 + ], + [ + "▁Schi", + -11.72157096862793 + ], + [ + "▁Marc", + -11.721597671508789 + ], + [ + "▁operates", + -11.721661567687988 + ], + [ + "gress", + -11.721805572509766 + ], + [ + "has", + -11.721899032592773 + ], + [ + "sole", + -11.722108840942383 + ], + [ + "▁Buck", + -11.722122192382812 + ], + [ + "impl", + -11.722160339355469 + ], + [ + "▁Ron", + -11.722172737121582 + ], + [ + "▁handled", + -11.722346305847168 + ], + [ + "▁Apr", + -11.722347259521484 + ], + [ + "▁Storage", + -11.722467422485352 + ], + [ + "▁temp", + -11.722512245178223 + ], + [ + "▁differently", + -11.722614288330078 + ], + [ + "▁wherever", + -11.722670555114746 + ], + [ + "matched", + -11.722695350646973 + ], + [ + "rios", + -11.72276496887207 + ], + [ + "▁surprising", + -11.722846031188965 + ], + [ + "teilen", + -11.722867965698242 + ], + [ + "▁difficulties", + -11.72294807434082 + ], + [ + "tab", + -11.723064422607422 + ], + [ + "▁Leader", + -11.723128318786621 + ], + [ + "implementing", + -11.723372459411621 + ], + [ + "▁workforce", + -11.723384857177734 + ], + [ + "▁bereit", + -11.723503112792969 + ], + [ + "vig", + -11.72352123260498 + ], + [ + "▁LOVE", + -11.723580360412598 + ], + [ + "▁instances", + -11.723954200744629 + ], + [ + "▁frumos", + -11.723960876464844 + ], + [ + "▁Java", + -11.723974227905273 + ], + [ + "▁arrest", + -11.723977088928223 + ], + [ + "▁apparent", + -11.724152565002441 + ], + [ + "▁hence", + -11.724200248718262 + ], + [ + "▁entwickelt", + -11.72437572479248 + ], + [ + "▁Fra", + -11.724471092224121 + ], + [ + "▁prend", + -11.724486351013184 + ], + [ + "ließ", + -11.724522590637207 + ], + [ + "▁drawer", + -11.724671363830566 + ], + [ + "ARD", + -11.724926948547363 + ], + [ + "▁caring", + -11.72499942779541 + ], + [ + "▁wollte", + -11.725024223327637 + ], + [ + "▁vielleicht", + -11.72511100769043 + ], + [ + "▁iconic", + -11.725324630737305 + ], + [ + "äch", + -11.72552490234375 + ], + [ + "abel", + -11.725639343261719 + ], + [ + "▁génér", + -11.72570514678955 + ], + [ + "ault", + -11.725727081298828 + ], + [ + "▁alternatives", + -11.725909233093262 + ], + [ + "think", + -11.726025581359863 + ], + [ + "ро", + -11.726055145263672 + ], + [ + "whereas", + -11.726058006286621 + ], + [ + "erei", + -11.726366996765137 + ], + [ + "▁Eagle", + -11.726766586303711 + ], + [ + "situé", + -11.72704792022705 + ], + [ + "▁laboratory", + -11.727157592773438 + ], + [ + "▁Nutzung", + -11.727256774902344 + ], + [ + "▁Bathroom", + -11.72728157043457 + ], + [ + "▁loaded", + -11.727293968200684 + ], + [ + "niste", + -11.727408409118652 + ], + [ + "som", + -11.727429389953613 + ], + [ + "▁aucun", + -11.727666854858398 + ], + [ + "gebracht", + -11.727676391601562 + ], + [ + "▁tomb", + -11.727771759033203 + ], + [ + "▁Ty", + -11.727785110473633 + ], + [ + "▁afaceri", + -11.727971076965332 + ], + [ + "tex", + -11.72803783416748 + ], + [ + "ality", + -11.728147506713867 + ], + [ + "▁identification", + -11.728150367736816 + ], + [ + "▁cultiv", + -11.728255271911621 + ], + [ + "Not", + -11.728326797485352 + ], + [ + "▁acestor", + -11.72846508026123 + ], + [ + "▁PhD", + -11.728466033935547 + ], + [ + "nell", + -11.728470802307129 + ], + [ + "▁dial", + -11.728594779968262 + ], + [ + "chro", + -11.728673934936523 + ], + [ + "▁specifications", + -11.728682518005371 + ], + [ + "anii", + -11.72877025604248 + ], + [ + "▁cloth", + -11.728836059570312 + ], + [ + "▁highway", + -11.728914260864258 + ], + [ + "▁Vitamin", + -11.729118347167969 + ], + [ + "▁indication", + -11.729349136352539 + ], + [ + "80%", + -11.72959041595459 + ], + [ + "▁Lion", + -11.729681015014648 + ], + [ + "▁10,", + -11.729693412780762 + ], + [ + "▁Werk", + -11.72974967956543 + ], + [ + "▁combin", + -11.729803085327148 + ], + [ + "▁releases", + -11.7298583984375 + ], + [ + "LL", + -11.730006217956543 + ], + [ + "ktor", + -11.730186462402344 + ], + [ + "ufgrund", + -11.73018741607666 + ], + [ + "calc", + -11.73034381866455 + ], + [ + "▁accomplished", + -11.730606079101562 + ], + [ + "▁los", + -11.730619430541992 + ], + [ + "▁distant", + -11.730688095092773 + ], + [ + "▁secteur", + -11.73068904876709 + ], + [ + "logue", + -11.730781555175781 + ], + [ + "▁betting", + -11.730792999267578 + ], + [ + "elf", + -11.731180191040039 + ], + [ + "puteti", + -11.73123550415039 + ], + [ + "▁Moment", + -11.731236457824707 + ], + [ + "▁scoring", + -11.731548309326172 + ], + [ + "▁freuen", + -11.731572151184082 + ], + [ + "▁fastest", + -11.731873512268066 + ], + [ + "▁directors", + -11.732080459594727 + ], + [ + "▁fame", + -11.732234954833984 + ], + [ + "▁complaint", + -11.732239723205566 + ], + [ + "▁Ep", + -11.732314109802246 + ], + [ + "▁delicate", + -11.732329368591309 + ], + [ + "annonce", + -11.73240852355957 + ], + [ + "ext", + -11.732454299926758 + ], + [ + "▁quit", + -11.732473373413086 + ], + [ + "▁Cop", + -11.73253345489502 + ], + [ + "prop", + -11.732565879821777 + ], + [ + "365", + -11.732742309570312 + ], + [ + "▁Say", + -11.732879638671875 + ], + [ + "▁internationale", + -11.733064651489258 + ], + [ + "cott", + -11.733213424682617 + ], + [ + "▁Whatever", + -11.733261108398438 + ], + [ + "▁admir", + -11.733261108398438 + ], + [ + "▁bucur", + -11.733549118041992 + ], + [ + "▁entity", + -11.733779907226562 + ], + [ + "▁dancing", + -11.733837127685547 + ], + [ + "▁printre", + -11.733892440795898 + ], + [ + "▁meditation", + -11.734396934509277 + ], + [ + "▁avis", + -11.734416961669922 + ], + [ + "▁1988", + -11.73447036743164 + ], + [ + "10.", + -11.734506607055664 + ], + [ + "▁worker", + -11.734638214111328 + ], + [ + "▁$100", + -11.734784126281738 + ], + [ + "▁contrôle", + -11.7349853515625 + ], + [ + "▁insist", + -11.734997749328613 + ], + [ + "ements", + -11.73505973815918 + ], + [ + "izate", + -11.735163688659668 + ], + [ + "▁tied", + -11.735332489013672 + ], + [ + "▁correspond", + -11.735396385192871 + ], + [ + "▁apartments", + -11.735547065734863 + ], + [ + "▁2009.", + -11.735599517822266 + ], + [ + "▁tiles", + -11.735624313354492 + ], + [ + "▁boots", + -11.735639572143555 + ], + [ + "▁laundry", + -11.735673904418945 + ], + [ + "▁Coffee", + -11.735674858093262 + ], + [ + "▁CV", + -11.735727310180664 + ], + [ + "▁composed", + -11.736035346984863 + ], + [ + "atom", + -11.73622989654541 + ], + [ + "▁shore", + -11.736270904541016 + ], + [ + "▁marijuana", + -11.736312866210938 + ], + [ + "plic", + -11.73648452758789 + ], + [ + "▁Zahl", + -11.736649513244629 + ], + [ + "depth", + -11.73682689666748 + ], + [ + "▁Egypt", + -11.736854553222656 + ], + [ + "▁NFL", + -11.736906051635742 + ], + [ + "▁12,", + -11.736922264099121 + ], + [ + "▁pollution", + -11.736964225769043 + ], + [ + "▁Vergleich", + -11.73704719543457 + ], + [ + "û", + -11.737109184265137 + ], + [ + "▁nurse", + -11.737153053283691 + ], + [ + "▁Susan", + -11.737173080444336 + ], + [ + "▁verify", + -11.737393379211426 + ], + [ + "▁kon", + -11.737504959106445 + ], + [ + "▁ulei", + -11.7376127243042 + ], + [ + "▁Sept", + -11.737699508666992 + ], + [ + "▁Location", + -11.737908363342285 + ], + [ + "▁frozen", + -11.737991333007812 + ], + [ + "good", + -11.73802661895752 + ], + [ + "▁cine", + -11.738066673278809 + ], + [ + "forming", + -11.738181114196777 + ], + [ + "▁Near", + -11.738391876220703 + ], + [ + "▁Tab", + -11.738545417785645 + ], + [ + "▁Alexandr", + -11.738600730895996 + ], + [ + "ст", + -11.73863697052002 + ], + [ + "CK", + -11.738656044006348 + ], + [ + "▁loads", + -11.738948822021484 + ], + [ + "▁disorders", + -11.738957405090332 + ], + [ + "hip", + -11.739596366882324 + ], + [ + "▁blessing", + -11.73987102508545 + ], + [ + "▁vechi", + -11.73997688293457 + ], + [ + "▁Bookmark", + -11.740296363830566 + ], + [ + "SON", + -11.74036979675293 + ], + [ + "books", + -11.740428924560547 + ], + [ + "▁tropical", + -11.740438461303711 + ], + [ + "▁Garten", + -11.740447044372559 + ], + [ + "ôt", + -11.740760803222656 + ], + [ + "tures", + -11.740827560424805 + ], + [ + "▁obligation", + -11.741010665893555 + ], + [ + "▁admin", + -11.741011619567871 + ], + [ + "▁sélection", + -11.741106986999512 + ], + [ + "disp", + -11.741172790527344 + ], + [ + "▁Anyone", + -11.741225242614746 + ], + [ + "keeper", + -11.74138355255127 + ], + [ + "▁konnten", + -11.741521835327148 + ], + [ + "▁existe", + -11.741615295410156 + ], + [ + "▁Rund", + -11.741798400878906 + ], + [ + "▁retailers", + -11.74184799194336 + ], + [ + "folg", + -11.741948127746582 + ], + [ + "▁urmare", + -11.742019653320312 + ], + [ + "▁Liebe", + -11.742321014404297 + ], + [ + "▁actors", + -11.742422103881836 + ], + [ + "▁Druck", + -11.742618560791016 + ], + [ + "lien", + -11.742752075195312 + ], + [ + "sian", + -11.742847442626953 + ], + [ + "▁partid", + -11.74304485321045 + ], + [ + "▁loin", + -11.743114471435547 + ], + [ + "AZ", + -11.743119239807129 + ], + [ + "oasă", + -11.743501663208008 + ], + [ + "▁inclusiv", + -11.743656158447266 + ], + [ + "TD", + -11.743680953979492 + ], + [ + "▁anului", + -11.743766784667969 + ], + [ + "poc", + -11.743844985961914 + ], + [ + "▁musique", + -11.743972778320312 + ], + [ + "▁Hart", + -11.743997573852539 + ], + [ + "Sh", + -11.744283676147461 + ], + [ + "html", + -11.744290351867676 + ], + [ + "▁serial", + -11.744318008422852 + ], + [ + "țele", + -11.744369506835938 + ], + [ + "inning", + -11.744544982910156 + ], + [ + "▁Bureau", + -11.744555473327637 + ], + [ + "▁rush", + -11.744626998901367 + ], + [ + "▁deosebit", + -11.744637489318848 + ], + [ + "▁Wort", + -11.744648933410645 + ], + [ + "▁Thailand", + -11.744688987731934 + ], + [ + "▁Language", + -11.745193481445312 + ], + [ + "▁Governor", + -11.745213508605957 + ], + [ + "▁Later", + -11.74525260925293 + ], + [ + "rilor", + -11.745282173156738 + ], + [ + "▁activités", + -11.745372772216797 + ], + [ + "schaffen", + -11.745598793029785 + ], + [ + "▁harvest", + -11.74567985534668 + ], + [ + "▁municipal", + -11.745783805847168 + ], + [ + "einander", + -11.74600601196289 + ], + [ + "▁fingers", + -11.746383666992188 + ], + [ + "▁sculpture", + -11.74638843536377 + ], + [ + "▁Bien", + -11.746390342712402 + ], + [ + "▁departments", + -11.746562957763672 + ], + [ + "▁période", + -11.746746063232422 + ], + [ + "▁jeune", + -11.746960639953613 + ], + [ + "▁governments", + -11.74710750579834 + ], + [ + "uter", + -11.747179985046387 + ], + [ + "Aceste", + -11.747220039367676 + ], + [ + "▁Deal", + -11.747243881225586 + ], + [ + "▁Equipment", + -11.74726390838623 + ], + [ + "nous", + -11.747300148010254 + ], + [ + "▁gate", + -11.747315406799316 + ], + [ + "▁meta", + -11.747447967529297 + ], + [ + "▁stiu", + -11.747474670410156 + ], + [ + "fold", + -11.747486114501953 + ], + [ + "▁seule", + -11.747523307800293 + ], + [ + "▁varied", + -11.747541427612305 + ], + [ + "hit", + -11.747635841369629 + ], + [ + "▁DIY", + -11.74768352508545 + ], + [ + "▁lemn", + -11.747685432434082 + ], + [ + "OB", + -11.747865676879883 + ], + [ + "▁colorful", + -11.748095512390137 + ], + [ + "▁câ", + -11.74826431274414 + ], + [ + "▁semester", + -11.74830150604248 + ], + [ + "▁dealer", + -11.748575210571289 + ], + [ + "nett", + -11.748788833618164 + ], + [ + "▁shortly", + -11.748932838439941 + ], + [ + "▁Driver", + -11.748983383178711 + ], + [ + "culture", + -11.749052047729492 + ], + [ + "▁permitted", + -11.749072074890137 + ], + [ + "▁sorts", + -11.749432563781738 + ], + [ + "▁crop", + -11.74999713897705 + ], + [ + "▁valoare", + -11.75046157836914 + ], + [ + "▁analog", + -11.750576972961426 + ], + [ + "▁excuse", + -11.750588417053223 + ], + [ + "▁modèle", + -11.750657081604004 + ], + [ + "When", + -11.75068473815918 + ], + [ + "▁march", + -11.750744819641113 + ], + [ + "haz", + -11.750978469848633 + ], + [ + "▁minimize", + -11.750992774963379 + ], + [ + "traction", + -11.751028060913086 + ], + [ + "▁caracter", + -11.752382278442383 + ], + [ + "▁modules", + -11.7523832321167 + ], + [ + "clu", + -11.75244426727295 + ], + [ + "ţional", + -11.752482414245605 + ], + [ + "▁breach", + -11.752562522888184 + ], + [ + "▁priced", + -11.752614974975586 + ], + [ + "▁attorneys", + -11.752644538879395 + ], + [ + "▁implant", + -11.752645492553711 + ], + [ + "▁ANY", + -11.752655029296875 + ], + [ + "dition", + -11.752707481384277 + ], + [ + "▁trials", + -11.752838134765625 + ], + [ + "▁Nas", + -11.75293254852295 + ], + [ + "Pre", + -11.752970695495605 + ], + [ + "lorsque", + -11.752979278564453 + ], + [ + "plin", + -11.753050804138184 + ], + [ + "Er", + -11.753056526184082 + ], + [ + "▁Dom", + -11.753067970275879 + ], + [ + "▁tire", + -11.753190040588379 + ], + [ + "sili", + -11.753233909606934 + ], + [ + "▁coins", + -11.753350257873535 + ], + [ + "▁rend", + -11.753470420837402 + ], + [ + "▁reliability", + -11.753503799438477 + ], + [ + "▁Analysis", + -11.753508567810059 + ], + [ + "▁trails", + -11.753692626953125 + ], + [ + "trägt", + -11.753762245178223 + ], + [ + "▁Kansas", + -11.753908157348633 + ], + [ + "▁responsive", + -11.75390911102295 + ], + [ + "▁disappear", + -11.753988265991211 + ], + [ + "▁stakeholders", + -11.754022598266602 + ], + [ + "▁aplica", + -11.754164695739746 + ], + [ + "▁imi", + -11.754180908203125 + ], + [ + "▁Laura", + -11.754369735717773 + ], + [ + "▁Terms", + -11.75440788269043 + ], + [ + "450", + -11.754460334777832 + ], + [ + "▁voltage", + -11.754483222961426 + ], + [ + "▁Gel", + -11.754544258117676 + ], + [ + "▁qualities", + -11.754549026489258 + ], + [ + "▁qualifi", + -11.754603385925293 + ], + [ + "▁Mé", + -11.754735946655273 + ], + [ + "bereit", + -11.754829406738281 + ], + [ + "gleich", + -11.754875183105469 + ], + [ + "▁voting", + -11.754961013793945 + ], + [ + "▁trademark", + -11.755128860473633 + ], + [ + "▁2.5", + -11.75515079498291 + ], + [ + "ND", + -11.755438804626465 + ], + [ + "▁Kelly", + -11.755470275878906 + ], + [ + "▁weiteren", + -11.755559921264648 + ], + [ + "▁filters", + -11.75562572479248 + ], + [ + "▁coût", + -11.75562858581543 + ], + [ + "jur", + -11.755765914916992 + ], + [ + "acre", + -11.755804061889648 + ], + [ + "▁retired", + -11.756022453308105 + ], + [ + "▁Engine", + -11.756205558776855 + ], + [ + "▁président", + -11.756264686584473 + ], + [ + "ajul", + -11.756307601928711 + ], + [ + "▁GA", + -11.756425857543945 + ], + [ + "rät", + -11.75666332244873 + ], + [ + "▁instructor", + -11.756669998168945 + ], + [ + "▁Allen", + -11.75668716430664 + ], + [ + "▁Delhi", + -11.756771087646484 + ], + [ + "▁cure", + -11.756844520568848 + ], + [ + "seite", + -11.756898880004883 + ], + [ + "coming", + -11.756914138793945 + ], + [ + "▁mixing", + -11.756963729858398 + ], + [ + "▁Kno", + -11.757041931152344 + ], + [ + "▁Sure", + -11.757079124450684 + ], + [ + "▁hired", + -11.757102012634277 + ], + [ + "▁participated", + -11.757196426391602 + ], + [ + "Count", + -11.757320404052734 + ], + [ + "treffen", + -11.757355690002441 + ], + [ + "▁54", + -11.75735855102539 + ], + [ + "▁rings", + -11.75735855102539 + ], + [ + "▁Thor", + -11.757359504699707 + ], + [ + "éro", + -11.75744915008545 + ], + [ + "▁buttons", + -11.757488250732422 + ], + [ + "▁47", + -11.757539749145508 + ], + [ + "▁Tel", + -11.757694244384766 + ], + [ + "▁suport", + -11.757776260375977 + ], + [ + "▁rhythm", + -11.75782585144043 + ], + [ + "▁Theater", + -11.758113861083984 + ], + [ + "▁informatii", + -11.758121490478516 + ], + [ + "hält", + -11.758201599121094 + ], + [ + "▁ouvert", + -11.758238792419434 + ], + [ + "fewer", + -11.75828742980957 + ], + [ + "▁alumni", + -11.758466720581055 + ], + [ + "▁valley", + -11.758508682250977 + ], + [ + "tial", + -11.75860595703125 + ], + [ + "***", + -11.758782386779785 + ], + [ + "kri", + -11.75905704498291 + ], + [ + "▁accidents", + -11.759113311767578 + ], + [ + "▁barrel", + -11.759170532226562 + ], + [ + "mobil", + -11.759310722351074 + ], + [ + "etti", + -11.759437561035156 + ], + [ + "▁immigration", + -11.759515762329102 + ], + [ + "▁poveste", + -11.759528160095215 + ], + [ + "hren", + -11.759669303894043 + ], + [ + "hydr", + -11.759719848632812 + ], + [ + "▁tweet", + -11.759744644165039 + ], + [ + "▁zip", + -11.759872436523438 + ], + [ + "▁Bonus", + -11.760189056396484 + ], + [ + "ordnung", + -11.760287284851074 + ], + [ + "liber", + -11.76046085357666 + ], + [ + "▁Navy", + -11.760591506958008 + ], + [ + "▁agreements", + -11.760612487792969 + ], + [ + "▁detection", + -11.7607421875 + ], + [ + "DF", + -11.760762214660645 + ], + [ + "hur", + -11.760774612426758 + ], + [ + "0.00", + -11.760798454284668 + ], + [ + "▁07", + -11.760866165161133 + ], + [ + "etta", + -11.760884284973145 + ], + [ + "▁13,", + -11.760887145996094 + ], + [ + "rolled", + -11.760970115661621 + ], + [ + "▁injection", + -11.761002540588379 + ], + [ + "mig", + -11.761017799377441 + ], + [ + "wach", + -11.761107444763184 + ], + [ + "▁choisir", + -11.761515617370605 + ], + [ + "▁professionnels", + -11.76159954071045 + ], + [ + "▁Tower", + -11.76169490814209 + ], + [ + "▁neighbor", + -11.76170539855957 + ], + [ + "deutschen", + -11.76187801361084 + ], + [ + "▁luxurious", + -11.76201057434082 + ], + [ + "▁walks", + -11.762033462524414 + ], + [ + "reti", + -11.762046813964844 + ], + [ + "▁Pad", + -11.762085914611816 + ], + [ + "wise", + -11.762297630310059 + ], + [ + "▁exhaust", + -11.762307167053223 + ], + [ + "▁demonstration", + -11.762582778930664 + ], + [ + "▁agricultural", + -11.762667655944824 + ], + [ + "Upon", + -11.762885093688965 + ], + [ + "▁Blu", + -11.76292610168457 + ], + [ + "atorul", + -11.762967109680176 + ], + [ + "amour", + -11.762984275817871 + ], + [ + "issant", + -11.763004302978516 + ], + [ + "▁delighted", + -11.763031959533691 + ], + [ + "rita", + -11.763113021850586 + ], + [ + "requiring", + -11.763195037841797 + ], + [ + "ivity", + -11.763216972351074 + ], + [ + "▁Unser", + -11.763306617736816 + ], + [ + "FP", + -11.763379096984863 + ], + [ + "fait", + -11.763533592224121 + ], + [ + "dite", + -11.763562202453613 + ], + [ + "kul", + -11.763716697692871 + ], + [ + "arth", + -11.76376724243164 + ], + [ + "▁Ker", + -11.763815879821777 + ], + [ + "torilor", + -11.763816833496094 + ], + [ + "stage", + -11.763866424560547 + ], + [ + "▁HTML", + -11.76398754119873 + ], + [ + "▁Wheel", + -11.764005661010742 + ], + [ + "▁quelque", + -11.76414680480957 + ], + [ + "▁Ou", + -11.764196395874023 + ], + [ + "▁considerable", + -11.764277458190918 + ], + [ + "▁Sco", + -11.76458740234375 + ], + [ + "▁donations", + -11.76481819152832 + ], + [ + "dessen", + -11.765002250671387 + ], + [ + "▁pourquoi", + -11.765039443969727 + ], + [ + "▁Bow", + -11.765189170837402 + ], + [ + "▁Dupa", + -11.76522445678711 + ], + [ + "ska", + -11.765707015991211 + ], + [ + "hot", + -11.765732765197754 + ], + [ + "▁drove", + -11.765849113464355 + ], + [ + "▁oppos", + -11.766018867492676 + ], + [ + "▁hiking", + -11.766035079956055 + ], + [ + "▁Boot", + -11.766081809997559 + ], + [ + "One", + -11.766087532043457 + ], + [ + "▁guvern", + -11.766094207763672 + ], + [ + "▁15,", + -11.766400337219238 + ], + [ + "scheid", + -11.766437530517578 + ], + [ + "▁Miet", + -11.766458511352539 + ], + [ + "▁Technical", + -11.766767501831055 + ], + [ + "▁Dal", + -11.7669038772583 + ], + [ + "▁Metro", + -11.766966819763184 + ], + [ + "▁Baker", + -11.767215728759766 + ], + [ + "▁trece", + -11.767252922058105 + ], + [ + "tained", + -11.767302513122559 + ], + [ + "block", + -11.76738452911377 + ], + [ + "▁wander", + -11.767401695251465 + ], + [ + "▁penalty", + -11.76742172241211 + ], + [ + "▁shipped", + -11.767509460449219 + ], + [ + "▁30%", + -11.767518043518066 + ], + [ + "group", + -11.767541885375977 + ], + [ + "▁brothers", + -11.767701148986816 + ], + [ + "▁comanda", + -11.767777442932129 + ], + [ + "▁retreat", + -11.767789840698242 + ], + [ + "▁Movie", + -11.767802238464355 + ], + [ + "PU", + -11.76787281036377 + ], + [ + "▁Jun", + -11.767885208129883 + ], + [ + "▁$6", + -11.767969131469727 + ], + [ + "▁Fal", + -11.768054962158203 + ], + [ + "▁Palestinian", + -11.768075942993164 + ], + [ + "▁soccer", + -11.768217086791992 + ], + [ + "▁Autor", + -11.768254280090332 + ], + [ + "▁chamber", + -11.768266677856445 + ], + [ + "nement", + -11.768463134765625 + ], + [ + "▁offense", + -11.768610954284668 + ], + [ + "▁gig", + -11.768631935119629 + ], + [ + "▁abandon", + -11.768691062927246 + ], + [ + "▁Kraft", + -11.768783569335938 + ], + [ + "▁Medicare", + -11.768784523010254 + ], + [ + "▁soap", + -11.768835067749023 + ], + [ + "▁Fur", + -11.768990516662598 + ], + [ + "▁conditioning", + -11.769103050231934 + ], + [ + "rained", + -11.769132614135742 + ], + [ + "▁puts", + -11.769134521484375 + ], + [ + "▁cod", + -11.76930046081543 + ], + [ + "lassen", + -11.76941967010498 + ], + [ + "FL", + -11.769600868225098 + ], + [ + "▁komplett", + -11.769664764404297 + ], + [ + "▁entscheiden", + -11.769665718078613 + ], + [ + "▁Hour", + -11.769691467285156 + ], + [ + "?!", + -11.770040512084961 + ], + [ + "Stream", + -11.770145416259766 + ], + [ + "▁Grad", + -11.770209312438965 + ], + [ + "▁gently", + -11.770231246948242 + ], + [ + "▁poetry", + -11.770429611206055 + ], + [ + "▁secured", + -11.770438194274902 + ], + [ + "oph", + -11.770466804504395 + ], + [ + "hop", + -11.770561218261719 + ], + [ + "handel", + -11.770634651184082 + ], + [ + "▁besoins", + -11.770658493041992 + ], + [ + "got", + -11.770824432373047 + ], + [ + "▁Chrome", + -11.77088737487793 + ], + [ + "ILL", + -11.770930290222168 + ], + [ + "▁Schritt", + -11.771014213562012 + ], + [ + "▁spell", + -11.771063804626465 + ], + [ + "▁grinding", + -11.771334648132324 + ], + [ + "▁ramp", + -11.77144718170166 + ], + [ + "▁mama", + -11.7716064453125 + ], + [ + "▁bottles", + -11.77180290222168 + ], + [ + "▁canvas", + -11.771906852722168 + ], + [ + "▁ecosystem", + -11.77194595336914 + ], + [ + "aţii", + -11.771967887878418 + ], + [ + "cellular", + -11.772085189819336 + ], + [ + "▁Spin", + -11.772164344787598 + ], + [ + "▁Discover", + -11.772217750549316 + ], + [ + "-17", + -11.772322654724121 + ], + [ + "▁feeding", + -11.77246379852295 + ], + [ + "▁stops", + -11.7725191116333 + ], + [ + "▁haute", + -11.772552490234375 + ], + [ + "▁Entscheidung", + -11.7725830078125 + ], + [ + "▁semble", + -11.772590637207031 + ], + [ + "▁acele", + -11.772857666015625 + ], + [ + "▁Walk", + -11.773154258728027 + ], + [ + "▁joke", + -11.773180961608887 + ], + [ + "▁Fed", + -11.773294448852539 + ], + [ + "climat", + -11.773306846618652 + ], + [ + "▁Lot", + -11.773460388183594 + ], + [ + "runner", + -11.773551940917969 + ], + [ + "▁flip", + -11.773786544799805 + ], + [ + "▁werde", + -11.773818016052246 + ], + [ + "▁Deck", + -11.77417278289795 + ], + [ + "bala", + -11.774296760559082 + ], + [ + "▁sacrifice", + -11.774375915527344 + ], + [ + "cid", + -11.774388313293457 + ], + [ + "him", + -11.774569511413574 + ], + [ + "zahlen", + -11.774587631225586 + ], + [ + "▁heater", + -11.774596214294434 + ], + [ + "formed", + -11.774619102478027 + ], + [ + "plus", + -11.774711608886719 + ], + [ + "▁util", + -11.774742126464844 + ], + [ + "rama", + -11.775019645690918 + ], + [ + "(4)", + -11.7750244140625 + ], + [ + "▁knife", + -11.775111198425293 + ], + [ + "▁traditions", + -11.77520751953125 + ], + [ + "▁dip", + -11.775357246398926 + ], + [ + "kill", + -11.775405883789062 + ], + [ + "▁Rich", + -11.775418281555176 + ], + [ + "▁DI", + -11.775555610656738 + ], + [ + "▁containers", + -11.775677680969238 + ], + [ + "▁locuri", + -11.775728225708008 + ], + [ + "▁continent", + -11.775797843933105 + ], + [ + "teilung", + -11.776005744934082 + ], + [ + "▁vreme", + -11.776028633117676 + ], + [ + "organisation", + -11.776126861572266 + ], + [ + "serie", + -11.776135444641113 + ], + [ + "▁Diamond", + -11.776204109191895 + ], + [ + "magazin", + -11.77627944946289 + ], + [ + "▁poster", + -11.776455879211426 + ], + [ + "▁passenger", + -11.7765474319458 + ], + [ + "▁soldiers", + -11.776552200317383 + ], + [ + "▁urgent", + -11.776616096496582 + ], + [ + "▁Lip", + -11.77680778503418 + ], + [ + "▁aşa", + -11.776972770690918 + ], + [ + "▁BO", + -11.777024269104004 + ], + [ + "▁somebody", + -11.777076721191406 + ], + [ + "▁silence", + -11.777132034301758 + ], + [ + "cop", + -11.777359962463379 + ], + [ + "▁Burn", + -11.77749252319336 + ], + [ + "▁stopping", + -11.777544021606445 + ], + [ + "▁essence", + -11.777568817138672 + ], + [ + "▁hitting", + -11.777762413024902 + ], + [ + "▁producers", + -11.777801513671875 + ], + [ + "▁fibre", + -11.777894020080566 + ], + [ + "▁seasonal", + -11.777960777282715 + ], + [ + "▁tara", + -11.778096199035645 + ], + [ + "▁Jose", + -11.778099060058594 + ], + [ + "▁Better", + -11.77825927734375 + ], + [ + "▁steep", + -11.778295516967773 + ], + [ + "Alors", + -11.778353691101074 + ], + [ + "▁collecting", + -11.778507232666016 + ], + [ + "vre", + -11.778635025024414 + ], + [ + "▁disabled", + -11.77863883972168 + ], + [ + "▁voters", + -11.778679847717285 + ], + [ + "consuming", + -11.779092788696289 + ], + [ + "deemed", + -11.779115676879883 + ], + [ + "éra", + -11.779227256774902 + ], + [ + "opération", + -11.779273986816406 + ], + [ + "▁roller", + -11.779305458068848 + ], + [ + "Rather", + -11.779321670532227 + ], + [ + "▁leider", + -11.779370307922363 + ], + [ + "▁IV", + -11.779434204101562 + ], + [ + "▁erreichen", + -11.779473304748535 + ], + [ + "▁charging", + -11.779657363891602 + ], + [ + "tions", + -11.77973747253418 + ], + [ + "tiques", + -11.779861450195312 + ], + [ + "▁formats", + -11.779876708984375 + ], + [ + "▁painful", + -11.78000545501709 + ], + [ + "▁eager", + -11.780061721801758 + ], + [ + "generation", + -11.780137062072754 + ], + [ + "anna", + -11.780235290527344 + ], + [ + "▁races", + -11.780323028564453 + ], + [ + "force", + -11.780357360839844 + ], + [ + "▁ferm", + -11.780522346496582 + ], + [ + "▁breathing", + -11.780618667602539 + ], + [ + "▁offen", + -11.780648231506348 + ], + [ + "▁minds", + -11.780805587768555 + ], + [ + "▁musste", + -11.780832290649414 + ], + [ + "▁Vision", + -11.780888557434082 + ], + [ + "▁Installation", + -11.780988693237305 + ], + [ + "▁hesitate", + -11.781002044677734 + ], + [ + "▁somit", + -11.781023979187012 + ], + [ + "hôtel", + -11.781044006347656 + ], + [ + "cab", + -11.781235694885254 + ], + [ + "-16", + -11.781312942504883 + ], + [ + "▁Visual", + -11.781418800354004 + ], + [ + "intérêt", + -11.781524658203125 + ], + [ + "▁apel", + -11.781831741333008 + ], + [ + "therapy", + -11.782089233398438 + ], + [ + "volt", + -11.78225040435791 + ], + [ + "▁Rou", + -11.782439231872559 + ], + [ + "▁efficace", + -11.782464027404785 + ], + [ + "▁architectural", + -11.782605171203613 + ], + [ + "▁privilege", + -11.782670974731445 + ], + [ + "▁treating", + -11.782711029052734 + ], + [ + "▁Tam", + -11.782722473144531 + ], + [ + "tsch", + -11.782744407653809 + ], + [ + "building", + -11.782750129699707 + ], + [ + "▁associations", + -11.782929420471191 + ], + [ + "▁Consumer", + -11.783424377441406 + ], + [ + "▁Lim", + -11.783496856689453 + ], + [ + "newest", + -11.7835054397583 + ], + [ + "▁față", + -11.783675193786621 + ], + [ + "▁ships", + -11.783732414245605 + ], + [ + "lev", + -11.78373908996582 + ], + [ + "raft", + -11.783817291259766 + ], + [ + "▁variations", + -11.783845901489258 + ], + [ + "▁noua", + -11.78386402130127 + ], + [ + "▁Cab", + -11.784063339233398 + ], + [ + "1.2", + -11.78409481048584 + ], + [ + "▁ocazi", + -11.784347534179688 + ], + [ + "▁recommendation", + -11.784449577331543 + ], + [ + "titled", + -11.78445053100586 + ], + [ + "▁invoice", + -11.78459644317627 + ], + [ + "▁noastra", + -11.784647941589355 + ], + [ + "kur", + -11.784700393676758 + ], + [ + "issent", + -11.784758567810059 + ], + [ + "base", + -11.784778594970703 + ], + [ + "hä", + -11.7848482131958 + ], + [ + "888", + -11.784914016723633 + ], + [ + "▁declar", + -11.784941673278809 + ], + [ + "▁Football", + -11.7850341796875 + ], + [ + "▁Indeed", + -11.785293579101562 + ], + [ + "▁weapon", + -11.785333633422852 + ], + [ + "▁destroyed", + -11.785457611083984 + ], + [ + "▁enormous", + -11.785594940185547 + ], + [ + "▁blanket", + -11.7857084274292 + ], + [ + "▁aktiv", + -11.785759925842285 + ], + [ + "raw", + -11.785791397094727 + ], + [ + "▁computing", + -11.785823822021484 + ], + [ + "6)", + -11.785955429077148 + ], + [ + "▁Dam", + -11.786152839660645 + ], + [ + "▁confort", + -11.786174774169922 + ], + [ + "▁Gla", + -11.786198616027832 + ], + [ + "hardly", + -11.786242485046387 + ], + [ + "▁annually", + -11.786269187927246 + ], + [ + "▁destinations", + -11.786401748657227 + ], + [ + "▁guilty", + -11.786404609680176 + ], + [ + "▁scholarship", + -11.786439895629883 + ], + [ + "▁harmful", + -11.786453247070312 + ], + [ + "▁2-3", + -11.786616325378418 + ], + [ + "▁Race", + -11.786638259887695 + ], + [ + "▁hypo", + -11.78671646118164 + ], + [ + "▁shorter", + -11.786733627319336 + ], + [ + "quest", + -11.78675651550293 + ], + [ + "uze", + -11.786812782287598 + ], + [ + "izi", + -11.787005424499512 + ], + [ + "OO", + -11.787095069885254 + ], + [ + "▁Schutz", + -11.787097930908203 + ], + [ + "▁Teilnehmer", + -11.787185668945312 + ], + [ + "▁profiles", + -11.787199020385742 + ], + [ + "▁sustainability", + -11.78747272491455 + ], + [ + "▁emb", + -11.787489891052246 + ], + [ + "▁Augen", + -11.787516593933105 + ], + [ + "▁outdoors", + -11.787542343139648 + ], + [ + "▁Individual", + -11.787548065185547 + ], + [ + "▁pou", + -11.78757095336914 + ], + [ + "▁Together", + -11.787575721740723 + ], + [ + "HT", + -11.787674903869629 + ], + [ + "suited", + -11.787755012512207 + ], + [ + "▁tro", + -11.787782669067383 + ], + [ + "▁Strom", + -11.787805557250977 + ], + [ + "▁achievement", + -11.78799819946289 + ], + [ + "▁Range", + -11.78815746307373 + ], + [ + "tory", + -11.78817081451416 + ], + [ + "▁distribute", + -11.788250923156738 + ], + [ + "▁letzte", + -11.788276672363281 + ], + [ + "incorporated", + -11.788287162780762 + ], + [ + "▁Kir", + -11.788325309753418 + ], + [ + "ruf", + -11.78839111328125 + ], + [ + "▁disappointed", + -11.788543701171875 + ], + [ + "▁referral", + -11.788602828979492 + ], + [ + "flam", + -11.788687705993652 + ], + [ + "▁excessive", + -11.7886962890625 + ], + [ + "▁rapidement", + -11.788743019104004 + ], + [ + "▁Rio", + -11.78875732421875 + ], + [ + "aţia", + -11.788951873779297 + ], + [ + "▁meuble", + -11.78912353515625 + ], + [ + "▁2008.", + -11.789135932922363 + ], + [ + "▁Gall", + -11.78915023803711 + ], + [ + "▁française", + -11.789369583129883 + ], + [ + "▁ladies", + -11.789695739746094 + ], + [ + "ailed", + -11.789746284484863 + ], + [ + "El", + -11.789834976196289 + ], + [ + "▁wines", + -11.789868354797363 + ], + [ + "▁beispielsweise", + -11.789876937866211 + ], + [ + "▁gamme", + -11.790193557739258 + ], + [ + "▁guided", + -11.79028034210205 + ], + [ + "▁plin", + -11.790339469909668 + ], + [ + "Î", + -11.790390968322754 + ], + [ + "▁True", + -11.790498733520508 + ], + [ + "▁Temple", + -11.790507316589355 + ], + [ + "▁Pic", + -11.790520668029785 + ], + [ + "permalink", + -11.790547370910645 + ], + [ + "▁vedea", + -11.790656089782715 + ], + [ + "▁rank", + -11.790922164916992 + ], + [ + "▁Grill", + -11.791025161743164 + ], + [ + "clin", + -11.791070938110352 + ], + [ + "▁Hab", + -11.791089057922363 + ], + [ + "▁odds", + -11.791125297546387 + ], + [ + "▁anytime", + -11.791146278381348 + ], + [ + "▁Thanksgiving", + -11.791265487670898 + ], + [ + "guard", + -11.791300773620605 + ], + [ + "▁essays", + -11.791389465332031 + ], + [ + "▁PE", + -11.79139518737793 + ], + [ + "▁Rechts", + -11.791494369506836 + ], + [ + "mals", + -11.791751861572266 + ], + [ + "achi", + -11.791762351989746 + ], + [ + "▁Anthony", + -11.791765213012695 + ], + [ + "▁réponse", + -11.792036056518555 + ], + [ + "standing", + -11.79227352142334 + ], + [ + "▁Mol", + -11.792427062988281 + ], + [ + "▁Canon", + -11.792474746704102 + ], + [ + "▁silk", + -11.792515754699707 + ], + [ + "▁pourrait", + -11.79278564453125 + ], + [ + "▁raport", + -11.79280948638916 + ], + [ + "▁Woche", + -11.792889595031738 + ], + [ + "fallen", + -11.79293155670166 + ], + [ + "sting", + -11.79310131072998 + ], + [ + "▁circulation", + -11.793102264404297 + ], + [ + "▁skirt", + -11.7931547164917 + ], + [ + "▁Title", + -11.793187141418457 + ], + [ + "▁17.", + -11.79331111907959 + ], + [ + "▁Touch", + -11.793486595153809 + ], + [ + "▁utilizat", + -11.79352855682373 + ], + [ + "▁Organisation", + -11.793569564819336 + ], + [ + "▁mereu", + -11.793848991394043 + ], + [ + "▁oxygen", + -11.793953895568848 + ], + [ + "lique", + -11.793985366821289 + ], + [ + "▁consume", + -11.794100761413574 + ], + [ + "▁Barb", + -11.794102668762207 + ], + [ + "1.1", + -11.794105529785156 + ], + [ + "▁nicely", + -11.79419231414795 + ], + [ + "▁psychological", + -11.794227600097656 + ], + [ + "▁refrigerator", + -11.794478416442871 + ], + [ + "▁fantasy", + -11.79481029510498 + ], + [ + "▁dispute", + -11.79494571685791 + ], + [ + "▁IBM", + -11.794954299926758 + ], + [ + "▁Nation", + -11.794971466064453 + ], + [ + "▁mobil", + -11.795063972473145 + ], + [ + "▁density", + -11.795201301574707 + ], + [ + "ske", + -11.795230865478516 + ], + [ + "▁intimate", + -11.795313835144043 + ], + [ + "▁tailored", + -11.795319557189941 + ], + [ + "▁outline", + -11.795472145080566 + ], + [ + "TN", + -11.79554557800293 + ], + [ + "mur", + -11.795634269714355 + ], + [ + "GC", + -11.795662879943848 + ], + [ + "they", + -11.795992851257324 + ], + [ + "pag", + -11.796161651611328 + ], + [ + "▁Kultur", + -11.796246528625488 + ], + [ + "grün", + -11.796281814575195 + ], + [ + "voted", + -11.796529769897461 + ], + [ + "▁donné", + -11.796546936035156 + ], + [ + "▁Să", + -11.796629905700684 + ], + [ + "enberg", + -11.796648979187012 + ], + [ + "▁wi", + -11.79686450958252 + ], + [ + "▁Francis", + -11.797057151794434 + ], + [ + "▁Rick", + -11.797157287597656 + ], + [ + "accord", + -11.797403335571289 + ], + [ + "▁Zusammen", + -11.797415733337402 + ], + [ + "▁nonprofit", + -11.797456741333008 + ], + [ + "▁listings", + -11.797615051269531 + ], + [ + "6,", + -11.797908782958984 + ], + [ + "▁maximize", + -11.798253059387207 + ], + [ + "bud", + -11.798345565795898 + ], + [ + "▁promotional", + -11.798486709594727 + ], + [ + "cina", + -11.798646926879883 + ], + [ + "▁potatoes", + -11.79869556427002 + ], + [ + "▁mot", + -11.798871040344238 + ], + [ + "carries", + -11.799384117126465 + ], + [ + "▁stabilit", + -11.799458503723145 + ], + [ + "▁Door", + -11.799574851989746 + ], + [ + "▁downloaded", + -11.799574851989746 + ], + [ + "▁experimental", + -11.799724578857422 + ], + [ + "HD", + -11.7997407913208 + ], + [ + "▁parfois", + -11.79980182647705 + ], + [ + "▁zeigen", + -11.800092697143555 + ], + [ + "▁proposé", + -11.80030632019043 + ], + [ + "▁Verein", + -11.800636291503906 + ], + [ + "▁amestec", + -11.800676345825195 + ], + [ + "▁entreprise", + -11.800718307495117 + ], + [ + "▁PSD", + -11.800841331481934 + ], + [ + "▁bake", + -11.800897598266602 + ], + [ + "▁Rh", + -11.800904273986816 + ], + [ + "▁Mehr", + -11.800922393798828 + ], + [ + "▁purple", + -11.801074028015137 + ], + [ + "▁recipient", + -11.80109691619873 + ], + [ + "rare", + -11.801166534423828 + ], + [ + "egi", + -11.80117130279541 + ], + [ + "ancien", + -11.801176071166992 + ], + [ + "▁risque", + -11.80118465423584 + ], + [ + "▁mystery", + -11.80157470703125 + ], + [ + "mac", + -11.801697731018066 + ], + [ + "ibility", + -11.80182933807373 + ], + [ + "▁Moore", + -11.801881790161133 + ], + [ + "▁flavors", + -11.801911354064941 + ], + [ + "▁trauma", + -11.801966667175293 + ], + [ + "▁automotive", + -11.802112579345703 + ], + [ + "▁Anyway", + -11.802197456359863 + ], + [ + "▁simulation", + -11.802253723144531 + ], + [ + "▁crafts", + -11.802525520324707 + ], + [ + "▁measurements", + -11.80257511138916 + ], + [ + "▁cour", + -11.80257797241211 + ], + [ + "▁tard", + -11.802600860595703 + ], + [ + "nnie", + -11.802881240844727 + ], + [ + "▁Production", + -11.803388595581055 + ], + [ + "▁Cleaning", + -11.803567886352539 + ], + [ + "5,", + -11.803644180297852 + ], + [ + "▁Islamic", + -11.803766250610352 + ], + [ + "▁Gate", + -11.80378532409668 + ], + [ + "bay", + -11.803814888000488 + ], + [ + "HR", + -11.803990364074707 + ], + [ + "▁Offer", + -11.80399227142334 + ], + [ + "▁acceptance", + -11.804107666015625 + ], + [ + "▁Erfahrung", + -11.80412769317627 + ], + [ + "▁environ", + -11.804193496704102 + ], + [ + "▁fancy", + -11.804218292236328 + ], + [ + "▁bullet", + -11.80437183380127 + ], + [ + "organ", + -11.804466247558594 + ], + [ + "▁Peace", + -11.804520606994629 + ], + [ + "▁detalii", + -11.80461597442627 + ], + [ + "▁promised", + -11.804715156555176 + ], + [ + "▁wellness", + -11.804746627807617 + ], + [ + "▁satisfy", + -11.80481243133545 + ], + [ + "▁grants", + -11.805212020874023 + ], + [ + "accueil", + -11.80522346496582 + ], + [ + "▁oben", + -11.805412292480469 + ], + [ + "▁prospects", + -11.80543327331543 + ], + [ + "▁Events", + -11.805513381958008 + ], + [ + "2013", + -11.805569648742676 + ], + [ + "gesehen", + -11.805685997009277 + ], + [ + "▁£1", + -11.805727005004883 + ], + [ + "▁handelt", + -11.805798530578613 + ], + [ + "▁Spieler", + -11.805876731872559 + ], + [ + "▁Virtual", + -11.806145668029785 + ], + [ + "▁bubble", + -11.806239128112793 + ], + [ + "▁Trend", + -11.806254386901855 + ], + [ + "▁sistemul", + -11.806315422058105 + ], + [ + "▁Morgan", + -11.806320190429688 + ], + [ + "▁pole", + -11.806503295898438 + ], + [ + "▁spielen", + -11.806533813476562 + ], + [ + "tür", + -11.806571006774902 + ], + [ + "SCO", + -11.806572914123535 + ], + [ + "▁informative", + -11.806678771972656 + ], + [ + "▁affirm", + -11.806755065917969 + ], + [ + "▁Aqua", + -11.806818008422852 + ], + [ + "▁AR", + -11.806888580322266 + ], + [ + "richten", + -11.807071685791016 + ], + [ + "▁rewards", + -11.807122230529785 + ], + [ + "lub", + -11.807235717773438 + ], + [ + "shot", + -11.807236671447754 + ], + [ + "LM", + -11.807540893554688 + ], + [ + "Up", + -11.807586669921875 + ], + [ + "▁absolut", + -11.807737350463867 + ], + [ + "▁Mart", + -11.807806968688965 + ], + [ + "erweise", + -11.807812690734863 + ], + [ + "BP", + -11.807977676391602 + ], + [ + "▁difficile", + -11.808152198791504 + ], + [ + "▁Document", + -11.808159828186035 + ], + [ + "▁Sweet", + -11.8082914352417 + ], + [ + "▁indicator", + -11.808338165283203 + ], + [ + "▁Boden", + -11.808389663696289 + ], + [ + "mates", + -11.808477401733398 + ], + [ + "▁supporters", + -11.808504104614258 + ], + [ + "▁begun", + -11.808600425720215 + ], + [ + "▁blogging", + -11.808611869812012 + ], + [ + "▁CL", + -11.808663368225098 + ], + [ + "gres", + -11.808692932128906 + ], + [ + "▁preferences", + -11.808738708496094 + ], + [ + "▁screw", + -11.808756828308105 + ], + [ + "▁tutor", + -11.808858871459961 + ], + [ + "▁Additional", + -11.80891227722168 + ], + [ + "▁Bitte", + -11.808976173400879 + ], + [ + "utilizing", + -11.808998107910156 + ], + [ + "▁expérience", + -11.809073448181152 + ], + [ + "▁dur", + -11.809146881103516 + ], + [ + "▁precisely", + -11.809178352355957 + ], + [ + "▁janvier", + -11.809394836425781 + ], + [ + "AGE", + -11.80987548828125 + ], + [ + "moto", + -11.810007095336914 + ], + [ + "▁counsel", + -11.810195922851562 + ], + [ + "▁110", + -11.810226440429688 + ], + [ + "nick", + -11.810245513916016 + ], + [ + "licit", + -11.810540199279785 + ], + [ + "technik", + -11.810659408569336 + ], + [ + "▁collaborate", + -11.810736656188965 + ], + [ + "▁neighbors", + -11.810794830322266 + ], + [ + "tered", + -11.810922622680664 + ], + [ + "▁excel", + -11.811025619506836 + ], + [ + "▁Route", + -11.811059951782227 + ], + [ + "steuer", + -11.81109619140625 + ], + [ + "▁pioneer", + -11.811607360839844 + ], + [ + "nuit", + -11.81169319152832 + ], + [ + "▁skip", + -11.811963081359863 + ], + [ + "▁destruction", + -11.811997413635254 + ], + [ + "▁thesis", + -11.812249183654785 + ], + [ + "▁libre", + -11.812317848205566 + ], + [ + "▁petition", + -11.81234073638916 + ], + [ + "▁steady", + -11.812456130981445 + ], + [ + "▁medications", + -11.812458992004395 + ], + [ + "▁audiences", + -11.812623023986816 + ], + [ + "▁coaches", + -11.812689781188965 + ], + [ + "aller", + -11.812704086303711 + ], + [ + "3,000", + -11.812705993652344 + ], + [ + "▁anger", + -11.812785148620605 + ], + [ + "▁striking", + -11.812844276428223 + ], + [ + "▁shades", + -11.81291675567627 + ], + [ + "▁Sitz", + -11.812994956970215 + ], + [ + "▁gluten", + -11.813162803649902 + ], + [ + "▁egal", + -11.813222885131836 + ], + [ + "ania", + -11.813223838806152 + ], + [ + "▁defend", + -11.813241004943848 + ], + [ + "gut", + -11.81382942199707 + ], + [ + "▁reserves", + -11.813895225524902 + ], + [ + "▁advocate", + -11.814053535461426 + ], + [ + "▁Cit", + -11.814082145690918 + ], + [ + "▁technicians", + -11.814105033874512 + ], + [ + "▁cater", + -11.814138412475586 + ], + [ + "leitung", + -11.814190864562988 + ], + [ + "▁towns", + -11.814335823059082 + ], + [ + "▁Costa", + -11.814364433288574 + ], + [ + "▁confront", + -11.814567565917969 + ], + [ + "mount", + -11.814652442932129 + ], + [ + "▁nationale", + -11.814706802368164 + ], + [ + "▁adverse", + -11.814932823181152 + ], + [ + "▁couleur", + -11.815112113952637 + ], + [ + "▁delight", + -11.815169334411621 + ], + [ + "▁promises", + -11.815224647521973 + ], + [ + "▁silent", + -11.81550121307373 + ], + [ + "richtet", + -11.815556526184082 + ], + [ + "▁Companies", + -11.815614700317383 + ], + [ + "▁Charlotte", + -11.815620422363281 + ], + [ + "▁labels", + -11.815652847290039 + ], + [ + "▁Süd", + -11.815656661987305 + ], + [ + "▁Honor", + -11.81567096710205 + ], + [ + "▁complaints", + -11.815710067749023 + ], + [ + "▁siècle", + -11.815752029418945 + ], + [ + "▁suits", + -11.815792083740234 + ], + [ + "▁Bath", + -11.815827369689941 + ], + [ + "mise", + -11.815926551818848 + ], + [ + "▁acela", + -11.8159818649292 + ], + [ + "▁candidat", + -11.816011428833008 + ], + [ + "Flo", + -11.816207885742188 + ], + [ + "▁conservative", + -11.816215515136719 + ], + [ + "DD", + -11.816314697265625 + ], + [ + "▁changement", + -11.816414833068848 + ], + [ + "▁login", + -11.816492080688477 + ], + [ + "▁Fashion", + -11.816585540771484 + ], + [ + "reichen", + -11.816672325134277 + ], + [ + "through", + -11.816751480102539 + ], + [ + "aki", + -11.817240715026855 + ], + [ + "gna", + -11.817547798156738 + ], + [ + "▁verse", + -11.817551612854004 + ], + [ + "▁threats", + -11.817622184753418 + ], + [ + "▁Song", + -11.817770004272461 + ], + [ + "▁funded", + -11.81792163848877 + ], + [ + "langen", + -11.818023681640625 + ], + [ + "▁distribu", + -11.818195343017578 + ], + [ + "édition", + -11.818316459655762 + ], + [ + "▁royal", + -11.818562507629395 + ], + [ + "▁bevor", + -11.818829536437988 + ], + [ + "▁02", + -11.818854331970215 + ], + [ + "straße", + -11.818938255310059 + ], + [ + "edit", + -11.81904125213623 + ], + [ + "▁energetic", + -11.81922721862793 + ], + [ + "▁Carr", + -11.819757461547852 + ], + [ + "viol", + -11.819937705993652 + ], + [ + "▁niche", + -11.820054054260254 + ], + [ + "avais", + -11.820099830627441 + ], + [ + "▁backyard", + -11.82010269165039 + ], + [ + "▁Saudi", + -11.820158958435059 + ], + [ + "▁Zwei", + -11.820207595825195 + ], + [ + "▁Legal", + -11.82027530670166 + ], + [ + "accessed", + -11.820277214050293 + ], + [ + "▁choisi", + -11.820340156555176 + ], + [ + "▁GDP", + -11.820343971252441 + ], + [ + "oferă", + -11.820352554321289 + ], + [ + "hlen", + -11.820490837097168 + ], + [ + "▁Wor", + -11.820520401000977 + ], + [ + "▁cheer", + -11.820586204528809 + ], + [ + "▁barely", + -11.820625305175781 + ], + [ + "cost", + -11.820646286010742 + ], + [ + "▁Really", + -11.820661544799805 + ], + [ + "kol", + -11.820721626281738 + ], + [ + "▁binding", + -11.821045875549316 + ], + [ + "euer", + -11.821136474609375 + ], + [ + "▁optimization", + -11.821158409118652 + ], + [ + "▁Designer", + -11.8211669921875 + ], + [ + "▁measuring", + -11.82117748260498 + ], + [ + "ncy", + -11.821516036987305 + ], + [ + "weise", + -11.821520805358887 + ], + [ + "DER", + -11.821850776672363 + ], + [ + "▁$7", + -11.821949005126953 + ], + [ + "▁Anfang", + -11.821954727172852 + ], + [ + "material", + -11.821967124938965 + ], + [ + "▁antique", + -11.822281837463379 + ], + [ + "▁Certificate", + -11.822294235229492 + ], + [ + "▁modest", + -11.822370529174805 + ], + [ + "ției", + -11.822427749633789 + ], + [ + "▁praise", + -11.82245922088623 + ], + [ + "▁Springs", + -11.822660446166992 + ], + [ + "▁organiza", + -11.823041915893555 + ], + [ + "jurul", + -11.823047637939453 + ], + [ + "▁plumbing", + -11.82341194152832 + ], + [ + "▁foster", + -11.823490142822266 + ], + [ + "▁Wy", + -11.823491096496582 + ], + [ + "▁Sab", + -11.823503494262695 + ], + [ + "▁overwhelming", + -11.823677062988281 + ], + [ + "▁matin", + -11.823812484741211 + ], + [ + "▁responded", + -11.82408332824707 + ], + [ + "▁confused", + -11.824150085449219 + ], + [ + "▁blessed", + -11.824280738830566 + ], + [ + "▁160", + -11.824295997619629 + ], + [ + "▁ingredient", + -11.824360847473145 + ], + [ + "▁confer", + -11.82448673248291 + ], + [ + "▁Gesundheit", + -11.824530601501465 + ], + [ + "▁bucket", + -11.824555397033691 + ], + [ + "kraft", + -11.824565887451172 + ], + [ + "lange", + -11.824630737304688 + ], + [ + "▁Kopf", + -11.824678421020508 + ], + [ + "▁Prize", + -11.824678421020508 + ], + [ + "▁authorized", + -11.824779510498047 + ], + [ + "▁tick", + -11.824803352355957 + ], + [ + "▁steal", + -11.824910163879395 + ], + [ + "Depending", + -11.824918746948242 + ], + [ + "Depuis", + -11.824952125549316 + ], + [ + "▁functie", + -11.82499885559082 + ], + [ + "▁developments", + -11.825053215026855 + ], + [ + "▁Christians", + -11.825311660766602 + ], + [ + "▁calculated", + -11.8256254196167 + ], + [ + "▁Leave", + -11.825672149658203 + ], + [ + "▁Jam", + -11.82573413848877 + ], + [ + "▁habitat", + -11.825760841369629 + ], + [ + "▁Sorry", + -11.825801849365234 + ], + [ + "▁oficial", + -11.825944900512695 + ], + [ + "▁allein", + -11.826079368591309 + ], + [ + "▁concentrate", + -11.82608413696289 + ], + [ + "dica", + -11.826302528381348 + ], + [ + "▁Convention", + -11.826476097106934 + ], + [ + "illes", + -11.826550483703613 + ], + [ + "▁fum", + -11.82664680480957 + ], + [ + "▁Tal", + -11.826651573181152 + ], + [ + "Europe", + -11.826899528503418 + ], + [ + "▁attachment", + -11.826949119567871 + ], + [ + "▁sensibil", + -11.826995849609375 + ], + [ + "▁clue", + -11.82715892791748 + ], + [ + "▁specialty", + -11.827203750610352 + ], + [ + "▁Cou", + -11.827229499816895 + ], + [ + "▁liste", + -11.827278137207031 + ], + [ + "▁Penn", + -11.827465057373047 + ], + [ + "TRA", + -11.827559471130371 + ], + [ + "▁Themen", + -11.827561378479004 + ], + [ + "▁motivated", + -11.827906608581543 + ], + [ + "▁camere", + -11.828017234802246 + ], + [ + "▁14,", + -11.828393936157227 + ], + [ + "▁attendance", + -11.828557968139648 + ], + [ + "atorii", + -11.828581809997559 + ], + [ + "chemistry", + -11.82873821258545 + ], + [ + "▁roofing", + -11.828959465026855 + ], + [ + "▁Links", + -11.829048156738281 + ], + [ + "▁trou", + -11.829103469848633 + ], + [ + "▁trucks", + -11.829136848449707 + ], + [ + "hilfe", + -11.829557418823242 + ], + [ + "▁(6", + -11.829599380493164 + ], + [ + "vapor", + -11.82964038848877 + ], + [ + "mad", + -11.829668045043945 + ], + [ + "▁Albert", + -11.829877853393555 + ], + [ + "▁FIG", + -11.830073356628418 + ], + [ + "▁Rand", + -11.830187797546387 + ], + [ + "▁Constitution", + -11.830219268798828 + ], + [ + "ambi", + -11.830294609069824 + ], + [ + "▁Syria", + -11.830307006835938 + ], + [ + "▁Fond", + -11.830477714538574 + ], + [ + "▁gouvernement", + -11.830594062805176 + ], + [ + "▁Active", + -11.830705642700195 + ], + [ + "▁prints", + -11.830801963806152 + ], + [ + "▁weigh", + -11.8308687210083 + ], + [ + "▁Craft", + -11.831069946289062 + ], + [ + "▁projets", + -11.831247329711914 + ], + [ + "▁paste", + -11.831377029418945 + ], + [ + "anci", + -11.83139705657959 + ], + [ + "kie", + -11.831411361694336 + ], + [ + "▁gains", + -11.83165168762207 + ], + [ + "▁Record", + -11.831942558288574 + ], + [ + "▁beliefs", + -11.831954956054688 + ], + [ + "countless", + -11.831957817077637 + ], + [ + "▁tomatoes", + -11.831997871398926 + ], + [ + "arie", + -11.832082748413086 + ], + [ + "▁140", + -11.83211612701416 + ], + [ + "▁ethical", + -11.832229614257812 + ], + [ + "objectif", + -11.832279205322266 + ], + [ + "▁acestuia", + -11.832283973693848 + ], + [ + "▁Bluetooth", + -11.832398414611816 + ], + [ + "▁agriculture", + -11.832746505737305 + ], + [ + "uré", + -11.833027839660645 + ], + [ + "▁cale", + -11.833072662353516 + ], + [ + "▁articol", + -11.833073616027832 + ], + [ + "▁gum", + -11.833319664001465 + ], + [ + "▁vendor", + -11.833490371704102 + ], + [ + "ifié", + -11.833527565002441 + ], + [ + "▁peer", + -11.833662033081055 + ], + [ + "pod", + -11.834036827087402 + ], + [ + "▁utilized", + -11.834113121032715 + ], + [ + "▁Mü", + -11.834207534790039 + ], + [ + "owohl", + -11.834208488464355 + ], + [ + "hilst", + -11.834233283996582 + ], + [ + "frame", + -11.834260940551758 + ], + [ + "▁fridge", + -11.834822654724121 + ], + [ + "▁query", + -11.835108757019043 + ], + [ + "▁Survey", + -11.835227012634277 + ], + [ + "▁Hell", + -11.835247993469238 + ], + [ + "▁notification", + -11.83530044555664 + ], + [ + "TR", + -11.83538818359375 + ], + [ + "▁ultima", + -11.835505485534668 + ], + [ + "▁radiation", + -11.835631370544434 + ], + [ + "▁musicians", + -11.835821151733398 + ], + [ + "CAN", + -11.83595085144043 + ], + [ + "▁grocery", + -11.83607292175293 + ], + [ + "▁Sicherheit", + -11.83611011505127 + ], + [ + "▁Highway", + -11.836276054382324 + ], + [ + "▁Break", + -11.836285591125488 + ], + [ + "TED", + -11.836345672607422 + ], + [ + "ön", + -11.836352348327637 + ], + [ + "▁biological", + -11.836352348327637 + ], + [ + "qual", + -11.836397171020508 + ], + [ + "250", + -11.83641242980957 + ], + [ + "▁modify", + -11.836651802062988 + ], + [ + "▁Hit", + -11.836698532104492 + ], + [ + "▁Iar", + -11.836838722229004 + ], + [ + "aged", + -11.836884498596191 + ], + [ + "...)", + -11.83688735961914 + ], + [ + "▁contrat", + -11.836928367614746 + ], + [ + "▁centres", + -11.836956977844238 + ], + [ + "griff", + -11.836987495422363 + ], + [ + "Our", + -11.837233543395996 + ], + [ + "▁determination", + -11.837300300598145 + ], + [ + "▁variables", + -11.83742904663086 + ], + [ + "▁nuts", + -11.837472915649414 + ], + [ + "échange", + -11.837577819824219 + ], + [ + "extérieur", + -11.837631225585938 + ], + [ + "▁suflet", + -11.83764362335205 + ], + [ + "▁Scha", + -11.837752342224121 + ], + [ + "stück", + -11.837774276733398 + ], + [ + "▁Tau", + -11.837821960449219 + ], + [ + "▁participa", + -11.838008880615234 + ], + [ + "▁mad", + -11.838034629821777 + ], + [ + "▁relie", + -11.838051795959473 + ], + [ + "▁Fine", + -11.83808422088623 + ], + [ + "▁grape", + -11.838118553161621 + ], + [ + "▁wage", + -11.838141441345215 + ], + [ + "▁startup", + -11.838193893432617 + ], + [ + "▁blank", + -11.838194847106934 + ], + [ + "▁physique", + -11.838199615478516 + ], + [ + "▁punch", + -11.838233947753906 + ], + [ + "▁contacts", + -11.838321685791016 + ], + [ + "▁dezvolt", + -11.83835220336914 + ], + [ + "cross", + -11.838639259338379 + ], + [ + "▁TR", + -11.838652610778809 + ], + [ + "▁gener", + -11.838754653930664 + ], + [ + "▁indem", + -11.838823318481445 + ], + [ + "▁Stan", + -11.838839530944824 + ], + [ + "▁azi", + -11.838930130004883 + ], + [ + "▁Sel", + -11.838958740234375 + ], + [ + "▁Tot", + -11.83924674987793 + ], + [ + "vra", + -11.839341163635254 + ], + [ + "▁recruit", + -11.839482307434082 + ], + [ + "▁Yeah", + -11.839494705200195 + ], + [ + "/10", + -11.839507102966309 + ], + [ + "▁nail", + -11.83956241607666 + ], + [ + "▁Ky", + -11.839611053466797 + ], + [ + "▁beloved", + -11.839760780334473 + ], + [ + "operative", + -11.839823722839355 + ], + [ + "▁Tickets", + -11.83983325958252 + ], + [ + "▁tear", + -11.840229988098145 + ], + [ + "▁amp", + -11.840352058410645 + ], + [ + "▁04", + -11.840361595153809 + ], + [ + "▁illustrate", + -11.840361595153809 + ], + [ + "▁mac", + -11.840400695800781 + ], + [ + "▁receiver", + -11.840482711791992 + ], + [ + "atrice", + -11.840508460998535 + ], + [ + "▁souhait", + -11.840572357177734 + ], + [ + "▁Gewinn", + -11.840619087219238 + ], + [ + "▁Vit", + -11.840808868408203 + ], + [ + "roch", + -11.841202735900879 + ], + [ + "▁arata", + -11.841262817382812 + ], + [ + "▁Indiana", + -11.841364860534668 + ], + [ + "child", + -11.841516494750977 + ], + [ + "▁invested", + -11.84157657623291 + ], + [ + "▁Excellent", + -11.841625213623047 + ], + [ + "gori", + -11.841769218444824 + ], + [ + "▁thermal", + -11.841813087463379 + ], + [ + "Str", + -11.841973304748535 + ], + [ + "▁liver", + -11.84201717376709 + ], + [ + "miss", + -11.842035293579102 + ], + [ + "▁utiliser", + -11.842120170593262 + ], + [ + "▁prest", + -11.842445373535156 + ], + [ + "2016", + -11.842506408691406 + ], + [ + "isée", + -11.842508316040039 + ], + [ + "▁Index", + -11.842559814453125 + ], + [ + "▁arch", + -11.842639923095703 + ], + [ + "▁Toyota", + -11.842748641967773 + ], + [ + "▁YOUR", + -11.842782020568848 + ], + [ + "▁Mexican", + -11.842891693115234 + ], + [ + "▁gegenüber", + -11.842940330505371 + ], + [ + "▁cannabis", + -11.843033790588379 + ], + [ + "bis", + -11.843077659606934 + ], + [ + "vage", + -11.843083381652832 + ], + [ + "hall", + -11.843091011047363 + ], + [ + "fax", + -11.843137741088867 + ], + [ + "▁spoken", + -11.843232154846191 + ], + [ + "▁Zimmer", + -11.843544960021973 + ], + [ + "kauf", + -11.8436279296875 + ], + [ + "▁couleurs", + -11.843705177307129 + ], + [ + "▁NJ", + -11.844026565551758 + ], + [ + "▁Heritage", + -11.844318389892578 + ], + [ + "▁Pflege", + -11.844321250915527 + ], + [ + "luc", + -11.844361305236816 + ], + [ + "▁56", + -11.844489097595215 + ], + [ + "VP", + -11.844542503356934 + ], + [ + "▁cuvinte", + -11.844594955444336 + ], + [ + "▁Alliance", + -11.844614028930664 + ], + [ + "▁coco", + -11.844615936279297 + ], + [ + "▁leverage", + -11.844762802124023 + ], + [ + "auch", + -11.844844818115234 + ], + [ + "▁Cart", + -11.84506607055664 + ], + [ + "taux", + -11.84532642364502 + ], + [ + "east", + -11.84560775756836 + ], + [ + "▁decorating", + -11.84565258026123 + ], + [ + "tip", + -11.84565544128418 + ], + [ + "▁Communications", + -11.845780372619629 + ], + [ + "ACE", + -11.84580135345459 + ], + [ + "▁Consul", + -11.845993041992188 + ], + [ + "▁Swiss", + -11.846197128295898 + ], + [ + "inci", + -11.846230506896973 + ], + [ + "▁Fact", + -11.846312522888184 + ], + [ + "▁ajung", + -11.846321105957031 + ], + [ + "▁airline", + -11.846325874328613 + ], + [ + "▁kidney", + -11.846379280090332 + ], + [ + "▁Records", + -11.84642505645752 + ], + [ + "▁Olympic", + -11.846747398376465 + ], + [ + "▁dried", + -11.84719467163086 + ], + [ + "oivent", + -11.847333908081055 + ], + [ + "▁Adobe", + -11.847467422485352 + ], + [ + "▁powers", + -11.847748756408691 + ], + [ + "lande", + -11.847834587097168 + ], + [ + "▁relieve", + -11.847858428955078 + ], + [ + "ţine", + -11.847898483276367 + ], + [ + "▁gradually", + -11.847945213317871 + ], + [ + "mud", + -11.84811019897461 + ], + [ + "▁30,", + -11.848116874694824 + ], + [ + "▁plante", + -11.848133087158203 + ], + [ + "▁Hug", + -11.848225593566895 + ], + [ + "▁Focus", + -11.84853458404541 + ], + [ + "▁distinctive", + -11.848594665527344 + ], + [ + "▁Bab", + -11.848662376403809 + ], + [ + "tata", + -11.848679542541504 + ], + [ + "▁Nun", + -11.848797798156738 + ], + [ + "▁Eve", + -11.848811149597168 + ], + [ + "▁déc", + -11.848881721496582 + ], + [ + "▁Beitrag", + -11.84900951385498 + ], + [ + "▁devenit", + -11.849042892456055 + ], + [ + "driven", + -11.849250793457031 + ], + [ + "▁offerings", + -11.84933853149414 + ], + [ + "▁exc", + -11.84941577911377 + ], + [ + "encies", + -11.849576950073242 + ], + [ + "▁Neuro", + -11.849588394165039 + ], + [ + "scher", + -11.849604606628418 + ], + [ + "map", + -11.849703788757324 + ], + [ + "pending", + -11.849783897399902 + ], + [ + "▁courage", + -11.849799156188965 + ], + [ + "axe", + -11.849894523620605 + ], + [ + "▁Gesellschaft", + -11.849900245666504 + ], + [ + "▁ears", + -11.85000991821289 + ], + [ + "▁aider", + -11.850403785705566 + ], + [ + "▁Cast", + -11.85042667388916 + ], + [ + "fast", + -11.850442886352539 + ], + [ + "▁departe", + -11.850502014160156 + ], + [ + "▁oak", + -11.850507736206055 + ], + [ + "▁batch", + -11.850730895996094 + ], + [ + "▁Corporate", + -11.850762367248535 + ], + [ + "▁Ost", + -11.850895881652832 + ], + [ + "-14", + -11.850897789001465 + ], + [ + "▁Pie", + -11.85115909576416 + ], + [ + "▁ranking", + -11.851273536682129 + ], + [ + "clusion", + -11.851316452026367 + ], + [ + "▁costume", + -11.851347923278809 + ], + [ + "▁Knight", + -11.851449966430664 + ], + [ + "▁privat", + -11.851577758789062 + ], + [ + "▁Engineer", + -11.851593971252441 + ], + [ + "▁gens", + -11.8517427444458 + ], + [ + "physics", + -11.85176944732666 + ], + [ + "generating", + -11.851773262023926 + ], + [ + "directement", + -11.851786613464355 + ], + [ + "▁confidential", + -11.851810455322266 + ], + [ + "▁poet", + -11.851937294006348 + ], + [ + "▁monster", + -11.851944923400879 + ], + [ + "▁suppose", + -11.851984977722168 + ], + [ + "său", + -11.851996421813965 + ], + [ + "▁balls", + -11.852103233337402 + ], + [ + "▁substitute", + -11.852137565612793 + ], + [ + "▁simultaneously", + -11.852238655090332 + ], + [ + "▁specify", + -11.852272033691406 + ], + [ + "wald", + -11.852287292480469 + ], + [ + "▁collapse", + -11.852352142333984 + ], + [ + "dessus", + -11.852458953857422 + ], + [ + "▁vitr", + -11.852516174316406 + ], + [ + "▁recruitment", + -11.852607727050781 + ], + [ + "denken", + -11.852632522583008 + ], + [ + "▁candy", + -11.852691650390625 + ], + [ + "▁tourists", + -11.852721214294434 + ], + [ + "dimensional", + -11.852782249450684 + ], + [ + "conce", + -11.852814674377441 + ], + [ + "wechsel", + -11.852822303771973 + ], + [ + "▁passende", + -11.852971076965332 + ], + [ + "industrie", + -11.85299301147461 + ], + [ + "agne", + -11.853127479553223 + ], + [ + "▁warehouse", + -11.853233337402344 + ], + [ + "▁Jugend", + -11.853277206420898 + ], + [ + "▁Weise", + -11.853357315063477 + ], + [ + "▁Zone", + -11.853528022766113 + ], + [ + "▁licence", + -11.853550910949707 + ], + [ + "▁broker", + -11.853630065917969 + ], + [ + "▁Rolle", + -11.85365104675293 + ], + [ + "pton", + -11.853789329528809 + ], + [ + "▁preference", + -11.853846549987793 + ], + [ + "▁homeowners", + -11.853861808776855 + ], + [ + "▁Lum", + -11.85387134552002 + ], + [ + "▁Chairman", + -11.853879928588867 + ], + [ + "▁Pages", + -11.853998184204102 + ], + [ + "▁beam", + -11.854005813598633 + ], + [ + "▁coordinate", + -11.854158401489258 + ], + [ + "▁Tool", + -11.854212760925293 + ], + [ + "▁complexity", + -11.854272842407227 + ], + [ + "▁checks", + -11.854339599609375 + ], + [ + "▁Bedroom", + -11.854405403137207 + ], + [ + "minded", + -11.854538917541504 + ], + [ + "▁copiii", + -11.854694366455078 + ], + [ + "▁celebrating", + -11.85470199584961 + ], + [ + "zimmer", + -11.854759216308594 + ], + [ + "▁Imagine", + -11.854759216308594 + ], + [ + "▁decoration", + -11.854830741882324 + ], + [ + "team", + -11.855354309082031 + ], + [ + "▁împreună", + -11.855369567871094 + ], + [ + "▁publicly", + -11.855391502380371 + ], + [ + "▁centuries", + -11.855514526367188 + ], + [ + "▁Islands", + -11.855644226074219 + ], + [ + "▁ethnic", + -11.855663299560547 + ], + [ + "still", + -11.85576057434082 + ], + [ + "stieg", + -11.855823516845703 + ], + [ + "emia", + -11.855904579162598 + ], + [ + "tags", + -11.856026649475098 + ], + [ + "▁marche", + -11.856062889099121 + ], + [ + "▁migration", + -11.856096267700195 + ], + [ + "▁banner", + -11.85616683959961 + ], + [ + "▁macro", + -11.856378555297852 + ], + [ + "▁Edit", + -11.856379508972168 + ], + [ + "tran", + -11.85656452178955 + ], + [ + "ça", + -11.856597900390625 + ], + [ + "▁recycling", + -11.856670379638672 + ], + [ + "▁1,000", + -11.856673240661621 + ], + [ + "▁Quelle", + -11.856891632080078 + ], + [ + "▁Vel", + -11.85700511932373 + ], + [ + "▁Rit", + -11.857025146484375 + ], + [ + "▁Spaß", + -11.857046127319336 + ], + [ + "▁Corn", + -11.857074737548828 + ], + [ + "tracted", + -11.857177734375 + ], + [ + "cited", + -11.857185363769531 + ], + [ + "▁tablets", + -11.857202529907227 + ], + [ + "▁Display", + -11.857337951660156 + ], + [ + "▁persoana", + -11.857392311096191 + ], + [ + "Term", + -11.857410430908203 + ], + [ + "▁Vancouver", + -11.857537269592285 + ], + [ + "▁Gäste", + -11.857550621032715 + ], + [ + "determining", + -11.857608795166016 + ], + [ + "▁populations", + -11.85778522491455 + ], + [ + "aison", + -11.857873916625977 + ], + [ + "▁surgical", + -11.858072280883789 + ], + [ + "tale", + -11.858160018920898 + ], + [ + "ivi", + -11.858283042907715 + ], + [ + "▁Zur", + -11.858388900756836 + ], + [ + "esprit", + -11.858574867248535 + ], + [ + "▁Edge", + -11.858665466308594 + ], + [ + "dach", + -11.858760833740234 + ], + [ + "phi", + -11.858773231506348 + ], + [ + "▁suc", + -11.858841896057129 + ], + [ + "▁scrie", + -11.858848571777344 + ], + [ + "▁Ausbildung", + -11.858885765075684 + ], + [ + "▁51", + -11.85892391204834 + ], + [ + "ologi", + -11.858938217163086 + ], + [ + "▁correction", + -11.859049797058105 + ], + [ + "▁Wald", + -11.859078407287598 + ], + [ + "▁additionally", + -11.859131813049316 + ], + [ + "▁proche", + -11.859353065490723 + ], + [ + "▁classical", + -11.859477996826172 + ], + [ + "▁bringen", + -11.859490394592285 + ], + [ + "▁(10", + -11.859611511230469 + ], + [ + "▁Mile", + -11.859809875488281 + ], + [ + "lace", + -11.859885215759277 + ], + [ + "▁premi", + -11.85988712310791 + ], + [ + "▁constitute", + -11.860029220581055 + ], + [ + "▁bitter", + -11.860078811645508 + ], + [ + "▁Inform", + -11.860295295715332 + ], + [ + "▁corporations", + -11.860334396362305 + ], + [ + "▁Lisa", + -11.860494613647461 + ], + [ + "▁obligat", + -11.860685348510742 + ], + [ + "Throughout", + -11.860738754272461 + ], + [ + "▁Rs", + -11.860769271850586 + ], + [ + "▁Hair", + -11.860916137695312 + ], + [ + "▁supplements", + -11.86099624633789 + ], + [ + "▁motorcycle", + -11.861054420471191 + ], + [ + "escent", + -11.861132621765137 + ], + [ + "▁investi", + -11.861222267150879 + ], + [ + "▁continuously", + -11.861265182495117 + ], + [ + "▁Essen", + -11.861334800720215 + ], + [ + "▁precision", + -11.8613862991333 + ], + [ + "▁deficit", + -11.861461639404297 + ], + [ + "▁wallet", + -11.861481666564941 + ], + [ + "▁Bürger", + -11.861531257629395 + ], + [ + "chir", + -11.861574172973633 + ], + [ + "9)", + -11.86161994934082 + ], + [ + "▁Programme", + -11.861716270446777 + ], + [ + "▁simplement", + -11.86193561553955 + ], + [ + "MD", + -11.862093925476074 + ], + [ + "▁rouge", + -11.862096786499023 + ], + [ + "usion", + -11.862133979797363 + ], + [ + "▁stove", + -11.862208366394043 + ], + [ + "▁prospective", + -11.862224578857422 + ], + [ + "▁corp", + -11.86234188079834 + ], + [ + "▁impacts", + -11.862401008605957 + ], + [ + "▁bride", + -11.86266803741455 + ], + [ + "0.0", + -11.862788200378418 + ], + [ + "hid", + -11.862833976745605 + ], + [ + "▁warrant", + -11.862930297851562 + ], + [ + "▁Ice", + -11.8631010055542 + ], + [ + "▁sensible", + -11.863151550292969 + ], + [ + "▁vreo", + -11.863166809082031 + ], + [ + "spekt", + -11.863249778747559 + ], + [ + "▁appreciation", + -11.8633394241333 + ], + [ + "▁automation", + -11.863377571105957 + ], + [ + "Luc", + -11.86341381072998 + ], + [ + "teaches", + -11.863471031188965 + ], + [ + "▁fold", + -11.863506317138672 + ], + [ + "deutsche", + -11.863523483276367 + ], + [ + "▁assisted", + -11.86380386352539 + ], + [ + "▁straightforward", + -11.863932609558105 + ], + [ + "▁mechanic", + -11.864068031311035 + ], + [ + "observ", + -11.864169120788574 + ], + [ + "▁Schau", + -11.864195823669434 + ], + [ + "▁Recently", + -11.864301681518555 + ], + [ + "kers", + -11.86435604095459 + ], + [ + "▁Soft", + -11.864455223083496 + ], + [ + "muni", + -11.864537239074707 + ], + [ + "▁lie", + -11.864617347717285 + ], + [ + "▁Fat", + -11.864728927612305 + ], + [ + "cream", + -11.86476993560791 + ], + [ + "▁snack", + -11.864909172058105 + ], + [ + "▁juin", + -11.865068435668945 + ], + [ + "▁competent", + -11.865134239196777 + ], + [ + "▁Drug", + -11.865141868591309 + ], + [ + "▁Row", + -11.865302085876465 + ], + [ + "▁needle", + -11.865852355957031 + ], + [ + "▁convey", + -11.865900039672852 + ], + [ + "▁voie", + -11.86600399017334 + ], + [ + "▁Hon", + -11.866190910339355 + ], + [ + "▁ebook", + -11.866194725036621 + ], + [ + "▁veteran", + -11.866209030151367 + ], + [ + "▁statistical", + -11.866217613220215 + ], + [ + "190", + -11.866312980651855 + ], + [ + "▁munca", + -11.866402626037598 + ], + [ + "▁venues", + -11.866438865661621 + ], + [ + "▁Viel", + -11.866604804992676 + ], + [ + "▁décor", + -11.866799354553223 + ], + [ + "▁répond", + -11.8670015335083 + ], + [ + "▁produsele", + -11.86700439453125 + ], + [ + "ruc", + -11.867009162902832 + ], + [ + "▁drops", + -11.867011070251465 + ], + [ + "▁autant", + -11.867311477661133 + ], + [ + "▁Fahrzeug", + -11.867313385009766 + ], + [ + "▁hills", + -11.86735725402832 + ], + [ + "ference", + -11.867414474487305 + ], + [ + "▁Glück", + -11.86742115020752 + ], + [ + "▁Pac", + -11.867480278015137 + ], + [ + "▁permettr", + -11.867568969726562 + ], + [ + "▁mouvement", + -11.867713928222656 + ], + [ + "établissement", + -11.867859840393066 + ], + [ + "▁Parc", + -11.867874145507812 + ], + [ + "▁solving", + -11.867900848388672 + ], + [ + "▁jail", + -11.867972373962402 + ], + [ + "▁junk", + -11.867980003356934 + ], + [ + "▁jeux", + -11.868091583251953 + ], + [ + "▁rôle", + -11.868107795715332 + ], + [ + "▁cache", + -11.868124961853027 + ], + [ + "▁Answer", + -11.86832046508789 + ], + [ + "wir", + -11.868706703186035 + ], + [ + "option", + -11.868732452392578 + ], + [ + "▁Tiger", + -11.868739128112793 + ], + [ + "▁Ble", + -11.868793487548828 + ], + [ + "Mitglied", + -11.868797302246094 + ], + [ + "▁partial", + -11.868819236755371 + ], + [ + "▁Mercedes", + -11.86888313293457 + ], + [ + "tire", + -11.869001388549805 + ], + [ + "MENT", + -11.869091987609863 + ], + [ + "▁transit", + -11.869230270385742 + ], + [ + "▁cineva", + -11.869285583496094 + ], + [ + "▁Andrea", + -11.869294166564941 + ], + [ + "▁boundaries", + -11.869497299194336 + ], + [ + "script", + -11.870061874389648 + ], + [ + "▁Medi", + -11.870123863220215 + ], + [ + "schreiben", + -11.870203018188477 + ], + [ + "▁lobby", + -11.87035846710205 + ], + [ + "▁defendant", + -11.870406150817871 + ], + [ + "▁sq", + -11.870467185974121 + ], + [ + "▁forgotten", + -11.870569229125977 + ], + [ + "stimmung", + -11.870651245117188 + ], + [ + "hus", + -11.870665550231934 + ], + [ + "RY", + -11.870728492736816 + ], + [ + "▁Anderson", + -11.870748519897461 + ], + [ + "▁Dental", + -11.870828628540039 + ], + [ + "ject", + -11.87110710144043 + ], + [ + "▁Nutzer", + -11.871377944946289 + ], + [ + "▁Portland", + -11.871540069580078 + ], + [ + "scription", + -11.871636390686035 + ], + [ + "▁angel", + -11.871695518493652 + ], + [ + "▁monument", + -11.871748924255371 + ], + [ + "▁număr", + -11.871784210205078 + ], + [ + "▁Lane", + -11.871800422668457 + ], + [ + "▁Bai", + -11.871894836425781 + ], + [ + "But", + -11.871909141540527 + ], + [ + "▁calculate", + -11.872315406799316 + ], + [ + "▁provoca", + -11.87247371673584 + ], + [ + "▁votes", + -11.872493743896484 + ], + [ + "RNA", + -11.872503280639648 + ], + [ + "though", + -11.87259292602539 + ], + [ + "spor", + -11.872631072998047 + ], + [ + "▁connaissance", + -11.872695922851562 + ], + [ + "▁Anwendung", + -11.872932434082031 + ], + [ + "▁Kate", + -11.873123168945312 + ], + [ + "lob", + -11.87315845489502 + ], + [ + "▁Conf", + -11.873180389404297 + ], + [ + "bung", + -11.873212814331055 + ], + [ + "ander", + -11.873282432556152 + ], + [ + "▁functioning", + -11.873297691345215 + ], + [ + "▁sponsored", + -11.873324394226074 + ], + [ + "rav", + -11.873734474182129 + ], + [ + "▁resistant", + -11.873797416687012 + ], + [ + "tră", + -11.873916625976562 + ], + [ + "▁costly", + -11.873923301696777 + ], + [ + "▁Mars", + -11.873991012573242 + ], + [ + "▁tir", + -11.874075889587402 + ], + [ + "▁writes", + -11.874134063720703 + ], + [ + "▁Greg", + -11.874267578125 + ], + [ + "▁Question", + -11.874714851379395 + ], + [ + "▁corporation", + -11.87485408782959 + ], + [ + "▁lire", + -11.874991416931152 + ], + [ + "locked", + -11.875048637390137 + ], + [ + "8,", + -11.875092506408691 + ], + [ + "▁sagt", + -11.875301361083984 + ], + [ + "gaining", + -11.87536907196045 + ], + [ + "▁Pierre", + -11.875688552856445 + ], + [ + "verb", + -11.875725746154785 + ], + [ + "▁Barcelona", + -11.87578296661377 + ], + [ + "werte", + -11.876474380493164 + ], + [ + "▁disponible", + -11.87651538848877 + ], + [ + "▁urge", + -11.876521110534668 + ], + [ + "▁expecting", + -11.876572608947754 + ], + [ + "▁Girl", + -11.87662124633789 + ], + [ + "▁unlimited", + -11.876761436462402 + ], + [ + "watt", + -11.876788139343262 + ], + [ + "▁Möglichkeiten", + -11.876813888549805 + ], + [ + "▁schöne", + -11.876847267150879 + ], + [ + "rium", + -11.877076148986816 + ], + [ + "That", + -11.877272605895996 + ], + [ + "▁socio", + -11.877296447753906 + ], + [ + "▁Democrats", + -11.877351760864258 + ], + [ + "guten", + -11.877422332763672 + ], + [ + "▁Lou", + -11.877425193786621 + ], + [ + "ităţi", + -11.877559661865234 + ], + [ + "▁possibilité", + -11.877717018127441 + ], + [ + "▁adjustable", + -11.877938270568848 + ], + [ + "▁Salt", + -11.877967834472656 + ], + [ + "Thr", + -11.878021240234375 + ], + [ + "▁biseric", + -11.878056526184082 + ], + [ + "ieux", + -11.87808895111084 + ], + [ + "▁procur", + -11.8782377243042 + ], + [ + "▁credits", + -11.878250122070312 + ], + [ + "▁Netflix", + -11.878585815429688 + ], + [ + "doi", + -11.878605842590332 + ], + [ + "▁Jews", + -11.878663063049316 + ], + [ + "▁Ukraine", + -11.87873363494873 + ], + [ + "▁adevărat", + -11.878785133361816 + ], + [ + "▁Apply", + -11.878813743591309 + ], + [ + "▁coupons", + -11.878859519958496 + ], + [ + "▁Detroit", + -11.878881454467773 + ], + [ + "▁rue", + -11.878889083862305 + ], + [ + "anumite", + -11.878926277160645 + ], + [ + "ished", + -11.878973960876465 + ], + [ + "▁withdrawal", + -11.87915325164795 + ], + [ + "▁replacing", + -11.87917709350586 + ], + [ + "catching", + -11.879385948181152 + ], + [ + "▁climbing", + -11.879612922668457 + ], + [ + "▁Basic", + -11.879770278930664 + ], + [ + "▁inclus", + -11.879783630371094 + ], + [ + "scope", + -11.879887580871582 + ], + [ + "▁facem", + -11.879892349243164 + ], + [ + "▁plec", + -11.879904747009277 + ], + [ + "mäßig", + -11.879980087280273 + ], + [ + "▁tasty", + -11.880064010620117 + ], + [ + "▁tunnel", + -11.880074501037598 + ], + [ + "figured", + -11.88032341003418 + ], + [ + "gged", + -11.880390167236328 + ], + [ + "▁conditii", + -11.880599975585938 + ], + [ + "▁homework", + -11.880631446838379 + ], + [ + "volle", + -11.88063907623291 + ], + [ + "▁Gott", + -11.880807876586914 + ], + [ + "▁95", + -11.880969047546387 + ], + [ + "▁elect", + -11.881020545959473 + ], + [ + "▁blast", + -11.881043434143066 + ], + [ + "▁easiest", + -11.881248474121094 + ], + [ + "USE", + -11.881462097167969 + ], + [ + "concentr", + -11.881475448608398 + ], + [ + "orial", + -11.881596565246582 + ], + [ + "▁scroll", + -11.881638526916504 + ], + [ + "stead", + -11.881691932678223 + ], + [ + "▁hormone", + -11.881710052490234 + ], + [ + "▁starter", + -11.88179874420166 + ], + [ + "▁cald", + -11.881878852844238 + ], + [ + "▁wax", + -11.881895065307617 + ], + [ + "▁ridic", + -11.881900787353516 + ], + [ + "ously", + -11.881982803344727 + ], + [ + "maschine", + -11.882101058959961 + ], + [ + "licher", + -11.882399559020996 + ], + [ + "▁16,", + -11.882452964782715 + ], + [ + "▁hassle", + -11.882469177246094 + ], + [ + "semnat", + -11.882535934448242 + ], + [ + "▁pub", + -11.88260555267334 + ], + [ + "240", + -11.882800102233887 + ], + [ + "▁kits", + -11.882871627807617 + ], + [ + "▁Generation", + -11.88293743133545 + ], + [ + "▁merchant", + -11.883052825927734 + ], + [ + "▁Erd", + -11.883068084716797 + ], + [ + "▁café", + -11.883077621459961 + ], + [ + "hoff", + -11.88314151763916 + ], + [ + "▁WITH", + -11.883376121520996 + ], + [ + "▁gesch", + -11.883515357971191 + ], + [ + "▁Editor", + -11.883557319641113 + ], + [ + "▁treats", + -11.883609771728516 + ], + [ + "▁harsh", + -11.883711814880371 + ], + [ + "rome", + -11.883729934692383 + ], + [ + "▁Foreign", + -11.883928298950195 + ], + [ + "▁denied", + -11.883968353271484 + ], + [ + "▁Valentine", + -11.884014129638672 + ], + [ + "▁healthier", + -11.88408088684082 + ], + [ + "▁readily", + -11.884138107299805 + ], + [ + "nac", + -11.884190559387207 + ], + [ + "▁intake", + -11.884191513061523 + ], + [ + "▁puncte", + -11.884230613708496 + ], + [ + "erne", + -11.884431838989258 + ], + [ + "file", + -11.884668350219727 + ], + [ + "▁continually", + -11.884688377380371 + ], + [ + "door", + -11.884699821472168 + ], + [ + "▁imediat", + -11.884822845458984 + ], + [ + "▁accused", + -11.884833335876465 + ], + [ + "chy", + -11.884854316711426 + ], + [ + "▁wrapped", + -11.884861946105957 + ], + [ + "IES", + -11.884878158569336 + ], + [ + "▁terrace", + -11.884883880615234 + ], + [ + "mouth", + -11.884897232055664 + ], + [ + "▁defensive", + -11.884991645812988 + ], + [ + "▁Luci", + -11.88508129119873 + ], + [ + "▁significance", + -11.885107040405273 + ], + [ + "▁2007,", + -11.885213851928711 + ], + [ + "▁inclusion", + -11.885221481323242 + ], + [ + "▁rotation", + -11.885248184204102 + ], + [ + "hos", + -11.885283470153809 + ], + [ + "▁crea", + -11.885357856750488 + ], + [ + "üß", + -11.885903358459473 + ], + [ + "▁Install", + -11.885988235473633 + ], + [ + "▁dump", + -11.885998725891113 + ], + [ + "▁informations", + -11.886114120483398 + ], + [ + "▁Thi", + -11.886117935180664 + ], + [ + "▁85", + -11.886252403259277 + ], + [ + "dox", + -11.886283874511719 + ], + [ + "track", + -11.886436462402344 + ], + [ + "▁couples", + -11.886571884155273 + ], + [ + "▁Assembly", + -11.886594772338867 + ], + [ + "wagen", + -11.88672161102295 + ], + [ + "▁Hil", + -11.886723518371582 + ], + [ + "ières", + -11.886833190917969 + ], + [ + "▁Gabriel", + -11.886903762817383 + ], + [ + "▁patience", + -11.887053489685059 + ], + [ + "▁colored", + -11.887147903442383 + ], + [ + "▁separately", + -11.88715934753418 + ], + [ + "▁deployment", + -11.887166023254395 + ], + [ + "scape", + -11.887306213378906 + ], + [ + "▁Acum", + -11.8875150680542 + ], + [ + "▁länger", + -11.887518882751465 + ], + [ + "▁screens", + -11.887598991394043 + ], + [ + "▁prezenta", + -11.887630462646484 + ], + [ + "▁obicei", + -11.887638092041016 + ], + [ + "▁crisp", + -11.887758255004883 + ], + [ + "▁mechanisms", + -11.887771606445312 + ], + [ + "▁thirty", + -11.887786865234375 + ], + [ + "▁individually", + -11.887989044189453 + ], + [ + "▁internationally", + -11.887991905212402 + ], + [ + "lling", + -11.888050079345703 + ], + [ + "▁bureau", + -11.88843059539795 + ], + [ + "▁erfahren", + -11.88844108581543 + ], + [ + "TY", + -11.888553619384766 + ], + [ + "PF", + -11.888607025146484 + ], + [ + "wid", + -11.888752937316895 + ], + [ + "sell", + -11.888835906982422 + ], + [ + "▁Luke", + -11.888879776000977 + ], + [ + "▁Must", + -11.888916969299316 + ], + [ + "▁identical", + -11.888927459716797 + ], + [ + "▁Netherlands", + -11.888980865478516 + ], + [ + "▁investor", + -11.88905143737793 + ], + [ + "▁squad", + -11.889073371887207 + ], + [ + "▁21,", + -11.889143943786621 + ], + [ + "iko", + -11.889230728149414 + ], + [ + "▁departure", + -11.88937759399414 + ], + [ + "ega", + -11.889384269714355 + ], + [ + "uzi", + -11.889408111572266 + ], + [ + "▁lasa", + -11.889458656311035 + ], + [ + "bian", + -11.889525413513184 + ], + [ + "▁Madrid", + -11.889623641967773 + ], + [ + "▁Iowa", + -11.889806747436523 + ], + [ + "▁Yellow", + -11.890026092529297 + ], + [ + "conom", + -11.89004898071289 + ], + [ + "▁hint", + -11.890098571777344 + ], + [ + "NOW", + -11.890111923217773 + ], + [ + "dress", + -11.890204429626465 + ], + [ + "▁Stück", + -11.890267372131348 + ], + [ + "echt", + -11.890424728393555 + ], + [ + "rial", + -11.89045238494873 + ], + [ + "▁Initiative", + -11.890474319458008 + ], + [ + "▁magnificent", + -11.890474319458008 + ], + [ + "▁pipeline", + -11.890543937683105 + ], + [ + "▁08", + -11.890806198120117 + ], + [ + "▁écrit", + -11.890889167785645 + ], + [ + "KA", + -11.891085624694824 + ], + [ + "arile", + -11.891151428222656 + ], + [ + "▁unfortunately", + -11.891352653503418 + ], + [ + "dose", + -11.891355514526367 + ], + [ + "▁counts", + -11.891427993774414 + ], + [ + "deciding", + -11.891549110412598 + ], + [ + "WA", + -11.89167308807373 + ], + [ + "▁doresc", + -11.891685485839844 + ], + [ + "NY", + -11.892008781433105 + ], + [ + "olin", + -11.892112731933594 + ], + [ + "▁Urlaub", + -11.892133712768555 + ], + [ + "▁alătur", + -11.892317771911621 + ], + [ + "▁Vic", + -11.892515182495117 + ], + [ + "▁fier", + -11.89269733428955 + ], + [ + "EU", + -11.892772674560547 + ], + [ + "▁triple", + -11.892871856689453 + ], + [ + "▁compliment", + -11.89310359954834 + ], + [ + "▁vegetable", + -11.89334487915039 + ], + [ + "member", + -11.893743515014648 + ], + [ + "atiei", + -11.893793106079102 + ], + [ + "▁toxic", + -11.893835067749023 + ], + [ + "▁converted", + -11.893888473510742 + ], + [ + "▁Pink", + -11.893999099731445 + ], + [ + "▁fragment", + -11.894020080566406 + ], + [ + "presenting", + -11.894027709960938 + ], + [ + "▁garantie", + -11.894031524658203 + ], + [ + "▁31,", + -11.894052505493164 + ], + [ + "▁puisqu", + -11.894105911254883 + ], + [ + "aching", + -11.894107818603516 + ], + [ + "▁Shan", + -11.894119262695312 + ], + [ + "▁Affairs", + -11.894368171691895 + ], + [ + "üsse", + -11.894405364990234 + ], + [ + "▁CBD", + -11.894428253173828 + ], + [ + "▁quatre", + -11.894588470458984 + ], + [ + "▁horror", + -11.894651412963867 + ], + [ + "▁culoare", + -11.894661903381348 + ], + [ + "▁welcoming", + -11.894673347473145 + ], + [ + "▁headache", + -11.894808769226074 + ], + [ + "▁septembre", + -11.894820213317871 + ], + [ + "▁Tür", + -11.894862174987793 + ], + [ + "lateral", + -11.89507007598877 + ], + [ + "▁termin", + -11.895228385925293 + ], + [ + "▁Aid", + -11.895291328430176 + ], + [ + "second", + -11.895308494567871 + ], + [ + "▁Philip", + -11.895310401916504 + ], + [ + "berries", + -11.895347595214844 + ], + [ + "▁Slot", + -11.895431518554688 + ], + [ + "ка", + -11.895442962646484 + ], + [ + "▁consecutive", + -11.895590782165527 + ], + [ + "value", + -11.895705223083496 + ], + [ + "▁islands", + -11.8958101272583 + ], + [ + "▁posibilitatea", + -11.895928382873535 + ], + [ + "0.5", + -11.896341323852539 + ], + [ + "▁Dumpster", + -11.896471977233887 + ], + [ + "▁Gran", + -11.89647388458252 + ], + [ + "▁restricted", + -11.8967924118042 + ], + [ + "▁discussing", + -11.896921157836914 + ], + [ + "cock", + -11.896966934204102 + ], + [ + "Serie", + -11.896989822387695 + ], + [ + "▁crushing", + -11.896998405456543 + ], + [ + "RB", + -11.897034645080566 + ], + [ + "▁Gy", + -11.897068977355957 + ], + [ + "normal", + -11.897098541259766 + ], + [ + "DT", + -11.897180557250977 + ], + [ + "▁concurs", + -11.897181510925293 + ], + [ + "▁Beratung", + -11.897231101989746 + ], + [ + "▁handful", + -11.897235870361328 + ], + [ + "▁loading", + -11.897237777709961 + ], + [ + "▁WI", + -11.897269248962402 + ], + [ + "▁Fitness", + -11.897283554077148 + ], + [ + "▁RAM", + -11.897302627563477 + ], + [ + "▁Twi", + -11.89730453491211 + ], + [ + "adurch", + -11.897345542907715 + ], + [ + "▁obiectiv", + -11.897366523742676 + ], + [ + "BM", + -11.897635459899902 + ], + [ + "▁amendment", + -11.8976469039917 + ], + [ + "whi", + -11.897652626037598 + ], + [ + "▁Besonder", + -11.897871017456055 + ], + [ + "ALL", + -11.898003578186035 + ], + [ + "▁earning", + -11.898090362548828 + ], + [ + "▁nutrients", + -11.898580551147461 + ], + [ + "pru", + -11.898633003234863 + ], + [ + "▁offensive", + -11.898696899414062 + ], + [ + "▁shelves", + -11.898711204528809 + ], + [ + "▁încâ", + -11.898726463317871 + ], + [ + "▁execute", + -11.898923873901367 + ], + [ + "▁cauz", + -11.898966789245605 + ], + [ + "exist", + -11.899179458618164 + ], + [ + "▁Meter", + -11.899191856384277 + ], + [ + "there", + -11.899201393127441 + ], + [ + "▁réaliser", + -11.899249076843262 + ], + [ + "blog", + -11.899362564086914 + ], + [ + "▁résultats", + -11.89937973022461 + ], + [ + "baren", + -11.899391174316406 + ], + [ + "▁lang", + -11.899425506591797 + ], + [ + "▁mere", + -11.899870872497559 + ], + [ + "▁toti", + -11.900079727172852 + ], + [ + "DN", + -11.90017032623291 + ], + [ + "Hi", + -11.900310516357422 + ], + [ + "▁merg", + -11.900359153747559 + ], + [ + "▁Camera", + -11.90054988861084 + ], + [ + "▁parfum", + -11.900697708129883 + ], + [ + "CG", + -11.900701522827148 + ], + [ + "posed", + -11.900713920593262 + ], + [ + "▁proposals", + -11.900732040405273 + ], + [ + "▁incorrect", + -11.900811195373535 + ], + [ + "▁Denver", + -11.901168823242188 + ], + [ + "▁noapte", + -11.901397705078125 + ], + [ + "▁VPN", + -11.901436805725098 + ], + [ + "▁Oklahoma", + -11.90159797668457 + ], + [ + "horizon", + -11.901647567749023 + ], + [ + "▁villa", + -11.901668548583984 + ], + [ + "duce", + -11.901812553405762 + ], + [ + "Dienst", + -11.902042388916016 + ], + [ + "▁oversee", + -11.902511596679688 + ], + [ + "astr", + -11.902548789978027 + ], + [ + "brand", + -11.902713775634766 + ], + [ + "▁Safe", + -11.902746200561523 + ], + [ + "▁competing", + -11.902812004089355 + ], + [ + "▁subiect", + -11.902812004089355 + ], + [ + "▁équipe", + -11.903091430664062 + ], + [ + "▁Dress", + -11.903095245361328 + ], + [ + "▁Juni", + -11.903139114379883 + ], + [ + "▁repeated", + -11.90317153930664 + ], + [ + "2012", + -11.903226852416992 + ], + [ + "▁départ", + -11.903234481811523 + ], + [ + "immer", + -11.903335571289062 + ], + [ + "▁mondial", + -11.903374671936035 + ], + [ + "▁datelor", + -11.903703689575195 + ], + [ + "▁surgeon", + -11.903782844543457 + ], + [ + "▁demanding", + -11.903812408447266 + ], + [ + "▁concluded", + -11.903878211975098 + ], + [ + "țiile", + -11.903950691223145 + ], + [ + "marin", + -11.903999328613281 + ], + [ + "▁estim", + -11.904206275939941 + ], + [ + "▁Loan", + -11.904361724853516 + ], + [ + "sculpt", + -11.904373168945312 + ], + [ + "▁99", + -11.904391288757324 + ], + [ + "void", + -11.904400825500488 + ], + [ + "▁Empire", + -11.904499053955078 + ], + [ + "▁Brit", + -11.90450382232666 + ], + [ + "▁véhicule", + -11.904777526855469 + ], + [ + "▁dividend", + -11.905069351196289 + ], + [ + "▁refused", + -11.905077934265137 + ], + [ + "▁speaks", + -11.905156135559082 + ], + [ + "▁Morris", + -11.905282020568848 + ], + [ + "dict", + -11.905349731445312 + ], + [ + "▁funeral", + -11.905556678771973 + ], + [ + "▁Behandlung", + -11.905763626098633 + ], + [ + "▁Revolution", + -11.905905723571777 + ], + [ + "▁Sum", + -11.905935287475586 + ], + [ + "einigen", + -11.906030654907227 + ], + [ + "RES", + -11.906070709228516 + ], + [ + "▁vite", + -11.906071662902832 + ], + [ + "▁Captain", + -11.906190872192383 + ], + [ + "▁assurance", + -11.9061918258667 + ], + [ + "uga", + -11.906500816345215 + ], + [ + "▁conserv", + -11.906583786010742 + ], + [ + "▁therapeutic", + -11.906641006469727 + ], + [ + "▁Sweden", + -11.906753540039062 + ], + [ + "▁Lead", + -11.906888961791992 + ], + [ + "ément", + -11.907071113586426 + ], + [ + "▁53", + -11.90709114074707 + ], + [ + "▁fraction", + -11.9071683883667 + ], + [ + "▁magnet", + -11.907170295715332 + ], + [ + "assurer", + -11.907184600830078 + ], + [ + "▁Steuer", + -11.90733814239502 + ], + [ + "▁flori", + -11.90735149383545 + ], + [ + "▁charming", + -11.907588958740234 + ], + [ + "▁athletic", + -11.907621383666992 + ], + [ + "▁membri", + -11.907706260681152 + ], + [ + "▁Sep", + -11.907726287841797 + ], + [ + "ogue", + -11.907800674438477 + ], + [ + "▁familie", + -11.907800674438477 + ], + [ + "▁SW", + -11.90796947479248 + ], + [ + "▁diagnosed", + -11.908023834228516 + ], + [ + "RR", + -11.908143997192383 + ], + [ + "▁Fern", + -11.908233642578125 + ], + [ + "▁rational", + -11.908281326293945 + ], + [ + "▁talents", + -11.90828800201416 + ], + [ + "ziert", + -11.908317565917969 + ], + [ + "▁chemin", + -11.908459663391113 + ], + [ + "sheet", + -11.908562660217285 + ], + [ + "▁outer", + -11.908565521240234 + ], + [ + "▁Kap", + -11.908591270446777 + ], + [ + "▁HERE", + -11.908656120300293 + ], + [ + "▁uman", + -11.908824920654297 + ], + [ + "▁accompany", + -11.908880233764648 + ], + [ + "▁varieties", + -11.908881187438965 + ], + [ + "▁sensors", + -11.908957481384277 + ], + [ + "▁25%", + -11.90919017791748 + ], + [ + "▁tray", + -11.909354209899902 + ], + [ + "▁critique", + -11.909459114074707 + ], + [ + "▁puţin", + -11.909515380859375 + ], + [ + "▁Schüler", + -11.90953540802002 + ], + [ + "▁repar", + -11.909744262695312 + ], + [ + "▁overlook", + -11.909931182861328 + ], + [ + "▁surf", + -11.910048484802246 + ], + [ + "▁tasting", + -11.910118103027344 + ], + [ + "bog", + -11.91027545928955 + ], + [ + "▁Payment", + -11.910289764404297 + ], + [ + "▁Helen", + -11.91049575805664 + ], + [ + "▁Refer", + -11.910694122314453 + ], + [ + "application", + -11.910698890686035 + ], + [ + "lection", + -11.910856246948242 + ], + [ + "▁avril", + -11.911042213439941 + ], + [ + "▁Grace", + -11.911109924316406 + ], + [ + "▁kau", + -11.911274909973145 + ], + [ + "▁libraries", + -11.911319732666016 + ], + [ + "▁closest", + -11.911347389221191 + ], + [ + "▁coating", + -11.911351203918457 + ], + [ + "▁suicide", + -11.911364555358887 + ], + [ + "▁undergraduate", + -11.911449432373047 + ], + [ + "▁stitch", + -11.91149616241455 + ], + [ + "▁reset", + -11.911593437194824 + ], + [ + "▁Greece", + -11.911626815795898 + ], + [ + "▁Fred", + -11.91197681427002 + ], + [ + "▁18.", + -11.912047386169434 + ], + [ + "▁nuit", + -11.912087440490723 + ], + [ + "▁lying", + -11.912199974060059 + ], + [ + "▁cottage", + -11.91232681274414 + ], + [ + "bone", + -11.912477493286133 + ], + [ + "▁milieu", + -11.912480354309082 + ], + [ + "management", + -11.912623405456543 + ], + [ + "▁Freund", + -11.912724494934082 + ], + [ + "▁specially", + -11.912841796875 + ], + [ + "veut", + -11.912961959838867 + ], + [ + "▁necesare", + -11.912999153137207 + ], + [ + "▁cert", + -11.913081169128418 + ], + [ + "articul", + -11.913151741027832 + ], + [ + "150", + -11.913174629211426 + ], + [ + "rounded", + -11.913180351257324 + ], + [ + "▁longue", + -11.913193702697754 + ], + [ + "▁Quel", + -11.913240432739258 + ], + [ + "Until", + -11.913322448730469 + ], + [ + "▁700", + -11.913398742675781 + ], + [ + "▁installations", + -11.913423538208008 + ], + [ + "▁boats", + -11.913467407226562 + ], + [ + "Fig", + -11.913609504699707 + ], + [ + "▁cocktail", + -11.913613319396973 + ], + [ + "▁rocks", + -11.91366958618164 + ], + [ + "meinen", + -11.91374683380127 + ], + [ + "entrepreneur", + -11.913780212402344 + ], + [ + "schwarz", + -11.913924217224121 + ], + [ + "▁diesel", + -11.91392993927002 + ], + [ + "▁villages", + -11.913969039916992 + ], + [ + "▁cups", + -11.914076805114746 + ], + [ + "▁stairs", + -11.914241790771484 + ], + [ + "▁Match", + -11.914350509643555 + ], + [ + "Taking", + -11.914437294006348 + ], + [ + "prin", + -11.914469718933105 + ], + [ + "▁penal", + -11.91472053527832 + ], + [ + "partner", + -11.914867401123047 + ], + [ + "wave", + -11.91497802734375 + ], + [ + "▁baie", + -11.91515064239502 + ], + [ + "LAN", + -11.915151596069336 + ], + [ + "fix", + -11.915202140808105 + ], + [ + "▁surveillance", + -11.915295600891113 + ], + [ + "▁Register", + -11.915343284606934 + ], + [ + "oara", + -11.915536880493164 + ], + [ + "▁Phoenix", + -11.915602684020996 + ], + [ + "aktuellen", + -11.915613174438477 + ], + [ + "▁livres", + -11.915618896484375 + ], + [ + "▁entities", + -11.916102409362793 + ], + [ + "▁Regard", + -11.916112899780273 + ], + [ + "▁Jazz", + -11.91614055633545 + ], + [ + "▁flame", + -11.91616153717041 + ], + [ + "▁independence", + -11.916215896606445 + ], + [ + "▁Adventure", + -11.916341781616211 + ], + [ + "▁assign", + -11.916399955749512 + ], + [ + "▁Adult", + -11.916579246520996 + ], + [ + "kehr", + -11.916666984558105 + ], + [ + "▁ordering", + -11.916850090026855 + ], + [ + "▁charts", + -11.91687297821045 + ], + [ + "▁Român", + -11.916936874389648 + ], + [ + "bauen", + -11.916982650756836 + ], + [ + "▁Floor", + -11.917065620422363 + ], + [ + "▁Meet", + -11.917101860046387 + ], + [ + "▁compromise", + -11.917158126831055 + ], + [ + "regarded", + -11.917171478271484 + ], + [ + "02.", + -11.917215347290039 + ], + [ + "▁granite", + -11.917299270629883 + ], + [ + "▁Judge", + -11.917314529418945 + ], + [ + "opti", + -11.917373657226562 + ], + [ + "liste", + -11.917379379272461 + ], + [ + "▁capacité", + -11.917427062988281 + ], + [ + "▁criticism", + -11.917450904846191 + ], + [ + "LES", + -11.918198585510254 + ], + [ + "▁Century", + -11.918211936950684 + ], + [ + "▁mobility", + -11.918252944946289 + ], + [ + "▁variation", + -11.918622016906738 + ], + [ + "▁Utah", + -11.91867446899414 + ], + [ + "▁seminar", + -11.918678283691406 + ], + [ + "▁experiments", + -11.918803215026855 + ], + [ + "midst", + -11.918943405151367 + ], + [ + "▁Psycho", + -11.919002532958984 + ], + [ + "▁choses", + -11.919121742248535 + ], + [ + "▁Karl", + -11.919175148010254 + ], + [ + "▁ruling", + -11.919286727905273 + ], + [ + "▁Voice", + -11.919404983520508 + ], + [ + "▁împotriv", + -11.919442176818848 + ], + [ + "▁mesaj", + -11.919500350952148 + ], + [ + "▁vrei", + -11.919594764709473 + ], + [ + "fan", + -11.919601440429688 + ], + [ + "parent", + -11.919648170471191 + ], + [ + "▁oraș", + -11.919770240783691 + ], + [ + "▁printable", + -11.919777870178223 + ], + [ + "▁diver", + -11.919859886169434 + ], + [ + "▁ochi", + -11.919949531555176 + ], + [ + "▁teenager", + -11.920125961303711 + ], + [ + "▁Death", + -11.920150756835938 + ], + [ + "▁manque", + -11.920289993286133 + ], + [ + "ască", + -11.920345306396484 + ], + [ + "▁prob", + -11.9203519821167 + ], + [ + "▁télé", + -11.920354843139648 + ], + [ + "cursul", + -11.920378684997559 + ], + [ + "pion", + -11.92052173614502 + ], + [ + "▁dedication", + -11.920644760131836 + ], + [ + "▁opr", + -11.920687675476074 + ], + [ + "führung", + -11.920761108398438 + ], + [ + "▁cognitive", + -11.920827865600586 + ], + [ + "soft", + -11.920868873596191 + ], + [ + "▁19,", + -11.9209623336792 + ], + [ + "▁24-", + -11.921197891235352 + ], + [ + "▁legitimate", + -11.921220779418945 + ], + [ + "▁comedy", + -11.921277046203613 + ], + [ + "▁violation", + -11.921327590942383 + ], + [ + "▁disposal", + -11.921472549438477 + ], + [ + "▁liegen", + -11.921605110168457 + ], + [ + "ко", + -11.921878814697266 + ], + [ + "▁martie", + -11.921931266784668 + ], + [ + "▁Vas", + -11.92212200164795 + ], + [ + "rash", + -11.922134399414062 + ], + [ + "▁hadn", + -11.922174453735352 + ], + [ + "▁connu", + -11.922204971313477 + ], + [ + "▁regelmäßig", + -11.922216415405273 + ], + [ + "▁Webseite", + -11.922224998474121 + ], + [ + "▁failing", + -11.922273635864258 + ], + [ + "explique", + -11.922449111938477 + ], + [ + "▁Player", + -11.922513961791992 + ], + [ + "vul", + -11.922560691833496 + ], + [ + "camp", + -11.922992706298828 + ], + [ + "▁erreicht", + -11.922996520996094 + ], + [ + "▁tags", + -11.922998428344727 + ], + [ + "▁headline", + -11.923210144042969 + ], + [ + "▁banc", + -11.923253059387207 + ], + [ + "▁Mayor", + -11.923309326171875 + ], + [ + "trop", + -11.923395156860352 + ], + [ + "AK", + -11.9235258102417 + ], + [ + "▁lighter", + -11.923602104187012 + ], + [ + "▁syndrome", + -11.923604965209961 + ], + [ + "▁Adrian", + -11.92365550994873 + ], + [ + "▁EUR", + -11.923759460449219 + ], + [ + "▁Missouri", + -11.923916816711426 + ], + [ + "▁Chan", + -11.924108505249023 + ], + [ + "topped", + -11.924233436584473 + ], + [ + "▁nationwide", + -11.924276351928711 + ], + [ + "▁6-", + -11.924302101135254 + ], + [ + "final", + -11.924408912658691 + ], + [ + "ttes", + -11.924485206604004 + ], + [ + "▁FO", + -11.924537658691406 + ], + [ + "▁legi", + -11.924556732177734 + ], + [ + "▁Hum", + -11.924575805664062 + ], + [ + "vita", + -11.924662590026855 + ], + [ + "▁Regen", + -11.924695014953613 + ], + [ + "▁confusion", + -11.92498779296875 + ], + [ + "▁valori", + -11.925142288208008 + ], + [ + "mill", + -11.92516803741455 + ], + [ + "did", + -11.925237655639648 + ], + [ + "pid", + -11.925253868103027 + ], + [ + "▁implications", + -11.925284385681152 + ], + [ + "▁Value", + -11.92552375793457 + ], + [ + "lângă", + -11.925666809082031 + ], + [ + "▁véritable", + -11.92577075958252 + ], + [ + "▁Stick", + -11.925814628601074 + ], + [ + "zol", + -11.925835609436035 + ], + [ + "▁ebenso", + -11.925863265991211 + ], + [ + "west", + -11.925895690917969 + ], + [ + "▁auszu", + -11.92600154876709 + ], + [ + "▁adorable", + -11.926016807556152 + ], + [ + "▁clarity", + -11.92605209350586 + ], + [ + "▁Wash", + -11.926335334777832 + ], + [ + "▁alien", + -11.926423072814941 + ], + [ + "usement", + -11.926626205444336 + ], + [ + "▁bones", + -11.9266357421875 + ], + [ + "▁Beau", + -11.926726341247559 + ], + [ + "▁Jet", + -11.926727294921875 + ], + [ + "▁visibility", + -11.927034378051758 + ], + [ + "impose", + -11.927063941955566 + ], + [ + "food", + -11.927133560180664 + ], + [ + "▁duce", + -11.927361488342285 + ], + [ + "▁Format", + -11.927386283874512 + ], + [ + "▁durability", + -11.927424430847168 + ], + [ + "▁Prim", + -11.927614212036133 + ], + [ + "▁mele", + -11.927629470825195 + ], + [ + "▁dürfen", + -11.927631378173828 + ], + [ + "▁Angebote", + -11.92765998840332 + ], + [ + "▁discharge", + -11.927745819091797 + ], + [ + "▁Justin", + -11.928055763244629 + ], + [ + "▁shame", + -11.928228378295898 + ], + [ + "▁heated", + -11.928282737731934 + ], + [ + "ères", + -11.92856216430664 + ], + [ + "human", + -11.928810119628906 + ], + [ + "4.5", + -11.928831100463867 + ], + [ + "▁lien", + -11.928955078125 + ], + [ + "▁Alan", + -11.92896556854248 + ], + [ + "▁transmis", + -11.929130554199219 + ], + [ + "▁Bul", + -11.929137229919434 + ], + [ + "plu", + -11.929169654846191 + ], + [ + "acul", + -11.929337501525879 + ], + [ + "merk", + -11.929434776306152 + ], + [ + "▁altfel", + -11.929566383361816 + ], + [ + "deli", + -11.929689407348633 + ], + [ + "▁Cru", + -11.930001258850098 + ], + [ + "▁hommes", + -11.930127143859863 + ], + [ + "aurait", + -11.930137634277344 + ], + [ + "cca", + -11.930187225341797 + ], + [ + "▁Path", + -11.930208206176758 + ], + [ + "astronom", + -11.930241584777832 + ], + [ + "▁détail", + -11.930276870727539 + ], + [ + "▁blocked", + -11.930394172668457 + ], + [ + "iding", + -11.93044376373291 + ], + [ + "schä", + -11.930500030517578 + ], + [ + "▁30-", + -11.930624008178711 + ], + [ + "diction", + -11.930813789367676 + ], + [ + "▁pulling", + -11.930868148803711 + ], + [ + "▁Sample", + -11.930924415588379 + ], + [ + "▁renewable", + -11.930997848510742 + ], + [ + "▁Pinterest", + -11.93106746673584 + ], + [ + "▁Tages", + -11.93106746673584 + ], + [ + "▁shed", + -11.931171417236328 + ], + [ + "▁hart", + -11.931188583374023 + ], + [ + "▁serie", + -11.931200981140137 + ], + [ + "▁documentary", + -11.931208610534668 + ], + [ + "gebaut", + -11.931220054626465 + ], + [ + "▁Hause", + -11.931272506713867 + ], + [ + "share", + -11.931303977966309 + ], + [ + "▁inflation", + -11.93138599395752 + ], + [ + "▁gall", + -11.931504249572754 + ], + [ + "▁adjacent", + -11.931673049926758 + ], + [ + "jer", + -11.93173885345459 + ], + [ + "▁Universal", + -11.931946754455566 + ], + [ + "▁disabilities", + -11.931984901428223 + ], + [ + "▁proposition", + -11.93204116821289 + ], + [ + "Work", + -11.932293891906738 + ], + [ + "▁closure", + -11.932306289672852 + ], + [ + "▁separated", + -11.932496070861816 + ], + [ + "▁soda", + -11.932549476623535 + ], + [ + "▁elite", + -11.93263053894043 + ], + [ + "appro", + -11.93265438079834 + ], + [ + "▁acute", + -11.93266487121582 + ], + [ + "utton", + -11.932938575744629 + ], + [ + "▁facă", + -11.933053016662598 + ], + [ + "▁collector", + -11.933121681213379 + ], + [ + "▁unlock", + -11.933249473571777 + ], + [ + "▁Alpha", + -11.933267593383789 + ], + [ + "▁Used", + -11.933267593383789 + ], + [ + "▁applicants", + -11.933302879333496 + ], + [ + "▁înseamn", + -11.933387756347656 + ], + [ + "▁inclu", + -11.933414459228516 + ], + [ + "▁disclosure", + -11.933544158935547 + ], + [ + "▁Fahr", + -11.933995246887207 + ], + [ + "AST", + -11.934061050415039 + ], + [ + "▁vivre", + -11.934069633483887 + ], + [ + "»,", + -11.934167861938477 + ], + [ + "laud", + -11.93430233001709 + ], + [ + "▁soir", + -11.934365272521973 + ], + [ + "▁barrier", + -11.934405326843262 + ], + [ + "înd", + -11.934470176696777 + ], + [ + "▁ambition", + -11.93451976776123 + ], + [ + "asta", + -11.934550285339355 + ], + [ + "occupied", + -11.934747695922852 + ], + [ + "▁Gau", + -11.934774398803711 + ], + [ + "four", + -11.93481159210205 + ], + [ + "▁nap", + -11.934887886047363 + ], + [ + "iez", + -11.934922218322754 + ], + [ + "endra", + -11.935242652893066 + ], + [ + "gaben", + -11.935464859008789 + ], + [ + "▁Carol", + -11.935481071472168 + ], + [ + "▁Switzerland", + -11.935575485229492 + ], + [ + "▁Bond", + -11.935617446899414 + ], + [ + "▁crossing", + -11.935630798339844 + ], + [ + "▁Palace", + -11.9359769821167 + ], + [ + "NG", + -11.935986518859863 + ], + [ + "▁Budget", + -11.93622875213623 + ], + [ + "▁lid", + -11.936372756958008 + ], + [ + "bab", + -11.936393737792969 + ], + [ + "▁polish", + -11.936416625976562 + ], + [ + "▁herbs", + -11.93673038482666 + ], + [ + "▁dear", + -11.936747550964355 + ], + [ + "▁devrai", + -11.936846733093262 + ], + [ + "walk", + -11.936864852905273 + ], + [ + "▁humanity", + -11.936897277832031 + ], + [ + "▁tires", + -11.936978340148926 + ], + [ + "égal", + -11.936994552612305 + ], + [ + "▁bow", + -11.937032699584961 + ], + [ + "▁debris", + -11.937201499938965 + ], + [ + "▁keywords", + -11.937273025512695 + ], + [ + "irk", + -11.937345504760742 + ], + [ + "▁suspend", + -11.937360763549805 + ], + [ + "▁pourra", + -11.93738079071045 + ], + [ + "migran", + -11.937454223632812 + ], + [ + "thereby", + -11.937570571899414 + ], + [ + "▁Harris", + -11.937943458557129 + ], + [ + "ateurs", + -11.937956809997559 + ], + [ + "▁fal", + -11.938271522521973 + ], + [ + "alleged", + -11.938355445861816 + ], + [ + "noch", + -11.938494682312012 + ], + [ + "▁observation", + -11.938506126403809 + ], + [ + "▁București", + -11.93855094909668 + ], + [ + "▁SQL", + -11.938624382019043 + ], + [ + "▁Phase", + -11.938760757446289 + ], + [ + "▁adventures", + -11.93881607055664 + ], + [ + "▁Kol", + -11.938885688781738 + ], + [ + "▁professionnel", + -11.938916206359863 + ], + [ + "crit", + -11.939026832580566 + ], + [ + "LR", + -11.939313888549805 + ], + [ + "▁preview", + -11.939464569091797 + ], + [ + "▁highlighted", + -11.939942359924316 + ], + [ + "▁Stud", + -11.939949035644531 + ], + [ + "▁labour", + -11.939956665039062 + ], + [ + "MV", + -11.9399995803833 + ], + [ + "click", + -11.940049171447754 + ], + [ + "approche", + -11.94016170501709 + ], + [ + "tian", + -11.940183639526367 + ], + [ + "cité", + -11.940192222595215 + ], + [ + "▁Rain", + -11.94028377532959 + ], + [ + "typ", + -11.94032096862793 + ], + [ + "Usually", + -11.940435409545898 + ], + [ + "▁outlet", + -11.940513610839844 + ], + [ + "logging", + -11.940814018249512 + ], + [ + "▁Temperatur", + -11.940906524658203 + ], + [ + "▁Scottish", + -11.94090747833252 + ], + [ + "iga", + -11.940942764282227 + ], + [ + "▁glory", + -11.941086769104004 + ], + [ + "▁Rom", + -11.941242218017578 + ], + [ + "zeug", + -11.941337585449219 + ], + [ + "establishing", + -11.941339492797852 + ], + [ + "▁imaging", + -11.941926002502441 + ], + [ + "▁Beauty", + -11.942015647888184 + ], + [ + "igan", + -11.942042350769043 + ], + [ + "après", + -11.94224739074707 + ], + [ + "Adresse", + -11.942267417907715 + ], + [ + "cliff", + -11.942349433898926 + ], + [ + "▁unnecessary", + -11.943267822265625 + ], + [ + "▁slim", + -11.943324089050293 + ], + [ + "dir", + -11.943490982055664 + ], + [ + "▁leisure", + -11.943660736083984 + ], + [ + "▁principale", + -11.94368839263916 + ], + [ + "▁Viele", + -11.943770408630371 + ], + [ + "▁2007.", + -11.943802833557129 + ], + [ + "Hopefully", + -11.943829536437988 + ], + [ + "cola", + -11.943851470947266 + ], + [ + "▁Planet", + -11.943927764892578 + ], + [ + "▁orientation", + -11.943933486938477 + ], + [ + "▁angry", + -11.94419002532959 + ], + [ + "MIT", + -11.944234848022461 + ], + [ + "▁Kenya", + -11.944265365600586 + ], + [ + "▁bless", + -11.94435977935791 + ], + [ + "▁Fill", + -11.944524765014648 + ], + [ + "▁compar", + -11.944664001464844 + ], + [ + "▁curtain", + -11.94473934173584 + ], + [ + "ţei", + -11.944754600524902 + ], + [ + "▁Az", + -11.94482421875 + ], + [ + "▁Rang", + -11.944908142089844 + ], + [ + "▁dominant", + -11.944974899291992 + ], + [ + "race", + -11.944985389709473 + ], + [ + "▁Target", + -11.944987297058105 + ], + [ + "▁manually", + -11.944987297058105 + ], + [ + "objet", + -11.945024490356445 + ], + [ + "thrown", + -11.945131301879883 + ], + [ + "NF", + -11.945149421691895 + ], + [ + "durant", + -11.945185661315918 + ], + [ + "rect", + -11.945302963256836 + ], + [ + "▁Größe", + -11.945320129394531 + ], + [ + "VM", + -11.9453763961792 + ], + [ + "▁aprilie", + -11.945476531982422 + ], + [ + "▁Welche", + -11.945639610290527 + ], + [ + "▁verde", + -11.946157455444336 + ], + [ + "▁Portugal", + -11.946266174316406 + ], + [ + "▁algorithm", + -11.94627571105957 + ], + [ + "ăț", + -11.946328163146973 + ], + [ + "▁Grey", + -11.946371078491211 + ], + [ + "▁cleaned", + -11.94644832611084 + ], + [ + "▁modes", + -11.946463584899902 + ], + [ + "▁relaxation", + -11.946599006652832 + ], + [ + "mbr", + -11.946786880493164 + ], + [ + "étique", + -11.946821212768555 + ], + [ + "Her", + -11.946904182434082 + ], + [ + "▁beta", + -11.946952819824219 + ], + [ + "▁nobody", + -11.94699764251709 + ], + [ + "▁aplic", + -11.947060585021973 + ], + [ + "present", + -11.947080612182617 + ], + [ + "emis", + -11.947197914123535 + ], + [ + "éléments", + -11.947257995605469 + ], + [ + "▁lately", + -11.947303771972656 + ], + [ + "fab", + -11.94732666015625 + ], + [ + "▁aluminiu", + -11.947373390197754 + ], + [ + "▁vest", + -11.947524070739746 + ], + [ + "▁statue", + -11.947558403015137 + ], + [ + "▁publice", + -11.947586059570312 + ], + [ + "▁merchandise", + -11.9476900100708 + ], + [ + "▁relat", + -11.947810173034668 + ], + [ + "git", + -11.94796371459961 + ], + [ + "▁interne", + -11.948281288146973 + ], + [ + "▁Tokyo", + -11.948325157165527 + ], + [ + "chal", + -11.948348045349121 + ], + [ + "contacted", + -11.948430061340332 + ], + [ + "▁tras", + -11.948455810546875 + ], + [ + "▁Clinic", + -11.948626518249512 + ], + [ + "▁unbe", + -11.948633193969727 + ], + [ + "▁dumneavoastra", + -11.948798179626465 + ], + [ + "float", + -11.949078559875488 + ], + [ + "isson", + -11.94909381866455 + ], + [ + "▁vessel", + -11.949126243591309 + ], + [ + "attempting", + -11.949161529541016 + ], + [ + "▁doute", + -11.94918441772461 + ], + [ + "▁Leadership", + -11.949322700500488 + ], + [ + "▁sustain", + -11.94947338104248 + ], + [ + "▁textile", + -11.949666023254395 + ], + [ + "auer", + -11.949702262878418 + ], + [ + "▁90%", + -11.949899673461914 + ], + [ + "garten", + -11.949911117553711 + ], + [ + "▁adauga", + -11.949991226196289 + ], + [ + "▁Kil", + -11.950061798095703 + ], + [ + "▁troops", + -11.950420379638672 + ], + [ + "▁pale", + -11.950568199157715 + ], + [ + "host", + -11.950743675231934 + ], + [ + "▁cry", + -11.950757026672363 + ], + [ + "▁Alb", + -11.950793266296387 + ], + [ + "▁Brad", + -11.95089340209961 + ], + [ + "▁bicycle", + -11.951054573059082 + ], + [ + "▁24/7", + -11.951217651367188 + ], + [ + "▁с", + -11.951228141784668 + ], + [ + "▁stimul", + -11.951401710510254 + ], + [ + "gler", + -11.951445579528809 + ], + [ + "▁notwendig", + -11.951496124267578 + ], + [ + "▁cousin", + -11.95158863067627 + ], + [ + "cheie", + -11.951600074768066 + ], + [ + "hay", + -11.951751708984375 + ], + [ + "▁rezolv", + -11.952134132385254 + ], + [ + "▁THIS", + -11.952143669128418 + ], + [ + "ordre", + -11.952157974243164 + ], + [ + "iști", + -11.952173233032227 + ], + [ + "▁conclude", + -11.952310562133789 + ], + [ + "▁Lage", + -11.952327728271484 + ], + [ + "▁Entertainment", + -11.952454566955566 + ], + [ + "▁valued", + -11.952478408813477 + ], + [ + "ktion", + -11.95253849029541 + ], + [ + "▁priorities", + -11.95268440246582 + ], + [ + "▁1986", + -11.952770233154297 + ], + [ + "▁fatal", + -11.952934265136719 + ], + [ + "▁accurately", + -11.952988624572754 + ], + [ + "▁1987", + -11.953022956848145 + ], + [ + "▁folk", + -11.953073501586914 + ], + [ + "7)", + -11.953163146972656 + ], + [ + "führer", + -11.95360279083252 + ], + [ + "▁knot", + -11.953612327575684 + ], + [ + "haltung", + -11.953720092773438 + ], + [ + "▁Charlie", + -11.953733444213867 + ], + [ + "âge", + -11.95376205444336 + ], + [ + "▁threshold", + -11.954041481018066 + ], + [ + "▁assault", + -11.954130172729492 + ], + [ + "▁meist", + -11.954141616821289 + ], + [ + "bine", + -11.954155921936035 + ], + [ + "surprisingly", + -11.954171180725098 + ], + [ + "▁Protect", + -11.954180717468262 + ], + [ + "▁Hack", + -11.954258918762207 + ], + [ + "▁Quant", + -11.954537391662598 + ], + [ + "▁Cet", + -11.954782485961914 + ], + [ + "▁convinced", + -11.95481014251709 + ], + [ + "▁muncă", + -11.955033302307129 + ], + [ + "dging", + -11.955066680908203 + ], + [ + "▁Millionen", + -11.955129623413086 + ], + [ + "zahlung", + -11.955148696899414 + ], + [ + "▁anticipated", + -11.955192565917969 + ], + [ + "▁brass", + -11.9552001953125 + ], + [ + "KO", + -11.955244064331055 + ], + [ + "▁culori", + -11.955286979675293 + ], + [ + "▁Aero", + -11.955326080322266 + ], + [ + "▁intermediu", + -11.955373764038086 + ], + [ + "▁Philippines", + -11.955381393432617 + ], + [ + "▁jury", + -11.955387115478516 + ], + [ + "▁Funktion", + -11.95569896697998 + ], + [ + "▁probe", + -11.955704689025879 + ], + [ + "TL", + -11.955748558044434 + ], + [ + "1.0", + -11.955804824829102 + ], + [ + "ELL", + -11.95581340789795 + ], + [ + "She", + -11.956001281738281 + ], + [ + "▁Blood", + -11.956073760986328 + ], + [ + "▁Dean", + -11.956111907958984 + ], + [ + "▁scène", + -11.9561185836792 + ], + [ + "volu", + -11.95621395111084 + ], + [ + "▁Epi", + -11.95621395111084 + ], + [ + "▁séjour", + -11.95627498626709 + ], + [ + "▁Smartphone", + -11.956306457519531 + ], + [ + "▁fired", + -11.956357955932617 + ], + [ + "beat", + -11.95650577545166 + ], + [ + "▁pockets", + -11.956506729125977 + ], + [ + "▁serviciu", + -11.956624031066895 + ], + [ + "▁affairs", + -11.95678424835205 + ], + [ + "▁Ry", + -11.956842422485352 + ], + [ + "▁Stadium", + -11.956954956054688 + ], + [ + "▁snacks", + -11.957182884216309 + ], + [ + "▁efectu", + -11.957221031188965 + ], + [ + "▁Richtung", + -11.957273483276367 + ], + [ + "▁dresses", + -11.957352638244629 + ], + [ + "▁Medien", + -11.95744800567627 + ], + [ + "writer", + -11.95759105682373 + ], + [ + "changing", + -11.957655906677246 + ], + [ + "▁supportive", + -11.957849502563477 + ], + [ + "▁beneath", + -11.957873344421387 + ], + [ + "paid", + -11.958078384399414 + ], + [ + "▁customize", + -11.958155632019043 + ], + [ + "▁Ferr", + -11.958187103271484 + ], + [ + "reaches", + -11.958338737487793 + ], + [ + "arma", + -11.958401679992676 + ], + [ + "ción", + -11.958598136901855 + ], + [ + "▁elderly", + -11.959243774414062 + ], + [ + "▁modification", + -11.95934009552002 + ], + [ + "▁perfection", + -11.959381103515625 + ], + [ + "▁Allow", + -11.959492683410645 + ], + [ + "▁belonging", + -11.959542274475098 + ], + [ + "▁compound", + -11.959589004516602 + ], + [ + "▁Results", + -11.959681510925293 + ], + [ + "▁astăzi", + -11.959793090820312 + ], + [ + "▁Liber", + -11.959818840026855 + ], + [ + "jor", + -11.959850311279297 + ], + [ + "▁Nin", + -11.959980964660645 + ], + [ + "▁lumina", + -11.959992408752441 + ], + [ + "▁130", + -11.960073471069336 + ], + [ + "▁Platform", + -11.960121154785156 + ], + [ + "▁SMS", + -11.960221290588379 + ], + [ + "▁medic", + -11.96024227142334 + ], + [ + "hör", + -11.960315704345703 + ], + [ + "▁Kas", + -11.96038818359375 + ], + [ + "▁tomato", + -11.960403442382812 + ], + [ + "▁logiciel", + -11.960505485534668 + ], + [ + "php", + -11.960654258728027 + ], + [ + "▁premises", + -11.96071720123291 + ], + [ + "▁Communication", + -11.96072769165039 + ], + [ + "▁reprezintă", + -11.960762023925781 + ], + [ + "▁Partners", + -11.960866928100586 + ], + [ + "▁RV", + -11.961090087890625 + ], + [ + "▁pants", + -11.961197853088379 + ], + [ + "▁envie", + -11.961256980895996 + ], + [ + "▁commerce", + -11.961263656616211 + ], + [ + "▁tears", + -11.961298942565918 + ], + [ + "▁cooler", + -11.961494445800781 + ], + [ + "strand", + -11.961556434631348 + ], + [ + "▁Gil", + -11.961588859558105 + ], + [ + "▁référence", + -11.961641311645508 + ], + [ + "▁electronics", + -11.961681365966797 + ], + [ + "exposition", + -11.961700439453125 + ], + [ + "▁Caribbean", + -11.96171760559082 + ], + [ + "▁compelling", + -11.96171760559082 + ], + [ + "luci", + -11.961723327636719 + ], + [ + "▁Brooklyn", + -11.961892127990723 + ], + [ + "▁Thai", + -11.961950302124023 + ], + [ + "dler", + -11.96198844909668 + ], + [ + "▁supra", + -11.962016105651855 + ], + [ + "centered", + -11.962026596069336 + ], + [ + "▁metro", + -11.962081909179688 + ], + [ + "▁03", + -11.962299346923828 + ], + [ + "▁enrich", + -11.962437629699707 + ], + [ + "▁adevarat", + -11.962594985961914 + ], + [ + "5000", + -11.962961196899414 + ], + [ + "▁bell", + -11.96297550201416 + ], + [ + "▁sine", + -11.962996482849121 + ], + [ + "▁appealing", + -11.963088989257812 + ], + [ + "clam", + -11.963116645812988 + ], + [ + "▁vorhanden", + -11.963165283203125 + ], + [ + "▁pickup", + -11.963268280029297 + ], + [ + "▁Alaska", + -11.963269233703613 + ], + [ + "▁Nacht", + -11.963300704956055 + ], + [ + "borough", + -11.9633207321167 + ], + [ + "▁Blanc", + -11.96340274810791 + ], + [ + "▁apare", + -11.963616371154785 + ], + [ + "▁Works", + -11.963798522949219 + ], + [ + "mettent", + -11.963801383972168 + ], + [ + "atter", + -11.96389389038086 + ], + [ + "terra", + -11.963946342468262 + ], + [ + "▁Bit", + -11.964105606079102 + ], + [ + "RL", + -11.964131355285645 + ], + [ + "▁Wander", + -11.964262962341309 + ], + [ + "▁Hawk", + -11.964595794677734 + ], + [ + "▁Probleme", + -11.964665412902832 + ], + [ + "regel", + -11.964729309082031 + ], + [ + "hne", + -11.964739799499512 + ], + [ + "fass", + -11.96486759185791 + ], + [ + "▁Andy", + -11.965014457702637 + ], + [ + "▁befinde", + -11.965179443359375 + ], + [ + "boo", + -11.965265274047852 + ], + [ + "▁connectivity", + -11.965304374694824 + ], + [ + "▁spielt", + -11.965418815612793 + ], + [ + "zweiten", + -11.96547794342041 + ], + [ + "ţilor", + -11.965526580810547 + ], + [ + "▁confi", + -11.96561336517334 + ], + [ + "▁schlecht", + -11.965773582458496 + ], + [ + "▁Beginn", + -11.96581745147705 + ], + [ + "▁floating", + -11.965903282165527 + ], + [ + "nimmt", + -11.966071128845215 + ], + [ + "▁arbeiten", + -11.96611213684082 + ], + [ + "pillar", + -11.966131210327148 + ], + [ + "sterreich", + -11.966347694396973 + ], + [ + "▁Schule", + -11.966446876525879 + ], + [ + "▁durée", + -11.966521263122559 + ], + [ + "▁honestly", + -11.96653938293457 + ], + [ + "▁acel", + -11.9666166305542 + ], + [ + "▁Prozess", + -11.96662425994873 + ], + [ + "Min", + -11.966629028320312 + ], + [ + "enii", + -11.966632843017578 + ], + [ + "DAY", + -11.966758728027344 + ], + [ + "▁Blo", + -11.966806411743164 + ], + [ + "▁bolt", + -11.966946601867676 + ], + [ + "sicher", + -11.967070579528809 + ], + [ + "▁17,", + -11.967122077941895 + ], + [ + "▁anchor", + -11.967215538024902 + ], + [ + "▁consistency", + -11.967241287231445 + ], + [ + "▁relatives", + -11.967263221740723 + ], + [ + "▁lac", + -11.967385292053223 + ], + [ + "105", + -11.967432975769043 + ], + [ + "▁Craig", + -11.967534065246582 + ], + [ + "▁mandate", + -11.967598915100098 + ], + [ + "▁bedeutet", + -11.967674255371094 + ], + [ + "▁Soviet", + -11.967680931091309 + ], + [ + "▁arguments", + -11.967938423156738 + ], + [ + "▁Gebäude", + -11.967997550964355 + ], + [ + "▁Parliament", + -11.968005180358887 + ], + [ + "▁Kha", + -11.968087196350098 + ], + [ + "nica", + -11.968130111694336 + ], + [ + "▁Amazing", + -11.968162536621094 + ], + [ + "gründe", + -11.968179702758789 + ], + [ + "▁Ott", + -11.968269348144531 + ], + [ + "Exp", + -11.968314170837402 + ], + [ + "▁ianuarie", + -11.96848201751709 + ], + [ + "riot", + -11.968571662902832 + ], + [ + "▁futur", + -11.968626976013184 + ], + [ + "▁Honda", + -11.968647956848145 + ], + [ + "!!!!", + -11.96865177154541 + ], + [ + "▁citit", + -11.968689918518066 + ], + [ + "▁22,", + -11.968708992004395 + ], + [ + "țional", + -11.968711853027344 + ], + [ + "▁lovers", + -11.968732833862305 + ], + [ + "▁Current", + -11.968835830688477 + ], + [ + "▁drone", + -11.96927261352539 + ], + [ + "▁promising", + -11.969335556030273 + ], + [ + "devoted", + -11.969443321228027 + ], + [ + "▁Born", + -11.969520568847656 + ], + [ + "▁viitor", + -11.969589233398438 + ], + [ + "▁ritual", + -11.969614028930664 + ], + [ + "▁Guard", + -11.969681739807129 + ], + [ + "09.", + -11.969828605651855 + ], + [ + "▁Py", + -11.970260620117188 + ], + [ + "▁finds", + -11.970380783081055 + ], + [ + "▁boli", + -11.970394134521484 + ], + [ + "▁Mitglieder", + -11.970697402954102 + ], + [ + "ogni", + -11.97107982635498 + ], + [ + "▁stones", + -11.97118854522705 + ], + [ + "rox", + -11.971210479736328 + ], + [ + "▁dock", + -11.971390724182129 + ], + [ + "▁onion", + -11.97144889831543 + ], + [ + "▁classified", + -11.971538543701172 + ], + [ + "big", + -11.971833229064941 + ], + [ + "RG", + -11.971857070922852 + ], + [ + "influenced", + -11.971955299377441 + ], + [ + "▁sudden", + -11.971988677978516 + ], + [ + "▁ample", + -11.97204303741455 + ], + [ + "án", + -11.972095489501953 + ], + [ + "▁ornament", + -11.972122192382812 + ], + [ + "datele", + -11.972227096557617 + ], + [ + "▁Dad", + -11.97225284576416 + ], + [ + "BER", + -11.972278594970703 + ], + [ + "gerecht", + -11.972380638122559 + ], + [ + "kett", + -11.972536087036133 + ], + [ + "▁Antonio", + -11.972572326660156 + ], + [ + "Nu", + -11.972834587097168 + ], + [ + "dium", + -11.97284984588623 + ], + [ + "CAD", + -11.972850799560547 + ], + [ + "▁bundle", + -11.972916603088379 + ], + [ + "▁Vari", + -11.97301197052002 + ], + [ + "▁thrive", + -11.973020553588867 + ], + [ + "▁Seminar", + -11.973071098327637 + ], + [ + "wire", + -11.973084449768066 + ], + [ + "▁contributing", + -11.973114967346191 + ], + [ + "▁Bour", + -11.97320556640625 + ], + [ + "▁dori", + -11.973206520080566 + ], + [ + "▁packing", + -11.97343921661377 + ], + [ + "▁colleges", + -11.973459243774414 + ], + [ + "▁garbage", + -11.97366714477539 + ], + [ + "▁vector", + -11.973837852478027 + ], + [ + "▁suggestion", + -11.973897933959961 + ], + [ + "borne", + -11.973904609680176 + ], + [ + "▁Listen", + -11.973938941955566 + ], + [ + "▁Prix", + -11.973957061767578 + ], + [ + "viennent", + -11.974162101745605 + ], + [ + "insbesondere", + -11.97426700592041 + ], + [ + "▁fonctionne", + -11.974435806274414 + ], + [ + "▁mainstream", + -11.974485397338867 + ], + [ + "▁merci", + -11.974574089050293 + ], + [ + "oko", + -11.97460651397705 + ], + [ + "▁Commerce", + -11.97493839263916 + ], + [ + "▁droits", + -11.975115776062012 + ], + [ + "▁muzica", + -11.975141525268555 + ], + [ + "▁profesor", + -11.9751558303833 + ], + [ + "▁epic", + -11.97518253326416 + ], + [ + "▁intuitive", + -11.975186347961426 + ], + [ + "▁aggregate", + -11.975223541259766 + ], + [ + "▁vaccine", + -11.97529411315918 + ], + [ + "▁dank", + -11.975459098815918 + ], + [ + "▁situ", + -11.975578308105469 + ], + [ + "▁Cand", + -11.975593566894531 + ], + [ + "▁Ganz", + -11.97562313079834 + ], + [ + "▁Crystal", + -11.97578239440918 + ], + [ + "▁discretion", + -11.975825309753418 + ], + [ + "mug", + -11.975997924804688 + ], + [ + "▁anzu", + -11.976144790649414 + ], + [ + "▁cement", + -11.97616958618164 + ], + [ + "▁priest", + -11.97625732421875 + ], + [ + "▁rejected", + -11.976298332214355 + ], + [ + "▁Summit", + -11.976325988769531 + ], + [ + "▁Sara", + -11.976424217224121 + ], + [ + "▁palette", + -11.976527214050293 + ], + [ + "▁continuare", + -11.976569175720215 + ], + [ + "uge", + -11.976676940917969 + ], + [ + "ryl", + -11.976844787597656 + ], + [ + "▁Solid", + -11.977142333984375 + ], + [ + "▁meilleure", + -11.977177619934082 + ], + [ + "▁Tennessee", + -11.977248191833496 + ], + [ + "rail", + -11.977326393127441 + ], + [ + "▁attributes", + -11.9773530960083 + ], + [ + "▁vessels", + -11.977840423583984 + ], + [ + "cylinder", + -11.977900505065918 + ], + [ + "▁parfait", + -11.977916717529297 + ], + [ + "abb", + -11.97801399230957 + ], + [ + "▁Julie", + -11.97806167602539 + ], + [ + "▁pièces", + -11.978120803833008 + ], + [ + "▁proiecte", + -11.978142738342285 + ], + [ + "médi", + -11.978273391723633 + ], + [ + "▁décembre", + -11.9783935546875 + ], + [ + "Per", + -11.97841739654541 + ], + [ + "1/", + -11.978520393371582 + ], + [ + "regulated", + -11.978601455688477 + ], + [ + "▁Dy", + -11.978633880615234 + ], + [ + "▁23,", + -11.978694915771484 + ], + [ + "beck", + -11.978763580322266 + ], + [ + "tură", + -11.97885513305664 + ], + [ + "▁Chiar", + -11.978931427001953 + ], + [ + "▁isolated", + -11.979012489318848 + ], + [ + "▁kennen", + -11.979259490966797 + ], + [ + "Du", + -11.979260444641113 + ], + [ + "reflected", + -11.979482650756836 + ], + [ + "▁belong", + -11.979571342468262 + ], + [ + "▁welcomed", + -11.97969913482666 + ], + [ + "▁Rate", + -11.979776382446289 + ], + [ + "prestigious", + -11.979859352111816 + ], + [ + "▁1/4", + -11.979930877685547 + ], + [ + "▁distinction", + -11.979966163635254 + ], + [ + "▁boring", + -11.980001449584961 + ], + [ + "▁booked", + -11.980369567871094 + ], + [ + "▁citizen", + -11.980441093444824 + ], + [ + "▁comprises", + -11.980498313903809 + ], + [ + "▁aufge", + -11.98051929473877 + ], + [ + "GL", + -11.980566024780273 + ], + [ + "▁nearest", + -11.980616569519043 + ], + [ + "▁printr", + -11.980692863464355 + ], + [ + "▁département", + -11.981318473815918 + ], + [ + "▁planner", + -11.981510162353516 + ], + [ + "▁Rai", + -11.981817245483398 + ], + [ + "▁Broad", + -11.981934547424316 + ], + [ + "▁pastor", + -11.981947898864746 + ], + [ + "▁reservation", + -11.982243537902832 + ], + [ + "▁decembrie", + -11.982315063476562 + ], + [ + "▁suficient", + -11.982501983642578 + ], + [ + "geld", + -11.982560157775879 + ], + [ + "training", + -11.982620239257812 + ], + [ + "deshalb", + -11.982634544372559 + ], + [ + "▁chaud", + -11.982651710510254 + ], + [ + "Cor", + -11.982662200927734 + ], + [ + "▁Grade", + -11.982769966125488 + ], + [ + "▁faţă", + -11.982809066772461 + ], + [ + "story", + -11.982839584350586 + ], + [ + "gericht", + -11.98286247253418 + ], + [ + "▁Got", + -11.982954025268555 + ], + [ + "particulièrement", + -11.982976913452148 + ], + [ + "▁bump", + -11.983051300048828 + ], + [ + "▁fatigue", + -11.983160018920898 + ], + [ + "Activ", + -11.983250617980957 + ], + [ + "▁numéro", + -11.983302116394043 + ], + [ + "▁stranger", + -11.983312606811523 + ], + [ + "▁Skin", + -11.983327865600586 + ], + [ + "add", + -11.98344898223877 + ], + [ + "Ainsi", + -11.98357105255127 + ], + [ + "▁assists", + -11.983684539794922 + ], + [ + "▁zusätzlich", + -11.983943939208984 + ], + [ + "▁vede", + -11.983979225158691 + ], + [ + "RON", + -11.984108924865723 + ], + [ + "▁seemingly", + -11.984126091003418 + ], + [ + "▁NU", + -11.98417854309082 + ], + [ + "geb", + -11.984273910522461 + ], + [ + "▁Release", + -11.984353065490723 + ], + [ + "▁throwing", + -11.984427452087402 + ], + [ + "▁Alabama", + -11.984447479248047 + ], + [ + "▁Something", + -11.984590530395508 + ], + [ + "▁Cuba", + -11.98464584350586 + ], + [ + "▁Verbindung", + -11.984649658203125 + ], + [ + "▁Cir", + -11.984654426574707 + ], + [ + "your", + -11.984713554382324 + ], + [ + "-13", + -11.984748840332031 + ], + [ + "▁Delta", + -11.984801292419434 + ], + [ + "▁Twin", + -11.98504638671875 + ], + [ + "▁governance", + -11.985156059265137 + ], + [ + "▁groom", + -11.985310554504395 + ], + [ + "▁conception", + -11.98533821105957 + ], + [ + "▁governor", + -11.985383033752441 + ], + [ + "▁Spar", + -11.985416412353516 + ], + [ + "▁coastal", + -11.985652923583984 + ], + [ + "▁Seven", + -11.985856056213379 + ], + [ + "▁inclusive", + -11.986002922058105 + ], + [ + "cili", + -11.986035346984863 + ], + [ + "▁Ridge", + -11.986100196838379 + ], + [ + "teller", + -11.986224174499512 + ], + [ + "▁Kin", + -11.986247062683105 + ], + [ + "leiter", + -11.986279487609863 + ], + [ + "stern", + -11.986364364624023 + ], + [ + "change", + -11.986404418945312 + ], + [ + "▁presidential", + -11.986433982849121 + ], + [ + "▁composer", + -11.986544609069824 + ], + [ + "Stu", + -11.986560821533203 + ], + [ + "▁Frankfurt", + -11.986584663391113 + ], + [ + "prä", + -11.986639976501465 + ], + [ + "▁Ideal", + -11.986644744873047 + ], + [ + "▁linear", + -11.986857414245605 + ], + [ + "▁bloom", + -11.986879348754883 + ], + [ + "▁grades", + -11.986881256103516 + ], + [ + "mettant", + -11.98692512512207 + ], + [ + "▁finishes", + -11.986952781677246 + ], + [ + "holz", + -11.987086296081543 + ], + [ + "▁dirty", + -11.987317085266113 + ], + [ + "▁Roh", + -11.987386703491211 + ], + [ + "▁Praxis", + -11.987408638000488 + ], + [ + "tempo", + -11.987433433532715 + ], + [ + "▁attempted", + -11.987433433532715 + ], + [ + "▁primar", + -11.987434387207031 + ], + [ + "▁pomp", + -11.987528800964355 + ], + [ + "▁tolle", + -11.987614631652832 + ], + [ + "▁adres", + -11.988011360168457 + ], + [ + "▁Between", + -11.988066673278809 + ], + [ + "▁ruin", + -11.988432884216309 + ], + [ + "▁matériel", + -11.988561630249023 + ], + [ + "MER", + -11.988913536071777 + ], + [ + "Nevertheless", + -11.989055633544922 + ], + [ + "▁corruption", + -11.989119529724121 + ], + [ + "spire", + -11.989180564880371 + ], + [ + "▁mou", + -11.989208221435547 + ], + [ + "ROM", + -11.989278793334961 + ], + [ + "▁underground", + -11.98935604095459 + ], + [ + "▁relativ", + -11.989389419555664 + ], + [ + "waited", + -11.989462852478027 + ], + [ + "▁speeds", + -11.989468574523926 + ], + [ + "▁adjusted", + -11.989486694335938 + ], + [ + "▁Flat", + -11.989514350891113 + ], + [ + "UND", + -11.98965835571289 + ], + [ + "▁individuelle", + -11.989744186401367 + ], + [ + "▁anybody", + -11.98978042602539 + ], + [ + "EO", + -11.989790916442871 + ], + [ + "->", + -11.989791870117188 + ], + [ + "▁Spend", + -11.989876747131348 + ], + [ + "aktion", + -11.990011215209961 + ], + [ + "édit", + -11.99006462097168 + ], + [ + "▁quest", + -11.990078926086426 + ], + [ + "rind", + -11.990541458129883 + ], + [ + "▁mediu", + -11.99057388305664 + ], + [ + "▁barriers", + -11.99062442779541 + ], + [ + "▁répondre", + -11.990633010864258 + ], + [ + "▁novembre", + -11.990708351135254 + ], + [ + "▁champ", + -11.990736961364746 + ], + [ + "saw", + -11.990757942199707 + ], + [ + "▁fed", + -11.990804672241211 + ], + [ + "▁favorites", + -11.990939140319824 + ], + [ + "▁shield", + -11.991055488586426 + ], + [ + "▁Wide", + -11.991146087646484 + ], + [ + "▁problema", + -11.991445541381836 + ], + [ + "▁Asta", + -11.991525650024414 + ], + [ + "▁refreshing", + -11.99168872833252 + ], + [ + "hey", + -11.991692543029785 + ], + [ + "obtaining", + -11.991788864135742 + ], + [ + "▁parler", + -11.992072105407715 + ], + [ + "▁Cele", + -11.992134094238281 + ], + [ + "frage", + -11.992136001586914 + ], + [ + "écran", + -11.992324829101562 + ], + [ + "▁cleared", + -11.992448806762695 + ], + [ + "zehn", + -11.992594718933105 + ], + [ + "parmi", + -11.992647171020508 + ], + [ + "änder", + -11.992691993713379 + ], + [ + "▁Defense", + -11.992693901062012 + ], + [ + "tatea", + -11.992696762084961 + ], + [ + "▁reasonably", + -11.992939949035645 + ], + [ + "▁Idee", + -11.992985725402832 + ], + [ + "nehm", + -11.993000030517578 + ], + [ + "technologie", + -11.993020057678223 + ], + [ + "atura", + -11.993048667907715 + ], + [ + "▁slope", + -11.993332862854004 + ], + [ + "Hence", + -11.993351936340332 + ], + [ + "▁40%", + -11.993391990661621 + ], + [ + "▁jewe", + -11.993448257446289 + ], + [ + "▁queries", + -11.993470191955566 + ], + [ + "▁$8", + -11.994096755981445 + ], + [ + "▁Parker", + -11.994107246398926 + ], + [ + "▁publique", + -11.994488716125488 + ], + [ + "quant", + -11.994529724121094 + ], + [ + "issue", + -11.994690895080566 + ], + [ + "▁Cleveland", + -11.994847297668457 + ], + [ + "4,000", + -11.995071411132812 + ], + [ + "IDE", + -11.995145797729492 + ], + [ + "▁Barbara", + -11.995233535766602 + ], + [ + "udge", + -11.995477676391602 + ], + [ + "corn", + -11.99554443359375 + ], + [ + "veți", + -11.995588302612305 + ], + [ + "▁proteins", + -11.995707511901855 + ], + [ + "▁trăi", + -11.995793342590332 + ], + [ + "▁mijloc", + -11.995842933654785 + ], + [ + "logie", + -11.995884895324707 + ], + [ + "▁Walter", + -11.995884895324707 + ], + [ + "heißt", + -11.99593448638916 + ], + [ + "search", + -11.995946884155273 + ], + [ + "▁hochwertige", + -11.996010780334473 + ], + [ + "▁încerc", + -11.996014595031738 + ], + [ + "▁administrator", + -11.99608039855957 + ], + [ + "tension", + -11.996133804321289 + ], + [ + "▁homemade", + -11.996438026428223 + ], + [ + "▁$20", + -11.99651050567627 + ], + [ + "▁leben", + -11.996662139892578 + ], + [ + "netz", + -11.996665954589844 + ], + [ + "▁intensity", + -11.996882438659668 + ], + [ + "▁clever", + -11.996891975402832 + ], + [ + "▁installer", + -11.996999740600586 + ], + [ + "▁Wand", + -11.997087478637695 + ], + [ + "meister", + -11.997130393981934 + ], + [ + "ziel", + -11.99744701385498 + ], + [ + "▁architect", + -11.99748706817627 + ], + [ + "▁crede", + -11.997512817382812 + ], + [ + "▁Sleep", + -11.997675895690918 + ], + [ + "▁demonstr", + -11.997745513916016 + ], + [ + "cake", + -11.997781753540039 + ], + [ + "▁Cheap", + -11.997783660888672 + ], + [ + "pool", + -11.9979829788208 + ], + [ + "▁gadget", + -11.998004913330078 + ], + [ + "▁Anbieter", + -11.998005867004395 + ], + [ + "▁Jonathan", + -11.998170852661133 + ], + [ + "ül", + -11.998492240905762 + ], + [ + "▁Harvard", + -11.998503684997559 + ], + [ + "▁1985", + -11.998773574829102 + ], + [ + "HP", + -11.998839378356934 + ], + [ + "▁afara", + -11.99893569946289 + ], + [ + "▁halten", + -11.999008178710938 + ], + [ + "▁Technik", + -11.999042510986328 + ], + [ + "▁dressed", + -11.999149322509766 + ], + [ + "weis", + -11.999165534973145 + ], + [ + "▁donated", + -11.9993314743042 + ], + [ + "also", + -11.99938678741455 + ], + [ + "▁EN", + -11.999405860900879 + ], + [ + "▁imprim", + -11.99942398071289 + ], + [ + "▁onions", + -11.999458312988281 + ], + [ + "Par", + -11.99950122833252 + ], + [ + "▁donate", + -11.99958324432373 + ], + [ + "▁mice", + -11.999610900878906 + ], + [ + "referring", + -11.999897956848145 + ], + [ + "▁restored", + -12.00003433227539 + ], + [ + "▁amateur", + -12.0000581741333 + ], + [ + "▁Switch", + -12.000075340270996 + ], + [ + "appel", + -12.00013542175293 + ], + [ + "▁idéal", + -12.0001859664917 + ], + [ + "▁wheat", + -12.000199317932129 + ], + [ + "▁lime", + -12.000240325927734 + ], + [ + "REA", + -12.00027084350586 + ], + [ + "riti", + -12.000357627868652 + ], + [ + "ţiile", + -12.00058364868164 + ], + [ + "▁machinery", + -12.00064754486084 + ], + [ + "UNE", + -12.00089168548584 + ], + [ + "▁Cont", + -12.000971794128418 + ], + [ + "▁attendees", + -12.001014709472656 + ], + [ + "▁aparat", + -12.001080513000488 + ], + [ + "freundlich", + -12.00117301940918 + ], + [ + "▁zilnic", + -12.001175880432129 + ], + [ + "▁spark", + -12.001421928405762 + ], + [ + "▁Gast", + -12.001459121704102 + ], + [ + "▁Issue", + -12.00147533416748 + ], + [ + "▁scam", + -12.001566886901855 + ], + [ + "▁bonds", + -12.001618385314941 + ], + [ + "owner", + -12.001641273498535 + ], + [ + "▁empfehlen", + -12.001673698425293 + ], + [ + "elia", + -12.001749992370605 + ], + [ + "cic", + -12.001757621765137 + ], + [ + "▁honored", + -12.001800537109375 + ], + [ + "▁castle", + -12.001846313476562 + ], + [ + "avand", + -12.002058982849121 + ], + [ + "rough", + -12.002108573913574 + ], + [ + "▁Address", + -12.002116203308105 + ], + [ + "angle", + -12.00217342376709 + ], + [ + "leton", + -12.002259254455566 + ], + [ + "▁locked", + -12.002392768859863 + ], + [ + "▁consolid", + -12.00248908996582 + ], + [ + "▁voucher", + -12.003011703491211 + ], + [ + "ației", + -12.003201484680176 + ], + [ + "wachsen", + -12.003211975097656 + ], + [ + "▁magazines", + -12.003287315368652 + ], + [ + "▁Schools", + -12.003318786621094 + ], + [ + "▁voices", + -12.003362655639648 + ], + [ + "▁Dry", + -12.003479957580566 + ], + [ + "▁tricks", + -12.00349235534668 + ], + [ + "schließlich", + -12.003546714782715 + ], + [ + "▁loyalty", + -12.003687858581543 + ], + [ + "risk", + -12.003764152526855 + ], + [ + "▁Vers", + -12.003786087036133 + ], + [ + "chester", + -12.003802299499512 + ], + [ + "▁decorated", + -12.003830909729004 + ], + [ + "▁copiilor", + -12.003969192504883 + ], + [ + "riz", + -12.003994941711426 + ], + [ + "03.", + -12.004013061523438 + ], + [ + "▁Hur", + -12.004016876220703 + ], + [ + "▁archive", + -12.004021644592285 + ], + [ + "▁Continue", + -12.004042625427246 + ], + [ + "▁Nähe", + -12.004043579101562 + ], + [ + "jit", + -12.004090309143066 + ], + [ + "gekommen", + -12.004301071166992 + ], + [ + "▁conjunction", + -12.004349708557129 + ], + [ + "combining", + -12.004404067993164 + ], + [ + "▁Unterstützung", + -12.004517555236816 + ], + [ + "oza", + -12.004593849182129 + ], + [ + "▁sketch", + -12.004720687866211 + ], + [ + "▁arată", + -12.004731178283691 + ], + [ + "▁Mining", + -12.004765510559082 + ], + [ + "uous", + -12.004791259765625 + ], + [ + "▁devis", + -12.004834175109863 + ], + [ + "Almost", + -12.004862785339355 + ], + [ + "Hu", + -12.005037307739258 + ], + [ + "▁Om", + -12.005366325378418 + ], + [ + "MF", + -12.00544548034668 + ], + [ + "liz", + -12.005451202392578 + ], + [ + "▁fails", + -12.005456924438477 + ], + [ + "▁comparable", + -12.005459785461426 + ], + [ + "▁vein", + -12.005547523498535 + ], + [ + "▁Vis", + -12.00561809539795 + ], + [ + "▁viagra", + -12.005654335021973 + ], + [ + "▁farming", + -12.005678176879883 + ], + [ + "▁Late", + -12.005765914916992 + ], + [ + "geschrieben", + -12.006033897399902 + ], + [ + "hrew", + -12.006103515625 + ], + [ + "▁melt", + -12.006120681762695 + ], + [ + "lager", + -12.006168365478516 + ], + [ + "halte", + -12.006240844726562 + ], + [ + "▁Hotels", + -12.006266593933105 + ], + [ + "▁facebook", + -12.0064058303833 + ], + [ + "▁défi", + -12.006550788879395 + ], + [ + "shore", + -12.006802558898926 + ], + [ + "▁membrane", + -12.006866455078125 + ], + [ + "▁sixth", + -12.006903648376465 + ], + [ + "api", + -12.007003784179688 + ], + [ + "▁Owner", + -12.007222175598145 + ], + [ + "▁(\"", + -12.007234573364258 + ], + [ + "▁$50", + -12.007280349731445 + ], + [ + "▁protective", + -12.007420539855957 + ], + [ + "/2", + -12.007548332214355 + ], + [ + "▁Girls", + -12.007562637329102 + ], + [ + "Gri", + -12.00769329071045 + ], + [ + "▁nouă", + -12.007708549499512 + ], + [ + "▁infections", + -12.007813453674316 + ], + [ + "rân", + -12.007868766784668 + ], + [ + "▁Geb", + -12.0078763961792 + ], + [ + "▁Conseil", + -12.007905006408691 + ], + [ + "▁imagini", + -12.007909774780273 + ], + [ + "▁promotions", + -12.00794792175293 + ], + [ + "▁enforce", + -12.00795841217041 + ], + [ + "▁applicant", + -12.007965087890625 + ], + [ + "▁Apart", + -12.008087158203125 + ], + [ + "▁progression", + -12.008151054382324 + ], + [ + "▁careers", + -12.008511543273926 + ], + [ + "▁litigation", + -12.008533477783203 + ], + [ + "▁Menge", + -12.00866413116455 + ], + [ + "▁Contract", + -12.00871753692627 + ], + [ + "▁Kel", + -12.0087308883667 + ], + [ + "▁réserve", + -12.008769035339355 + ], + [ + "▁Cold", + -12.008870124816895 + ], + [ + "▁larg", + -12.009040832519531 + ], + [ + "▁microwave", + -12.009090423583984 + ], + [ + "▁Whit", + -12.009212493896484 + ], + [ + "▁Technologies", + -12.009381294250488 + ], + [ + "OU", + -12.00949478149414 + ], + [ + "itudine", + -12.00959587097168 + ], + [ + "▁handles", + -12.009895324707031 + ], + [ + "▁proceedings", + -12.009982109069824 + ], + [ + "▁prizes", + -12.010043144226074 + ], + [ + "▁unterstützen", + -12.010062217712402 + ], + [ + "▁piele", + -12.010090827941895 + ], + [ + "▁profound", + -12.010153770446777 + ], + [ + "schließen", + -12.0101957321167 + ], + [ + "▁trafic", + -12.01025104522705 + ], + [ + "▁Nar", + -12.010441780090332 + ], + [ + "▁Gesamt", + -12.0106201171875 + ], + [ + "▁bugs", + -12.010720252990723 + ], + [ + "▁Amy", + -12.010764122009277 + ], + [ + "▁eastern", + -12.010775566101074 + ], + [ + "nice", + -12.010784149169922 + ], + [ + "▁Besuch", + -12.010835647583008 + ], + [ + "▁synth", + -12.010892868041992 + ], + [ + "▁clasa", + -12.011194229125977 + ], + [ + "Book", + -12.01134204864502 + ], + [ + "▁ribbon", + -12.011415481567383 + ], + [ + "▁neues", + -12.011431694030762 + ], + [ + "ZE", + -12.011504173278809 + ], + [ + "▁peers", + -12.011613845825195 + ], + [ + "leistung", + -12.011730194091797 + ], + [ + "▁internship", + -12.011808395385742 + ], + [ + "count", + -12.011850357055664 + ], + [ + "nam", + -12.01193618774414 + ], + [ + "▁12-", + -12.012072563171387 + ], + [ + "acked", + -12.012146949768066 + ], + [ + "gonna", + -12.012146949768066 + ], + [ + "▁Dinge", + -12.01215648651123 + ], + [ + "Time", + -12.012299537658691 + ], + [ + "▁twelve", + -12.01242446899414 + ], + [ + "eye", + -12.012432098388672 + ], + [ + "▁avantaj", + -12.01253604888916 + ], + [ + "▁Glas", + -12.012731552124023 + ], + [ + "aucune", + -12.0127534866333 + ], + [ + "▁boil", + -12.012763977050781 + ], + [ + "▁Gray", + -12.012773513793945 + ], + [ + "adapt", + -12.01288890838623 + ], + [ + "occ", + -12.012895584106445 + ], + [ + "▁prieten", + -12.012897491455078 + ], + [ + "▁trai", + -12.01296615600586 + ], + [ + "▁Scal", + -12.013009071350098 + ], + [ + "▁conscious", + -12.013057708740234 + ], + [ + "▁charter", + -12.013093948364258 + ], + [ + "KS", + -12.013242721557617 + ], + [ + "▁Barr", + -12.013404846191406 + ], + [ + "▁summit", + -12.013411521911621 + ], + [ + "▁inflammation", + -12.013439178466797 + ], + [ + "tungs", + -12.013440132141113 + ], + [ + "ovic", + -12.013449668884277 + ], + [ + "▁conduit", + -12.013465881347656 + ], + [ + "▁Alice", + -12.013702392578125 + ], + [ + "▁veterans", + -12.013850212097168 + ], + [ + "Während", + -12.013944625854492 + ], + [ + "▁maximal", + -12.014013290405273 + ], + [ + "▁Hawaii", + -12.014037132263184 + ], + [ + "▁Pine", + -12.01432991027832 + ], + [ + "acelasi", + -12.014391899108887 + ], + [ + "hyp", + -12.014424324035645 + ], + [ + "sensitivity", + -12.01445198059082 + ], + [ + "pour", + -12.014481544494629 + ], + [ + "ре", + -12.014493942260742 + ], + [ + "▁Kentucky", + -12.015129089355469 + ], + [ + "▁badge", + -12.015276908874512 + ], + [ + "affecting", + -12.015310287475586 + ], + [ + "▁chairman", + -12.015311241149902 + ], + [ + "▁München", + -12.015467643737793 + ], + [ + "▁Hersteller", + -12.015469551086426 + ], + [ + "▁urmat", + -12.015615463256836 + ], + [ + "tels", + -12.015654563903809 + ], + [ + "▁FM", + -12.015701293945312 + ], + [ + "▁Basis", + -12.015732765197754 + ], + [ + "▁erklärt", + -12.015809059143066 + ], + [ + "▁changer", + -12.015859603881836 + ], + [ + "tischen", + -12.0159330368042 + ], + [ + "▁brave", + -12.015960693359375 + ], + [ + "▁siguranta", + -12.015986442565918 + ], + [ + "▁partnerships", + -12.015989303588867 + ], + [ + "ților", + -12.015999794006348 + ], + [ + "▁breathe", + -12.016141891479492 + ], + [ + "rink", + -12.016551971435547 + ], + [ + "▁footage", + -12.016654014587402 + ], + [ + "▁transformed", + -12.016658782958984 + ], + [ + "▁prep", + -12.016866683959961 + ], + [ + "▁upset", + -12.016901969909668 + ], + [ + "▁Native", + -12.017059326171875 + ], + [ + "▁Prima", + -12.017154693603516 + ], + [ + "▁jersey", + -12.017163276672363 + ], + [ + "230", + -12.017182350158691 + ], + [ + "▁lucrurile", + -12.017393112182617 + ], + [ + "▁divine", + -12.017502784729004 + ], + [ + "▁Pit", + -12.017593383789062 + ], + [ + "RIS", + -12.01765251159668 + ], + [ + "▁Cultural", + -12.017672538757324 + ], + [ + "▁exotic", + -12.017786979675293 + ], + [ + "▁tastes", + -12.017881393432617 + ], + [ + "▁bargain", + -12.017913818359375 + ], + [ + "▁optimize", + -12.017985343933105 + ], + [ + "▁électrique", + -12.018012046813965 + ], + [ + "deuxième", + -12.018030166625977 + ], + [ + "▁Gary", + -12.018085479736328 + ], + [ + "▁projection", + -12.018122673034668 + ], + [ + "▁sliding", + -12.018195152282715 + ], + [ + "club", + -12.018216133117676 + ], + [ + "association", + -12.01823902130127 + ], + [ + "▁LG", + -12.018259048461914 + ], + [ + "▁capsule", + -12.018291473388672 + ], + [ + "▁politicians", + -12.018397331237793 + ], + [ + "▁thumb", + -12.018423080444336 + ], + [ + "▁globally", + -12.018743515014648 + ], + [ + "positioned", + -12.018796920776367 + ], + [ + "▁Hamilton", + -12.018861770629883 + ], + [ + "arme", + -12.018881797790527 + ], + [ + "▁efectuat", + -12.018881797790527 + ], + [ + "zip", + -12.019111633300781 + ], + [ + "▁welfare", + -12.019201278686523 + ], + [ + "Leistung", + -12.019230842590332 + ], + [ + "▁Bac", + -12.019316673278809 + ], + [ + "▁fizic", + -12.019338607788086 + ], + [ + "OK", + -12.019454002380371 + ], + [ + "▁limba", + -12.019545555114746 + ], + [ + "▁wardrobe", + -12.019549369812012 + ], + [ + "▁offline", + -12.019627571105957 + ], + [ + "▁fortune", + -12.019665718078613 + ], + [ + "▁dialog", + -12.019681930541992 + ], + [ + "▁dramatically", + -12.01997184753418 + ], + [ + "▁NYC", + -12.020045280456543 + ], + [ + "▁Rem", + -12.02017593383789 + ], + [ + "▁bronze", + -12.020455360412598 + ], + [ + "▁pulse", + -12.02053451538086 + ], + [ + "Fortunately", + -12.020562171936035 + ], + [ + "▁glue", + -12.020596504211426 + ], + [ + "▁Expo", + -12.020720481872559 + ], + [ + "▁profitable", + -12.020776748657227 + ], + [ + "▁distributor", + -12.020845413208008 + ], + [ + "abilité", + -12.020869255065918 + ], + [ + "▁lyrics", + -12.020913124084473 + ], + [ + "▁mesh", + -12.02114486694336 + ], + [ + "▁organizational", + -12.021157264709473 + ], + [ + "▁vanilla", + -12.021249771118164 + ], + [ + "▁foc", + -12.021355628967285 + ], + [ + "▁1984", + -12.02147388458252 + ], + [ + "▁créé", + -12.02172565460205 + ], + [ + "▁servi", + -12.022027969360352 + ], + [ + "▁underneath", + -12.022095680236816 + ], + [ + "▁surveys", + -12.022143363952637 + ], + [ + "▁genes", + -12.022238731384277 + ], + [ + "▁limite", + -12.02224349975586 + ], + [ + "oder", + -12.022247314453125 + ], + [ + "▁mandatory", + -12.022269248962402 + ], + [ + "▁hospitality", + -12.022303581237793 + ], + [ + "▁bikes", + -12.022309303283691 + ], + [ + "▁Quote", + -12.022358894348145 + ], + [ + "glu", + -12.02241039276123 + ], + [ + "▁activitatea", + -12.022513389587402 + ], + [ + "preventing", + -12.022584915161133 + ], + [ + "▁Kh", + -12.02259635925293 + ], + [ + "économie", + -12.022616386413574 + ], + [ + "▁visite", + -12.022757530212402 + ], + [ + "▁spectacle", + -12.022778511047363 + ], + [ + "▁tract", + -12.022860527038574 + ], + [ + "▁quant", + -12.022862434387207 + ], + [ + "▁evolu", + -12.022866249084473 + ], + [ + "▁invata", + -12.023070335388184 + ], + [ + "▁homo", + -12.02311897277832 + ], + [ + "▁Users", + -12.02344799041748 + ], + [ + "introducing", + -12.023632049560547 + ], + [ + "hibi", + -12.023661613464355 + ], + [ + "▁Instrument", + -12.023805618286133 + ], + [ + "▁ép", + -12.023839950561523 + ], + [ + "▁Raj", + -12.023869514465332 + ], + [ + "▁executives", + -12.023881912231445 + ], + [ + "atoire", + -12.023885726928711 + ], + [ + "▁erforderlich", + -12.02397346496582 + ], + [ + "male", + -12.024211883544922 + ], + [ + "umble", + -12.024271011352539 + ], + [ + "erson", + -12.024277687072754 + ], + [ + "▁Treatment", + -12.024286270141602 + ], + [ + "▁Representative", + -12.024314880371094 + ], + [ + "▁corners", + -12.024409294128418 + ], + [ + "▁Petit", + -12.024599075317383 + ], + [ + "8)", + -12.02464771270752 + ], + [ + "▁Walker", + -12.024714469909668 + ], + [ + "▁Stir", + -12.02476692199707 + ], + [ + "/19", + -12.024767875671387 + ], + [ + "▁Stelle", + -12.024979591369629 + ], + [ + "ără", + -12.025009155273438 + ], + [ + "osse", + -12.025166511535645 + ], + [ + "2000", + -12.025189399719238 + ], + [ + "▁McG", + -12.025580406188965 + ], + [ + "DV", + -12.025773048400879 + ], + [ + "▁Firm", + -12.025862693786621 + ], + [ + "▁packet", + -12.025904655456543 + ], + [ + "Toate", + -12.02640438079834 + ], + [ + "▁institutional", + -12.026479721069336 + ], + [ + "rug", + -12.026663780212402 + ], + [ + "DG", + -12.026837348937988 + ], + [ + "fine", + -12.026837348937988 + ], + [ + "bringen", + -12.026856422424316 + ], + [ + "▁Horse", + -12.026921272277832 + ], + [ + "▁premiere", + -12.026937484741211 + ], + [ + "▁Că", + -12.027026176452637 + ], + [ + "acheter", + -12.02703857421875 + ], + [ + "▁Afghanistan", + -12.027053833007812 + ], + [ + "▁Prop", + -12.027085304260254 + ], + [ + "ühr", + -12.02715015411377 + ], + [ + "▁braucht", + -12.027398109436035 + ], + [ + "▁sunny", + -12.027424812316895 + ], + [ + "▁Sach", + -12.027461051940918 + ], + [ + "▁volumes", + -12.02753734588623 + ], + [ + "tinut", + -12.02759838104248 + ], + [ + "▁Sho", + -12.027722358703613 + ], + [ + "▁winds", + -12.027735710144043 + ], + [ + "▁Mall", + -12.027873992919922 + ], + [ + "ledge", + -12.027937889099121 + ], + [ + "▁sciences", + -12.027997016906738 + ], + [ + "plication", + -12.028024673461914 + ], + [ + "VR", + -12.028068542480469 + ], + [ + "destin", + -12.028234481811523 + ], + [ + "▁früh", + -12.02833366394043 + ], + [ + "▁tongue", + -12.028359413146973 + ], + [ + "▁Jennifer", + -12.028425216674805 + ], + [ + "▁bracket", + -12.028427124023438 + ], + [ + "▁episodes", + -12.02845287322998 + ], + [ + "breite", + -12.028461456298828 + ], + [ + "▁stoc", + -12.028635025024414 + ], + [ + "ilia", + -12.028728485107422 + ], + [ + "▁Gulf", + -12.02874755859375 + ], + [ + "▁transparency", + -12.028768539428711 + ], + [ + "Industrie", + -12.028853416442871 + ], + [ + "▁viewers", + -12.028916358947754 + ], + [ + "AIN", + -12.029129981994629 + ], + [ + "▁Registration", + -12.029149055480957 + ], + [ + "/4", + -12.029309272766113 + ], + [ + "▁fera", + -12.029337882995605 + ], + [ + "▁06", + -12.029351234436035 + ], + [ + "▁einzu", + -12.029391288757324 + ], + [ + "enburg", + -12.02944278717041 + ], + [ + "▁eff", + -12.029449462890625 + ], + [ + "▁Stage", + -12.029558181762695 + ], + [ + "▁Cour", + -12.029685020446777 + ], + [ + "indu", + -12.029836654663086 + ], + [ + "▁Tools", + -12.029909133911133 + ], + [ + "IST", + -12.029921531677246 + ], + [ + "grund", + -12.030105590820312 + ], + [ + "seitig", + -12.030153274536133 + ], + [ + "pai", + -12.030250549316406 + ], + [ + "▁waist", + -12.030350685119629 + ], + [ + "▁Therapy", + -12.03049373626709 + ], + [ + "▁nomination", + -12.030599594116211 + ], + [ + "▁seama", + -12.030790328979492 + ], + [ + "▁analyse", + -12.030975341796875 + ], + [ + "▁emerge", + -12.031044006347656 + ], + [ + "▁adjustment", + -12.031106948852539 + ], + [ + "▁stroll", + -12.031106948852539 + ], + [ + "▁Beyond", + -12.031174659729004 + ], + [ + "▁legally", + -12.03122615814209 + ], + [ + "▁gauge", + -12.03123664855957 + ], + [ + "▁26,", + -12.031360626220703 + ], + [ + "Tex", + -12.031390190124512 + ], + [ + "economic", + -12.031488418579102 + ], + [ + "stoffe", + -12.031532287597656 + ], + [ + "Wir", + -12.031559944152832 + ], + [ + "ffen", + -12.031601905822754 + ], + [ + "▁acoperi", + -12.031609535217285 + ], + [ + "▁finale", + -12.031792640686035 + ], + [ + "▁theoretical", + -12.031864166259766 + ], + [ + "1.3", + -12.031875610351562 + ], + [ + "anim", + -12.031888008117676 + ], + [ + "▁separation", + -12.031928062438965 + ], + [ + "agence", + -12.031937599182129 + ], + [ + "▁réalisé", + -12.032069206237793 + ], + [ + "sprech", + -12.03215503692627 + ], + [ + "▁embedded", + -12.032208442687988 + ], + [ + "▁defence", + -12.032242774963379 + ], + [ + "éni", + -12.032569885253906 + ], + [ + "▁Norman", + -12.032613754272461 + ], + [ + "▁insgesamt", + -12.032621383666992 + ], + [ + "▁reminde", + -12.032631874084473 + ], + [ + "▁timeline", + -12.032703399658203 + ], + [ + "▁symbols", + -12.032770156860352 + ], + [ + "▁booth", + -12.032783508300781 + ], + [ + "▁Window", + -12.032788276672363 + ], + [ + "▁Titan", + -12.032910346984863 + ], + [ + "înt", + -12.033021926879883 + ], + [ + "▁langa", + -12.033021926879883 + ], + [ + "isant", + -12.03303337097168 + ], + [ + "hart", + -12.033113479614258 + ], + [ + "broader", + -12.033266067504883 + ], + [ + "▁stays", + -12.033288955688477 + ], + [ + "dur", + -12.033488273620605 + ], + [ + "▁Actually", + -12.033514022827148 + ], + [ + "works", + -12.03351879119873 + ], + [ + "▁réussi", + -12.03357219696045 + ], + [ + "▁performant", + -12.033658981323242 + ], + [ + "▁banana", + -12.033788681030273 + ], + [ + "▁baked", + -12.033870697021484 + ], + [ + "▁Parlament", + -12.033931732177734 + ], + [ + "▁Legend", + -12.033967018127441 + ], + [ + "toata", + -12.034172058105469 + ], + [ + "platte", + -12.03419017791748 + ], + [ + "▁Mou", + -12.034192085266113 + ], + [ + "HL", + -12.034235000610352 + ], + [ + "▁(8", + -12.034290313720703 + ], + [ + "▁accepting", + -12.034313201904297 + ], + [ + "▁Senator", + -12.034340858459473 + ], + [ + "▁consciousness", + -12.034396171569824 + ], + [ + "▁conducting", + -12.0344820022583 + ], + [ + "▁panic", + -12.034833908081055 + ], + [ + "▁FDA", + -12.035112380981445 + ], + [ + "▁(7", + -12.035163879394531 + ], + [ + "tool", + -12.035300254821777 + ], + [ + "▁Shipping", + -12.03538703918457 + ], + [ + "▁hop", + -12.035545349121094 + ], + [ + "▁conferences", + -12.03564167022705 + ], + [ + "▁pork", + -12.035661697387695 + ], + [ + "▁spam", + -12.035730361938477 + ], + [ + "▁interesant", + -12.035815238952637 + ], + [ + "▁Tagen", + -12.03581714630127 + ], + [ + "sig", + -12.035886764526367 + ], + [ + "étro", + -12.036044120788574 + ], + [ + "▁legendary", + -12.036449432373047 + ], + [ + "▁Alternative", + -12.036643981933594 + ], + [ + "iana", + -12.036704063415527 + ], + [ + "▁responsable", + -12.036888122558594 + ], + [ + "▁Mihai", + -12.037237167358398 + ], + [ + "▁decreased", + -12.037345886230469 + ], + [ + "▁organised", + -12.037485122680664 + ], + [ + "▁Lamp", + -12.037589073181152 + ], + [ + "litz", + -12.037622451782227 + ], + [ + "ohn", + -12.037622451782227 + ], + [ + "▁moteur", + -12.0376615524292 + ], + [ + "III", + -12.03768539428711 + ], + [ + "▁Montag", + -12.037755012512207 + ], + [ + "▁naturel", + -12.037814140319824 + ], + [ + "▁Hus", + -12.037842750549316 + ], + [ + "▁Schl", + -12.037884712219238 + ], + [ + "ains", + -12.037968635559082 + ], + [ + "▁dying", + -12.0380859375 + ], + [ + "▁HIV", + -12.038115501403809 + ], + [ + "],", + -12.038164138793945 + ], + [ + "alität", + -12.03818416595459 + ], + [ + "▁institute", + -12.038249015808105 + ], + [ + "mix", + -12.038433074951172 + ], + [ + "▁Regulation", + -12.038453102111816 + ], + [ + "▁pagina", + -12.03857707977295 + ], + [ + "▁Awesome", + -12.03860092163086 + ], + [ + "▁Official", + -12.03860092163086 + ], + [ + "▁Minute", + -12.038601875305176 + ], + [ + "▁dairy", + -12.038787841796875 + ], + [ + "▁carti", + -12.038881301879883 + ], + [ + "isk", + -12.039091110229492 + ], + [ + "▁thrilled", + -12.039138793945312 + ], + [ + "▁german", + -12.039172172546387 + ], + [ + "▁frustration", + -12.039228439331055 + ], + [ + "▁forums", + -12.03927230834961 + ], + [ + "command", + -12.039361000061035 + ], + [ + "▁router", + -12.039399147033691 + ], + [ + "▁Lösung", + -12.039423942565918 + ], + [ + "white", + -12.039470672607422 + ], + [ + "▁synthetic", + -12.039487838745117 + ], + [ + "▁retrouver", + -12.039554595947266 + ], + [ + "alle", + -12.039621353149414 + ], + [ + "daran", + -12.039653778076172 + ], + [ + "▁wahr", + -12.039697647094727 + ], + [ + "▁paths", + -12.039875984191895 + ], + [ + "▁unver", + -12.039962768554688 + ], + [ + "▁Environment", + -12.0400972366333 + ], + [ + "▁médecin", + -12.040510177612305 + ], + [ + "crypt", + -12.040572166442871 + ], + [ + "▁pursuit", + -12.040595054626465 + ], + [ + "flat", + -12.040611267089844 + ], + [ + "bron", + -12.040698051452637 + ], + [ + "▁Specialist", + -12.040852546691895 + ], + [ + "▁Vent", + -12.041157722473145 + ], + [ + "Gen", + -12.04132080078125 + ], + [ + "▁attraction", + -12.04132080078125 + ], + [ + "▁piese", + -12.041372299194336 + ], + [ + "CHE", + -12.041665077209473 + ], + [ + "fähig", + -12.04172420501709 + ], + [ + "▁28,", + -12.041773796081543 + ], + [ + "defender", + -12.041810989379883 + ], + [ + "▁stupid", + -12.04181957244873 + ], + [ + "enfin", + -12.04185962677002 + ], + [ + "▁composite", + -12.04207706451416 + ], + [ + "fragen", + -12.042202949523926 + ], + [ + "Part", + -12.042232513427734 + ], + [ + "may", + -12.042238235473633 + ], + [ + "▁Bucureşti", + -12.042248725891113 + ], + [ + "▁février", + -12.042248725891113 + ], + [ + "RED", + -12.042417526245117 + ], + [ + "▁makers", + -12.042462348937988 + ], + [ + "▁guns", + -12.042594909667969 + ], + [ + "▁pasta", + -12.042706489562988 + ], + [ + "STR", + -12.04271125793457 + ], + [ + "▁worthy", + -12.042760848999023 + ], + [ + "Poate", + -12.042783737182617 + ], + [ + "▁101", + -12.04286003112793 + ], + [ + "▁souhaitez", + -12.04299545288086 + ], + [ + "GN", + -12.043449401855469 + ], + [ + "drive", + -12.043499946594238 + ], + [ + "▁aveti", + -12.043582916259766 + ], + [ + "▁eventual", + -12.043591499328613 + ], + [ + "▁américain", + -12.043642044067383 + ], + [ + "▁Mine", + -12.043678283691406 + ], + [ + "▁sunset", + -12.043729782104492 + ], + [ + "▁Choice", + -12.043844223022461 + ], + [ + "▁offset", + -12.043944358825684 + ], + [ + "APP", + -12.04410457611084 + ], + [ + "▁suchen", + -12.044130325317383 + ], + [ + "▁aduc", + -12.044228553771973 + ], + [ + "▁Unternehmens", + -12.044342041015625 + ], + [ + "▁//", + -12.044651985168457 + ], + [ + "▁astept", + -12.044678688049316 + ], + [ + "▁Birthday", + -12.045061111450195 + ], + [ + "▁barn", + -12.045083999633789 + ], + [ + "apport", + -12.045105934143066 + ], + [ + "▁collar", + -12.045212745666504 + ], + [ + "▁gefunden", + -12.045294761657715 + ], + [ + "▁Hai", + -12.045429229736328 + ], + [ + "▁Soul", + -12.045441627502441 + ], + [ + "ismus", + -12.045654296875 + ], + [ + "letzt", + -12.045754432678223 + ], + [ + "▁maker", + -12.045841217041016 + ], + [ + "▁executed", + -12.045857429504395 + ], + [ + "▁Forschung", + -12.045915603637695 + ], + [ + "▁täglich", + -12.045958518981934 + ], + [ + "▁tailor", + -12.045960426330566 + ], + [ + "▁headquarters", + -12.0460844039917 + ], + [ + "▁physicians", + -12.046112060546875 + ], + [ + "▁Scout", + -12.046126365661621 + ], + [ + "folgen", + -12.046175003051758 + ], + [ + "▁cycling", + -12.046184539794922 + ], + [ + "mindestens", + -12.04620361328125 + ], + [ + "▁joli", + -12.046216011047363 + ], + [ + "▁classification", + -12.046225547790527 + ], + [ + "▁Führung", + -12.046258926391602 + ], + [ + "▁peau", + -12.04629135131836 + ], + [ + "INT", + -12.046502113342285 + ], + [ + "▁Garage", + -12.046664237976074 + ], + [ + "teile", + -12.046714782714844 + ], + [ + "util", + -12.046716690063477 + ], + [ + "▁petrec", + -12.046751022338867 + ], + [ + "▁Nevada", + -12.046826362609863 + ], + [ + "▁laisser", + -12.04706859588623 + ], + [ + "▁territoire", + -12.047131538391113 + ], + [ + "▁fichier", + -12.047154426574707 + ], + [ + "▁Formula", + -12.047343254089355 + ], + [ + "scopul", + -12.047379493713379 + ], + [ + "▁Tee", + -12.047486305236816 + ], + [ + "▁Monte", + -12.047529220581055 + ], + [ + "▁pumpkin", + -12.04757022857666 + ], + [ + "▁picnic", + -12.047589302062988 + ], + [ + "▁occupation", + -12.047652244567871 + ], + [ + "▁numérique", + -12.047831535339355 + ], + [ + "linie", + -12.04786491394043 + ], + [ + "▁masina", + -12.048117637634277 + ], + [ + "▁Prä", + -12.048173904418945 + ], + [ + "▁dezvoltare", + -12.048177719116211 + ], + [ + "▁vient", + -12.048291206359863 + ], + [ + "▁ranks", + -12.048295021057129 + ], + [ + "▁Bruce", + -12.048420906066895 + ], + [ + "▁seara", + -12.048433303833008 + ], + [ + "▁hungry", + -12.048563003540039 + ], + [ + "▁resolved", + -12.048650741577148 + ], + [ + "paired", + -12.048735618591309 + ], + [ + "▁Congratulations", + -12.048881530761719 + ], + [ + "▁religi", + -12.048918724060059 + ], + [ + "sätze", + -12.04897689819336 + ], + [ + "▁Eat", + -12.049172401428223 + ], + [ + "▁dense", + -12.049442291259766 + ], + [ + "▁slice", + -12.049447059631348 + ], + [ + "▁mulți", + -12.049463272094727 + ], + [ + "▁vorbe", + -12.049517631530762 + ], + [ + "▁terminate", + -12.049779891967773 + ], + [ + "worm", + -12.049880981445312 + ], + [ + "ignon", + -12.0499267578125 + ], + [ + "▁Howard", + -12.049992561340332 + ], + [ + "▁toddler", + -12.050017356872559 + ], + [ + "▁waters", + -12.050033569335938 + ], + [ + "▁graduates", + -12.0501708984375 + ], + [ + "▁fundraising", + -12.050298690795898 + ], + [ + "06.", + -12.05031967163086 + ], + [ + "▁scent", + -12.050346374511719 + ], + [ + "▁CPU", + -12.050406455993652 + ], + [ + "▁Kid", + -12.05045223236084 + ], + [ + "▁Years", + -12.050460815429688 + ], + [ + "▁Oktober", + -12.05063533782959 + ], + [ + "filled", + -12.050726890563965 + ], + [ + "▁Laser", + -12.05079460144043 + ], + [ + "▁tut", + -12.051032066345215 + ], + [ + "ively", + -12.051101684570312 + ], + [ + "▁WiFi", + -12.051161766052246 + ], + [ + "standen", + -12.051176071166992 + ], + [ + "▁publié", + -12.051243782043457 + ], + [ + "▁explaining", + -12.051279067993164 + ], + [ + "trieb", + -12.051288604736328 + ], + [ + "▁Rapid", + -12.0513334274292 + ], + [ + "▁unterstützt", + -12.051352500915527 + ], + [ + "▁Sonnen", + -12.051401138305664 + ], + [ + "▁lenses", + -12.05141544342041 + ], + [ + "▁pressing", + -12.051477432250977 + ], + [ + "▁respected", + -12.051657676696777 + ], + [ + "adapted", + -12.051706314086914 + ], + [ + "Don", + -12.051726341247559 + ], + [ + "▁mun", + -12.051733016967773 + ], + [ + "MAR", + -12.05180835723877 + ], + [ + "▁seam", + -12.051852226257324 + ], + [ + "chev", + -12.052140235900879 + ], + [ + "▁Sozial", + -12.052424430847168 + ], + [ + "▁Arabia", + -12.052485466003418 + ], + [ + "▁equation", + -12.05257511138916 + ], + [ + "▁elevi", + -12.052780151367188 + ], + [ + "▁piata", + -12.052868843078613 + ], + [ + "JA", + -12.052873611450195 + ], + [ + "▁wholesale", + -12.052887916564941 + ], + [ + "▁faithful", + -12.05296516418457 + ], + [ + "legal", + -12.053092002868652 + ], + [ + "▁Brexit", + -12.053095817565918 + ], + [ + "vention", + -12.053120613098145 + ], + [ + "▁adhere", + -12.053221702575684 + ], + [ + "▁Associate", + -12.053257942199707 + ], + [ + "▁decorations", + -12.053272247314453 + ], + [ + "▁crois", + -12.053359985351562 + ], + [ + "buck", + -12.053370475769043 + ], + [ + "▁smartphones", + -12.053421020507812 + ], + [ + "Regardless", + -12.053427696228027 + ], + [ + "center", + -12.053434371948242 + ], + [ + "eiß", + -12.053481101989746 + ], + [ + "▁emotion", + -12.053584098815918 + ], + [ + "▁Gespräch", + -12.053797721862793 + ], + [ + "▁Avi", + -12.053963661193848 + ], + [ + "▁loft", + -12.054059982299805 + ], + [ + "▁Wissen", + -12.054391860961914 + ], + [ + "▁orchestra", + -12.05439567565918 + ], + [ + "▁gehören", + -12.054421424865723 + ], + [ + "▁Reich", + -12.054532051086426 + ], + [ + "▁abandoned", + -12.054548263549805 + ], + [ + "▁Lanka", + -12.054586410522461 + ], + [ + "pala", + -12.054832458496094 + ], + [ + "▁Stell", + -12.054838180541992 + ], + [ + "logged", + -12.054924964904785 + ], + [ + "terie", + -12.054935455322266 + ], + [ + "▁educa", + -12.054954528808594 + ], + [ + "1).", + -12.055097579956055 + ], + [ + "▁disponibil", + -12.055119514465332 + ], + [ + "IND", + -12.055197715759277 + ], + [ + "▁Pont", + -12.055288314819336 + ], + [ + "▁téléphone", + -12.055398941040039 + ], + [ + "▁rope", + -12.055595397949219 + ], + [ + "ève", + -12.055622100830078 + ], + [ + "▁Trainer", + -12.056062698364258 + ], + [ + "▁présence", + -12.0560941696167 + ], + [ + "▁Oscar", + -12.056121826171875 + ], + [ + "▁VR", + -12.056342124938965 + ], + [ + "▁Besucher", + -12.056357383728027 + ], + [ + "▁disponibles", + -12.056447982788086 + ], + [ + "▁gelten", + -12.056604385375977 + ], + [ + "▁ports", + -12.056645393371582 + ], + [ + "Invest", + -12.056693077087402 + ], + [ + "ésormais", + -12.056795120239258 + ], + [ + "schauen", + -12.056880950927734 + ], + [ + "▁Command", + -12.056958198547363 + ], + [ + "▁alternate", + -12.05709171295166 + ], + [ + "citation", + -12.05713939666748 + ], + [ + "évolution", + -12.05714225769043 + ], + [ + "▁Maine", + -12.057145118713379 + ], + [ + "pflege", + -12.057174682617188 + ], + [ + "2011", + -12.057343482971191 + ], + [ + "▁Ground", + -12.057364463806152 + ], + [ + "▁ghost", + -12.057418823242188 + ], + [ + "lebt", + -12.057530403137207 + ], + [ + "▁scenarios", + -12.057595252990723 + ], + [ + "▁mall", + -12.057634353637695 + ], + [ + "▁Kings", + -12.057653427124023 + ], + [ + "▁15%", + -12.057848930358887 + ], + [ + "▁Paint", + -12.057848930358887 + ], + [ + "FD", + -12.057849884033203 + ], + [ + "ugg", + -12.058011054992676 + ], + [ + "▁Leon", + -12.058023452758789 + ], + [ + "▁grows", + -12.058135032653809 + ], + [ + "▁pharmacy", + -12.058384895324707 + ], + [ + "▁situat", + -12.0584135055542 + ], + [ + "20,000", + -12.05855941772461 + ], + [ + "▁10,000", + -12.058760643005371 + ], + [ + "▁membre", + -12.058771133422852 + ], + [ + "▁facilement", + -12.058806419372559 + ], + [ + "▁Analytics", + -12.058915138244629 + ], + [ + "▁Marvel", + -12.058930397033691 + ], + [ + "▁survived", + -12.059097290039062 + ], + [ + "▁conviction", + -12.059124946594238 + ], + [ + "▁Produktion", + -12.059260368347168 + ], + [ + "▁professionally", + -12.059293746948242 + ], + [ + "▁contributor", + -12.059486389160156 + ], + [ + "▁Kurs", + -12.059503555297852 + ], + [ + "▁humor", + -12.059549331665039 + ], + [ + "▁cinci", + -12.059609413146973 + ], + [ + "▁Different", + -12.059670448303223 + ], + [ + "▁Verarbeitung", + -12.059800148010254 + ], + [ + "▁inexpensive", + -12.059800148010254 + ], + [ + "▁sortie", + -12.05980110168457 + ], + [ + "▁thankful", + -12.059951782226562 + ], + [ + "▁vacances", + -12.059978485107422 + ], + [ + "▁vergangen", + -12.059979438781738 + ], + [ + "▁wings", + -12.05998420715332 + ], + [ + "▁nano", + -12.06003475189209 + ], + [ + "▁touches", + -12.060088157653809 + ], + [ + "▁Notice", + -12.060348510742188 + ], + [ + "▁reprezinta", + -12.060466766357422 + ], + [ + "▁rewarding", + -12.060555458068848 + ], + [ + "▁Kurz", + -12.060580253601074 + ], + [ + "▁mega", + -12.060611724853516 + ], + [ + "▁secrets", + -12.060646057128906 + ], + [ + "▁vorher", + -12.060667037963867 + ], + [ + "▁crescut", + -12.06074333190918 + ], + [ + "▁coordination", + -12.060754776000977 + ], + [ + "▁dissertation", + -12.060863494873047 + ], + [ + "▁header", + -12.060873985290527 + ], + [ + "existent", + -12.061070442199707 + ], + [ + "thal", + -12.061185836791992 + ], + [ + "▁translate", + -12.061214447021484 + ], + [ + "vertrag", + -12.06124210357666 + ], + [ + "GU", + -12.06126594543457 + ], + [ + "▁Arthur", + -12.061315536499023 + ], + [ + "wahl", + -12.061534881591797 + ], + [ + "▁octobre", + -12.061573028564453 + ], + [ + "▁bother", + -12.06157398223877 + ], + [ + "▁pencil", + -12.061580657958984 + ], + [ + "▁Dyna", + -12.061604499816895 + ], + [ + "▁complimentary", + -12.061651229858398 + ], + [ + "écoute", + -12.061676979064941 + ], + [ + "PB", + -12.061722755432129 + ], + [ + "▁independently", + -12.061759948730469 + ], + [ + "▁targeting", + -12.061840057373047 + ], + [ + "fought", + -12.061944961547852 + ], + [ + "mental", + -12.062112808227539 + ], + [ + "▁Veranstaltung", + -12.062300682067871 + ], + [ + "▁tatsächlich", + -12.062314987182617 + ], + [ + "▁Features", + -12.0625 + ], + [ + "▁1920", + -12.062554359436035 + ], + [ + "▁Domain", + -12.062885284423828 + ], + [ + "▁rally", + -12.062901496887207 + ], + [ + "▁iunie", + -12.063036918640137 + ], + [ + "▁fabrics", + -12.063070297241211 + ], + [ + "▁mint", + -12.063331604003906 + ], + [ + "▁antioxidant", + -12.063347816467285 + ], + [ + "hut", + -12.063432693481445 + ], + [ + "EPA", + -12.063496589660645 + ], + [ + "▁rigid", + -12.063498497009277 + ], + [ + "▁evit", + -12.063549995422363 + ], + [ + "▁personnage", + -12.063977241516113 + ], + [ + "▁garanti", + -12.0640287399292 + ], + [ + "▁Hä", + -12.064042091369629 + ], + [ + "▁Days", + -12.064048767089844 + ], + [ + "boarding", + -12.064050674438477 + ], + [ + "jemand", + -12.064166069030762 + ], + [ + "▁Pos", + -12.064262390136719 + ], + [ + "▁wool", + -12.064288139343262 + ], + [ + "▁boom", + -12.064349174499512 + ], + [ + "▁wichtige", + -12.06447982788086 + ], + [ + "▁emerged", + -12.064517974853516 + ], + [ + "▁smoothly", + -12.064802169799805 + ], + [ + "▁Interview", + -12.064942359924316 + ], + [ + "gemäß", + -12.06505012512207 + ], + [ + "▁suivi", + -12.065064430236816 + ], + [ + "▁missions", + -12.065129280090332 + ], + [ + "▁Kreis", + -12.065328598022461 + ], + [ + "century", + -12.065348625183105 + ], + [ + "▁tuned", + -12.065370559692383 + ], + [ + "isieren", + -12.065407752990723 + ], + [ + "▁Branch", + -12.065427780151367 + ], + [ + "▁Russell", + -12.065483093261719 + ], + [ + "▁**", + -12.065519332885742 + ], + [ + "▁Lehr", + -12.065617561340332 + ], + [ + "▁perspectives", + -12.065690040588379 + ], + [ + "▁handed", + -12.06570816040039 + ], + [ + "▁apporte", + -12.065743446350098 + ], + [ + "unta", + -12.065959930419922 + ], + [ + "▁contemplat", + -12.066255569458008 + ], + [ + "riel", + -12.06633472442627 + ], + [ + "▁freely", + -12.066341400146484 + ], + [ + "▁loyal", + -12.066451072692871 + ], + [ + "▁evolved", + -12.066518783569336 + ], + [ + "▁Cafe", + -12.066548347473145 + ], + [ + "▁assignments", + -12.066598892211914 + ], + [ + "▁Cream", + -12.066718101501465 + ], + [ + "▁Build", + -12.066731452941895 + ], + [ + "▁exams", + -12.066746711730957 + ], + [ + "▁graduation", + -12.066765785217285 + ], + [ + "▁Dining", + -12.066773414611816 + ], + [ + "inne", + -12.06684398651123 + ], + [ + "▁propriu", + -12.067055702209473 + ], + [ + "▁accordingly", + -12.067241668701172 + ], + [ + "▁seniors", + -12.067484855651855 + ], + [ + "▁sisters", + -12.067505836486816 + ], + [ + "formerly", + -12.067658424377441 + ], + [ + "▁fleur", + -12.067702293395996 + ], + [ + "▁alten", + -12.067802429199219 + ], + [ + "▁Gefühl", + -12.06797981262207 + ], + [ + "▁freeze", + -12.068222045898438 + ], + [ + "▁structured", + -12.068312644958496 + ], + [ + "▁reserved", + -12.068367004394531 + ], + [ + "stellt", + -12.068638801574707 + ], + [ + "▁foto", + -12.068668365478516 + ], + [ + "linger", + -12.06871223449707 + ], + [ + "▁profiter", + -12.068737030029297 + ], + [ + "▁trup", + -12.068862915039062 + ], + [ + "▁Hunter", + -12.068974494934082 + ], + [ + "▁widespread", + -12.069050788879395 + ], + [ + "entretien", + -12.069242477416992 + ], + [ + "▁Truck", + -12.06958293914795 + ], + [ + "Can", + -12.069656372070312 + ], + [ + "péri", + -12.06976318359375 + ], + [ + "▁>>", + -12.069926261901855 + ], + [ + "▁trains", + -12.070141792297363 + ], + [ + "▁faca", + -12.070149421691895 + ], + [ + "▁Patienten", + -12.070170402526855 + ], + [ + "▁scor", + -12.070361137390137 + ], + [ + "▁perceived", + -12.070384979248047 + ], + [ + "setzung", + -12.070393562316895 + ], + [ + "▁Robin", + -12.070558547973633 + ], + [ + "▁geboren", + -12.07060718536377 + ], + [ + "lons", + -12.070687294006348 + ], + [ + "inţa", + -12.070836067199707 + ], + [ + "glob", + -12.070887565612793 + ], + [ + "subsequently", + -12.07111930847168 + ], + [ + "▁vet", + -12.071170806884766 + ], + [ + "▁Holland", + -12.071328163146973 + ], + [ + "▁Clinical", + -12.071370124816895 + ], + [ + "▁uncertainty", + -12.071381568908691 + ], + [ + "hohen", + -12.071386337280273 + ], + [ + "uza", + -12.071431159973145 + ], + [ + "▁kleiner", + -12.071518898010254 + ], + [ + "▁substances", + -12.07155704498291 + ], + [ + "ados", + -12.071627616882324 + ], + [ + "wheel", + -12.07178020477295 + ], + [ + "▁cone", + -12.071990966796875 + ], + [ + "▁castig", + -12.072218894958496 + ], + [ + "▁Conditions", + -12.072242736816406 + ], + [ + "minus", + -12.072643280029297 + ], + [ + "▁permits", + -12.07265853881836 + ], + [ + "fond", + -12.072784423828125 + ], + [ + "▁reactions", + -12.07278823852539 + ], + [ + "▁Mario", + -12.072819709777832 + ], + [ + "▁materiale", + -12.07291030883789 + ], + [ + "AH", + -12.072924613952637 + ], + [ + "▁juillet", + -12.073172569274902 + ], + [ + "▁juridic", + -12.073182106018066 + ], + [ + "▁dropping", + -12.073200225830078 + ], + [ + "expérience", + -12.073225021362305 + ], + [ + "▁depot", + -12.073345184326172 + ], + [ + "▁plea", + -12.073490142822266 + ], + [ + "dezvoltarea", + -12.073512077331543 + ], + [ + "▁Independent", + -12.07363224029541 + ], + [ + "▁Homes", + -12.073674201965332 + ], + [ + "▁crust", + -12.073808670043945 + ], + [ + "▁pillow", + -12.073899269104004 + ], + [ + "kreis", + -12.073920249938965 + ], + [ + "▁boiler", + -12.073928833007812 + ], + [ + "latin", + -12.073978424072266 + ], + [ + "▁stet", + -12.074131965637207 + ], + [ + "GH", + -12.074143409729004 + ], + [ + "▁absent", + -12.074334144592285 + ], + [ + "▁Directors", + -12.074501037597656 + ], + [ + "zwischen", + -12.07462215423584 + ], + [ + "▁comprendre", + -12.07465648651123 + ], + [ + "▁25,", + -12.074832916259766 + ], + [ + "▁pharmaceutical", + -12.075145721435547 + ], + [ + "▁placeholder", + -12.075174331665039 + ], + [ + "KI", + -12.075176239013672 + ], + [ + "▁români", + -12.07540225982666 + ], + [ + "▁Dollar", + -12.075509071350098 + ], + [ + "▁Operations", + -12.075525283813477 + ], + [ + "▁Dublin", + -12.075550079345703 + ], + [ + "▁drawings", + -12.0756196975708 + ], + [ + "▁respir", + -12.075769424438477 + ], + [ + "▁haul", + -12.0758056640625 + ], + [ + "Obviously", + -12.075864791870117 + ], + [ + "▁Beat", + -12.075864791870117 + ], + [ + "▁jeans", + -12.07590103149414 + ], + [ + "▁Masters", + -12.075927734375 + ], + [ + "▁bits", + -12.076213836669922 + ], + [ + "poți", + -12.076226234436035 + ], + [ + "▁asigur", + -12.076228141784668 + ], + [ + "▁intampla", + -12.076228141784668 + ], + [ + "▁marc", + -12.076282501220703 + ], + [ + "......", + -12.076404571533203 + ], + [ + "▁districts", + -12.076437950134277 + ], + [ + "cru", + -12.076457023620605 + ], + [ + "nav", + -12.076608657836914 + ], + [ + "huile", + -12.076644897460938 + ], + [ + "▁limitation", + -12.076647758483887 + ], + [ + "boat", + -12.076712608337402 + ], + [ + "IRE", + -12.076720237731934 + ], + [ + "Unis", + -12.07675838470459 + ], + [ + "dated", + -12.0769624710083 + ], + [ + "▁consultants", + -12.07699203491211 + ], + [ + "▁Josh", + -12.077007293701172 + ], + [ + "tanz", + -12.077184677124023 + ], + [ + "launching", + -12.0772066116333 + ], + [ + "▁browsing", + -12.077310562133789 + ], + [ + "▁incerc", + -12.077314376831055 + ], + [ + "▁27,", + -12.077375411987305 + ], + [ + "не", + -12.077398300170898 + ], + [ + "wig", + -12.077415466308594 + ], + [ + "▁spar", + -12.077458381652832 + ], + [ + "▁token", + -12.077547073364258 + ], + [ + "▁09", + -12.077548027038574 + ], + [ + "spa", + -12.07766056060791 + ], + [ + "ometer", + -12.07772159576416 + ], + [ + "▁riders", + -12.077869415283203 + ], + [ + "▁Drop", + -12.077898979187012 + ], + [ + "RN", + -12.078103065490723 + ], + [ + "▁pairs", + -12.07815933227539 + ], + [ + "▁psychology", + -12.078420639038086 + ], + [ + "▁Douglas", + -12.078437805175781 + ], + [ + "▁verwenden", + -12.078516960144043 + ], + [ + "▁(9", + -12.07857894897461 + ], + [ + "▁Rental", + -12.078728675842285 + ], + [ + "▁délai", + -12.078847885131836 + ], + [ + "▁sooner", + -12.078882217407227 + ], + [ + "▁bankruptcy", + -12.079109191894531 + ], + [ + "04.", + -12.079110145568848 + ], + [ + "abend", + -12.079194068908691 + ], + [ + "çon", + -12.079237937927246 + ], + [ + "▁Ple", + -12.079243659973145 + ], + [ + "fug", + -12.079337120056152 + ], + [ + "▁Wohnung", + -12.079410552978516 + ], + [ + "▁Preise", + -12.079424858093262 + ], + [ + "▁Kay", + -12.079427719116211 + ], + [ + "▁notify", + -12.079474449157715 + ], + [ + "▁Brain", + -12.079534530639648 + ], + [ + "▁optical", + -12.079580307006836 + ], + [ + "▁modifications", + -12.079727172851562 + ], + [ + "▁repos", + -12.07999324798584 + ], + [ + "▁worksheet", + -12.0800142288208 + ], + [ + "continu", + -12.08005428314209 + ], + [ + "▁assumed", + -12.08059024810791 + ], + [ + "varying", + -12.080626487731934 + ], + [ + "feier", + -12.080643653869629 + ], + [ + "▁Freedom", + -12.080717086791992 + ], + [ + "▁Inhalte", + -12.080740928649902 + ], + [ + "▁observations", + -12.080755233764648 + ], + [ + "▁Gruppe", + -12.080791473388672 + ], + [ + "▁Cyber", + -12.080883979797363 + ], + [ + "hort", + -12.080889701843262 + ], + [ + "▁langue", + -12.080915451049805 + ], + [ + "führen", + -12.08110523223877 + ], + [ + "ganze", + -12.081254005432129 + ], + [ + "▁forte", + -12.081327438354492 + ], + [ + "▁Stefan", + -12.081376075744629 + ], + [ + "▁Jetzt", + -12.081463813781738 + ], + [ + "mehr", + -12.081489562988281 + ], + [ + "trip", + -12.081549644470215 + ], + [ + "▁poem", + -12.081583976745605 + ], + [ + "▁practitioners", + -12.081720352172852 + ], + [ + "▁connector", + -12.08177661895752 + ], + [ + "ECT", + -12.081794738769531 + ], + [ + "▁inseamna", + -12.081820487976074 + ], + [ + "addressing", + -12.081867218017578 + ], + [ + "▁beliebt", + -12.081908226013184 + ], + [ + "▁Mama", + -12.082002639770508 + ], + [ + "▁fade", + -12.08204460144043 + ], + [ + "messen", + -12.08205509185791 + ], + [ + "▁Visa", + -12.082080841064453 + ], + [ + "▁Meta", + -12.082154273986816 + ], + [ + "lene", + -12.082188606262207 + ], + [ + "▁remembered", + -12.082334518432617 + ], + [ + "/3", + -12.082337379455566 + ], + [ + "apte", + -12.082347869873047 + ], + [ + "▁uncomfortable", + -12.082364082336426 + ], + [ + "▁romance", + -12.08253002166748 + ], + [ + "▁réalis", + -12.082601547241211 + ], + [ + "▁Vincent", + -12.082706451416016 + ], + [ + "▁ABC", + -12.08275318145752 + ], + [ + "▁handicap", + -12.082756042480469 + ], + [ + "▁Shin", + -12.082801818847656 + ], + [ + "▁Hunde", + -12.082847595214844 + ], + [ + "▁Ach", + -12.083131790161133 + ], + [ + "▁Questions", + -12.083136558532715 + ], + [ + "▁particles", + -12.083226203918457 + ], + [ + "usch", + -12.083230018615723 + ], + [ + "▁SUV", + -12.083279609680176 + ], + [ + "▁Tous", + -12.083301544189453 + ], + [ + "▁empower", + -12.08336067199707 + ], + [ + "▁Yi", + -12.083446502685547 + ], + [ + "▁LinkedIn", + -12.083453178405762 + ], + [ + "▁Profile", + -12.083507537841797 + ], + [ + "▁surround", + -12.083553314208984 + ], + [ + "▁wh", + -12.083560943603516 + ], + [ + "▁Weiter", + -12.083577156066895 + ], + [ + "▁Weight", + -12.083672523498535 + ], + [ + "▁creatures", + -12.083807945251465 + ], + [ + "Especially", + -12.08381462097168 + ], + [ + "▁repede", + -12.08383560180664 + ], + [ + "▁albums", + -12.083885192871094 + ], + [ + "▁compatibil", + -12.0839204788208 + ], + [ + "▁Interesse", + -12.083929061889648 + ], + [ + "abili", + -12.084062576293945 + ], + [ + "▁roast", + -12.084310531616211 + ], + [ + "▁unii", + -12.084310531616211 + ], + [ + "▁Glad", + -12.084421157836914 + ], + [ + "▁enthusiasm", + -12.084539413452148 + ], + [ + "▁whisk", + -12.084547996520996 + ], + [ + "▁freezer", + -12.084712982177734 + ], + [ + "▁stolen", + -12.084715843200684 + ], + [ + "▁neighbour", + -12.084883689880371 + ], + [ + "▁sake", + -12.084967613220215 + ], + [ + "▁Effect", + -12.0850191116333 + ], + [ + "▁fighter", + -12.085044860839844 + ], + [ + "▁tranquil", + -12.085084915161133 + ], + [ + "▁organizer", + -12.085199356079102 + ], + [ + "pixel", + -12.085306167602539 + ], + [ + "▁Guest", + -12.085338592529297 + ], + [ + "▁Philipp", + -12.085369110107422 + ], + [ + "kunft", + -12.085382461547852 + ], + [ + "▁Meer", + -12.085409164428711 + ], + [ + "▁inviting", + -12.085432052612305 + ], + [ + "gänge", + -12.085450172424316 + ], + [ + "▁Position", + -12.085627555847168 + ], + [ + "giving", + -12.085693359375 + ], + [ + "▁marble", + -12.085807800292969 + ], + [ + "▁neg", + -12.085813522338867 + ], + [ + "▁Haar", + -12.085914611816406 + ], + [ + "Ein", + -12.086039543151855 + ], + [ + "▁buses", + -12.086187362670898 + ], + [ + "▁Lodge", + -12.086188316345215 + ], + [ + "soare", + -12.086319923400879 + ], + [ + "▁Barn", + -12.086409568786621 + ], + [ + "▁captain", + -12.086527824401855 + ], + [ + "▁Fix", + -12.08657169342041 + ], + [ + "ulate", + -12.086629867553711 + ], + [ + "ență", + -12.086709022521973 + ], + [ + "▁finances", + -12.086770057678223 + ], + [ + "▁VIP", + -12.086800575256348 + ], + [ + "▁Adams", + -12.086801528930664 + ], + [ + "▁spécialisé", + -12.086960792541504 + ], + [ + "▁fortunate", + -12.087236404418945 + ], + [ + "ility", + -12.087345123291016 + ], + [ + "▁democracy", + -12.08749771118164 + ], + [ + "shu", + -12.087580680847168 + ], + [ + "▁consiste", + -12.087624549865723 + ], + [ + "▁tort", + -12.087692260742188 + ], + [ + "▁branding", + -12.087793350219727 + ], + [ + "▁porch", + -12.08780288696289 + ], + [ + "UNI", + -12.087867736816406 + ], + [ + "▁placut", + -12.087915420532227 + ], + [ + "▁coupled", + -12.088058471679688 + ], + [ + "▁ministre", + -12.088187217712402 + ], + [ + "▁minerals", + -12.088335037231445 + ], + [ + "▁safer", + -12.088335990905762 + ], + [ + "▁outlets", + -12.088438034057617 + ], + [ + "▁caution", + -12.08864688873291 + ], + [ + "▁lightly", + -12.0886869430542 + ], + [ + "▁utilizator", + -12.088700294494629 + ], + [ + "▁Pala", + -12.088959693908691 + ], + [ + "▁doll", + -12.088961601257324 + ], + [ + "(1)", + -12.089065551757812 + ], + [ + "chol", + -12.089120864868164 + ], + [ + "▁Left", + -12.08919620513916 + ], + [ + "▁roulant", + -12.089277267456055 + ], + [ + "▁propune", + -12.089301109313965 + ], + [ + "▁Cred", + -12.089339256286621 + ], + [ + "▁negotiations", + -12.089362144470215 + ], + [ + "amba", + -12.089393615722656 + ], + [ + "▁grasp", + -12.089420318603516 + ], + [ + "▁Amsterdam", + -12.089451789855957 + ], + [ + "▁Zweck", + -12.08945369720459 + ], + [ + "▁conven", + -12.089563369750977 + ], + [ + "▁organizing", + -12.089574813842773 + ], + [ + "section", + -12.089618682861328 + ], + [ + "▁endeavor", + -12.089634895324707 + ], + [ + "▁basics", + -12.089722633361816 + ], + [ + "jud", + -12.089874267578125 + ], + [ + "▁yarn", + -12.090049743652344 + ], + [ + "▁shout", + -12.09009075164795 + ], + [ + "fällt", + -12.090285301208496 + ], + [ + "▁dragoste", + -12.09054946899414 + ], + [ + "▁Rein", + -12.090594291687012 + ], + [ + "Cal", + -12.090688705444336 + ], + [ + "▁deaths", + -12.090729713439941 + ], + [ + "▁24,", + -12.0907564163208 + ], + [ + "▁măr", + -12.090773582458496 + ], + [ + "server", + -12.090825080871582 + ], + [ + "▁explic", + -12.09085464477539 + ], + [ + "▁sufer", + -12.090903282165527 + ], + [ + "▁lucrări", + -12.091097831726074 + ], + [ + "▁Disease", + -12.091126441955566 + ], + [ + "▁prescribed", + -12.091194152832031 + ], + [ + "prozess", + -12.091285705566406 + ], + [ + "▁dessin", + -12.091343879699707 + ], + [ + "▁refuge", + -12.091473579406738 + ], + [ + "▁cope", + -12.091631889343262 + ], + [ + "pole", + -12.09196949005127 + ], + [ + "▁vacant", + -12.091984748840332 + ], + [ + "▁sezon", + -12.092035293579102 + ], + [ + "▁Carbon", + -12.092227935791016 + ], + [ + "▁goût", + -12.092233657836914 + ], + [ + "Ste", + -12.092320442199707 + ], + [ + "▁surroundings", + -12.092754364013672 + ], + [ + "definite", + -12.09284496307373 + ], + [ + "▁adaptation", + -12.093358993530273 + ], + [ + "cteur", + -12.0933837890625 + ], + [ + "System", + -12.093442916870117 + ], + [ + "▁Burg", + -12.093550682067871 + ], + [ + "▁retention", + -12.093579292297363 + ], + [ + "examen", + -12.093618392944336 + ], + [ + "▁adjustments", + -12.093668937683105 + ], + [ + "nies", + -12.094213485717773 + ], + [ + "▁RSS", + -12.094215393066406 + ], + [ + "▁Umwelt", + -12.094259262084961 + ], + [ + "▁strengths", + -12.094326972961426 + ], + [ + "loom", + -12.094401359558105 + ], + [ + "▁pics", + -12.094404220581055 + ], + [ + "phase", + -12.09443187713623 + ], + [ + "▁Poland", + -12.094472885131836 + ], + [ + "▁practicing", + -12.094558715820312 + ], + [ + "monetary", + -12.094756126403809 + ], + [ + "▁embodiment", + -12.094756126403809 + ], + [ + "▁jocuri", + -12.094846725463867 + ], + [ + "▁impreuna", + -12.094939231872559 + ], + [ + "▁Lyon", + -12.094985961914062 + ], + [ + "keeping", + -12.095157623291016 + ], + [ + "▁Starting", + -12.095202445983887 + ], + [ + "▁începe", + -12.095357894897461 + ], + [ + "▁clay", + -12.095440864562988 + ], + [ + "bildung", + -12.095444679260254 + ], + [ + "Technologie", + -12.095513343811035 + ], + [ + "toxic", + -12.095624923706055 + ], + [ + "▁gasit", + -12.095819473266602 + ], + [ + "rott", + -12.095870018005371 + ], + [ + "brook", + -12.095935821533203 + ], + [ + "▁wann", + -12.096029281616211 + ], + [ + "▁lined", + -12.09610366821289 + ], + [ + "▁Chelsea", + -12.096223831176758 + ], + [ + "▁Orlando", + -12.096224784851074 + ], + [ + "▁Otherwise", + -12.096267700195312 + ], + [ + "▁debit", + -12.096273422241211 + ], + [ + "▁entsprechend", + -12.09648323059082 + ], + [ + "nism", + -12.09654426574707 + ], + [ + "issen", + -12.09664535522461 + ], + [ + "▁rendez", + -12.096646308898926 + ], + [ + "▁processus", + -12.096745491027832 + ], + [ + "mbi", + -12.096890449523926 + ], + [ + "▁Graduate", + -12.096960067749023 + ], + [ + "▁cozy", + -12.097119331359863 + ], + [ + "▁Freunde", + -12.097320556640625 + ], + [ + "▁teme", + -12.097389221191406 + ], + [ + "▁bias", + -12.097548484802246 + ], + [ + "102", + -12.09756851196289 + ], + [ + "terrorism", + -12.09770679473877 + ], + [ + "threatening", + -12.097756385803223 + ], + [ + "ни", + -12.097776412963867 + ], + [ + "▁Sonntag", + -12.098062515258789 + ], + [ + "▁efect", + -12.098116874694824 + ], + [ + "▁prayers", + -12.098134994506836 + ], + [ + "▁backpack", + -12.09841537475586 + ], + [ + "?)", + -12.098489761352539 + ], + [ + "▁searches", + -12.098788261413574 + ], + [ + "ouverture", + -12.09880256652832 + ], + [ + "▁sustained", + -12.098865509033203 + ], + [ + "hawk", + -12.098869323730469 + ], + [ + "messe", + -12.098958969116211 + ], + [ + "▁prototype", + -12.098989486694336 + ], + [ + "▁stră", + -12.09903335571289 + ], + [ + "▁Neo", + -12.099040985107422 + ], + [ + "▁29,", + -12.099109649658203 + ], + [ + "izo", + -12.099306106567383 + ], + [ + "▁Anton", + -12.099333763122559 + ], + [ + "SIS", + -12.099564552307129 + ], + [ + "pendant", + -12.099617958068848 + ], + [ + "▁passive", + -12.099813461303711 + ], + [ + "▁Aaron", + -12.099824905395508 + ], + [ + "▁Karen", + -12.099831581115723 + ], + [ + "▁Bildung", + -12.09994888305664 + ], + [ + "ario", + -12.099949836730957 + ], + [ + "▁regulator", + -12.100006103515625 + ], + [ + "gruppe", + -12.100032806396484 + ], + [ + "stepped", + -12.100053787231445 + ], + [ + "▁interventions", + -12.10014533996582 + ], + [ + "▁rounds", + -12.100149154663086 + ], + [ + "▁Khan", + -12.10020637512207 + ], + [ + "▁railway", + -12.10028076171875 + ], + [ + "▁souvenir", + -12.100296974182129 + ], + [ + "▁Plans", + -12.100336074829102 + ], + [ + "aille", + -12.100372314453125 + ], + [ + "▁billing", + -12.100473403930664 + ], + [ + "▁Spiele", + -12.100541114807129 + ], + [ + "▁supermarket", + -12.100556373596191 + ], + [ + "▁flows", + -12.100625991821289 + ], + [ + "▁PayPal", + -12.100641250610352 + ], + [ + "▁tribe", + -12.10067081451416 + ], + [ + "anni", + -12.100780487060547 + ], + [ + "▁rides", + -12.100934982299805 + ], + [ + "▁Orleans", + -12.101009368896484 + ], + [ + "▁evaluated", + -12.101021766662598 + ], + [ + "founder", + -12.10106372833252 + ], + [ + "▁Feld", + -12.101212501525879 + ], + [ + "▁altele", + -12.10122299194336 + ], + [ + "▁thermo", + -12.101290702819824 + ], + [ + "ugh", + -12.101330757141113 + ], + [ + "▁adus", + -12.101375579833984 + ], + [ + "▁Taiwan", + -12.101396560668945 + ], + [ + "▁clause", + -12.101409912109375 + ], + [ + "oxi", + -12.101465225219727 + ], + [ + "alcool", + -12.101495742797852 + ], + [ + "▁Noi", + -12.101531982421875 + ], + [ + "rub", + -12.101540565490723 + ], + [ + "▁dosar", + -12.101582527160645 + ], + [ + "▁Nelson", + -12.101751327514648 + ], + [ + "fassung", + -12.102316856384277 + ], + [ + "▁Kill", + -12.102489471435547 + ], + [ + "▁Standards", + -12.102490425109863 + ], + [ + "▁upward", + -12.102653503417969 + ], + [ + "▁Coloring", + -12.102664947509766 + ], + [ + "Designed", + -12.102754592895508 + ], + [ + "▁Nou", + -12.10281753540039 + ], + [ + "▁borrow", + -12.102940559387207 + ], + [ + "▁Poll", + -12.10321044921875 + ], + [ + "▁antibiotic", + -12.103277206420898 + ], + [ + "▁fabrication", + -12.103388786315918 + ], + [ + "quo", + -12.103432655334473 + ], + [ + "▁crimes", + -12.103464126586914 + ], + [ + "▁nahe", + -12.103484153747559 + ], + [ + "▁aplicat", + -12.103565216064453 + ], + [ + "OST", + -12.1035737991333 + ], + [ + "▁Beijing", + -12.103599548339844 + ], + [ + "fight", + -12.103612899780273 + ], + [ + "▁lodge", + -12.103612899780273 + ], + [ + "dreh", + -12.103922843933105 + ], + [ + "▁harness", + -12.104036331176758 + ], + [ + "▁noiembrie", + -12.104151725769043 + ], + [ + "ounded", + -12.104161262512207 + ], + [ + "▁Imp", + -12.1041841506958 + ], + [ + "▁nächste", + -12.104275703430176 + ], + [ + "funktion", + -12.104476928710938 + ], + [ + "exploitation", + -12.104569435119629 + ], + [ + "▁Ready", + -12.10457706451416 + ], + [ + "▁Plate", + -12.104598999023438 + ], + [ + "▁octombrie", + -12.104706764221191 + ], + [ + "▁considerat", + -12.104982376098633 + ], + [ + "▁Xbox", + -12.105067253112793 + ], + [ + "mind", + -12.105107307434082 + ], + [ + "▁Lind", + -12.105111122131348 + ], + [ + "runde", + -12.105352401733398 + ], + [ + "mination", + -12.105374336242676 + ], + [ + "▁memori", + -12.105377197265625 + ], + [ + "▁cere", + -12.105389595031738 + ], + [ + "barkeit", + -12.105517387390137 + ], + [ + "▁găsi", + -12.105761528015137 + ], + [ + "2.1", + -12.105863571166992 + ], + [ + "▁Finding", + -12.105891227722168 + ], + [ + "▁static", + -12.106405258178711 + ], + [ + "court", + -12.106439590454102 + ], + [ + "▁Gem", + -12.106489181518555 + ], + [ + "▁pièce", + -12.106494903564453 + ], + [ + "▁reel", + -12.10651969909668 + ], + [ + "▁manuscript", + -12.106560707092285 + ], + [ + "▁complications", + -12.106578826904297 + ], + [ + "▁controlling", + -12.106585502624512 + ], + [ + "▁favour", + -12.106738090515137 + ], + [ + "▁advancement", + -12.106739044189453 + ], + [ + "▁Radi", + -12.106870651245117 + ], + [ + "▁faites", + -12.107076644897461 + ], + [ + "▁ordin", + -12.107131958007812 + ], + [ + "sorted", + -12.107152938842773 + ], + [ + "▁1982", + -12.10715389251709 + ], + [ + "▁brutal", + -12.107154846191406 + ], + [ + "▁Guy", + -12.107226371765137 + ], + [ + "▁accomplishment", + -12.107248306274414 + ], + [ + "▁wer", + -12.107329368591309 + ], + [ + "▁withdraw", + -12.107460975646973 + ], + [ + "abilitate", + -12.1075439453125 + ], + [ + "▁NBA", + -12.107625961303711 + ], + [ + "▁Benefit", + -12.107675552368164 + ], + [ + "▁divide", + -12.107824325561523 + ], + [ + "induced", + -12.107913970947266 + ], + [ + "▁văzut", + -12.108049392700195 + ], + [ + "▁peel", + -12.10807991027832 + ], + [ + "▁joints", + -12.108160972595215 + ], + [ + "▁enthalten", + -12.108301162719727 + ], + [ + "▁spy", + -12.108397483825684 + ], + [ + "▁occasional", + -12.108437538146973 + ], + [ + "warm", + -12.108514785766602 + ], + [ + "ême", + -12.108542442321777 + ], + [ + "▁Betriebs", + -12.108551979064941 + ], + [ + "▁Ioan", + -12.1087064743042 + ], + [ + "▁balloon", + -12.108809471130371 + ], + [ + "▁leap", + -12.108869552612305 + ], + [ + "pelled", + -12.109000205993652 + ], + [ + "▁realise", + -12.109073638916016 + ], + [ + "▁Retail", + -12.109118461608887 + ], + [ + "▁Farben", + -12.109151840209961 + ], + [ + "▁Kennedy", + -12.10916519165039 + ], + [ + "▁Firma", + -12.109196662902832 + ], + [ + "▁tineri", + -12.10934066772461 + ], + [ + "tub", + -12.109354019165039 + ], + [ + "PORT", + -12.109381675720215 + ], + [ + "▁stiff", + -12.109416007995605 + ], + [ + "▁notable", + -12.109476089477539 + ], + [ + "tler", + -12.109498023986816 + ], + [ + "▁utile", + -12.10958480834961 + ], + [ + "▁jouer", + -12.109674453735352 + ], + [ + "▁Primary", + -12.109735488891602 + ], + [ + "▁retailer", + -12.109764099121094 + ], + [ + "▁jederzeit", + -12.109808921813965 + ], + [ + "▁amend", + -12.109817504882812 + ], + [ + "▁sagte", + -12.109845161437988 + ], + [ + "atch", + -12.10995864868164 + ], + [ + "ution", + -12.110008239746094 + ], + [ + "once", + -12.110018730163574 + ], + [ + "ended", + -12.1100435256958 + ], + [ + "▁literary", + -12.11013126373291 + ], + [ + "▁wrist", + -12.110281944274902 + ], + [ + "vii", + -12.11036205291748 + ], + [ + "scriere", + -12.110367774963379 + ], + [ + "▁compassion", + -12.110443115234375 + ], + [ + "▁Milan", + -12.110474586486816 + ], + [ + "▁Dach", + -12.110490798950195 + ], + [ + "▁problèmes", + -12.110630989074707 + ], + [ + "▁Pré", + -12.110687255859375 + ], + [ + "▁Feder", + -12.110759735107422 + ], + [ + "Dr", + -12.110814094543457 + ], + [ + "Spr", + -12.110908508300781 + ], + [ + "▁né", + -12.110969543457031 + ], + [ + "François", + -12.111023902893066 + ], + [ + "▁Shu", + -12.111115455627441 + ], + [ + "▁poison", + -12.111154556274414 + ], + [ + "zier", + -12.111176490783691 + ], + [ + "▁attain", + -12.11124038696289 + ], + [ + "▁switching", + -12.111310958862305 + ], + [ + "▁vibration", + -12.111348152160645 + ], + [ + "▁Tablet", + -12.11136531829834 + ], + [ + "▁Lern", + -12.11148452758789 + ], + [ + "offrir", + -12.111660957336426 + ], + [ + "123", + -12.11168098449707 + ], + [ + "cheapest", + -12.11173152923584 + ], + [ + "▁numărul", + -12.111764907836914 + ], + [ + "break", + -12.11180305480957 + ], + [ + "cyto", + -12.111836433410645 + ], + [ + "▁Mississippi", + -12.111955642700195 + ], + [ + "▁dragon", + -12.11207389831543 + ], + [ + "fir", + -12.112176895141602 + ], + [ + "▁fête", + -12.112180709838867 + ], + [ + "▁Wait", + -12.112350463867188 + ], + [ + "buy", + -12.112359046936035 + ], + [ + "având", + -12.112391471862793 + ], + [ + "▁Scar", + -12.112517356872559 + ], + [ + "▁Hund", + -12.112586975097656 + ], + [ + "bug", + -12.112807273864746 + ], + [ + "▁classique", + -12.112811088562012 + ], + [ + "▁tenant", + -12.112860679626465 + ], + [ + "▁Walt", + -12.11296272277832 + ], + [ + "▁timber", + -12.11296272277832 + ], + [ + "inscription", + -12.11300277709961 + ], + [ + "BD", + -12.113016128540039 + ], + [ + "▁Commissioner", + -12.113018989562988 + ], + [ + "▁casinos", + -12.11306095123291 + ], + [ + "▁prochain", + -12.113168716430664 + ], + [ + "▁rustic", + -12.11349868774414 + ], + [ + "▁Kent", + -12.113607406616211 + ], + [ + "▁Deci", + -12.113761901855469 + ], + [ + "ли", + -12.113855361938477 + ], + [ + "▁crossed", + -12.113861083984375 + ], + [ + "▁delightful", + -12.113869667053223 + ], + [ + "▁metres", + -12.113872528076172 + ], + [ + "▁scandal", + -12.113906860351562 + ], + [ + "▁activitate", + -12.113986015319824 + ], + [ + "▁nimeni", + -12.114009857177734 + ], + [ + "ease", + -12.11402416229248 + ], + [ + "▁revenues", + -12.1140775680542 + ], + [ + "▁partially", + -12.114187240600586 + ], + [ + "AE", + -12.114263534545898 + ], + [ + "nique", + -12.114410400390625 + ], + [ + "▁fixtures", + -12.114426612854004 + ], + [ + "▁pupils", + -12.114694595336914 + ], + [ + "Lib", + -12.11471176147461 + ], + [ + "analyse", + -12.114739418029785 + ], + [ + "▁Oracle", + -12.114767074584961 + ], + [ + "troph", + -12.114859580993652 + ], + [ + "▁detected", + -12.114879608154297 + ], + [ + "▁servant", + -12.11507797241211 + ], + [ + "▁badly", + -12.115121841430664 + ], + [ + "comparing", + -12.115150451660156 + ], + [ + "abs", + -12.115238189697266 + ], + [ + "▁fotografi", + -12.115443229675293 + ], + [ + "▁Million", + -12.115541458129883 + ], + [ + "▁Gordon", + -12.11557388305664 + ], + [ + "▁Smok", + -12.115592002868652 + ], + [ + "▁Essay", + -12.11565113067627 + ], + [ + "eptic", + -12.115665435791016 + ], + [ + "▁Transportation", + -12.115728378295898 + ], + [ + "/2019", + -12.115767478942871 + ], + [ + "▁alignment", + -12.115778923034668 + ], + [ + "▁laut", + -12.11578369140625 + ], + [ + "stände", + -12.115791320800781 + ], + [ + "▁concerts", + -12.115811347961426 + ], + [ + "▁weekends", + -12.11589241027832 + ], + [ + "▁obstacles", + -12.115941047668457 + ], + [ + "wür", + -12.115964889526367 + ], + [ + "▁Fisher", + -12.116219520568848 + ], + [ + "▁supervisor", + -12.116242408752441 + ], + [ + "▁traders", + -12.116262435913086 + ], + [ + "▁scary", + -12.116484642028809 + ], + [ + "▁Grove", + -12.116538047790527 + ], + [ + "▁expose", + -12.116583824157715 + ], + [ + "▁enemies", + -12.116630554199219 + ], + [ + "▁Lux", + -12.11667537689209 + ], + [ + "▁Berufs", + -12.11672306060791 + ], + [ + "▁Sheet", + -12.116780281066895 + ], + [ + "▁Natürlich", + -12.116819381713867 + ], + [ + "▁examined", + -12.116886138916016 + ], + [ + "pursuing", + -12.116920471191406 + ], + [ + "▁pools", + -12.116923332214355 + ], + [ + "▁Thompson", + -12.117005348205566 + ], + [ + "▁SAP", + -12.117010116577148 + ], + [ + "claiming", + -12.117053985595703 + ], + [ + "buried", + -12.117055892944336 + ], + [ + "assurance", + -12.117138862609863 + ], + [ + "▁sandwich", + -12.117195129394531 + ], + [ + "uber", + -12.117310523986816 + ], + [ + "▁laisse", + -12.117321968078613 + ], + [ + "peak", + -12.117348670959473 + ], + [ + "spring", + -12.1173677444458 + ], + [ + "▁august", + -12.117369651794434 + ], + [ + "▁benötigt", + -12.11738109588623 + ], + [ + "▁achievements", + -12.117470741271973 + ], + [ + "coala", + -12.117478370666504 + ], + [ + "▁scr", + -12.117842674255371 + ], + [ + "gesagt", + -12.118122100830078 + ], + [ + "▁envelope", + -12.118141174316406 + ], + [ + "▁mapping", + -12.118169784545898 + ], + [ + "▁Suche", + -12.118298530578613 + ], + [ + "first", + -12.118329048156738 + ], + [ + "▁Quin", + -12.118447303771973 + ], + [ + "räu", + -12.118561744689941 + ], + [ + "▁răs", + -12.118583679199219 + ], + [ + "chemical", + -12.118597984313965 + ], + [ + "dad", + -12.118927955627441 + ], + [ + "formation", + -12.118983268737793 + ], + [ + "▁cushion", + -12.119026184082031 + ], + [ + "▁Maß", + -12.119046211242676 + ], + [ + "07.", + -12.119184494018555 + ], + [ + "▁perioadă", + -12.119257926940918 + ], + [ + "▁Wunsch", + -12.11925983428955 + ], + [ + "▁joi", + -12.119423866271973 + ], + [ + "▁$25", + -12.119482040405273 + ], + [ + "▁uploaded", + -12.11952018737793 + ], + [ + "▁hobby", + -12.119633674621582 + ], + [ + "▁septembrie", + -12.119633674621582 + ], + [ + "▁Dimension", + -12.119634628295898 + ], + [ + "▁domeniu", + -12.119661331176758 + ], + [ + "▁Tourism", + -12.119747161865234 + ], + [ + "▁fais", + -12.119800567626953 + ], + [ + "aches", + -12.119919776916504 + ], + [ + "neck", + -12.119969367980957 + ], + [ + "▁Chip", + -12.119982719421387 + ], + [ + "▁Tisch", + -12.1199951171875 + ], + [ + "▁Pai", + -12.120006561279297 + ], + [ + "▁Butter", + -12.120083808898926 + ], + [ + "▁altor", + -12.120133399963379 + ], + [ + "cultural", + -12.120182991027832 + ], + [ + "▁bases", + -12.12028980255127 + ], + [ + "▁Christopher", + -12.120396614074707 + ], + [ + "Kindle", + -12.120401382446289 + ], + [ + "▁bathrooms", + -12.12049388885498 + ], + [ + "▁civilian", + -12.12052059173584 + ], + [ + "▁Architecture", + -12.12058162689209 + ], + [ + "heiten", + -12.120641708374023 + ], + [ + "otte", + -12.120763778686523 + ], + [ + "ри", + -12.120784759521484 + ], + [ + "wash", + -12.120792388916016 + ], + [ + "▁evenimente", + -12.12086296081543 + ], + [ + "lade", + -12.121132850646973 + ], + [ + "▁ermöglicht", + -12.121140480041504 + ], + [ + "Port", + -12.121149063110352 + ], + [ + "▁Horn", + -12.12119197845459 + ], + [ + "▁Housing", + -12.121232032775879 + ], + [ + "▁Profit", + -12.121304512023926 + ], + [ + "▁stressed", + -12.12136459350586 + ], + [ + "▁70%", + -12.121431350708008 + ], + [ + "laying", + -12.121458053588867 + ], + [ + "▁specialize", + -12.121490478515625 + ], + [ + "▁Published", + -12.121519088745117 + ], + [ + "corp", + -12.121554374694824 + ], + [ + "▁revision", + -12.121611595153809 + ], + [ + "▁sail", + -12.121804237365723 + ], + [ + "courtesy", + -12.121909141540527 + ], + [ + "tax", + -12.1219482421875 + ], + [ + "▁perfekt", + -12.122018814086914 + ], + [ + "▁Risk", + -12.122088432312012 + ], + [ + "▁chaleur", + -12.122129440307617 + ], + [ + "ych", + -12.122132301330566 + ], + [ + "▁spine", + -12.12218189239502 + ], + [ + "▁holders", + -12.122264862060547 + ], + [ + "▁Speaking", + -12.122271537780762 + ], + [ + "▁Bernard", + -12.122400283813477 + ], + [ + "incarc", + -12.122532844543457 + ], + [ + "shalb", + -12.122639656066895 + ], + [ + "Potrivit", + -12.12264633178711 + ], + [ + "arising", + -12.122654914855957 + ], + [ + "▁kingdom", + -12.122665405273438 + ], + [ + "▁potato", + -12.122766494750977 + ], + [ + "▁promoted", + -12.122814178466797 + ], + [ + "▁judges", + -12.1228609085083 + ], + [ + "▁naturelle", + -12.122992515563965 + ], + [ + "▁Kindern", + -12.123022079467773 + ], + [ + "schicht", + -12.123047828674316 + ], + [ + "▁Drag", + -12.123066902160645 + ], + [ + "atta", + -12.123132705688477 + ], + [ + "soient", + -12.123249053955078 + ], + [ + "INS", + -12.12336540222168 + ], + [ + "▁legislative", + -12.123642921447754 + ], + [ + "▁teens", + -12.123785018920898 + ], + [ + "▁Fotos", + -12.123842239379883 + ], + [ + "▁illustrations", + -12.12392520904541 + ], + [ + "möglichkeiten", + -12.12415599822998 + ], + [ + "Votre", + -12.124194145202637 + ], + [ + "▁tarif", + -12.124195098876953 + ], + [ + "cli", + -12.124488830566406 + ], + [ + "▁landlord", + -12.12473201751709 + ], + [ + "cine", + -12.124743461608887 + ], + [ + "▁bot", + -12.124798774719238 + ], + [ + "enhancing", + -12.12491226196289 + ], + [ + "▁März", + -12.12491226196289 + ], + [ + "▁succès", + -12.125106811523438 + ], + [ + "▁disclose", + -12.125120162963867 + ], + [ + "▁Geräte", + -12.125321388244629 + ], + [ + "▁Magn", + -12.125422477722168 + ], + [ + "dessous", + -12.12580680847168 + ], + [ + "▁miracle", + -12.125862121582031 + ], + [ + "▁travailler", + -12.125933647155762 + ], + [ + "▁herb", + -12.125945091247559 + ], + [ + "-01", + -12.126049041748047 + ], + [ + "litre", + -12.126104354858398 + ], + [ + "▁tău", + -12.126120567321777 + ], + [ + "ACC", + -12.126190185546875 + ], + [ + "▁diminu", + -12.126275062561035 + ], + [ + "itzer", + -12.126317024230957 + ], + [ + "▁personenbezogen", + -12.126395225524902 + ], + [ + "▁Pure", + -12.126436233520508 + ], + [ + "▁influences", + -12.12668228149414 + ], + [ + "ană", + -12.126765251159668 + ], + [ + "▁proposer", + -12.126856803894043 + ], + [ + "▁longest", + -12.12692642211914 + ], + [ + "euses", + -12.127080917358398 + ], + [ + "/1", + -12.127487182617188 + ], + [ + "hafte", + -12.127716064453125 + ], + [ + "▁Dich", + -12.127761840820312 + ], + [ + "▁candle", + -12.128026962280273 + ], + [ + "ouche", + -12.128191947937012 + ], + [ + "installation", + -12.128241539001465 + ], + [ + "▁Includes", + -12.128280639648438 + ], + [ + "▁entfernt", + -12.12831974029541 + ], + [ + "traf", + -12.128499031066895 + ], + [ + "▁None", + -12.128508567810059 + ], + [ + "▁produc", + -12.128510475158691 + ], + [ + "held", + -12.128519058227539 + ], + [ + "graphic", + -12.128531455993652 + ], + [ + "▁demographic", + -12.128584861755371 + ], + [ + "ingham", + -12.1287841796875 + ], + [ + "schul", + -12.128812789916992 + ], + [ + "▁sneak", + -12.128843307495117 + ], + [ + "laub", + -12.128889083862305 + ], + [ + "▁thickness", + -12.12911605834961 + ], + [ + "▁killer", + -12.129297256469727 + ], + [ + "▁entsprechende", + -12.129344940185547 + ], + [ + "▁theft", + -12.129396438598633 + ], + [ + "▁Jerusalem", + -12.129457473754883 + ], + [ + "Adapt", + -12.129495620727539 + ], + [ + "▁updating", + -12.129497528076172 + ], + [ + "tete", + -12.12954330444336 + ], + [ + "▁warming", + -12.129701614379883 + ], + [ + "anlage", + -12.129739761352539 + ], + [ + "▁lenders", + -12.129814147949219 + ], + [ + "mobile", + -12.130008697509766 + ], + [ + "▁Package", + -12.130080223083496 + ], + [ + "▁Volume", + -12.130152702331543 + ], + [ + "---", + -12.130167007446289 + ], + [ + "▁Others", + -12.130173683166504 + ], + [ + "content", + -12.130188941955566 + ], + [ + "tement", + -12.130253791809082 + ], + [ + "bildet", + -12.13027572631836 + ], + [ + "▁washer", + -12.13053035736084 + ], + [ + "▁freelance", + -12.130623817443848 + ], + [ + "▁fein", + -12.130753517150879 + ], + [ + "▁catering", + -12.130851745605469 + ], + [ + "▁warmth", + -12.130911827087402 + ], + [ + "▁Month", + -12.131103515625 + ], + [ + "▁Federation", + -12.131134033203125 + ], + [ + "▁editorial", + -12.13121223449707 + ], + [ + "▁Shopping", + -12.131241798400879 + ], + [ + "▁efort", + -12.131296157836914 + ], + [ + "▁damp", + -12.131314277648926 + ], + [ + "▁declined", + -12.131332397460938 + ], + [ + "▁1978", + -12.13135051727295 + ], + [ + "6,000", + -12.131355285644531 + ], + [ + "location", + -12.131551742553711 + ], + [ + "▁blogger", + -12.131572723388672 + ], + [ + "▁goodness", + -12.131826400756836 + ], + [ + "▁Purchase", + -12.132119178771973 + ], + [ + "▁suspended", + -12.132159233093262 + ], + [ + "▁assessed", + -12.132201194763184 + ], + [ + "rada", + -12.132286071777344 + ], + [ + "▁Lac", + -12.132291793823242 + ], + [ + "▁angeboten", + -12.13235092163086 + ], + [ + "▁Wetter", + -12.132370948791504 + ], + [ + "ores", + -12.13243579864502 + ], + [ + "▁fourni", + -12.132476806640625 + ], + [ + "▁retire", + -12.13269329071045 + ], + [ + "▁Baptist", + -12.132741928100586 + ], + [ + "▁Saison", + -12.13277530670166 + ], + [ + "Bar", + -12.132794380187988 + ], + [ + "▁dossier", + -12.132979393005371 + ], + [ + "brow", + -12.133044242858887 + ], + [ + "▁Kaffee", + -12.133071899414062 + ], + [ + "-25", + -12.133463859558105 + ], + [ + "▁festivals", + -12.133599281311035 + ], + [ + "▁sellers", + -12.133716583251953 + ], + [ + "Ü", + -12.13393783569336 + ], + [ + "▁publisher", + -12.133960723876953 + ], + [ + "▁Designs", + -12.133970260620117 + ], + [ + "▁putut", + -12.13400936126709 + ], + [ + "▁Built", + -12.134417533874512 + ], + [ + "▁recreational", + -12.134476661682129 + ], + [ + "▁european", + -12.134514808654785 + ], + [ + "▁binary", + -12.134631156921387 + ], + [ + "▁Nieder", + -12.134764671325684 + ], + [ + "taking", + -12.1348237991333 + ], + [ + "▁Lots", + -12.13494873046875 + ], + [ + "▁recognised", + -12.135031700134277 + ], + [ + "ssant", + -12.135063171386719 + ], + [ + "ITE", + -12.135271072387695 + ], + [ + "oom", + -12.135298728942871 + ], + [ + "▁Kre", + -12.135310173034668 + ], + [ + "▁pipes", + -12.135631561279297 + ], + [ + "▁hinge", + -12.135653495788574 + ], + [ + "▁enterprises", + -12.135664939880371 + ], + [ + "▁texts", + -12.13583755493164 + ], + [ + "Organiz", + -12.136080741882324 + ], + [ + "▁suivre", + -12.136124610900879 + ], + [ + "noc", + -12.136157989501953 + ], + [ + "fair", + -12.136194229125977 + ], + [ + "▁darkness", + -12.136305809020996 + ], + [ + "▁Whi", + -12.13631534576416 + ], + [ + "natural", + -12.136321067810059 + ], + [ + "Bas", + -12.136422157287598 + ], + [ + "▁tribute", + -12.136443138122559 + ], + [ + "▁Naţional", + -12.136573791503906 + ], + [ + "hara", + -12.136622428894043 + ], + [ + "▁catégorie", + -12.136697769165039 + ], + [ + "▁Schedule", + -12.136698722839355 + ], + [ + "▁lernen", + -12.13671875 + ], + [ + "▁Plastic", + -12.136725425720215 + ], + [ + "▁giveaway", + -12.13675594329834 + ], + [ + "▁Ideen", + -12.136906623840332 + ], + [ + "▁circa", + -12.13718032836914 + ], + [ + "▁lice", + -12.137242317199707 + ], + [ + "▁Meinung", + -12.137264251708984 + ], + [ + "▁beside", + -12.137566566467285 + ], + [ + "▁vazut", + -12.137673377990723 + ], + [ + "strom", + -12.137749671936035 + ], + [ + "boro", + -12.137775421142578 + ], + [ + "▁Soon", + -12.137796401977539 + ], + [ + "dozens", + -12.137896537780762 + ], + [ + "▁Arena", + -12.137943267822266 + ], + [ + "▁viața", + -12.137989044189453 + ], + [ + "▁Impact", + -12.138082504272461 + ], + [ + "current", + -12.138106346130371 + ], + [ + "FM", + -12.138117790222168 + ], + [ + "▁coil", + -12.138657569885254 + ], + [ + "gold", + -12.138679504394531 + ], + [ + "▁spate", + -12.138679504394531 + ], + [ + "1.4", + -12.13875675201416 + ], + [ + "solution", + -12.138769149780273 + ], + [ + "▁Wayne", + -12.138835906982422 + ], + [ + "▁queen", + -12.138898849487305 + ], + [ + "illion", + -12.139022827148438 + ], + [ + "greifen", + -12.139127731323242 + ], + [ + "▁Bil", + -12.139174461364746 + ], + [ + "rote", + -12.139185905456543 + ], + [ + "END", + -12.13918685913086 + ], + [ + "äl", + -12.139206886291504 + ], + [ + "▁reçu", + -12.139378547668457 + ], + [ + "flower", + -12.139495849609375 + ], + [ + "▁draws", + -12.139519691467285 + ], + [ + "plant", + -12.139605522155762 + ], + [ + "2010", + -12.139702796936035 + ], + [ + "▁oper", + -12.139762878417969 + ], + [ + "▁conserve", + -12.139777183532715 + ], + [ + "▁sprinkle", + -12.13984203338623 + ], + [ + "mode", + -12.139924049377441 + ], + [ + "▁lifting", + -12.139941215515137 + ], + [ + "▁Institution", + -12.139951705932617 + ], + [ + "Când", + -12.14001750946045 + ], + [ + "Aus", + -12.140048027038574 + ], + [ + "▁fears", + -12.140054702758789 + ], + [ + "▁appointments", + -12.140079498291016 + ], + [ + "oarele", + -12.140162467956543 + ], + [ + "▁duck", + -12.140193939208984 + ], + [ + "▁stadium", + -12.140213012695312 + ], + [ + "▁vezi", + -12.140227317810059 + ], + [ + "▁lap", + -12.140315055847168 + ], + [ + "▁proceeds", + -12.140382766723633 + ], + [ + "geschlossen", + -12.140412330627441 + ], + [ + "▁tren", + -12.140478134155273 + ], + [ + "VS", + -12.140536308288574 + ], + [ + "▁vais", + -12.140800476074219 + ], + [ + "ținut", + -12.140859603881836 + ], + [ + "▁Concert", + -12.140928268432617 + ], + [ + "▁planting", + -12.141008377075195 + ], + [ + "▁honour", + -12.141069412231445 + ], + [ + "▁gras", + -12.141071319580078 + ], + [ + "woo", + -12.141092300415039 + ], + [ + "▁Hero", + -12.141282081604004 + ], + [ + "▁stimulate", + -12.14134407043457 + ], + [ + "▁überhaupt", + -12.141426086425781 + ], + [ + "▁bounce", + -12.14148235321045 + ], + [ + "oodle", + -12.14151382446289 + ], + [ + "▁packs", + -12.141576766967773 + ], + [ + "▁Poker", + -12.14158821105957 + ], + [ + "▁acea", + -12.141684532165527 + ], + [ + "▁parish", + -12.141754150390625 + ], + [ + "-24", + -12.141766548156738 + ], + [ + "▁iTunes", + -12.141874313354492 + ], + [ + "▁lumière", + -12.141948699951172 + ], + [ + "third", + -12.142024993896484 + ], + [ + "▁dynamics", + -12.142038345336914 + ], + [ + "Unless", + -12.142162322998047 + ], + [ + "▁immense", + -12.142416000366211 + ], + [ + "▁Sec", + -12.142781257629395 + ], + [ + "lois", + -12.143009185791016 + ], + [ + "époque", + -12.14302921295166 + ], + [ + "NB", + -12.143139839172363 + ], + [ + "written", + -12.143210411071777 + ], + [ + "▁logement", + -12.143226623535156 + ], + [ + "submitting", + -12.143295288085938 + ], + [ + "▁Quand", + -12.14331340789795 + ], + [ + "▁foi", + -12.143322944641113 + ], + [ + "▁catalogue", + -12.143351554870605 + ], + [ + "nova", + -12.14343547821045 + ], + [ + "▁prezentat", + -12.143527030944824 + ], + [ + "▁tart", + -12.143877983093262 + ], + [ + "те", + -12.143912315368652 + ], + [ + "hack", + -12.143916130065918 + ], + [ + "▁Politic", + -12.144003868103027 + ], + [ + "▁18,", + -12.144048690795898 + ], + [ + "▁ignored", + -12.144145965576172 + ], + [ + "▁spoon", + -12.144245147705078 + ], + [ + "▁Joy", + -12.144280433654785 + ], + [ + "▁reside", + -12.144482612609863 + ], + [ + ".99", + -12.144488334655762 + ], + [ + "lytic", + -12.144625663757324 + ], + [ + "▁bogat", + -12.144643783569336 + ], + [ + "▁nurses", + -12.144845008850098 + ], + [ + "▁funcţi", + -12.145029067993164 + ], + [ + "▁produselor", + -12.145038604736328 + ], + [ + "▁Associates", + -12.145069122314453 + ], + [ + "Est", + -12.14511489868164 + ], + [ + "▁peanut", + -12.145187377929688 + ], + [ + "▁résultat", + -12.145257949829102 + ], + [ + "08.", + -12.145424842834473 + ], + [ + "▁Astro", + -12.145439147949219 + ], + [ + "▁personnelle", + -12.145527839660645 + ], + [ + "320", + -12.145668983459473 + ], + [ + "▁Grab", + -12.145748138427734 + ], + [ + "éco", + -12.145801544189453 + ], + [ + "▁clasic", + -12.145857810974121 + ], + [ + "offre", + -12.14588451385498 + ], + [ + "▁idee", + -12.14589786529541 + ], + [ + "▁cheat", + -12.146259307861328 + ], + [ + "▁Flug", + -12.146286964416504 + ], + [ + "▁1500", + -12.146413803100586 + ], + [ + "▁kurze", + -12.14643383026123 + ], + [ + "With", + -12.146512985229492 + ], + [ + "▁Half", + -12.146575927734375 + ], + [ + "▁disciplines", + -12.146642684936523 + ], + [ + "sorption", + -12.14669132232666 + ], + [ + "▁greutate", + -12.146927833557129 + ], + [ + "mä", + -12.146940231323242 + ], + [ + "▁Literatur", + -12.146956443786621 + ], + [ + "3/", + -12.147016525268555 + ], + [ + "4.0", + -12.147095680236816 + ], + [ + "▁déco", + -12.147119522094727 + ], + [ + "▁Fuß", + -12.147233963012695 + ], + [ + "▁Deutsche", + -12.147289276123047 + ], + [ + "▁abundance", + -12.14746379852295 + ], + [ + "▁Luther", + -12.14750862121582 + ], + [ + "▁nutritional", + -12.147562980651855 + ], + [ + "▁Jude", + -12.147687911987305 + ], + [ + "AY", + -12.14786148071289 + ], + [ + "▁chore", + -12.147916793823242 + ], + [ + "▁Kro", + -12.148006439208984 + ], + [ + "▁alin", + -12.14801025390625 + ], + [ + "lösung", + -12.148030281066895 + ], + [ + "▁geworden", + -12.148238182067871 + ], + [ + "▁sociaux", + -12.148255348205566 + ], + [ + "▁Spark", + -12.1486177444458 + ], + [ + "▁phenomenon", + -12.148624420166016 + ], + [ + "ICA", + -12.148805618286133 + ], + [ + "▁Ran", + -12.148836135864258 + ], + [ + "▁Schwarz", + -12.148959159851074 + ], + [ + "▁1983", + -12.148985862731934 + ], + [ + "ет", + -12.148990631103516 + ], + [ + "möglich", + -12.149084091186523 + ], + [ + "vocation", + -12.149087905883789 + ], + [ + "▁Organic", + -12.14926815032959 + ], + [ + "Oh", + -12.149408340454102 + ], + [ + "▁blockchain", + -12.149422645568848 + ], + [ + "▁Bă", + -12.149515151977539 + ], + [ + "▁Bass", + -12.14953899383545 + ], + [ + "enie", + -12.149687767028809 + ], + [ + "▁rêve", + -12.149807929992676 + ], + [ + "▁Rap", + -12.149986267089844 + ], + [ + "▁democratic", + -12.150044441223145 + ], + [ + "▁Chart", + -12.150167465209961 + ], + [ + "▁Voi", + -12.150189399719238 + ], + [ + "process", + -12.150263786315918 + ], + [ + "▁preach", + -12.150389671325684 + ], + [ + "tient", + -12.150456428527832 + ], + [ + "▁Train", + -12.150468826293945 + ], + [ + "▁Reihe", + -12.150472640991211 + ], + [ + "help", + -12.150514602661133 + ], + [ + "1.6", + -12.150547981262207 + ], + [ + "▁cazuri", + -12.150547981262207 + ], + [ + "▁chap", + -12.150559425354004 + ], + [ + "aktiv", + -12.150632858276367 + ], + [ + "▁2006.", + -12.15079116821289 + ], + [ + "iene", + -12.150849342346191 + ], + [ + "▁BBQ", + -12.150969505310059 + ], + [ + "dauer", + -12.151028633117676 + ], + [ + "2).", + -12.151226997375488 + ], + [ + "▁Monat", + -12.151277542114258 + ], + [ + "Generally", + -12.151285171508789 + ], + [ + "▁bracelet", + -12.151336669921875 + ], + [ + "▁cartoon", + -12.151349067687988 + ], + [ + "▁pui", + -12.151488304138184 + ], + [ + "temp", + -12.151506423950195 + ], + [ + "▁Particip", + -12.151555061340332 + ], + [ + "▁dumneavoastră", + -12.151725769042969 + ], + [ + "▁Gin", + -12.151824951171875 + ], + [ + "iunile", + -12.151829719543457 + ], + [ + "reise", + -12.151849746704102 + ], + [ + "▁einzige", + -12.15189266204834 + ], + [ + "ANCE", + -12.15192985534668 + ], + [ + "▁humble", + -12.151951789855957 + ], + [ + "claim", + -12.152093887329102 + ], + [ + "LV", + -12.152143478393555 + ], + [ + "▁confiance", + -12.152270317077637 + ], + [ + "▁Trading", + -12.152535438537598 + ], + [ + "▁Fabric", + -12.152770042419434 + ], + [ + "▁Duke", + -12.152851104736328 + ], + [ + "spieler", + -12.152937889099121 + ], + [ + "▁reject", + -12.152987480163574 + ], + [ + "▁crise", + -12.153170585632324 + ], + [ + "▁borders", + -12.153196334838867 + ], + [ + "▁Vehicle", + -12.153279304504395 + ], + [ + "zeiten", + -12.153481483459473 + ], + [ + "enrolled", + -12.153514862060547 + ], + [ + "venue", + -12.153555870056152 + ], + [ + "▁forests", + -12.153564453125 + ], + [ + "vascular", + -12.15358829498291 + ], + [ + "▁phrases", + -12.153661727905273 + ], + [ + "▁receptor", + -12.15368366241455 + ], + [ + "schied", + -12.153687477111816 + ], + [ + "▁soirée", + -12.153785705566406 + ], + [ + "▁partener", + -12.153987884521484 + ], + [ + "▁Jobs", + -12.15417194366455 + ], + [ + "▁segments", + -12.154216766357422 + ], + [ + "▁violate", + -12.154438972473145 + ], + [ + "▁viable", + -12.154500007629395 + ], + [ + "▁encountered", + -12.154533386230469 + ], + [ + "▁travelers", + -12.154552459716797 + ], + [ + "▁împ", + -12.154679298400879 + ], + [ + "▁convince", + -12.154693603515625 + ], + [ + "▁mailing", + -12.154693603515625 + ], + [ + "▁Zahn", + -12.154698371887207 + ], + [ + "attend", + -12.15477466583252 + ], + [ + "▁eBay", + -12.154836654663086 + ], + [ + "▁Emergency", + -12.154844284057617 + ], + [ + "wirtschaft", + -12.154882431030273 + ], + [ + "▁scholars", + -12.154947280883789 + ], + [ + "▁considerably", + -12.155118942260742 + ], + [ + "▁combo", + -12.1551513671875 + ], + [ + "hiver", + -12.155198097229004 + ], + [ + "▁mysterious", + -12.15522575378418 + ], + [ + "▁Degree", + -12.155234336853027 + ], + [ + "▁fate", + -12.155242919921875 + ], + [ + "▁transplant", + -12.155281066894531 + ], + [ + "▁samedi", + -12.155400276184082 + ], + [ + "unit", + -12.155519485473633 + ], + [ + "▁moyenne", + -12.155611991882324 + ], + [ + "▁Liverpool", + -12.155614852905273 + ], + [ + "▁Champions", + -12.155728340148926 + ], + [ + "zzle", + -12.155824661254883 + ], + [ + "▁arena", + -12.156228065490723 + ], + [ + "▁Pipe", + -12.15633487701416 + ], + [ + "▁waterproof", + -12.156356811523438 + ], + [ + "▁eternal", + -12.156463623046875 + ], + [ + "Whenever", + -12.156503677368164 + ], + [ + "▁Hop", + -12.156535148620605 + ], + [ + "▁Betrieb", + -12.156816482543945 + ], + [ + "gne", + -12.15692138671875 + ], + [ + "▁spe", + -12.156975746154785 + ], + [ + "▁Corner", + -12.157078742980957 + ], + [ + "▁devenir", + -12.157118797302246 + ], + [ + "ambiance", + -12.157144546508789 + ], + [ + "▁Graham", + -12.157200813293457 + ], + [ + "▁desires", + -12.157289505004883 + ], + [ + "▁Applications", + -12.157291412353516 + ], + [ + "▁genutzt", + -12.157477378845215 + ], + [ + "tek", + -12.157612800598145 + ], + [ + "▁Career", + -12.157641410827637 + ], + [ + "▁staple", + -12.157695770263672 + ], + [ + "▁Dodge", + -12.157817840576172 + ], + [ + "▁strictly", + -12.157889366149902 + ], + [ + "▁Gruppen", + -12.157952308654785 + ], + [ + "▁Finanz", + -12.157981872558594 + ], + [ + "▁sporting", + -12.15809440612793 + ], + [ + "▁Wieder", + -12.158127784729004 + ], + [ + "anny", + -12.158208847045898 + ], + [ + "▁bucura", + -12.158233642578125 + ], + [ + "▁Pest", + -12.15824031829834 + ], + [ + "▁circles", + -12.158246994018555 + ], + [ + "▁richtige", + -12.158309936523438 + ], + [ + "▁cycles", + -12.158379554748535 + ], + [ + "static", + -12.15845012664795 + ], + [ + "lasting", + -12.15847396850586 + ], + [ + "▁calcium", + -12.158549308776855 + ], + [ + "▁digest", + -12.158697128295898 + ], + [ + "Enfin", + -12.158865928649902 + ], + [ + "▁stressful", + -12.158951759338379 + ], + [ + "▁schemes", + -12.158981323242188 + ], + [ + "▁décision", + -12.158987045288086 + ], + [ + "▁comercial", + -12.15907096862793 + ], + [ + "işti", + -12.159098625183105 + ], + [ + "▁Comic", + -12.15910816192627 + ], + [ + "▁extensions", + -12.159140586853027 + ], + [ + "▁Sieg", + -12.159168243408203 + ], + [ + "▁pine", + -12.15919017791748 + ], + [ + "ieß", + -12.159272193908691 + ], + [ + "▁Images", + -12.159427642822266 + ], + [ + "▁Mensch", + -12.159668922424316 + ], + [ + "Pap", + -12.159773826599121 + ], + [ + "▁crops", + -12.15994930267334 + ], + [ + "▁sheep", + -12.159996032714844 + ], + [ + "▁istoric", + -12.160001754760742 + ], + [ + "▁Assessment", + -12.160035133361816 + ], + [ + "▁mounting", + -12.16035270690918 + ], + [ + "wirken", + -12.160469055175781 + ], + [ + "▁augment", + -12.160469055175781 + ], + [ + "▁picioare", + -12.160542488098145 + ], + [ + "organisme", + -12.160590171813965 + ], + [ + "▁Monitor", + -12.16060733795166 + ], + [ + "▁celles", + -12.160642623901367 + ], + [ + "▁Maison", + -12.160709381103516 + ], + [ + "notified", + -12.160783767700195 + ], + [ + "▁chew", + -12.160831451416016 + ], + [ + "▁bleu", + -12.16083812713623 + ], + [ + "dow", + -12.160844802856445 + ], + [ + "▁Grav", + -12.16097354888916 + ], + [ + "▁curtains", + -12.160975456237793 + ], + [ + "▁Campus", + -12.161076545715332 + ], + [ + "▁controversial", + -12.161087036132812 + ], + [ + "▁soutien", + -12.161189079284668 + ], + [ + "▁Dell", + -12.1613187789917 + ], + [ + "▁instrumental", + -12.161431312561035 + ], + [ + "▁Nan", + -12.161514282226562 + ], + [ + "▁prom", + -12.161520957946777 + ], + [ + "▁spatial", + -12.161523818969727 + ], + [ + "Similarly", + -12.161558151245117 + ], + [ + "▁Gala", + -12.161601066589355 + ], + [ + "ultimul", + -12.16162109375 + ], + [ + "▁Vom", + -12.161761283874512 + ], + [ + "▁Foot", + -12.161784172058105 + ], + [ + "bike", + -12.1618013381958 + ], + [ + "▁acids", + -12.161979675292969 + ], + [ + "entend", + -12.162002563476562 + ], + [ + "ivă", + -12.162040710449219 + ], + [ + "▁Weitere", + -12.162124633789062 + ], + [ + "▁vitamins", + -12.162131309509277 + ], + [ + "▁enhancement", + -12.16234016418457 + ], + [ + "▁Cruise", + -12.162367820739746 + ], + [ + "assemble", + -12.162385940551758 + ], + [ + "▁spécifique", + -12.162459373474121 + ], + [ + "affaires", + -12.16261100769043 + ], + [ + "▁indispensable", + -12.1626558303833 + ], + [ + "▁logistics", + -12.16283130645752 + ], + [ + "▁manche", + -12.162919044494629 + ], + [ + "▁dealt", + -12.16297435760498 + ], + [ + "▁favorable", + -12.163036346435547 + ], + [ + "▁unwanted", + -12.163047790527344 + ], + [ + "▁handmade", + -12.163065910339355 + ], + [ + "▁Regi", + -12.163102149963379 + ], + [ + "safe", + -12.163134574890137 + ], + [ + "persoanele", + -12.163202285766602 + ], + [ + "▁destinat", + -12.163252830505371 + ], + [ + "▁Maxi", + -12.163299560546875 + ], + [ + "▁salmon", + -12.163454055786133 + ], + [ + "wag", + -12.163578033447266 + ], + [ + "210", + -12.163769721984863 + ], + [ + "▁warned", + -12.163865089416504 + ], + [ + "läuft", + -12.16386604309082 + ], + [ + "agging", + -12.163931846618652 + ], + [ + "▁responsabil", + -12.16398811340332 + ], + [ + "▁presse", + -12.164271354675293 + ], + [ + "▁amis", + -12.164305686950684 + ], + [ + "▁rolls", + -12.164377212524414 + ], + [ + "control", + -12.164405822753906 + ], + [ + "▁Manufacturer", + -12.164422988891602 + ], + [ + "hnen", + -12.164449691772461 + ], + [ + "▁buget", + -12.164546012878418 + ], + [ + "OW", + -12.16467571258545 + ], + [ + "etro", + -12.164745330810547 + ], + [ + "▁communauté", + -12.164837837219238 + ], + [ + "unci", + -12.164944648742676 + ], + [ + "▁Chine", + -12.164952278137207 + ], + [ + "combines", + -12.16501235961914 + ], + [ + "▁learners", + -12.165046691894531 + ], + [ + "STE", + -12.165055274963379 + ], + [ + "ckel", + -12.16511344909668 + ], + [ + "Service", + -12.165169715881348 + ], + [ + "▁veröffentlicht", + -12.165209770202637 + ], + [ + "besides", + -12.165266036987305 + ], + [ + "getragen", + -12.165349960327148 + ], + [ + "▁opponent", + -12.165521621704102 + ], + [ + "▁volum", + -12.165533065795898 + ], + [ + "▁confusing", + -12.165802001953125 + ], + [ + "invasive", + -12.165813446044922 + ], + [ + "▁conseils", + -12.165881156921387 + ], + [ + "▁vibe", + -12.165928840637207 + ], + [ + "View", + -12.166062355041504 + ], + [ + "oară", + -12.166086196899414 + ], + [ + "Link", + -12.166261672973633 + ], + [ + "▁holy", + -12.166261672973633 + ], + [ + "▁crema", + -12.16629409790039 + ], + [ + "▁Michelle", + -12.166303634643555 + ], + [ + "▁Wien", + -12.166383743286133 + ], + [ + "▁undertake", + -12.166404724121094 + ], + [ + "▁Photograph", + -12.166421890258789 + ], + [ + "humain", + -12.16645336151123 + ], + [ + "▁Hang", + -12.166545867919922 + ], + [ + "designed", + -12.16657829284668 + ], + [ + "▁analyses", + -12.166614532470703 + ], + [ + "▁compose", + -12.166653633117676 + ], + [ + "▁substantially", + -12.166765213012695 + ], + [ + "▁marking", + -12.166772842407227 + ], + [ + "▁campagne", + -12.166826248168945 + ], + [ + "▁$15", + -12.166828155517578 + ], + [ + "pharma", + -12.166972160339355 + ], + [ + "▁playoff", + -12.1669921875 + ], + [ + "▁momentum", + -12.167091369628906 + ], + [ + "Temp", + -12.16714096069336 + ], + [ + "▁vinegar", + -12.167143821716309 + ], + [ + "▁descriptions", + -12.167581558227539 + ], + [ + "christ", + -12.167656898498535 + ], + [ + "wore", + -12.16773509979248 + ], + [ + "ITY", + -12.167768478393555 + ], + [ + "stehen", + -12.167771339416504 + ], + [ + "▁insulation", + -12.1677827835083 + ], + [ + "grav", + -12.167842864990234 + ], + [ + "2.2", + -12.167887687683105 + ], + [ + "▁Explore", + -12.168028831481934 + ], + [ + "▁dye", + -12.168127059936523 + ], + [ + "stair", + -12.168155670166016 + ], + [ + "artisan", + -12.168207168579102 + ], + [ + "▁zoom", + -12.168285369873047 + ], + [ + "▁turkey", + -12.168573379516602 + ], + [ + "▁locksmith", + -12.168577194213867 + ], + [ + "▁sewing", + -12.168610572814941 + ], + [ + "▁modeling", + -12.168627738952637 + ], + [ + "lied", + -12.16870403289795 + ], + [ + "adel", + -12.168773651123047 + ], + [ + "▁Going", + -12.168785095214844 + ], + [ + "WH", + -12.168798446655273 + ], + [ + "▁deserves", + -12.168919563293457 + ], + [ + "▁arriving", + -12.168960571289062 + ], + [ + "OFF", + -12.169039726257324 + ], + [ + "torului", + -12.169109344482422 + ], + [ + "ucked", + -12.16921615600586 + ], + [ + "▁approached", + -12.169351577758789 + ], + [ + "▁élevé", + -12.169354438781738 + ], + [ + "▁quotidien", + -12.169416427612305 + ], + [ + "▁derzeit", + -12.16942024230957 + ], + [ + "nutzt", + -12.169656753540039 + ], + [ + "science", + -12.169729232788086 + ], + [ + "▁Emma", + -12.169841766357422 + ], + [ + "▁builds", + -12.169879913330078 + ], + [ + "▁Logo", + -12.169949531555176 + ], + [ + "▁clouds", + -12.170061111450195 + ], + [ + "inflammatory", + -12.170141220092773 + ], + [ + "țiuni", + -12.170199394226074 + ], + [ + "▁Cisco", + -12.17025089263916 + ], + [ + "▁würden", + -12.170254707336426 + ], + [ + "▁Shaw", + -12.170256614685059 + ], + [ + "▁Ell", + -12.170266151428223 + ], + [ + "avance", + -12.1703519821167 + ], + [ + "anglais", + -12.170365333557129 + ], + [ + "weil", + -12.170368194580078 + ], + [ + "▁singura", + -12.170464515686035 + ], + [ + "ACK", + -12.170489311218262 + ], + [ + "likewise", + -12.170522689819336 + ], + [ + "ographie", + -12.170646667480469 + ], + [ + "liegen", + -12.17088508605957 + ], + [ + "▁Crow", + -12.170964241027832 + ], + [ + "▁unic", + -12.171187400817871 + ], + [ + "▁Ale", + -12.171241760253906 + ], + [ + "▁păstr", + -12.17125129699707 + ], + [ + "▁informal", + -12.171337127685547 + ], + [ + "650", + -12.17136287689209 + ], + [ + "Benz", + -12.171489715576172 + ], + [ + "▁antenna", + -12.171540260314941 + ], + [ + "▁pagini", + -12.171552658081055 + ], + [ + "▁lansat", + -12.171561241149902 + ], + [ + "▁Fans", + -12.171576499938965 + ], + [ + "taine", + -12.171822547912598 + ], + [ + "JO", + -12.171853065490723 + ], + [ + "▁Tips", + -12.172091484069824 + ], + [ + "cir", + -12.172130584716797 + ], + [ + "nou", + -12.172384262084961 + ], + [ + "▁planted", + -12.17241382598877 + ], + [ + "▁steering", + -12.172423362731934 + ], + [ + "▁Waren", + -12.172475814819336 + ], + [ + "▁clearance", + -12.172515869140625 + ], + [ + "▁Moscow", + -12.172516822814941 + ], + [ + "▁Faith", + -12.172534942626953 + ], + [ + "▁Pizza", + -12.172572135925293 + ], + [ + "▁Tank", + -12.17273998260498 + ], + [ + "QUE", + -12.172783851623535 + ], + [ + "▁studii", + -12.172804832458496 + ], + [ + "éné", + -12.172829627990723 + ], + [ + "▁guerre", + -12.1728515625 + ], + [ + "▁celebr", + -12.173083305358887 + ], + [ + "▁Factory", + -12.173111915588379 + ], + [ + "▁Browse", + -12.173198699951172 + ], + [ + "▁Request", + -12.17323112487793 + ], + [ + "▁taxpayer", + -12.173311233520508 + ], + [ + "▁assert", + -12.173562049865723 + ], + [ + "unternehmen", + -12.173588752746582 + ], + [ + "▁Ergebnis", + -12.173687934875488 + ], + [ + "▁Antwort", + -12.173727035522461 + ], + [ + "▁Photography", + -12.173808097839355 + ], + [ + "▁plă", + -12.173866271972656 + ], + [ + "IME", + -12.173982620239258 + ], + [ + "▁prochaine", + -12.174074172973633 + ], + [ + "ajouter", + -12.174103736877441 + ], + [ + "▁buffet", + -12.174227714538574 + ], + [ + "▁pixels", + -12.174239158630371 + ], + [ + "▁pledge", + -12.174250602722168 + ], + [ + "▁Inhalt", + -12.17435359954834 + ], + [ + "▁chase", + -12.174384117126465 + ], + [ + "Flow", + -12.174493789672852 + ], + [ + "▁melodi", + -12.174872398376465 + ], + [ + "▁Abu", + -12.174991607666016 + ], + [ + "▁1979", + -12.175042152404785 + ], + [ + "▁Photos", + -12.175042152404785 + ], + [ + "▁qualifications", + -12.175148963928223 + ], + [ + "▁zis", + -12.175213813781738 + ], + [ + "IAL", + -12.175354957580566 + ], + [ + "▁lender", + -12.175390243530273 + ], + [ + "▁indiferent", + -12.175494194030762 + ], + [ + "▁behaviors", + -12.175506591796875 + ], + [ + "▁flowing", + -12.175531387329102 + ], + [ + "▁zweite", + -12.1756010055542 + ], + [ + "abl", + -12.175765037536621 + ], + [ + "Schw", + -12.176004409790039 + ], + [ + "opi", + -12.176030158996582 + ], + [ + "ggi", + -12.176164627075195 + ], + [ + "▁depart", + -12.176314353942871 + ], + [ + "▁garde", + -12.17640209197998 + ], + [ + "▁tuition", + -12.176490783691406 + ], + [ + "fälle", + -12.17650032043457 + ], + [ + "▁determina", + -12.17652702331543 + ], + [ + "▁spice", + -12.176627159118652 + ], + [ + "▁petites", + -12.176777839660645 + ], + [ + "kot", + -12.176973342895508 + ], + [ + "▁intersection", + -12.177242279052734 + ], + [ + "hak", + -12.177248001098633 + ], + [ + "▁autumn", + -12.177284240722656 + ], + [ + "▁verbunden", + -12.177284240722656 + ], + [ + "▁ferme", + -12.177287101745605 + ], + [ + "PN", + -12.17733097076416 + ], + [ + "▁insurer", + -12.177390098571777 + ], + [ + "arten", + -12.177401542663574 + ], + [ + "▁Turkish", + -12.177715301513672 + ], + [ + "▁shoulders", + -12.177732467651367 + ], + [ + "=>", + -12.177742004394531 + ], + [ + "▁Nike", + -12.177760124206543 + ], + [ + "uire", + -12.177763938903809 + ], + [ + "▁Chile", + -12.177811622619629 + ], + [ + "jon", + -12.177842140197754 + ], + [ + "▁fragrance", + -12.177884101867676 + ], + [ + "▁bean", + -12.177908897399902 + ], + [ + "ips", + -12.178108215332031 + ], + [ + "assuming", + -12.178191184997559 + ], + [ + "liens", + -12.178215026855469 + ], + [ + "tocmai", + -12.178267478942871 + ], + [ + "▁60%", + -12.178301811218262 + ], + [ + "ipped", + -12.178384780883789 + ], + [ + "DIS", + -12.178473472595215 + ], + [ + "▁predicted", + -12.178537368774414 + ], + [ + "▁Picture", + -12.178555488586426 + ], + [ + "Bahn", + -12.178796768188477 + ], + [ + "104", + -12.178854942321777 + ], + [ + "tended", + -12.178958892822266 + ], + [ + "▁approve", + -12.179031372070312 + ], + [ + "▁magasin", + -12.17908000946045 + ], + [ + "▁mindset", + -12.179208755493164 + ], + [ + "rase", + -12.179363250732422 + ], + [ + "grand", + -12.179469108581543 + ], + [ + "▁Principal", + -12.17947769165039 + ], + [ + "▁informații", + -12.17959976196289 + ], + [ + "▁legătur", + -12.179628372192383 + ], + [ + "▁Farb", + -12.179692268371582 + ], + [ + "▁Dieu", + -12.179710388183594 + ], + [ + "▁alliance", + -12.180378913879395 + ], + [ + "weiligen", + -12.180397987365723 + ], + [ + "▁Câ", + -12.18048095703125 + ], + [ + "▁counseling", + -12.180521011352539 + ], + [ + "▁traveled", + -12.180533409118652 + ], + [ + "▁translated", + -12.180558204650879 + ], + [ + "▁carne", + -12.180679321289062 + ], + [ + "aked", + -12.180707931518555 + ], + [ + "▁LCD", + -12.180868148803711 + ], + [ + "▁Folge", + -12.180909156799316 + ], + [ + "▁Erfahrungen", + -12.18093204498291 + ], + [ + "▁1981", + -12.18106460571289 + ], + [ + "▁răspuns", + -12.181075096130371 + ], + [ + "itori", + -12.18117618560791 + ], + [ + "▁elementary", + -12.181200981140137 + ], + [ + "▁vorbei", + -12.18127727508545 + ], + [ + "▁cargo", + -12.181361198425293 + ], + [ + "disciplinary", + -12.18140983581543 + ], + [ + "WR", + -12.181492805480957 + ], + [ + "▁counterpart", + -12.18162727355957 + ], + [ + "family", + -12.181641578674316 + ], + [ + "▁viață", + -12.181644439697266 + ], + [ + "▁Definition", + -12.18167495727539 + ], + [ + "▁Cow", + -12.18171501159668 + ], + [ + "fällig", + -12.182003021240234 + ], + [ + "▁Sicht", + -12.182025909423828 + ], + [ + "▁mum", + -12.182145118713379 + ], + [ + "▁Mediterranean", + -12.182275772094727 + ], + [ + "nev", + -12.182278633117676 + ], + [ + "bü", + -12.182293891906738 + ], + [ + "▁slave", + -12.182293891906738 + ], + [ + "schnitt", + -12.18233871459961 + ], + [ + "▁firme", + -12.182430267333984 + ], + [ + "▁spill", + -12.182454109191895 + ], + [ + "▁wages", + -12.182592391967773 + ], + [ + "▁refine", + -12.182615280151367 + ], + [ + "▁upgraded", + -12.182632446289062 + ], + [ + "▁gospel", + -12.182698249816895 + ], + [ + "▁quartier", + -12.182744979858398 + ], + [ + "▁#2", + -12.182772636413574 + ], + [ + "▁Situation", + -12.18298625946045 + ], + [ + "▁suggesting", + -12.183075904846191 + ], + [ + "▁acne", + -12.183113098144531 + ], + [ + "▁Murray", + -12.183337211608887 + ], + [ + "▁Ian", + -12.183469772338867 + ], + [ + "hören", + -12.183489799499512 + ], + [ + "bia", + -12.183603286743164 + ], + [ + "▁Bewegung", + -12.183684349060059 + ], + [ + "▁abzu", + -12.18379020690918 + ], + [ + "reveals", + -12.183795928955078 + ], + [ + "friend", + -12.184025764465332 + ], + [ + "▁Connecticut", + -12.18407917022705 + ], + [ + "▁Testament", + -12.184151649475098 + ], + [ + "▁Lit", + -12.184199333190918 + ], + [ + "▁Ship", + -12.184209823608398 + ], + [ + "▁minunat", + -12.184344291687012 + ], + [ + "▁Moving", + -12.184346199035645 + ], + [ + "▁Device", + -12.184486389160156 + ], + [ + "▁Bake", + -12.18453598022461 + ], + [ + "▁qualification", + -12.184633255004883 + ], + [ + "▁challenged", + -12.184640884399414 + ], + [ + "▁Hinweis", + -12.184721946716309 + ], + [ + "▁sechs", + -12.184769630432129 + ], + [ + "та", + -12.184903144836426 + ], + [ + "120", + -12.184904098510742 + ], + [ + "licht", + -12.184940338134766 + ], + [ + "▁supervision", + -12.185022354125977 + ], + [ + "▁milestone", + -12.18503189086914 + ], + [ + "zeig", + -12.185050964355469 + ], + [ + "▁emphasize", + -12.185224533081055 + ], + [ + "▁complain", + -12.185232162475586 + ], + [ + "sack", + -12.185341835021973 + ], + [ + "▁rebuild", + -12.185445785522461 + ], + [ + "projekt", + -12.18548583984375 + ], + [ + "▁saint", + -12.185644149780273 + ], + [ + "lette", + -12.185752868652344 + ], + [ + "rade", + -12.18580150604248 + ], + [ + "▁pacient", + -12.185893058776855 + ], + [ + "signed", + -12.186169624328613 + ], + [ + "▁mil", + -12.186261177062988 + ], + [ + "cali", + -12.186266899108887 + ], + [ + "▁brochure", + -12.186487197875977 + ], + [ + "▁Bulgaria", + -12.186488151550293 + ], + [ + "Har", + -12.186623573303223 + ], + [ + "DH", + -12.186697006225586 + ], + [ + "▁jumping", + -12.186712265014648 + ], + [ + "ären", + -12.186732292175293 + ], + [ + "▁tactics", + -12.186911582946777 + ], + [ + "▁soleil", + -12.187030792236328 + ], + [ + "lessness", + -12.18705940246582 + ], + [ + "steigen", + -12.187085151672363 + ], + [ + "▁Brief", + -12.187117576599121 + ], + [ + "▁Oz", + -12.18718433380127 + ], + [ + "credit", + -12.187239646911621 + ], + [ + "glass", + -12.187241554260254 + ], + [ + "▁Baltimore", + -12.187292098999023 + ], + [ + "varies", + -12.187445640563965 + ], + [ + "sourced", + -12.187575340270996 + ], + [ + "▁documented", + -12.187604904174805 + ], + [ + "▁devine", + -12.187664985656738 + ], + [ + "möglichst", + -12.187732696533203 + ], + [ + "▁früher", + -12.187756538391113 + ], + [ + "outefois", + -12.18790054321289 + ], + [ + "▁Engagement", + -12.187934875488281 + ], + [ + "▁anumit", + -12.18806266784668 + ], + [ + "▁1930", + -12.188186645507812 + ], + [ + "▁Aufgaben", + -12.188214302062988 + ], + [ + "▁lineup", + -12.188227653503418 + ], + [ + "▁Cad", + -12.188349723815918 + ], + [ + "améliorer", + -12.188437461853027 + ], + [ + "▁februarie", + -12.188499450683594 + ], + [ + "▁cancellation", + -12.188529968261719 + ], + [ + "▁locks", + -12.188577651977539 + ], + [ + "▁modèles", + -12.188711166381836 + ], + [ + "▁breakdown", + -12.188748359680176 + ], + [ + "Ticket", + -12.188810348510742 + ], + [ + "▁Chen", + -12.188855171203613 + ], + [ + "▁Competition", + -12.188910484313965 + ], + [ + "▁median", + -12.18896770477295 + ], + [ + "rische", + -12.189159393310547 + ], + [ + "▁multipli", + -12.189269065856934 + ], + [ + "▁Belgium", + -12.189305305480957 + ], + [ + "▁Physical", + -12.189308166503906 + ], + [ + "▁parameter", + -12.189432144165039 + ], + [ + "▁carrot", + -12.189435005187988 + ], + [ + "▁mandat", + -12.189617156982422 + ], + [ + "▁towel", + -12.189697265625 + ], + [ + "▁insured", + -12.189825057983398 + ], + [ + "PRI", + -12.189868927001953 + ], + [ + "etter", + -12.189915657043457 + ], + [ + "▁Oder", + -12.190083503723145 + ], + [ + "argued", + -12.190171241760254 + ], + [ + "FB", + -12.190196990966797 + ], + [ + "versicherung", + -12.190197944641113 + ], + [ + "abila", + -12.190251350402832 + ], + [ + "▁Coin", + -12.190324783325195 + ], + [ + "around", + -12.19050121307373 + ], + [ + "▁Lorsqu", + -12.190773963928223 + ], + [ + "valent", + -12.190918922424316 + ], + [ + "▁weltweit", + -12.19092082977295 + ], + [ + "Mod", + -12.191039085388184 + ], + [ + "▁defect", + -12.191044807434082 + ], + [ + "ibly", + -12.191136360168457 + ], + [ + "▁Juan", + -12.191153526306152 + ], + [ + "▁Jur", + -12.191171646118164 + ], + [ + "large", + -12.191307067871094 + ], + [ + "▁indicators", + -12.191461563110352 + ], + [ + "invest", + -12.19168472290039 + ], + [ + "▁rehabilitation", + -12.191705703735352 + ], + [ + "nag", + -12.191823959350586 + ], + [ + "▁Grundlage", + -12.191829681396484 + ], + [ + "▁Strategy", + -12.192131042480469 + ], + [ + "▁supérieur", + -12.192173957824707 + ], + [ + "▁orbit", + -12.192281723022461 + ], + [ + "▁Auftrag", + -12.192360877990723 + ], + [ + "▁Verb", + -12.192441940307617 + ], + [ + "ANA", + -12.19256591796875 + ], + [ + "▁trimis", + -12.192611694335938 + ], + [ + "▁Rub", + -12.192704200744629 + ], + [ + "institu", + -12.192732810974121 + ], + [ + "▁inspect", + -12.1927490234375 + ], + [ + "▁Princess", + -12.192757606506348 + ], + [ + "especially", + -12.192777633666992 + ], + [ + "▁combinations", + -12.192793846130371 + ], + [ + "▁gaze", + -12.192842483520508 + ], + [ + "elemente", + -12.192970275878906 + ], + [ + "deal", + -12.192980766296387 + ], + [ + "polis", + -12.193157196044922 + ], + [ + "shaw", + -12.193168640136719 + ], + [ + "▁Republicans", + -12.193203926086426 + ], + [ + "aded", + -12.193244934082031 + ], + [ + "▁Louisiana", + -12.193364143371582 + ], + [ + "▁Ville", + -12.193368911743164 + ], + [ + "▁afterwards", + -12.193389892578125 + ], + [ + "ONG", + -12.193608283996582 + ], + [ + "▁dryer", + -12.193636894226074 + ], + [ + "▁Manhattan", + -12.19374942779541 + ], + [ + "▁recomanda", + -12.19412612915039 + ], + [ + "▁juca", + -12.194253921508789 + ], + [ + "▁Crown", + -12.194260597229004 + ], + [ + "▁flesh", + -12.194347381591797 + ], + [ + "sichtig", + -12.194358825683594 + ], + [ + "▁rempli", + -12.19437026977539 + ], + [ + "▁deposits", + -12.19438362121582 + ], + [ + "▁Voll", + -12.194599151611328 + ], + [ + "▁analysts", + -12.194672584533691 + ], + [ + "▁Krieg", + -12.19484806060791 + ], + [ + "▁Rosa", + -12.19495964050293 + ], + [ + "▁Supply", + -12.194964408874512 + ], + [ + "GF", + -12.19497013092041 + ], + [ + "idad", + -12.195098876953125 + ], + [ + "▁flush", + -12.195103645324707 + ], + [ + "▁circular", + -12.195355415344238 + ], + [ + "▁național", + -12.195379257202148 + ], + [ + "▁lorsqu", + -12.195441246032715 + ], + [ + "▁analyst", + -12.195459365844727 + ], + [ + "▁Jahrhundert", + -12.195586204528809 + ], + [ + "▁biology", + -12.195713996887207 + ], + [ + "copy", + -12.195733070373535 + ], + [ + "▁bringt", + -12.195765495300293 + ], + [ + "▁Gospel", + -12.195780754089355 + ], + [ + "▁sorgen", + -12.195842742919922 + ], + [ + "zeichnung", + -12.196181297302246 + ], + [ + "chair", + -12.196197509765625 + ], + [ + "EB", + -12.19636344909668 + ], + [ + "▁Beth", + -12.1964111328125 + ], + [ + "115", + -12.196416854858398 + ], + [ + "▁Neue", + -12.196479797363281 + ], + [ + "▁faible", + -12.196599960327148 + ], + [ + "▁methodology", + -12.196603775024414 + ], + [ + "spiele", + -12.196647644042969 + ], + [ + "▁cherry", + -12.196727752685547 + ], + [ + "▁Mak", + -12.196802139282227 + ], + [ + "▁volet", + -12.196982383728027 + ], + [ + "funk", + -12.197196006774902 + ], + [ + "▁aktuelle", + -12.197372436523438 + ], + [ + "▁Yahoo", + -12.197408676147461 + ], + [ + "▁Zusammenarbeit", + -12.197669982910156 + ], + [ + "▁Serve", + -12.197754859924316 + ], + [ + "▁simpler", + -12.197978019714355 + ], + [ + "intégr", + -12.197990417480469 + ], + [ + "ndlich", + -12.198083877563477 + ], + [ + "▁actress", + -12.198320388793945 + ], + [ + "▁reuse", + -12.198332786560059 + ], + [ + "▁reviewing", + -12.198405265808105 + ], + [ + "statt", + -12.198457717895508 + ], + [ + "▁diving", + -12.198469161987305 + ], + [ + "▁Național", + -12.198677062988281 + ], + [ + "voi", + -12.19873332977295 + ], + [ + "Disc", + -12.198812484741211 + ], + [ + "▁Mineral", + -12.19886302947998 + ], + [ + "▁emit", + -12.199007034301758 + ], + [ + "witz", + -12.199078559875488 + ], + [ + "▁forgot", + -12.19909954071045 + ], + [ + "▁dim", + -12.199115753173828 + ], + [ + "upper", + -12.19947624206543 + ], + [ + "sichtlich", + -12.19949722290039 + ], + [ + "▁parcours", + -12.199670791625977 + ], + [ + "8:00", + -12.199697494506836 + ], + [ + "▁keyword", + -12.199701309204102 + ], + [ + "▁upgrades", + -12.199763298034668 + ], + [ + "kunden", + -12.200177192687988 + ], + [ + "▁Seg", + -12.200257301330566 + ], + [ + "▁Circle", + -12.200289726257324 + ], + [ + "▁ginger", + -12.200336456298828 + ], + [ + "mment", + -12.200516700744629 + ], + [ + "▁expenditure", + -12.200655937194824 + ], + [ + "▁parle", + -12.200693130493164 + ], + [ + "▁Counsel", + -12.200722694396973 + ], + [ + "▁Gui", + -12.200722694396973 + ], + [ + "resident", + -12.20103645324707 + ], + [ + "▁benchmark", + -12.20103931427002 + ], + [ + "▁Elektro", + -12.201064109802246 + ], + [ + "▁réalité", + -12.201064109802246 + ], + [ + "▁ridiculous", + -12.201067924499512 + ], + [ + "▁necklace", + -12.20108699798584 + ], + [ + "nian", + -12.201117515563965 + ], + [ + "▁Move", + -12.20113468170166 + ], + [ + "▁elevated", + -12.201204299926758 + ], + [ + "WE", + -12.201281547546387 + ], + [ + "▁Drum", + -12.20132064819336 + ], + [ + "▁Delivery", + -12.201350212097168 + ], + [ + "indicating", + -12.201452255249023 + ], + [ + "▁Benjamin", + -12.201472282409668 + ], + [ + "▁Samuel", + -12.2014741897583 + ], + [ + "bene", + -12.201666831970215 + ], + [ + "▁experienta", + -12.201676368713379 + ], + [ + "▁rocket", + -12.201839447021484 + ], + [ + "▁fossil", + -12.201883316040039 + ], + [ + "▁festive", + -12.20193099975586 + ], + [ + "▁conscience", + -12.201964378356934 + ], + [ + "▁bacon", + -12.202136993408203 + ], + [ + "▁aero", + -12.202159881591797 + ], + [ + "public", + -12.202187538146973 + ], + [ + "▁zic", + -12.202218055725098 + ], + [ + "ombre", + -12.202356338500977 + ], + [ + "▁Drain", + -12.202550888061523 + ], + [ + "7.5", + -12.202672004699707 + ], + [ + "▁Deutschen", + -12.202703475952148 + ], + [ + "reportedly", + -12.202754974365234 + ], + [ + "▁Français", + -12.203105926513672 + ], + [ + "▁enzyme", + -12.203106880187988 + ], + [ + "▁inquiry", + -12.203117370605469 + ], + [ + "▁presque", + -12.203193664550781 + ], + [ + "▁Airlines", + -12.203228950500488 + ], + [ + "▁Salon", + -12.203237533569336 + ], + [ + "▁Volunteer", + -12.203310012817383 + ], + [ + "▁modular", + -12.203349113464355 + ], + [ + "ón", + -12.203364372253418 + ], + [ + "NH", + -12.203449249267578 + ], + [ + "▁souhaite", + -12.203516960144043 + ], + [ + "social", + -12.203659057617188 + ], + [ + "▁Include", + -12.203729629516602 + ], + [ + "▁Decor", + -12.2037992477417 + ], + [ + "dded", + -12.203965187072754 + ], + [ + "▁Außen", + -12.203969955444336 + ], + [ + "rendu", + -12.20412540435791 + ], + [ + "▁MBA", + -12.204150199890137 + ], + [ + "▁columns", + -12.204155921936035 + ], + [ + "▁Wing", + -12.204436302185059 + ], + [ + "▁landmark", + -12.204442977905273 + ], + [ + "schritt", + -12.204594612121582 + ], + [ + "▁désir", + -12.204630851745605 + ], + [ + "(5)", + -12.204680442810059 + ], + [ + "▁réseaux", + -12.204693794250488 + ], + [ + "income", + -12.204710960388184 + ], + [ + "▁revised", + -12.204819679260254 + ], + [ + "HY", + -12.204863548278809 + ], + [ + "▁Explorer", + -12.204873085021973 + ], + [ + "▁Lam", + -12.204877853393555 + ], + [ + "▁almond", + -12.204910278320312 + ], + [ + "▁faux", + -12.204910278320312 + ], + [ + "opt", + -12.204923629760742 + ], + [ + "Out", + -12.204939842224121 + ], + [ + "▁virtue", + -12.205025672912598 + ], + [ + "▁Chocolate", + -12.205151557922363 + ], + [ + "▁spannend", + -12.205305099487305 + ], + [ + "▁spices", + -12.205327033996582 + ], + [ + "▁Climate", + -12.205560684204102 + ], + [ + "▁Residential", + -12.205560684204102 + ], + [ + "gung", + -12.205700874328613 + ], + [ + "▁filtr", + -12.20606803894043 + ], + [ + "circ", + -12.206123352050781 + ], + [ + "sisted", + -12.206172943115234 + ], + [ + "▁dedicat", + -12.206243515014648 + ], + [ + "▁foil", + -12.206387519836426 + ], + [ + "▁uita", + -12.206392288208008 + ], + [ + "▁lié", + -12.206402778625488 + ], + [ + "▁Demo", + -12.206409454345703 + ], + [ + "▁spoil", + -12.2064208984375 + ], + [ + "Cu", + -12.206448554992676 + ], + [ + "naut", + -12.206525802612305 + ], + [ + "▁configured", + -12.206535339355469 + ], + [ + "UK", + -12.206543922424316 + ], + [ + "▁disagree", + -12.20656967163086 + ], + [ + "Medic", + -12.206767082214355 + ], + [ + "cosm", + -12.207074165344238 + ], + [ + "Toute", + -12.207109451293945 + ], + [ + "▁beneficia", + -12.207170486450195 + ], + [ + "fassen", + -12.207327842712402 + ], + [ + "▁bail", + -12.207337379455566 + ], + [ + "igue", + -12.207439422607422 + ], + [ + "▁Mă", + -12.20744800567627 + ], + [ + "▁strips", + -12.20748519897461 + ], + [ + "▁Dritte", + -12.207537651062012 + ], + [ + "▁putere", + -12.207597732543945 + ], + [ + "Play", + -12.20763111114502 + ], + [ + "▁Samstag", + -12.207632064819336 + ], + [ + "▁households", + -12.207791328430176 + ], + [ + "▁persistent", + -12.207914352416992 + ], + [ + "uben", + -12.207942962646484 + ], + [ + "Web", + -12.20809555053711 + ], + [ + "▁scenery", + -12.20820140838623 + ], + [ + "▁défini", + -12.208257675170898 + ], + [ + "news", + -12.208337783813477 + ], + [ + "eira", + -12.208428382873535 + ], + [ + "▁Mumbai", + -12.208438873291016 + ], + [ + "▁Ward", + -12.208558082580566 + ], + [ + "▁ladder", + -12.2086181640625 + ], + [ + "▁plaque", + -12.208623886108398 + ], + [ + "nés", + -12.208639144897461 + ], + [ + "▁condamn", + -12.20864486694336 + ], + [ + "▁attribute", + -12.208687782287598 + ], + [ + "atti", + -12.20873737335205 + ], + [ + "▁Emily", + -12.208953857421875 + ], + [ + "▁pleine", + -12.20896053314209 + ], + [ + "▁automatisch", + -12.209004402160645 + ], + [ + "ifies", + -12.209052085876465 + ], + [ + "onna", + -12.209104537963867 + ], + [ + "▁inject", + -12.209157943725586 + ], + [ + "▁evolve", + -12.209297180175781 + ], + [ + "▁breeze", + -12.209299087524414 + ], + [ + "▁montre", + -12.209415435791016 + ], + [ + "▁memorial", + -12.209425926208496 + ], + [ + "ämlich", + -12.209465026855469 + ], + [ + "NBC", + -12.209589958190918 + ], + [ + "▁1940", + -12.209836959838867 + ], + [ + "▁trouvé", + -12.209892272949219 + ], + [ + "when", + -12.209914207458496 + ], + [ + "▁Büro", + -12.209959983825684 + ], + [ + "▁probability", + -12.209978103637695 + ], + [ + "cute", + -12.21006965637207 + ], + [ + "▁sturdy", + -12.210078239440918 + ], + [ + "AMP", + -12.210165023803711 + ], + [ + "▁Constantin", + -12.210283279418945 + ], + [ + "▁batter", + -12.21037483215332 + ], + [ + "▁bist", + -12.210470199584961 + ], + [ + "▁streams", + -12.210528373718262 + ], + [ + "rushing", + -12.21057415008545 + ], + [ + "▁shaft", + -12.21065902709961 + ], + [ + "▁proprii", + -12.210722923278809 + ], + [ + "émi", + -12.21074390411377 + ], + [ + "online", + -12.210817337036133 + ], + [ + "▁vanity", + -12.210870742797852 + ], + [ + "▁mural", + -12.210878372192383 + ], + [ + "▁distinguish", + -12.210905075073242 + ], + [ + "▁niciun", + -12.211191177368164 + ], + [ + "▁européenne", + -12.211252212524414 + ], + [ + "▁secretary", + -12.211289405822754 + ], + [ + "▁gaps", + -12.211492538452148 + ], + [ + "▁realm", + -12.211499214172363 + ], + [ + "▁elastic", + -12.211504936218262 + ], + [ + "▁Avoid", + -12.211519241333008 + ], + [ + "▁mauvais", + -12.211931228637695 + ], + [ + "▁innovations", + -12.212663650512695 + ], + [ + "▁suprem", + -12.212776184082031 + ], + [ + "▁vederea", + -12.212817192077637 + ], + [ + "wenden", + -12.212892532348633 + ], + [ + "-40", + -12.213075637817383 + ], + [ + "prenant", + -12.213155746459961 + ], + [ + "utilisateur", + -12.213210105895996 + ], + [ + "▁Oliver", + -12.213228225708008 + ], + [ + "111", + -12.21326732635498 + ], + [ + "▁manifestation", + -12.213382720947266 + ], + [ + "▁Rachel", + -12.213458061218262 + ], + [ + "agog", + -12.21348762512207 + ], + [ + "▁seamless", + -12.213534355163574 + ], + [ + "▁Employee", + -12.213576316833496 + ], + [ + "▁dimanche", + -12.213582038879395 + ], + [ + "▁banii", + -12.213631629943848 + ], + [ + "▁Ruth", + -12.213781356811523 + ], + [ + "▁Roy", + -12.21385383605957 + ], + [ + "▁homeless", + -12.2139253616333 + ], + [ + "▁Lower", + -12.213932037353516 + ], + [ + "health", + -12.21393871307373 + ], + [ + "▁atenti", + -12.2140474319458 + ], + [ + "▁touched", + -12.214183807373047 + ], + [ + "May", + -12.214195251464844 + ], + [ + "▁Buc", + -12.214225769042969 + ], + [ + "▁explored", + -12.214393615722656 + ], + [ + "▁declare", + -12.214461326599121 + ], + [ + "▁garment", + -12.214469909667969 + ], + [ + "▁buzz", + -12.214483261108398 + ], + [ + "▁rappel", + -12.214662551879883 + ], + [ + "▁uscat", + -12.214903831481934 + ], + [ + "▁Hyper", + -12.214914321899414 + ], + [ + "Etat", + -12.215007781982422 + ], + [ + "▁Titel", + -12.215035438537598 + ], + [ + "product", + -12.215191841125488 + ], + [ + "woman", + -12.215280532836914 + ], + [ + "▁Gab", + -12.215450286865234 + ], + [ + "▁advances", + -12.215615272521973 + ], + [ + "2/", + -12.215753555297852 + ], + [ + "prone", + -12.215770721435547 + ], + [ + "kö", + -12.215986251831055 + ], + [ + "▁counting", + -12.21599292755127 + ], + [ + "Sollte", + -12.216043472290039 + ], + [ + "▁Konzept", + -12.216063499450684 + ], + [ + "▁backgrounds", + -12.216153144836426 + ], + [ + "jährige", + -12.216154098510742 + ], + [ + "▁Alltag", + -12.216187477111816 + ], + [ + "▁metrics", + -12.21619701385498 + ], + [ + "▁illustrated", + -12.216222763061523 + ], + [ + "▁Charge", + -12.21631908416748 + ], + [ + "▁thoughtful", + -12.216423034667969 + ], + [ + "gesetz", + -12.216527938842773 + ], + [ + "pfen", + -12.216611862182617 + ], + [ + "▁déroul", + -12.216713905334473 + ], + [ + "▁checkout", + -12.216876029968262 + ], + [ + "quette", + -12.216936111450195 + ], + [ + "▁pierdut", + -12.2170991897583 + ], + [ + "▁Seat", + -12.217140197753906 + ], + [ + "▁linen", + -12.217193603515625 + ], + [ + "archiv", + -12.217245101928711 + ], + [ + "arna", + -12.217254638671875 + ], + [ + "importe", + -12.21742057800293 + ], + [ + "▁PHP", + -12.217496871948242 + ], + [ + "▁Parents", + -12.217503547668457 + ], + [ + "▁Birmingham", + -12.217513084411621 + ], + [ + "▁Integr", + -12.217588424682617 + ], + [ + "▁Mason", + -12.217607498168945 + ], + [ + "zieht", + -12.217781066894531 + ], + [ + "▁camps", + -12.217803001403809 + ], + [ + "OG", + -12.21786117553711 + ], + [ + "▁syrup", + -12.217927932739258 + ], + [ + "▁Cookies", + -12.217928886413574 + ], + [ + "▁Comfort", + -12.217955589294434 + ], + [ + "ută", + -12.217976570129395 + ], + [ + "abia", + -12.217979431152344 + ], + [ + "zeci", + -12.218003273010254 + ], + [ + "▁Gardens", + -12.218009948730469 + ], + [ + "▁incidents", + -12.218149185180664 + ], + [ + "▁participat", + -12.218235969543457 + ], + [ + "▁glimpse", + -12.218342781066895 + ], + [ + "5.5", + -12.218437194824219 + ], + [ + "▁dealers", + -12.218469619750977 + ], + [ + "▁Grande", + -12.218565940856934 + ], + [ + "▁raid", + -12.218944549560547 + ], + [ + "owing", + -12.21903133392334 + ], + [ + "▁contrary", + -12.219109535217285 + ], + [ + "Earlier", + -12.219138145446777 + ], + [ + "tien", + -12.21916389465332 + ], + [ + "drop", + -12.219169616699219 + ], + [ + "▁angajat", + -12.219359397888184 + ], + [ + "▁procesul", + -12.219515800476074 + ], + [ + "▁focal", + -12.219564437866211 + ], + [ + "▁impart", + -12.219703674316406 + ], + [ + "▁Abschluss", + -12.219749450683594 + ], + [ + "carui", + -12.219830513000488 + ], + [ + "insul", + -12.220277786254883 + ], + [ + "▁creamy", + -12.220283508300781 + ], + [ + "eille", + -12.22032356262207 + ], + [ + "suppl", + -12.220335960388184 + ], + [ + "▁Heaven", + -12.220471382141113 + ], + [ + "éna", + -12.220667839050293 + ], + [ + "▁swap", + -12.220739364624023 + ], + [ + "▁vreau", + -12.220762252807617 + ], + [ + "▁Bryan", + -12.220809936523438 + ], + [ + "▁Zug", + -12.220815658569336 + ], + [ + "▁glance", + -12.220848083496094 + ], + [ + "▁elimin", + -12.220900535583496 + ], + [ + "▁yeux", + -12.221084594726562 + ], + [ + "wehr", + -12.221238136291504 + ], + [ + "2.5", + -12.221287727355957 + ], + [ + "▁poses", + -12.221364974975586 + ], + [ + "▁parcel", + -12.221585273742676 + ], + [ + "▁Apartment", + -12.221749305725098 + ], + [ + "▁NASA", + -12.221768379211426 + ], + [ + "▁bénéfici", + -12.22187614440918 + ], + [ + "▁Umgebung", + -12.221890449523926 + ], + [ + "asia", + -12.221946716308594 + ], + [ + "abi", + -12.221967697143555 + ], + [ + "coup", + -12.222002983093262 + ], + [ + "synchron", + -12.222017288208008 + ], + [ + "▁Sicherheits", + -12.222029685974121 + ], + [ + "bic", + -12.222076416015625 + ], + [ + "▁distract", + -12.222148895263672 + ], + [ + "▁rentals", + -12.222163200378418 + ], + [ + "constru", + -12.222290992736816 + ], + [ + "curs", + -12.222345352172852 + ], + [ + "genannten", + -12.222386360168457 + ], + [ + "▁Shanghai", + -12.222501754760742 + ], + [ + "▁vague", + -12.222504615783691 + ], + [ + "▁Leather", + -12.22250747680664 + ], + [ + "▁Vintage", + -12.222532272338867 + ], + [ + "pointing", + -12.22259521484375 + ], + [ + "avant", + -12.22268295288086 + ], + [ + "gues", + -12.222949028015137 + ], + [ + "sweise", + -12.22302532196045 + ], + [ + "▁Greater", + -12.223065376281738 + ], + [ + "fig", + -12.22310733795166 + ], + [ + "▁Blut", + -12.223217964172363 + ], + [ + "▁Stellen", + -12.22326946258545 + ], + [ + "▁isolation", + -12.22337818145752 + ], + [ + "▁overhead", + -12.22338581085205 + ], + [ + "▁wondered", + -12.223508834838867 + ], + [ + "essai", + -12.223609924316406 + ], + [ + "aves", + -12.2236328125 + ], + [ + "▁Shore", + -12.223637580871582 + ], + [ + "▁INC", + -12.223709106445312 + ], + [ + "rufen", + -12.223980903625488 + ], + [ + "▁magnifique", + -12.224069595336914 + ], + [ + "▁intéressant", + -12.224072456359863 + ], + [ + "▁tanks", + -12.224075317382812 + ], + [ + "▁Tun", + -12.224367141723633 + ], + [ + "▁approaching", + -12.224390029907227 + ], + [ + "▁relay", + -12.224479675292969 + ], + [ + "▁Küche", + -12.224529266357422 + ], + [ + "describing", + -12.224587440490723 + ], + [ + "▁Certification", + -12.224588394165039 + ], + [ + "▁Breakfast", + -12.224597930908203 + ], + [ + "▁Frame", + -12.224891662597656 + ], + [ + "▁Stoff", + -12.224909782409668 + ], + [ + "▁victime", + -12.224924087524414 + ], + [ + "Observ", + -12.224943161010742 + ], + [ + "▁gutter", + -12.224989891052246 + ], + [ + "standard", + -12.225220680236816 + ], + [ + "▁Sci", + -12.225244522094727 + ], + [ + "▁sept", + -12.225377082824707 + ], + [ + "▁Potter", + -12.225423812866211 + ], + [ + "letter", + -12.22577953338623 + ], + [ + "▁tobacco", + -12.225852012634277 + ], + [ + "▁threatened", + -12.22591781616211 + ], + [ + "MW", + -12.225936889648438 + ], + [ + "▁Cher", + -12.225944519042969 + ], + [ + "0.1", + -12.225957870483398 + ], + [ + "mitted", + -12.22596263885498 + ], + [ + "zustellen", + -12.225967407226562 + ], + [ + "dominated", + -12.226165771484375 + ], + [ + "/16", + -12.22623348236084 + ], + [ + "POS", + -12.226317405700684 + ], + [ + "▁Zin", + -12.226373672485352 + ], + [ + "▁Okay", + -12.226381301879883 + ], + [ + "▁projected", + -12.226405143737793 + ], + [ + "▁selber", + -12.226548194885254 + ], + [ + "▁proiectului", + -12.2266206741333 + ], + [ + "▁Shell", + -12.226683616638184 + ], + [ + "▁cartridge", + -12.226706504821777 + ], + [ + "Message", + -12.2267484664917 + ], + [ + "haben", + -12.226799964904785 + ], + [ + "▁slides", + -12.226829528808594 + ], + [ + "▁gleichzeitig", + -12.226886749267578 + ], + [ + "▁Racing", + -12.227051734924316 + ], + [ + "▁20,", + -12.227070808410645 + ], + [ + "▁separat", + -12.227094650268555 + ], + [ + "▁repeatedly", + -12.227110862731934 + ], + [ + "▁casting", + -12.22728157043457 + ], + [ + "▁sacred", + -12.227283477783203 + ], + [ + "verfahren", + -12.227387428283691 + ], + [ + "▁echilibr", + -12.227514266967773 + ], + [ + "▁rebel", + -12.2277250289917 + ], + [ + "säu", + -12.227794647216797 + ], + [ + "ummy", + -12.227815628051758 + ], + [ + "▁backing", + -12.227889060974121 + ], + [ + "▁sponsors", + -12.227912902832031 + ], + [ + "▁Stress", + -12.22802448272705 + ], + [ + "▁Rules", + -12.228083610534668 + ], + [ + "▁render", + -12.228241920471191 + ], + [ + "▁funktioniert", + -12.228384971618652 + ], + [ + "▁Pearl", + -12.228472709655762 + ], + [ + "▁Scho", + -12.228527069091797 + ], + [ + "schwer", + -12.228595733642578 + ], + [ + "▁descoperit", + -12.228702545166016 + ], + [ + "holen", + -12.228720664978027 + ], + [ + "imposed", + -12.228960990905762 + ], + [ + "▁appearing", + -12.228968620300293 + ], + [ + "▁höher", + -12.229082107543945 + ], + [ + "▁Victorian", + -12.229111671447754 + ], + [ + "▁founding", + -12.229155540466309 + ], + [ + "▁Polish", + -12.229239463806152 + ], + [ + "▁anume", + -12.229248046875 + ], + [ + "Box", + -12.229488372802734 + ], + [ + "▁intrat", + -12.229598999023438 + ], + [ + "▁Inspiration", + -12.229610443115234 + ], + [ + "▁Canyon", + -12.229625701904297 + ], + [ + "▁Franklin", + -12.22974681854248 + ], + [ + "▁susceptible", + -12.22982120513916 + ], + [ + "trap", + -12.229839324951172 + ], + [ + "▁Roma", + -12.23000717163086 + ], + [ + "▁ethics", + -12.230009078979492 + ], + [ + "▁Privat", + -12.230027198791504 + ], + [ + "▁journalists", + -12.230090141296387 + ], + [ + "▁Universität", + -12.230246543884277 + ], + [ + "▁conditioner", + -12.230308532714844 + ], + [ + "folge", + -12.230327606201172 + ], + [ + "kirche", + -12.230416297912598 + ], + [ + "gehalten", + -12.230530738830566 + ], + [ + "midi", + -12.230570793151855 + ], + [ + "▁radar", + -12.230619430541992 + ], + [ + "▁Yard", + -12.230775833129883 + ], + [ + "▁professionnelle", + -12.230863571166992 + ], + [ + "▁Orchestra", + -12.230870246887207 + ], + [ + "▁immigrants", + -12.230870246887207 + ], + [ + "▁refined", + -12.230929374694824 + ], + [ + "▁Bishop", + -12.231036186218262 + ], + [ + "string", + -12.231095314025879 + ], + [ + "▁majoritatea", + -12.231231689453125 + ], + [ + "▁workflow", + -12.23123836517334 + ], + [ + "▁întreg", + -12.231306076049805 + ], + [ + "went", + -12.231563568115234 + ], + [ + "▁trat", + -12.231689453125 + ], + [ + "felul", + -12.23176383972168 + ], + [ + "▁hardwood", + -12.231821060180664 + ], + [ + "▁Task", + -12.231867790222168 + ], + [ + "branded", + -12.231921195983887 + ], + [ + "▁cinq", + -12.231966018676758 + ], + [ + "▁curb", + -12.232041358947754 + ], + [ + "▁Discount", + -12.232043266296387 + ], + [ + "▁Episode", + -12.232131958007812 + ], + [ + "▁Knowledge", + -12.232144355773926 + ], + [ + "▁tricky", + -12.232173919677734 + ], + [ + "▁characteristic", + -12.232233047485352 + ], + [ + "▁plata", + -12.23226261138916 + ], + [ + "▁Labour", + -12.23232650756836 + ], + [ + "▁Tha", + -12.232372283935547 + ], + [ + "▁Liefer", + -12.232430458068848 + ], + [ + "▁Reader", + -12.232471466064453 + ], + [ + "▁Linda", + -12.232521057128906 + ], + [ + "ittlerweile", + -12.232552528381348 + ], + [ + "defining", + -12.232564926147461 + ], + [ + "▁delayed", + -12.232635498046875 + ], + [ + "▁Bewertung", + -12.232674598693848 + ], + [ + "▁Unique", + -12.232791900634766 + ], + [ + "▁Champion", + -12.232866287231445 + ], + [ + "2008", + -12.232897758483887 + ], + [ + "▁conclu", + -12.232934951782227 + ], + [ + "▁câștig", + -12.2329740524292 + ], + [ + "▁scheduling", + -12.2329740524292 + ], + [ + "▁sailing", + -12.233116149902344 + ], + [ + "▁Storm", + -12.23318862915039 + ], + [ + "▁Stil", + -12.23320198059082 + ], + [ + "▁Album", + -12.233211517333984 + ], + [ + "▁ultime", + -12.233343124389648 + ], + [ + "url", + -12.233369827270508 + ], + [ + "▁terrific", + -12.23339557647705 + ], + [ + "▁remedy", + -12.233396530151367 + ], + [ + "▁Around", + -12.233592987060547 + ], + [ + "▁Kni", + -12.233756065368652 + ], + [ + "etty", + -12.23376750946045 + ], + [ + "Managing", + -12.233809471130371 + ], + [ + "▁Bedeutung", + -12.233816146850586 + ], + [ + "▁earthquake", + -12.233817100524902 + ], + [ + "▁Telefon", + -12.233818054199219 + ], + [ + "▁Upper", + -12.233869552612305 + ], + [ + "▁validation", + -12.233892440795898 + ], + [ + "-22", + -12.233997344970703 + ], + [ + "▁queue", + -12.23401165008545 + ], + [ + "tinde", + -12.234025001525879 + ], + [ + "built", + -12.234047889709473 + ], + [ + "▁voix", + -12.234125137329102 + ], + [ + "▁Resource", + -12.234126091003418 + ], + [ + "ţiuni", + -12.234143257141113 + ], + [ + "▁satisfying", + -12.234299659729004 + ], + [ + "▁Kohl", + -12.234441757202148 + ], + [ + "▁Materials", + -12.234618186950684 + ], + [ + "▁esp", + -12.234732627868652 + ], + [ + "enseignement", + -12.234773635864258 + ], + [ + "danach", + -12.234883308410645 + ], + [ + "peux", + -12.234932899475098 + ], + [ + "▁deployed", + -12.235113143920898 + ], + [ + "▁1976", + -12.235126495361328 + ], + [ + "ușor", + -12.235334396362305 + ], + [ + "élection", + -12.235380172729492 + ], + [ + "ettes", + -12.235437393188477 + ], + [ + "▁Madison", + -12.235506057739258 + ], + [ + "108", + -12.235685348510742 + ], + [ + "berger", + -12.235696792602539 + ], + [ + "▁pedal", + -12.235702514648438 + ], + [ + "▁quasi", + -12.235820770263672 + ], + [ + "▁lend", + -12.235843658447266 + ], + [ + "VER", + -12.235940933227539 + ], + [ + "▁chapters", + -12.236002922058105 + ], + [ + "▁idei", + -12.23600959777832 + ], + [ + "Deine", + -12.236034393310547 + ], + [ + "▁endure", + -12.236092567443848 + ], + [ + "▁Studios", + -12.236259460449219 + ], + [ + "structure", + -12.236274719238281 + ], + [ + "▁puiss", + -12.236370086669922 + ], + [ + "▁Morning", + -12.236443519592285 + ], + [ + "guide", + -12.236462593078613 + ], + [ + "▁Wave", + -12.236617088317871 + ], + [ + "▁banque", + -12.236879348754883 + ], + [ + "änd", + -12.236912727355957 + ], + [ + "oubli", + -12.237070083618164 + ], + [ + "▁mixer", + -12.237125396728516 + ], + [ + "▁remedi", + -12.237210273742676 + ], + [ + "▁scop", + -12.237421989440918 + ], + [ + "▁Rosen", + -12.237561225891113 + ], + [ + "▁spital", + -12.23773193359375 + ], + [ + "blau", + -12.237811088562012 + ], + [ + "▁financiar", + -12.237865447998047 + ], + [ + "avour", + -12.237871170043945 + ], + [ + "Def", + -12.238025665283203 + ], + [ + "▁socket", + -12.238076210021973 + ], + [ + "▁occurring", + -12.238360404968262 + ], + [ + "▁munci", + -12.238368034362793 + ], + [ + "▁realiza", + -12.238426208496094 + ], + [ + "▁beating", + -12.2384614944458 + ], + [ + "▁Phillip", + -12.238490104675293 + ], + [ + "▁courant", + -12.238509178161621 + ], + [ + "Auto", + -12.238608360290527 + ], + [ + "▁Lager", + -12.238685607910156 + ], + [ + "▁folos", + -12.238696098327637 + ], + [ + "▁moyens", + -12.238770484924316 + ], + [ + "▁Ec", + -12.238780975341797 + ], + [ + "▁Strip", + -12.238788604736328 + ], + [ + "sparen", + -12.238848686218262 + ], + [ + "▁Nintendo", + -12.238886833190918 + ], + [ + "▁Murphy", + -12.238912582397461 + ], + [ + "▁flux", + -12.239034652709961 + ], + [ + "▁mots", + -12.239034652709961 + ], + [ + "▁rechts", + -12.239045143127441 + ], + [ + "▁cardio", + -12.239142417907715 + ], + [ + "avoiding", + -12.239343643188477 + ], + [ + "érer", + -12.239453315734863 + ], + [ + "hiel", + -12.239461898803711 + ], + [ + "▁rezistent", + -12.239521980285645 + ], + [ + "close", + -12.23954963684082 + ], + [ + "hésitez", + -12.239596366882324 + ], + [ + "Hz", + -12.239631652832031 + ], + [ + "▁elaborate", + -12.239689826965332 + ], + [ + "▁permanently", + -12.239709854125977 + ], + [ + "▁Pittsburgh", + -12.239734649658203 + ], + [ + "▁counties", + -12.239819526672363 + ], + [ + "▁bookmark", + -12.239919662475586 + ], + [ + "▁Label", + -12.239965438842773 + ], + [ + "▁Freude", + -12.239974021911621 + ], + [ + "▁preferat", + -12.239986419677734 + ], + [ + "▁Mein", + -12.239995002746582 + ], + [ + "▁Crew", + -12.240218162536621 + ], + [ + "▁clips", + -12.240253448486328 + ], + [ + "8,000", + -12.240263938903809 + ], + [ + "▁recognise", + -12.240311622619629 + ], + [ + "ință", + -12.240365028381348 + ], + [ + "▁prieteni", + -12.240447044372559 + ], + [ + "Heute", + -12.240522384643555 + ], + [ + "ancienne", + -12.240534782409668 + ], + [ + "▁annoying", + -12.240583419799805 + ], + [ + "▁awful", + -12.240704536437988 + ], + [ + "▁Comments", + -12.240774154663086 + ], + [ + "▁musician", + -12.240830421447754 + ], + [ + "▁Elite", + -12.241023063659668 + ], + [ + "▁patri", + -12.241024017333984 + ], + [ + "▁Coupon", + -12.241037368774414 + ], + [ + "▁Farbe", + -12.241097450256348 + ], + [ + "▁contribui", + -12.241110801696777 + ], + [ + "hari", + -12.241294860839844 + ], + [ + "▁activitati", + -12.24161148071289 + ], + [ + "▁Traum", + -12.2416410446167 + ], + [ + "1.8", + -12.24170207977295 + ], + [ + "▁Healthcare", + -12.24172306060791 + ], + [ + "▁refresh", + -12.241943359375 + ], + [ + "▁Maha", + -12.242060661315918 + ], + [ + "▁dép", + -12.242082595825195 + ], + [ + "▁Studien", + -12.242314338684082 + ], + [ + "▁spectacol", + -12.242378234863281 + ], + [ + "impro", + -12.24254035949707 + ], + [ + "▁commentaire", + -12.242544174194336 + ], + [ + "ported", + -12.242570877075195 + ], + [ + "▁reclam", + -12.242612838745117 + ], + [ + "▁Verkauf", + -12.242634773254395 + ], + [ + "▁newspapers", + -12.242661476135254 + ], + [ + "▁iubit", + -12.242838859558105 + ], + [ + "▁Kenne", + -12.242844581604004 + ], + [ + "▁Consultant", + -12.242958068847656 + ], + [ + "▁stau", + -12.242986679077148 + ], + [ + "TON", + -12.243057250976562 + ], + [ + "▁Fehler", + -12.243070602416992 + ], + [ + "▁lettre", + -12.243167877197266 + ], + [ + "▁investigator", + -12.243172645568848 + ], + [ + "▁quantities", + -12.243184089660645 + ], + [ + "ogram", + -12.243208885192871 + ], + [ + "avaient", + -12.24323844909668 + ], + [ + "▁reducere", + -12.243265151977539 + ], + [ + "Lite", + -12.243402481079102 + ], + [ + "kurs", + -12.243443489074707 + ], + [ + "pré", + -12.24383544921875 + ], + [ + "pap", + -12.243898391723633 + ], + [ + "▁Männer", + -12.243983268737793 + ], + [ + "▁gauche", + -12.244022369384766 + ], + [ + "▁ähnlich", + -12.244027137756348 + ], + [ + "▁sunlight", + -12.244063377380371 + ], + [ + "▁rester", + -12.24422550201416 + ], + [ + "jumped", + -12.244586944580078 + ], + [ + "▁exclusiv", + -12.24463176727295 + ], + [ + "▁electoral", + -12.244640350341797 + ], + [ + "▁Portal", + -12.244650840759277 + ], + [ + "ulent", + -12.244688987731934 + ], + [ + "▁sonst", + -12.24474048614502 + ], + [ + "entraîne", + -12.24483585357666 + ], + [ + "▁repas", + -12.244837760925293 + ], + [ + "▁redus", + -12.244858741760254 + ], + [ + "aku", + -12.244866371154785 + ], + [ + "▁Graphic", + -12.245251655578613 + ], + [ + "▁geringe", + -12.24539566040039 + ], + [ + "plätze", + -12.245474815368652 + ], + [ + "Trebuie", + -12.245479583740234 + ], + [ + "▁rezultate", + -12.245479583740234 + ], + [ + "▁configure", + -12.245683670043945 + ], + [ + "▁PV", + -12.245834350585938 + ], + [ + "▁insect", + -12.246109962463379 + ], + [ + "▁Reviews", + -12.246129035949707 + ], + [ + "releasing", + -12.246186256408691 + ], + [ + "▁appliance", + -12.246246337890625 + ], + [ + "▁oferte", + -12.246482849121094 + ], + [ + "▁WILL", + -12.246484756469727 + ], + [ + "rion", + -12.246499061584473 + ], + [ + "▁Cole", + -12.246582984924316 + ], + [ + "▁1975", + -12.246650695800781 + ], + [ + "Admin", + -12.24677848815918 + ], + [ + "▁parade", + -12.246800422668457 + ], + [ + "▁mélange", + -12.24692153930664 + ], + [ + "▁shortage", + -12.247007369995117 + ], + [ + "▁Measure", + -12.247400283813477 + ], + [ + "anchmal", + -12.24742603302002 + ], + [ + "▁transfers", + -12.247432708740234 + ], + [ + "▁sistemului", + -12.247573852539062 + ], + [ + "▁deschide", + -12.247819900512695 + ], + [ + "▁Künstler", + -12.247821807861328 + ], + [ + "▁Plain", + -12.247848510742188 + ], + [ + "▁messaging", + -12.247855186462402 + ], + [ + "▁metabolism", + -12.247879981994629 + ], + [ + "fill", + -12.248031616210938 + ], + [ + "▁Bomb", + -12.24814224243164 + ], + [ + "usine", + -12.248208045959473 + ], + [ + "▁restart", + -12.248233795166016 + ], + [ + "▁Discussion", + -12.248336791992188 + ], + [ + "smith", + -12.248472213745117 + ], + [ + "▁Bh", + -12.248607635498047 + ], + [ + "▁sap", + -12.248689651489258 + ], + [ + "Moo", + -12.248714447021484 + ], + [ + "▁indirect", + -12.248785972595215 + ], + [ + "▁eingesetzt", + -12.248863220214844 + ], + [ + "▁Hip", + -12.248870849609375 + ], + [ + "▁iulie", + -12.249113082885742 + ], + [ + "▁atac", + -12.249201774597168 + ], + [ + "▁passport", + -12.2492036819458 + ], + [ + "▁Egyptian", + -12.249290466308594 + ], + [ + "▁soluți", + -12.249349594116211 + ], + [ + "▁cakes", + -12.249356269836426 + ], + [ + "▁Fellow", + -12.24949836730957 + ], + [ + "▁collision", + -12.249533653259277 + ], + [ + "▁abundant", + -12.249961853027344 + ], + [ + "▁Wonder", + -12.24997329711914 + ], + [ + "▁theories", + -12.249991416931152 + ], + [ + "landed", + -12.250046730041504 + ], + [ + "▁meantime", + -12.2500638961792 + ], + [ + "schlüsse", + -12.25022029876709 + ], + [ + "▁helicopter", + -12.25039005279541 + ], + [ + "Voici", + -12.250479698181152 + ], + [ + "▁Honey", + -12.25049877166748 + ], + [ + "▁deleted", + -12.250511169433594 + ], + [ + "▁Projekte", + -12.250523567199707 + ], + [ + "▁gasi", + -12.2506742477417 + ], + [ + "applique", + -12.25068473815918 + ], + [ + "TAL", + -12.250699043273926 + ], + [ + "notch", + -12.250699996948242 + ], + [ + "▁Response", + -12.250818252563477 + ], + [ + "▁deveni", + -12.250818252563477 + ], + [ + "▁regulate", + -12.250829696655273 + ], + [ + "▁vegetarian", + -12.25083065032959 + ], + [ + "▁Pastor", + -12.250880241394043 + ], + [ + "▁Strong", + -12.250940322875977 + ], + [ + "▁élèves", + -12.251055717468262 + ], + [ + "▁alimente", + -12.25113582611084 + ], + [ + "graphy", + -12.251181602478027 + ], + [ + "▁spirits", + -12.251266479492188 + ], + [ + "▁Cau", + -12.251282691955566 + ], + [ + "determin", + -12.251304626464844 + ], + [ + "arilor", + -12.251382827758789 + ], + [ + "▁masura", + -12.251470565795898 + ], + [ + "RAN", + -12.251500129699707 + ], + [ + "marked", + -12.251564979553223 + ], + [ + "cuba", + -12.251602172851562 + ], + [ + "omni", + -12.251609802246094 + ], + [ + "▁detox", + -12.251662254333496 + ], + [ + "▁quartz", + -12.251741409301758 + ], + [ + "▁Bug", + -12.25177001953125 + ], + [ + "▁Sugar", + -12.25185775756836 + ], + [ + "▁opponents", + -12.25197982788086 + ], + [ + "▁solved", + -12.25207805633545 + ], + [ + "semn", + -12.252257347106934 + ], + [ + "▁Prepare", + -12.252558708190918 + ], + [ + "ffel", + -12.252586364746094 + ], + [ + "▁Highlight", + -12.252608299255371 + ], + [ + "▁curent", + -12.252618789672852 + ], + [ + "▁praktisch", + -12.252626419067383 + ], + [ + "▁lending", + -12.252676963806152 + ], + [ + "▁minority", + -12.252752304077148 + ], + [ + "Free", + -12.252970695495605 + ], + [ + "business", + -12.252997398376465 + ], + [ + "▁outlook", + -12.253097534179688 + ], + [ + "▁assessments", + -12.253168106079102 + ], + [ + "▁Brother", + -12.253266334533691 + ], + [ + "▁partager", + -12.25326919555664 + ], + [ + "▁Brun", + -12.25329303741455 + ], + [ + "▁pedestrian", + -12.25339412689209 + ], + [ + "anța", + -12.253413200378418 + ], + [ + "▁recycled", + -12.253457069396973 + ], + [ + "▁quicker", + -12.253626823425293 + ], + [ + "▁lamps", + -12.253683090209961 + ], + [ + "▁nationally", + -12.253813743591309 + ], + [ + "▁Supplier", + -12.253823280334473 + ], + [ + "ograph", + -12.253936767578125 + ], + [ + "engage", + -12.253981590270996 + ], + [ + "▁Marg", + -12.254131317138672 + ], + [ + "▁aplicare", + -12.254181861877441 + ], + [ + "▁scared", + -12.254194259643555 + ], + [ + "▁accredited", + -12.254255294799805 + ], + [ + "▁outils", + -12.25436019897461 + ], + [ + "▁bâtiment", + -12.254446029663086 + ], + [ + "▁existed", + -12.254586219787598 + ], + [ + "gegangen", + -12.254619598388672 + ], + [ + "▁elevation", + -12.25463581085205 + ], + [ + "▁Tradition", + -12.254670143127441 + ], + [ + "▁Gericht", + -12.254677772521973 + ], + [ + "hub", + -12.254680633544922 + ], + [ + "strahl", + -12.25473690032959 + ], + [ + "build", + -12.254796981811523 + ], + [ + "▁Customers", + -12.25487232208252 + ], + [ + "klasse", + -12.254890441894531 + ], + [ + "▁pierre", + -12.254895210266113 + ], + [ + "(2)", + -12.255006790161133 + ], + [ + "Life", + -12.255125999450684 + ], + [ + "▁bachelor", + -12.25513744354248 + ], + [ + "▁quad", + -12.255195617675781 + ], + [ + "▁dispozitiv", + -12.25523567199707 + ], + [ + "106", + -12.255266189575195 + ], + [ + "▁suburb", + -12.255495071411133 + ], + [ + "▁1977", + -12.255586624145508 + ], + [ + "▁Alzheimer", + -12.255973815917969 + ], + [ + "▁spicy", + -12.255988121032715 + ], + [ + "▁spreading", + -12.256002426147461 + ], + [ + "nötigen", + -12.256078720092773 + ], + [ + "▁novels", + -12.256104469299316 + ], + [ + "▁responsabilité", + -12.256141662597656 + ], + [ + "▁Bud", + -12.256332397460938 + ], + [ + "▁desirable", + -12.256407737731934 + ], + [ + "TOR", + -12.256444931030273 + ], + [ + "five", + -12.256547927856445 + ], + [ + "▁Firmen", + -12.256860733032227 + ], + [ + "oeuvre", + -12.257075309753418 + ], + [ + "grass", + -12.257233619689941 + ], + [ + "▁practically", + -12.257277488708496 + ], + [ + "▁runners", + -12.257281303405762 + ], + [ + "▁mothers", + -12.257341384887695 + ], + [ + "Shop", + -12.257345199584961 + ], + [ + "▁Chicken", + -12.257408142089844 + ], + [ + "▁License", + -12.257593154907227 + ], + [ + "▁Bach", + -12.25765323638916 + ], + [ + "earliest", + -12.257729530334473 + ], + [ + "▁replica", + -12.25774097442627 + ], + [ + "▁haunt", + -12.257833480834961 + ], + [ + "▁materi", + -12.257854461669922 + ], + [ + "▁Finland", + -12.257893562316895 + ], + [ + "▁europene", + -12.257919311523438 + ], + [ + "abilă", + -12.257944107055664 + ], + [ + "cati", + -12.258007049560547 + ], + [ + "▁cholesterol", + -12.258132934570312 + ], + [ + "...).", + -12.258151054382324 + ], + [ + "cardi", + -12.25838565826416 + ], + [ + "▁(12", + -12.258387565612793 + ], + [ + "analyzed", + -12.258506774902344 + ], + [ + "▁respondents", + -12.258591651916504 + ], + [ + "▁höchste", + -12.258646011352539 + ], + [ + "▁Kern", + -12.258647918701172 + ], + [ + "▁knapp", + -12.258781433105469 + ], + [ + "▁Someone", + -12.258955001831055 + ], + [ + "▁équipé", + -12.258997917175293 + ], + [ + "credited", + -12.259106636047363 + ], + [ + "▁numar", + -12.259163856506348 + ], + [ + "▁Ace", + -12.259185791015625 + ], + [ + "zentrum", + -12.2592191696167 + ], + [ + "nehmer", + -12.259270668029785 + ], + [ + "arrivée", + -12.259282112121582 + ], + [ + "ELE", + -12.259291648864746 + ], + [ + "clean", + -12.259418487548828 + ], + [ + "Boost", + -12.259538650512695 + ], + [ + "call", + -12.259575843811035 + ], + [ + "▁Polizei", + -12.259659767150879 + ], + [ + "▁Januar", + -12.259663581848145 + ], + [ + "▁Tile", + -12.259681701660156 + ], + [ + "▁traduc", + -12.259744644165039 + ], + [ + "▁promptly", + -12.259773254394531 + ], + [ + "limit", + -12.259809494018555 + ], + [ + "▁recharge", + -12.2598237991333 + ], + [ + "▁wipe", + -12.259862899780273 + ], + [ + "▁Norway", + -12.26001262664795 + ], + [ + "▁Municipal", + -12.260077476501465 + ], + [ + "▁medieval", + -12.260117530822754 + ], + [ + "▁Treat", + -12.26021671295166 + ], + [ + "Orient", + -12.260283470153809 + ], + [ + "▁Stewart", + -12.260294914245605 + ], + [ + "▁lol", + -12.26039981842041 + ], + [ + "appartement", + -12.260522842407227 + ], + [ + "▁payer", + -12.260655403137207 + ], + [ + "▁splash", + -12.260723114013672 + ], + [ + "doubtedly", + -12.260726928710938 + ], + [ + "dry", + -12.260846138000488 + ], + [ + "▁Forex", + -12.260939598083496 + ], + [ + "▁Edinburgh", + -12.260943412780762 + ], + [ + "▁Traditional", + -12.261032104492188 + ], + [ + "▁1968", + -12.261134147644043 + ], + [ + "▁glow", + -12.261248588562012 + ], + [ + "Alternatively", + -12.261265754699707 + ], + [ + "▁partly", + -12.261354446411133 + ], + [ + "égi", + -12.261401176452637 + ], + [ + "▁Prices", + -12.261640548706055 + ], + [ + "haupt", + -12.261651992797852 + ], + [ + "▁sentences", + -12.261711120605469 + ], + [ + "ouvre", + -12.261735916137695 + ], + [ + "▁Liter", + -12.261746406555176 + ], + [ + "▁Important", + -12.2620267868042 + ], + [ + "▁Collins", + -12.262077331542969 + ], + [ + "▁reproduce", + -12.262106895446777 + ], + [ + "▁selten", + -12.262124061584473 + ], + [ + "▁Mitte", + -12.262170791625977 + ], + [ + "OA", + -12.262174606323242 + ], + [ + "▁Sister", + -12.262358665466309 + ], + [ + "▁responding", + -12.262385368347168 + ], + [ + "▁ballot", + -12.262455940246582 + ], + [ + "▁Nutrition", + -12.262460708618164 + ], + [ + "occurrence", + -12.26246452331543 + ], + [ + "Atunci", + -12.262604713439941 + ], + [ + "▁hockey", + -12.262680053710938 + ], + [ + "▁undertaking", + -12.262697219848633 + ], + [ + "▁educators", + -12.262885093688965 + ], + [ + "▁Swedish", + -12.262893676757812 + ], + [ + "▁Recovery", + -12.262894630432129 + ], + [ + "▁circum", + -12.262910842895508 + ], + [ + "▁chains", + -12.263084411621094 + ], + [ + "▁genug", + -12.263113021850586 + ], + [ + "▁Pil", + -12.263227462768555 + ], + [ + "▁farms", + -12.263265609741211 + ], + [ + "▁simplicity", + -12.263336181640625 + ], + [ + "-21", + -12.263399124145508 + ], + [ + "▁partition", + -12.263493537902832 + ], + [ + "▁Relations", + -12.26360034942627 + ], + [ + "zentrale", + -12.263794898986816 + ], + [ + "lapse", + -12.263855934143066 + ], + [ + "▁toast", + -12.263862609863281 + ], + [ + "▁citi", + -12.263946533203125 + ], + [ + "▁longtemps", + -12.263984680175781 + ], + [ + "maj", + -12.264448165893555 + ], + [ + "▁Cin", + -12.264483451843262 + ], + [ + "zeichen", + -12.264504432678223 + ], + [ + "▁Zoo", + -12.264567375183105 + ], + [ + "▁frisch", + -12.264570236206055 + ], + [ + "▁permettra", + -12.264595031738281 + ], + [ + "▁Liberty", + -12.264642715454102 + ], + [ + "▁playground", + -12.264873504638672 + ], + [ + "▁Mate", + -12.265031814575195 + ], + [ + "▁evolving", + -12.265066146850586 + ], + [ + "national", + -12.265207290649414 + ], + [ + "▁signifie", + -12.265279769897461 + ], + [ + "▁Related", + -12.265292167663574 + ], + [ + "NES", + -12.265337944030762 + ], + [ + "euil", + -12.265473365783691 + ], + [ + "▁struggles", + -12.265542030334473 + ], + [ + "▁instinct", + -12.265628814697266 + ], + [ + "arbre", + -12.26608943939209 + ], + [ + "▁commands", + -12.266222953796387 + ], + [ + "▁frumoase", + -12.26637077331543 + ], + [ + "▁watches", + -12.266779899597168 + ], + [ + "NM", + -12.266804695129395 + ], + [ + "▁influential", + -12.266807556152344 + ], + [ + "▁gewesen", + -12.266901969909668 + ], + [ + "▁Pictures", + -12.267224311828613 + ], + [ + "▁HVAC", + -12.267242431640625 + ], + [ + "▁skate", + -12.26732063293457 + ], + [ + "▁Robot", + -12.267327308654785 + ], + [ + "▁Boys", + -12.267404556274414 + ], + [ + "▁Mutter", + -12.267425537109375 + ], + [ + "▁marques", + -12.267539024353027 + ], + [ + "utiliser", + -12.267793655395508 + ], + [ + "▁amazed", + -12.267799377441406 + ], + [ + "ächtig", + -12.26783275604248 + ], + [ + "▁Success", + -12.267870903015137 + ], + [ + "gramm", + -12.267956733703613 + ], + [ + "▁1972", + -12.267956733703613 + ], + [ + "▁marina", + -12.268269538879395 + ], + [ + "▁lou", + -12.268321990966797 + ], + [ + "▁précis", + -12.268380165100098 + ], + [ + "ographic", + -12.268482208251953 + ], + [ + "people", + -12.26848316192627 + ], + [ + "fahr", + -12.268547058105469 + ], + [ + "▁Contemporary", + -12.268550872802734 + ], + [ + "▁frustrating", + -12.26858139038086 + ], + [ + "chide", + -12.268704414367676 + ], + [ + "1.5", + -12.268807411193848 + ], + [ + "▁ankle", + -12.268850326538086 + ], + [ + "▁proximity", + -12.268986701965332 + ], + [ + "▁Leute", + -12.269006729125977 + ], + [ + "UA", + -12.269031524658203 + ], + [ + "union", + -12.269131660461426 + ], + [ + "▁recovered", + -12.269133567810059 + ], + [ + "▁sword", + -12.269216537475586 + ], + [ + "▁Mut", + -12.26923942565918 + ], + [ + "▁Rin", + -12.269360542297363 + ], + [ + "▁lectures", + -12.26942253112793 + ], + [ + "▁licensing", + -12.269423484802246 + ], + [ + "MAC", + -12.269498825073242 + ], + [ + "▁commute", + -12.269776344299316 + ], + [ + "Acesta", + -12.269858360290527 + ], + [ + "▁Koch", + -12.270088195800781 + ], + [ + "▁depozit", + -12.270119667053223 + ], + [ + "▁erstmal", + -12.270163536071777 + ], + [ + "arhi", + -12.270271301269531 + ], + [ + "▁Normal", + -12.270462036132812 + ], + [ + "EZ", + -12.270464897155762 + ], + [ + "ărilor", + -12.270986557006836 + ], + [ + "▁favoris", + -12.271041870117188 + ], + [ + "▁$9", + -12.271050453186035 + ], + [ + "▁Lawrence", + -12.271172523498535 + ], + [ + "▁fixing", + -12.271200180053711 + ], + [ + "▁researching", + -12.271288871765137 + ], + [ + "▁Pant", + -12.271467208862305 + ], + [ + "▁candid", + -12.271490097045898 + ], + [ + "▁Arkansas", + -12.27160930633545 + ], + [ + "▁bitcoin", + -12.271612167358398 + ], + [ + "ва", + -12.271645545959473 + ], + [ + "▁Finger", + -12.271692276000977 + ], + [ + "▁SRL", + -12.271718978881836 + ], + [ + "Arg", + -12.271797180175781 + ], + [ + "trade", + -12.271903991699219 + ], + [ + "▁extraction", + -12.271941184997559 + ], + [ + "▁footprint", + -12.2720308303833 + ], + [ + "▁folosite", + -12.272085189819336 + ], + [ + "▁Flex", + -12.272184371948242 + ], + [ + "▁dys", + -12.272294998168945 + ], + [ + "▁Wright", + -12.272343635559082 + ], + [ + "▁multitude", + -12.272378921508789 + ], + [ + "▁Chu", + -12.272494316101074 + ], + [ + "▁Jerry", + -12.27249526977539 + ], + [ + "▁notebook", + -12.272722244262695 + ], + [ + "▁SIM", + -12.272932052612305 + ], + [ + "dietary", + -12.272963523864746 + ], + [ + "▁polished", + -12.272984504699707 + ], + [ + "▁carriers", + -12.272993087768555 + ], + [ + "▁cardiac", + -12.27299976348877 + ], + [ + "▁burned", + -12.273038864135742 + ], + [ + "▁sealed", + -12.273062705993652 + ], + [ + "▁pumps", + -12.273224830627441 + ], + [ + "▁consumed", + -12.273233413696289 + ], + [ + "▁Teaching", + -12.273446083068848 + ], + [ + "▁daughters", + -12.27348518371582 + ], + [ + "serviciile", + -12.273600578308105 + ], + [ + "▁Teams", + -12.273690223693848 + ], + [ + "▁avoided", + -12.273903846740723 + ], + [ + "▁compagnie", + -12.274019241333008 + ], + [ + "▁mașin", + -12.274024963378906 + ], + [ + "▁Sean", + -12.27418041229248 + ], + [ + "▁arunc", + -12.274208068847656 + ], + [ + "kräfte", + -12.274238586425781 + ], + [ + "vani", + -12.274255752563477 + ], + [ + "Metall", + -12.27437973022461 + ], + [ + "2009", + -12.274449348449707 + ], + [ + "moi", + -12.274688720703125 + ], + [ + "▁THAT", + -12.274700164794922 + ], + [ + "▁Ny", + -12.274809837341309 + ], + [ + "▁countertops", + -12.274860382080078 + ], + [ + "Pod", + -12.274938583374023 + ], + [ + "amente", + -12.274943351745605 + ], + [ + "▁offshore", + -12.275001525878906 + ], + [ + "luti", + -12.275087356567383 + ], + [ + "parked", + -12.275160789489746 + ], + [ + "ajout", + -12.275247573852539 + ], + [ + "Shirt", + -12.275328636169434 + ], + [ + "▁3/4", + -12.275389671325684 + ], + [ + "▁gratuite", + -12.27543830871582 + ], + [ + "mètres", + -12.27557373046875 + ], + [ + "▁Wish", + -12.2755765914917 + ], + [ + "▁holistic", + -12.27558422088623 + ], + [ + "gren", + -12.275607109069824 + ], + [ + "compiled", + -12.275660514831543 + ], + [ + "▁innocent", + -12.275779724121094 + ], + [ + "▁sorte", + -12.275787353515625 + ], + [ + "▁insulin", + -12.275792121887207 + ], + [ + "▁Academic", + -12.275996208190918 + ], + [ + "▁acrylic", + -12.27600383758545 + ], + [ + "▁hinzu", + -12.27616024017334 + ], + [ + "▁compression", + -12.27619457244873 + ], + [ + "▁viral", + -12.276220321655273 + ], + [ + "▁stereo", + -12.2764892578125 + ], + [ + "▁Concept", + -12.276542663574219 + ], + [ + "▁Margaret", + -12.276659965515137 + ], + [ + "▁consolidation", + -12.276875495910645 + ], + [ + "Figure", + -12.277058601379395 + ], + [ + "zzo", + -12.277061462402344 + ], + [ + "▁Egg", + -12.277098655700684 + ], + [ + "weiterhin", + -12.277213096618652 + ], + [ + "▁Vista", + -12.277252197265625 + ], + [ + "▁necessity", + -12.277316093444824 + ], + [ + "▁kayak", + -12.277490615844727 + ], + [ + "▁consensus", + -12.277535438537598 + ], + [ + "▁Katz", + -12.277602195739746 + ], + [ + "▁Warren", + -12.277640342712402 + ], + [ + "▁custody", + -12.277755737304688 + ], + [ + "++", + -12.277759552001953 + ], + [ + "▁paiement", + -12.277782440185547 + ], + [ + "▁foul", + -12.277878761291504 + ], + [ + "Chaque", + -12.277934074401855 + ], + [ + "▁Syrian", + -12.277998924255371 + ], + [ + "▁photographers", + -12.278056144714355 + ], + [ + "▁dismiss", + -12.278270721435547 + ], + [ + "▁Gaz", + -12.278526306152344 + ], + [ + "▁développer", + -12.278529167175293 + ], + [ + "▁Dakota", + -12.27863883972168 + ], + [ + "▁cardiovascular", + -12.278642654418945 + ], + [ + "▁tattoo", + -12.278858184814453 + ], + [ + "▁Lighting", + -12.278918266296387 + ], + [ + "▁nowhere", + -12.278940200805664 + ], + [ + "vada", + -12.27895450592041 + ], + [ + "▁Favor", + -12.279084205627441 + ], + [ + "ruled", + -12.2791748046875 + ], + [ + "▁Dating", + -12.2793550491333 + ], + [ + "gain", + -12.279963493347168 + ], + [ + "rism", + -12.28016471862793 + ], + [ + "coloured", + -12.280169486999512 + ], + [ + "▁refugees", + -12.280184745788574 + ], + [ + "▁Schm", + -12.2803955078125 + ], + [ + "▁happily", + -12.280402183532715 + ], + [ + "▁specification", + -12.280607223510742 + ], + [ + "WM", + -12.280736923217773 + ], + [ + "▁intro", + -12.280823707580566 + ], + [ + "rack", + -12.28097915649414 + ], + [ + "characterized", + -12.28107738494873 + ], + [ + "▁externe", + -12.281136512756348 + ], + [ + "▁arrives", + -12.28114128112793 + ], + [ + "WO", + -12.281181335449219 + ], + [ + "bericht", + -12.281233787536621 + ], + [ + "▁delays", + -12.281242370605469 + ], + [ + "▁Flight", + -12.281256675720215 + ], + [ + "1-3", + -12.281524658203125 + ], + [ + "▁Singh", + -12.281548500061035 + ], + [ + "▁shifting", + -12.281651496887207 + ], + [ + "▁dashboard", + -12.281729698181152 + ], + [ + "▁lieux", + -12.281781196594238 + ], + [ + "▁validate", + -12.281901359558105 + ], + [ + "▁uniquement", + -12.281963348388672 + ], + [ + "clip", + -12.28199291229248 + ], + [ + "cov", + -12.282132148742676 + ], + [ + "▁tendance", + -12.282215118408203 + ], + [ + "èle", + -12.282258033752441 + ], + [ + "▁incepe", + -12.282261848449707 + ], + [ + "▁chunk", + -12.282585144042969 + ], + [ + "▁Nr", + -12.28266716003418 + ], + [ + "▁Montana", + -12.282674789428711 + ], + [ + "▁sticks", + -12.28277587890625 + ], + [ + "▁caps", + -12.28309154510498 + ], + [ + "▁Jimmy", + -12.283167839050293 + ], + [ + "▁Levi", + -12.283285140991211 + ], + [ + "▁cables", + -12.28345012664795 + ], + [ + "▁SB", + -12.283550262451172 + ], + [ + "▁thème", + -12.2836275100708 + ], + [ + "ADA", + -12.283672332763672 + ], + [ + "▁garant", + -12.283686637878418 + ], + [ + "▁Joint", + -12.283820152282715 + ], + [ + "▁partage", + -12.28398323059082 + ], + [ + "schreib", + -12.284119606018066 + ], + [ + "ether", + -12.28420352935791 + ], + [ + "▁Klima", + -12.284303665161133 + ], + [ + "▁medicines", + -12.284317016601562 + ], + [ + "▁pH", + -12.284320831298828 + ], + [ + "Architect", + -12.284378051757812 + ], + [ + "știi", + -12.284396171569824 + ], + [ + "▁retrouve", + -12.284700393676758 + ], + [ + "▁posture", + -12.284753799438477 + ], + [ + "Feature", + -12.284773826599121 + ], + [ + "▁drying", + -12.284884452819824 + ], + [ + "trifft", + -12.28488826751709 + ], + [ + "ibi", + -12.285079002380371 + ], + [ + "▁rezerv", + -12.285116195678711 + ], + [ + "▁Vă", + -12.28518009185791 + ], + [ + "▁Speaker", + -12.285282135009766 + ], + [ + "▁illustration", + -12.285319328308105 + ], + [ + "oooo", + -12.285419464111328 + ], + [ + "▁initiated", + -12.285518646240234 + ], + [ + "PK", + -12.285545349121094 + ], + [ + "▁algorithms", + -12.285630226135254 + ], + [ + "▁zice", + -12.285757064819336 + ], + [ + "WI", + -12.28581428527832 + ], + [ + "urgence", + -12.285823822021484 + ], + [ + "▁bloggers", + -12.285887718200684 + ], + [ + "▁realitate", + -12.285894393920898 + ], + [ + "eks", + -12.28598690032959 + ], + [ + "▁cushions", + -12.286149024963379 + ], + [ + "▁Kri", + -12.286224365234375 + ], + [ + "▁réalisation", + -12.286396026611328 + ], + [ + "▁Photoshop", + -12.286407470703125 + ], + [ + "cret", + -12.286462783813477 + ], + [ + "faire", + -12.286613464355469 + ], + [ + "▁Cei", + -12.286782264709473 + ], + [ + "ICO", + -12.286789894104004 + ], + [ + "Contin", + -12.28681755065918 + ], + [ + "▁Builder", + -12.286916732788086 + ], + [ + "look", + -12.28698444366455 + ], + [ + "▁tenants", + -12.287023544311523 + ], + [ + "▁gloves", + -12.287113189697266 + ], + [ + "Day", + -12.287169456481934 + ], + [ + "firmly", + -12.28725814819336 + ], + [ + "CIA", + -12.287352561950684 + ], + [ + "▁TVA", + -12.28741455078125 + ], + [ + "▁notifications", + -12.287446975708008 + ], + [ + "▁Higher", + -12.287459373474121 + ], + [ + "▁Weihnachts", + -12.287491798400879 + ], + [ + "▁blur", + -12.287755012512207 + ], + [ + "ов", + -12.288087844848633 + ], + [ + "feder", + -12.288159370422363 + ], + [ + "▁explosion", + -12.288171768188477 + ], + [ + "▁Fenster", + -12.288189888000488 + ], + [ + "▁junge", + -12.288225173950195 + ], + [ + "▁Highland", + -12.288230895996094 + ], + [ + "▁Lü", + -12.288290023803711 + ], + [ + "▁Alba", + -12.28832721710205 + ], + [ + "▁Dort", + -12.288338661193848 + ], + [ + "▁recruiting", + -12.28835391998291 + ], + [ + "▁Multiple", + -12.288549423217773 + ], + [ + "▁animated", + -12.288604736328125 + ], + [ + "▁Virgin", + -12.288637161254883 + ], + [ + "1000", + -12.288676261901855 + ], + [ + "▁resin", + -12.288700103759766 + ], + [ + "▁matrix", + -12.288826942443848 + ], + [ + "irri", + -12.289011001586914 + ], + [ + "▁chiffre", + -12.28904914855957 + ], + [ + "▁Corps", + -12.289252281188965 + ], + [ + "▁advocacy", + -12.28927230834961 + ], + [ + "▁pozitiv", + -12.289274215698242 + ], + [ + "▁pouss", + -12.289451599121094 + ], + [ + "événement", + -12.28950309753418 + ], + [ + "▁pielii", + -12.289717674255371 + ], + [ + "onnais", + -12.289750099182129 + ], + [ + "▁Statement", + -12.289754867553711 + ], + [ + "crimin", + -12.289868354797363 + ], + [ + "hidrat", + -12.289942741394043 + ], + [ + "▁Jugendliche", + -12.290057182312012 + ], + [ + "TRI", + -12.290223121643066 + ], + [ + "erra", + -12.290240287780762 + ], + [ + "chat", + -12.290321350097656 + ], + [ + "▁traits", + -12.290359497070312 + ], + [ + "▁incentives", + -12.29038143157959 + ], + [ + "▁accelerate", + -12.290568351745605 + ], + [ + "woven", + -12.290633201599121 + ], + [ + "UST", + -12.290688514709473 + ], + [ + "▁premiers", + -12.290717124938965 + ], + [ + "▁Ferien", + -12.290755271911621 + ], + [ + "▁mariage", + -12.290796279907227 + ], + [ + "▁financially", + -12.290801048278809 + ], + [ + "gesellschaft", + -12.290863037109375 + ], + [ + "▁situaţi", + -12.290865898132324 + ], + [ + "▁quoted", + -12.291373252868652 + ], + [ + "▁periodic", + -12.291421890258789 + ], + [ + "▁chaos", + -12.291543960571289 + ], + [ + "▁remodel", + -12.29159927368164 + ], + [ + "▁Contractor", + -12.291641235351562 + ], + [ + "▁recuper", + -12.291729927062988 + ], + [ + "▁driveway", + -12.291755676269531 + ], + [ + "▁entertain", + -12.291765213012695 + ], + [ + "▁condus", + -12.291769027709961 + ], + [ + "▁chefs", + -12.29184341430664 + ], + [ + "pak", + -12.291866302490234 + ], + [ + "▁possède", + -12.291948318481445 + ], + [ + "▁outreach", + -12.291984558105469 + ], + [ + "▁navig", + -12.292036056518555 + ], + [ + "▁renewal", + -12.292071342468262 + ], + [ + "▁Rice", + -12.292309761047363 + ], + [ + "▁Czech", + -12.292398452758789 + ], + [ + "▁entstehen", + -12.292445182800293 + ], + [ + "▁droite", + -12.292448997497559 + ], + [ + "▁Investor", + -12.292497634887695 + ], + [ + "▁Soci", + -12.29250431060791 + ], + [ + "▁scalp", + -12.292622566223145 + ], + [ + "▁politiques", + -12.292815208435059 + ], + [ + "▁plaintiff", + -12.292841911315918 + ], + [ + "extending", + -12.29287052154541 + ], + [ + "▁paperwork", + -12.29300594329834 + ], + [ + "vizi", + -12.293142318725586 + ], + [ + "assisting", + -12.29317569732666 + ], + [ + "local", + -12.293272972106934 + ], + [ + "▁Wear", + -12.293323516845703 + ], + [ + "▁descend", + -12.293340682983398 + ], + [ + "▁Wikipedia", + -12.293513298034668 + ], + [ + "▁Consiliului", + -12.293516159057617 + ], + [ + "▁Nokia", + -12.293540000915527 + ], + [ + "▁facult", + -12.293560028076172 + ], + [ + "▁altogether", + -12.293851852416992 + ], + [ + "▁rankings", + -12.29391860961914 + ], + [ + "▁downloading", + -12.293953895568848 + ], + [ + "QU", + -12.294007301330566 + ], + [ + "▁Olive", + -12.294041633605957 + ], + [ + "▁backdrop", + -12.294110298156738 + ], + [ + "▁recomandat", + -12.294116020202637 + ], + [ + "▁Faculty", + -12.294184684753418 + ], + [ + "ANS", + -12.294220924377441 + ], + [ + "▁fracture", + -12.294225692749023 + ], + [ + "job", + -12.29448127746582 + ], + [ + "▁anticipate", + -12.294525146484375 + ], + [ + "▁drift", + -12.294543266296387 + ], + [ + "▁Marco", + -12.294632911682129 + ], + [ + "▁witnessed", + -12.294700622558594 + ], + [ + "▁comprend", + -12.294974327087402 + ], + [ + "▁bulb", + -12.29504680633545 + ], + [ + "▁shallow", + -12.295059204101562 + ], + [ + "stärke", + -12.295063972473145 + ], + [ + "▁Jessica", + -12.295080184936523 + ], + [ + "▁démarche", + -12.29508113861084 + ], + [ + "▁traditionally", + -12.29508113861084 + ], + [ + "Deputy", + -12.295093536376953 + ], + [ + "▁rivers", + -12.295260429382324 + ], + [ + "▁livraison", + -12.29531192779541 + ], + [ + "▁lacking", + -12.295421600341797 + ], + [ + "▁remodeling", + -12.295426368713379 + ], + [ + "▁acesteia", + -12.295514106750488 + ], + [ + "▁grosse", + -12.295669555664062 + ], + [ + "▁propus", + -12.295833587646484 + ], + [ + "lessly", + -12.29587459564209 + ], + [ + "▁Kredit", + -12.295931816101074 + ], + [ + "reputable", + -12.295981407165527 + ], + [ + "▁Sell", + -12.2960205078125 + ], + [ + "▁Crime", + -12.296111106872559 + ], + [ + "Ent", + -12.296310424804688 + ], + [ + "finity", + -12.296422004699707 + ], + [ + "▁Complex", + -12.296500205993652 + ], + [ + "easing", + -12.296638488769531 + ], + [ + "dynamic", + -12.296670913696289 + ], + [ + "▁eaten", + -12.296727180480957 + ], + [ + "gezogen", + -12.296734809875488 + ], + [ + "▁2004,", + -12.296774864196777 + ], + [ + "▁Muslims", + -12.296822547912598 + ], + [ + "▁Sprache", + -12.296883583068848 + ], + [ + "▁Truth", + -12.296927452087402 + ], + [ + "▁guarantees", + -12.296928405761719 + ], + [ + "/5", + -12.29712963104248 + ], + [ + "”).", + -12.297135353088379 + ], + [ + "▁Medium", + -12.2972993850708 + ], + [ + "▁décidé", + -12.297445297241211 + ], + [ + "▁balcony", + -12.29747200012207 + ], + [ + "leuchte", + -12.297502517700195 + ], + [ + "hik", + -12.297849655151367 + ], + [ + "▁Agriculture", + -12.298221588134766 + ], + [ + "▁securities", + -12.298221588134766 + ], + [ + "Probably", + -12.298224449157715 + ], + [ + "▁macar", + -12.29824161529541 + ], + [ + "▁Signal", + -12.298399925231934 + ], + [ + "lake", + -12.298677444458008 + ], + [ + "▁compétences", + -12.298726081848145 + ], + [ + "▁proprietary", + -12.298812866210938 + ], + [ + "allons", + -12.298850059509277 + ], + [ + "▁belongs", + -12.298916816711426 + ], + [ + "▁missile", + -12.298958778381348 + ], + [ + "țiune", + -12.298999786376953 + ], + [ + "▁Integration", + -12.299116134643555 + ], + [ + "▁testimony", + -12.299120903015137 + ], + [ + "▁wesentlich", + -12.299142837524414 + ], + [ + "▁donors", + -12.299152374267578 + ], + [ + "▁pivot", + -12.299202919006348 + ], + [ + "▁Uber", + -12.299219131469727 + ], + [ + "▁databases", + -12.299281120300293 + ], + [ + "▁studi", + -12.299317359924316 + ], + [ + "totdeauna", + -12.299351692199707 + ], + [ + "▁briefly", + -12.299449920654297 + ], + [ + "▁livr", + -12.29952335357666 + ], + [ + "▁CRM", + -12.299581527709961 + ], + [ + "gone", + -12.299697875976562 + ], + [ + "10)", + -12.299761772155762 + ], + [ + "▁zilele", + -12.299920082092285 + ], + [ + "Basically", + -12.300008773803711 + ], + [ + "▁medie", + -12.300041198730469 + ], + [ + "spotted", + -12.30006217956543 + ], + [ + "▁troubles", + -12.30009937286377 + ], + [ + "▁acknowledged", + -12.300176620483398 + ], + [ + "350", + -12.300185203552246 + ], + [ + "LB", + -12.300273895263672 + ], + [ + "Phy", + -12.30038833618164 + ], + [ + "natal", + -12.300397872924805 + ], + [ + "illé", + -12.300445556640625 + ], + [ + "bilder", + -12.300625801086426 + ], + [ + "▁apples", + -12.300636291503906 + ], + [ + "graphical", + -12.300889015197754 + ], + [ + "organiser", + -12.301024436950684 + ], + [ + "▁ochii", + -12.301040649414062 + ], + [ + "glas", + -12.301178932189941 + ], + [ + "CAP", + -12.301180839538574 + ], + [ + "▁Doors", + -12.301331520080566 + ], + [ + "▁Eis", + -12.30156135559082 + ], + [ + "tipuri", + -12.301590919494629 + ], + [ + "▁Worth", + -12.301684379577637 + ], + [ + "izează", + -12.301719665527344 + ], + [ + "nunț", + -12.30180549621582 + ], + [ + "▁Trip", + -12.30186653137207 + ], + [ + "ISS", + -12.301976203918457 + ], + [ + "efficient", + -12.30201530456543 + ], + [ + "Luckily", + -12.302099227905273 + ], + [ + "▁vase", + -12.302133560180664 + ], + [ + "▁gay", + -12.302343368530273 + ], + [ + "▁certificates", + -12.302434921264648 + ], + [ + "riad", + -12.302549362182617 + ], + [ + "stab", + -12.302570343017578 + ], + [ + "affiche", + -12.302604675292969 + ], + [ + "▁iPod", + -12.302645683288574 + ], + [ + "▁aștept", + -12.302726745605469 + ], + [ + "▁$500", + -12.302751541137695 + ], + [ + "▁Catherine", + -12.302952766418457 + ], + [ + "▁Circuit", + -12.302957534790039 + ], + [ + "▁ranch", + -12.303045272827148 + ], + [ + "▁consequence", + -12.303118705749512 + ], + [ + "listened", + -12.303131103515625 + ], + [ + "▁Options", + -12.303187370300293 + ], + [ + "feed", + -12.30318832397461 + ], + [ + "▁adviser", + -12.303248405456543 + ], + [ + "▁présenter", + -12.30333423614502 + ], + [ + "substant", + -12.30337905883789 + ], + [ + "▁Flag", + -12.303604125976562 + ], + [ + "▁Keith", + -12.30366325378418 + ], + [ + "▁inima", + -12.303709983825684 + ], + [ + "▁substrate", + -12.30373764038086 + ], + [ + "▁charger", + -12.303803443908691 + ], + [ + "▁reporter", + -12.303844451904297 + ], + [ + "ütz", + -12.304068565368652 + ], + [ + "▁unten", + -12.30417537689209 + ], + [ + "▁sympa", + -12.304542541503906 + ], + [ + "▁defeated", + -12.304600715637207 + ], + [ + "ändig", + -12.304644584655762 + ], + [ + "individu", + -12.304747581481934 + ], + [ + "▁Straßen", + -12.304774284362793 + ], + [ + "▁Nepal", + -12.304791450500488 + ], + [ + "million", + -12.304803848266602 + ], + [ + "▁Cake", + -12.30499267578125 + ], + [ + "▁investigations", + -12.30526065826416 + ], + [ + "▁inspector", + -12.3054780960083 + ], + [ + "▁Campbell", + -12.305486679077148 + ], + [ + "▁consommation", + -12.305489540100098 + ], + [ + "▁Ministerul", + -12.305628776550293 + ], + [ + "Advisory", + -12.305749893188477 + ], + [ + "▁Leistungs", + -12.305939674377441 + ], + [ + "▁Pull", + -12.306157112121582 + ], + [ + "▁lover", + -12.306194305419922 + ], + [ + "▁trunk", + -12.306380271911621 + ], + [ + "▁folosesc", + -12.30639934539795 + ], + [ + "pom", + -12.306558609008789 + ], + [ + "wunder", + -12.306794166564941 + ], + [ + "▁happier", + -12.306801795959473 + ], + [ + "▁embark", + -12.30689525604248 + ], + [ + "▁mediul", + -12.3069486618042 + ], + [ + "riff", + -12.306973457336426 + ], + [ + "▁copilul", + -12.307039260864258 + ], + [ + "ommage", + -12.307126998901367 + ], + [ + "rechnung", + -12.307218551635742 + ], + [ + "NU", + -12.307220458984375 + ], + [ + "▁fellowship", + -12.307395935058594 + ], + [ + "▁Mental", + -12.307403564453125 + ], + [ + "▁fever", + -12.3074312210083 + ], + [ + "▁silly", + -12.307547569274902 + ], + [ + "Object", + -12.30756664276123 + ], + [ + "NV", + -12.307591438293457 + ], + [ + "от", + -12.30774974822998 + ], + [ + "▁Strand", + -12.307762145996094 + ], + [ + "▁Exist", + -12.30777359008789 + ], + [ + "warum", + -12.307832717895508 + ], + [ + "CY", + -12.307848930358887 + ], + [ + "kä", + -12.307856559753418 + ], + [ + "!!!!!", + -12.307869911193848 + ], + [ + "▁moarte", + -12.30793571472168 + ], + [ + "▁waterfall", + -12.308024406433105 + ], + [ + "left", + -12.30815601348877 + ], + [ + "▁Nursing", + -12.308225631713867 + ], + [ + "▁invalid", + -12.30826187133789 + ], + [ + "struktur", + -12.308385848999023 + ], + [ + "Allerdings", + -12.30838680267334 + ], + [ + "étranger", + -12.30838680267334 + ], + [ + "▁prost", + -12.308517456054688 + ], + [ + "▁Parent", + -12.308562278747559 + ], + [ + "▁întreag", + -12.308611869812012 + ], + [ + "▁compensate", + -12.308871269226074 + ], + [ + "▁sometime", + -12.308955192565918 + ], + [ + "graduate", + -12.308968544006348 + ], + [ + "▁Carter", + -12.30898380279541 + ], + [ + "▁crap", + -12.308998107910156 + ], + [ + "▁mathematics", + -12.309067726135254 + ], + [ + "resemble", + -12.309069633483887 + ], + [ + "Dame", + -12.309152603149414 + ], + [ + "▁Swa", + -12.309198379516602 + ], + [ + "▁celebrity", + -12.309239387512207 + ], + [ + "▁verified", + -12.309338569641113 + ], + [ + "▁Behind", + -12.309349060058594 + ], + [ + "carbon", + -12.309432983398438 + ], + [ + "▁gateway", + -12.309490203857422 + ], + [ + "▁ambitious", + -12.30952262878418 + ], + [ + "▁Wellness", + -12.30966567993164 + ], + [ + "30,000", + -12.30968189239502 + ], + [ + "defined", + -12.309929847717285 + ], + [ + "specializes", + -12.310121536254883 + ], + [ + "▁Chase", + -12.310199737548828 + ], + [ + "HF", + -12.310233116149902 + ], + [ + "ABLE", + -12.310348510742188 + ], + [ + "▁Ehr", + -12.310467720031738 + ], + [ + "▁régime", + -12.310480117797852 + ], + [ + "▁awake", + -12.310487747192383 + ], + [ + "▁seafood", + -12.310487747192383 + ], + [ + "leading", + -12.310554504394531 + ], + [ + "▁Rule", + -12.310602188110352 + ], + [ + "verkehr", + -12.310726165771484 + ], + [ + "erem", + -12.310737609863281 + ], + [ + "▁1973", + -12.310795783996582 + ], + [ + "personal", + -12.311171531677246 + ], + [ + "ența", + -12.311330795288086 + ], + [ + "apprend", + -12.311396598815918 + ], + [ + "faisant", + -12.311420440673828 + ], + [ + "▁Sounds", + -12.31151008605957 + ], + [ + "▁Launch", + -12.31151294708252 + ], + [ + "half", + -12.311636924743652 + ], + [ + "▁verre", + -12.311859130859375 + ], + [ + "▁Regular", + -12.31207275390625 + ], + [ + "▁Nancy", + -12.312142372131348 + ], + [ + "quelles", + -12.312161445617676 + ], + [ + "▁erhält", + -12.312169075012207 + ], + [ + "▁socks", + -12.3121919631958 + ], + [ + "lamp", + -12.312387466430664 + ], + [ + "▁durchgeführt", + -12.312472343444824 + ], + [ + "▁advertise", + -12.31260871887207 + ], + [ + "powered", + -12.312653541564941 + ], + [ + "▁concur", + -12.312699317932129 + ], + [ + "▁ressources", + -12.31293773651123 + ], + [ + "▁allocation", + -12.312986373901367 + ], + [ + "chon", + -12.313041687011719 + ], + [ + "▁Larry", + -12.313177108764648 + ], + [ + "lässig", + -12.313254356384277 + ], + [ + "OLD", + -12.313493728637695 + ], + [ + "itty", + -12.313599586486816 + ], + [ + "▁immuno", + -12.313645362854004 + ], + [ + "▁(+", + -12.313651084899902 + ], + [ + "▁Essential", + -12.313674926757812 + ], + [ + "▁semaines", + -12.313719749450684 + ], + [ + "Ru", + -12.31375503540039 + ], + [ + "▁Gear", + -12.313764572143555 + ], + [ + "völlig", + -12.313850402832031 + ], + [ + "liga", + -12.31391716003418 + ], + [ + "▁Neg", + -12.314082145690918 + ], + [ + "▁gratitude", + -12.31408977508545 + ], + [ + "aventure", + -12.314108848571777 + ], + [ + "▁frustrated", + -12.314115524291992 + ], + [ + "▁retrait", + -12.31422233581543 + ], + [ + "▁statut", + -12.314231872558594 + ], + [ + "550", + -12.31434440612793 + ], + [ + "ла", + -12.314428329467773 + ], + [ + "risto", + -12.314448356628418 + ], + [ + "WAY", + -12.314607620239258 + ], + [ + "▁pigment", + -12.314652442932129 + ], + [ + "Selon", + -12.314715385437012 + ], + [ + "stil", + -12.3148775100708 + ], + [ + "▁Marin", + -12.315055847167969 + ], + [ + "ashi", + -12.315085411071777 + ], + [ + "▁contine", + -12.31519889831543 + ], + [ + "▁Economics", + -12.315200805664062 + ], + [ + "both", + -12.3152437210083 + ], + [ + "▁Dou", + -12.31527328491211 + ], + [ + "Fel", + -12.315373420715332 + ], + [ + "UNT", + -12.315434455871582 + ], + [ + "▁grandmother", + -12.31548023223877 + ], + [ + "▁domicile", + -12.315678596496582 + ], + [ + "▁buffer", + -12.31574535369873 + ], + [ + "▁fuse", + -12.315815925598145 + ], + [ + "▁dosage", + -12.315821647644043 + ], + [ + "▁Nici", + -12.315839767456055 + ], + [ + "▁worries", + -12.315908432006836 + ], + [ + "▁Rail", + -12.3159818649292 + ], + [ + "uneori", + -12.315990447998047 + ], + [ + "▁Sierra", + -12.316030502319336 + ], + [ + "▁porni", + -12.316032409667969 + ], + [ + "▁NOTE", + -12.316056251525879 + ], + [ + "▁tendency", + -12.316065788269043 + ], + [ + "Set", + -12.316256523132324 + ], + [ + "▁Hof", + -12.31629753112793 + ], + [ + "▁Ruhe", + -12.316300392150879 + ], + [ + "harm", + -12.316360473632812 + ], + [ + "▁Developer", + -12.316367149353027 + ], + [ + "suing", + -12.316400527954102 + ], + [ + "persönlichen", + -12.31658935546875 + ], + [ + "▁agréable", + -12.316596031188965 + ], + [ + "commissioned", + -12.316696166992188 + ], + [ + "▁1974", + -12.31672191619873 + ], + [ + "▁1969", + -12.316758155822754 + ], + [ + "▁regl", + -12.316996574401855 + ], + [ + "▁terror", + -12.317042350769043 + ], + [ + "▁température", + -12.317051887512207 + ], + [ + "▁Archiv", + -12.31706714630127 + ], + [ + "▁Military", + -12.317140579223633 + ], + [ + "▁König", + -12.317290306091309 + ], + [ + "▁forex", + -12.31737232208252 + ], + [ + "wiki", + -12.31745719909668 + ], + [ + "thetic", + -12.317506790161133 + ], + [ + "alaturi", + -12.317974090576172 + ], + [ + "▁montant", + -12.3179931640625 + ], + [ + "▁maladie", + -12.318044662475586 + ], + [ + "gust", + -12.318151473999023 + ], + [ + "▁demander", + -12.318164825439453 + ], + [ + "avocat", + -12.318191528320312 + ], + [ + "▁sci", + -12.318192481994629 + ], + [ + "▁Wireless", + -12.318214416503906 + ], + [ + "▁Dein", + -12.318220138549805 + ], + [ + "▁trio", + -12.3183012008667 + ], + [ + "▁Same", + -12.318395614624023 + ], + [ + "Datei", + -12.318464279174805 + ], + [ + "▁alerg", + -12.318578720092773 + ], + [ + "crowded", + -12.318657875061035 + ], + [ + "▁Punkt", + -12.318853378295898 + ], + [ + "▁sanctions", + -12.318864822387695 + ], + [ + "stating", + -12.318922996520996 + ], + [ + "▁discusse", + -12.318949699401855 + ], + [ + "▁Eigen", + -12.319068908691406 + ], + [ + "▁sănătate", + -12.31911563873291 + ], + [ + "▁correspondence", + -12.319211959838867 + ], + [ + "cred", + -12.319331169128418 + ], + [ + "VG", + -12.319347381591797 + ], + [ + "▁différence", + -12.319347381591797 + ], + [ + "▁Montreal", + -12.319391250610352 + ], + [ + "▁masini", + -12.319398880004883 + ], + [ + "iata", + -12.319487571716309 + ], + [ + "▁sampling", + -12.319574356079102 + ], + [ + "▁Gib", + -12.319831848144531 + ], + [ + "▁sheer", + -12.319944381713867 + ], + [ + "330", + -12.319947242736816 + ], + [ + "CHI", + -12.319990158081055 + ], + [ + "▁damn", + -12.320030212402344 + ], + [ + "▁Advisor", + -12.320201873779297 + ], + [ + "Typically", + -12.320302963256836 + ], + [ + "ssé", + -12.320352554321289 + ], + [ + "quart", + -12.320361137390137 + ], + [ + "chete", + -12.320385932922363 + ], + [ + "▁Puerto", + -12.32049560546875 + ], + [ + "2-1", + -12.32050609588623 + ], + [ + "NN", + -12.320674896240234 + ], + [ + "▁styling", + -12.320707321166992 + ], + [ + "rud", + -12.320777893066406 + ], + [ + "од", + -12.320856094360352 + ], + [ + "▁Hydro", + -12.320941925048828 + ], + [ + "▁Cable", + -12.320961952209473 + ], + [ + "video", + -12.320974349975586 + ], + [ + "▁Wirkung", + -12.321194648742676 + ], + [ + "▁noble", + -12.321270942687988 + ], + [ + "▁Sonder", + -12.32129192352295 + ], + [ + "mati", + -12.321317672729492 + ], + [ + "850", + -12.321395874023438 + ], + [ + "▁Richmond", + -12.32143497467041 + ], + [ + "▁niciodată", + -12.321442604064941 + ], + [ + "AO", + -12.321527481079102 + ], + [ + "▁altered", + -12.321648597717285 + ], + [ + "▁(15", + -12.32168960571289 + ], + [ + "▁Motiv", + -12.322052001953125 + ], + [ + "AKE", + -12.322089195251465 + ], + [ + "▁bestimmte", + -12.322172164916992 + ], + [ + "6.5", + -12.322176933288574 + ], + [ + "hectare", + -12.322333335876465 + ], + [ + "atorită", + -12.322335243225098 + ], + [ + "▁phases", + -12.322447776794434 + ], + [ + "▁Nova", + -12.322566032409668 + ], + [ + "ordinateur", + -12.322579383850098 + ], + [ + "▁corrupt", + -12.322813034057617 + ], + [ + "error", + -12.322895050048828 + ], + [ + "▁attacked", + -12.323005676269531 + ], + [ + "▁Kirche", + -12.323019981384277 + ], + [ + "heir", + -12.323040962219238 + ], + [ + "Das", + -12.323254585266113 + ], + [ + "▁anxious", + -12.323258399963379 + ], + [ + "▁Doc", + -12.323386192321777 + ], + [ + "▁Roth", + -12.323415756225586 + ], + [ + "▁Cine", + -12.32388687133789 + ], + [ + "▁auditor", + -12.324418067932129 + ], + [ + "▁beverage", + -12.324586868286133 + ], + [ + "▁précédent", + -12.324637413024902 + ], + [ + "▁deploy", + -12.324837684631348 + ], + [ + "▁accessibility", + -12.324843406677246 + ], + [ + "▁cage", + -12.324885368347168 + ], + [ + "▁Contra", + -12.324934005737305 + ], + [ + "Best", + -12.324952125549316 + ], + [ + "iji", + -12.324972152709961 + ], + [ + "▁père", + -12.325060844421387 + ], + [ + "▁scenic", + -12.32511043548584 + ], + [ + "synthesis", + -12.325165748596191 + ], + [ + "ßen", + -12.32534408569336 + ], + [ + "▁Videos", + -12.325482368469238 + ], + [ + "▁refus", + -12.325484275817871 + ], + [ + "stimmen", + -12.3255615234375 + ], + [ + "▁sleek", + -12.325577735900879 + ], + [ + "artige", + -12.32563591003418 + ], + [ + "mari", + -12.32568359375 + ], + [ + "▁excelent", + -12.325740814208984 + ], + [ + "▁negativ", + -12.325806617736816 + ], + [ + "▁blocking", + -12.32590103149414 + ], + [ + "spricht", + -12.326001167297363 + ], + [ + "▁discomfort", + -12.32602310180664 + ], + [ + "▁stratégie", + -12.32602310180664 + ], + [ + "▁Datenschutz", + -12.326078414916992 + ], + [ + "curg", + -12.326128005981445 + ], + [ + "▁lapte", + -12.326432228088379 + ], + [ + "▁acasă", + -12.326491355895996 + ], + [ + "▁ausschließlich", + -12.32653522491455 + ], + [ + "▁unbedingt", + -12.326802253723145 + ], + [ + "▁Linie", + -12.32689380645752 + ], + [ + "▁subscribers", + -12.327019691467285 + ], + [ + "109", + -12.32702350616455 + ], + [ + "▁Waste", + -12.32712173461914 + ], + [ + "▁Planung", + -12.327231407165527 + ], + [ + "▁visually", + -12.32734489440918 + ], + [ + "utilizarea", + -12.327370643615723 + ], + [ + "uba", + -12.327381134033203 + ], + [ + "▁fifteen", + -12.327411651611328 + ], + [ + "▁légère", + -12.327411651611328 + ], + [ + "ința", + -12.327446937561035 + ], + [ + "▁tolerance", + -12.327460289001465 + ], + [ + "▁piscine", + -12.327536582946777 + ], + [ + "▁nails", + -12.327569007873535 + ], + [ + "▁accus", + -12.327693939208984 + ], + [ + "▁coeur", + -12.327773094177246 + ], + [ + "freie", + -12.327849388122559 + ], + [ + "enţă", + -12.32812213897705 + ], + [ + "▁glucose", + -12.328336715698242 + ], + [ + "▁Jar", + -12.32838249206543 + ], + [ + "▁commencer", + -12.328387260437012 + ], + [ + "▁eliminating", + -12.328414916992188 + ], + [ + "▁mutation", + -12.32844352722168 + ], + [ + "▁afirma", + -12.328444480895996 + ], + [ + "▁Consulting", + -12.328454971313477 + ], + [ + "adia", + -12.328543663024902 + ], + [ + "zog", + -12.328604698181152 + ], + [ + "▁pielea", + -12.328658103942871 + ], + [ + "rton", + -12.328706741333008 + ], + [ + "exercice", + -12.3287935256958 + ], + [ + "namely", + -12.328847885131836 + ], + [ + "▁ajutor", + -12.3289155960083 + ], + [ + "▁markers", + -12.328917503356934 + ], + [ + "▁gardening", + -12.328932762145996 + ], + [ + "Karte", + -12.329038619995117 + ], + [ + "▁Pump", + -12.329142570495605 + ], + [ + "▁Dual", + -12.329169273376465 + ], + [ + "▁pratiques", + -12.329349517822266 + ], + [ + "▁behavioral", + -12.329358100891113 + ], + [ + "▁construire", + -12.329511642456055 + ], + [ + "▁Leonard", + -12.329596519470215 + ], + [ + "ediglich", + -12.329630851745605 + ], + [ + "ubbed", + -12.3297758102417 + ], + [ + "NK", + -12.329792022705078 + ], + [ + "shell", + -12.329912185668945 + ], + [ + "▁persönliche", + -12.329996109008789 + ], + [ + "ecuring", + -12.329998970031738 + ], + [ + "beaten", + -12.33000373840332 + ], + [ + "ALE", + -12.330053329467773 + ], + [ + "▁puppy", + -12.33023452758789 + ], + [ + "▁capac", + -12.33027458190918 + ], + [ + "▁seventh", + -12.330394744873047 + ], + [ + "▁nursery", + -12.330400466918945 + ], + [ + "▁Rum", + -12.330419540405273 + ], + [ + "▁exquisite", + -12.330423355102539 + ], + [ + "▁Legi", + -12.330483436584473 + ], + [ + "▁persist", + -12.330497741699219 + ], + [ + "bacterial", + -12.330548286437988 + ], + [ + "▁cereal", + -12.330572128295898 + ], + [ + "▁principe", + -12.330693244934082 + ], + [ + "chip", + -12.330766677856445 + ], + [ + "rush", + -12.330832481384277 + ], + [ + "▁funnel", + -12.330904006958008 + ], + [ + "▁calitatea", + -12.331024169921875 + ], + [ + "ibă", + -12.33104419708252 + ], + [ + "▁reign", + -12.331086158752441 + ], + [ + "▁congregation", + -12.331120491027832 + ], + [ + "▁obtine", + -12.331270217895508 + ], + [ + "▁découverte", + -12.331286430358887 + ], + [ + "▁gama", + -12.331315040588379 + ], + [ + "▁judec", + -12.33132553100586 + ], + [ + "Plan", + -12.331351280212402 + ], + [ + "▁gesture", + -12.331539154052734 + ], + [ + "öffentlichen", + -12.331644058227539 + ], + [ + "▁imported", + -12.331693649291992 + ], + [ + "▁rotate", + -12.331747055053711 + ], + [ + "blown", + -12.331756591796875 + ], + [ + "▁Protein", + -12.331827163696289 + ], + [ + "parfaitement", + -12.331832885742188 + ], + [ + "ondo", + -12.331868171691895 + ], + [ + "ologists", + -12.331890106201172 + ], + [ + "▁neighborhoods", + -12.331989288330078 + ], + [ + "▁Pope", + -12.33202075958252 + ], + [ + "▁museums", + -12.332194328308105 + ], + [ + "▁porter", + -12.332330703735352 + ], + [ + "▁kiss", + -12.332335472106934 + ], + [ + "pdf", + -12.332354545593262 + ], + [ + "sided", + -12.332359313964844 + ], + [ + "▁gern", + -12.332395553588867 + ], + [ + "bedingungen", + -12.332496643066406 + ], + [ + "▁Ride", + -12.332582473754883 + ], + [ + "Apoi", + -12.332584381103516 + ], + [ + "▁bestehen", + -12.332603454589844 + ], + [ + "5\"", + -12.33285903930664 + ], + [ + "bob", + -12.332862854003906 + ], + [ + "ficient", + -12.33303165435791 + ], + [ + "premise", + -12.333086967468262 + ], + [ + "▁Clip", + -12.333112716674805 + ], + [ + "▁concours", + -12.333213806152344 + ], + [ + "olar", + -12.333281517028809 + ], + [ + "▁Centr", + -12.333356857299805 + ], + [ + "outlined", + -12.333429336547852 + ], + [ + "▁observa", + -12.333511352539062 + ], + [ + "▁negotiate", + -12.333537101745605 + ], + [ + "▁Partnership", + -12.33358383178711 + ], + [ + "clock", + -12.333662033081055 + ], + [ + "roasted", + -12.333755493164062 + ], + [ + "Pourquoi", + -12.33391284942627 + ], + [ + "▁Marshall", + -12.334005355834961 + ], + [ + "▁Gerade", + -12.334052085876465 + ], + [ + "▁pachet", + -12.334160804748535 + ], + [ + "▁preliminary", + -12.334162712097168 + ], + [ + "▁tragic", + -12.334200859069824 + ], + [ + "author", + -12.334268569946289 + ], + [ + "▁Gov", + -12.334309577941895 + ], + [ + "▁comunic", + -12.334403991699219 + ], + [ + "▁coordinator", + -12.334410667419434 + ], + [ + "YA", + -12.33445930480957 + ], + [ + "▁Steam", + -12.33476734161377 + ], + [ + "▁Nag", + -12.334796905517578 + ], + [ + "▁Kara", + -12.334851264953613 + ], + [ + "▁Gang", + -12.334858894348145 + ], + [ + "aurez", + -12.334868431091309 + ], + [ + "▁horrible", + -12.334869384765625 + ], + [ + "▁Luxury", + -12.335076332092285 + ], + [ + "▁encouragement", + -12.335169792175293 + ], + [ + "▁conceptual", + -12.335250854492188 + ], + [ + "▁constituent", + -12.335431098937988 + ], + [ + "nvelop", + -12.335494041442871 + ], + [ + "ucc", + -12.335500717163086 + ], + [ + "▁conçu", + -12.335542678833008 + ], + [ + "pfel", + -12.33559513092041 + ], + [ + "special", + -12.335700988769531 + ], + [ + "▁Growth", + -12.335834503173828 + ], + [ + "cada", + -12.335916519165039 + ], + [ + "▁oamenilor", + -12.335976600646973 + ], + [ + "▁vendredi", + -12.336021423339844 + ], + [ + "▁coupe", + -12.336055755615234 + ], + [ + "▁Danke", + -12.336134910583496 + ], + [ + "reflects", + -12.336181640625 + ], + [ + "▁girlfriend", + -12.336273193359375 + ], + [ + "▁diffuse", + -12.336325645446777 + ], + [ + "HER", + -12.336328506469727 + ], + [ + "storing", + -12.336464881896973 + ], + [ + "ailing", + -12.336591720581055 + ], + [ + "▁Desi", + -12.336601257324219 + ], + [ + "stitution", + -12.336832046508789 + ], + [ + "▁adun", + -12.336844444274902 + ], + [ + "▁Partie", + -12.336869239807129 + ], + [ + "▁tissues", + -12.336958885192871 + ], + [ + "▁discovering", + -12.337154388427734 + ], + [ + "Jacques", + -12.337178230285645 + ], + [ + "lungs", + -12.33724594116211 + ], + [ + "▁Handy", + -12.337261199951172 + ], + [ + "centric", + -12.337285995483398 + ], + [ + "slav", + -12.337442398071289 + ], + [ + "▁sights", + -12.337560653686523 + ], + [ + "▁Category", + -12.337644577026367 + ], + [ + "▁Einrichtung", + -12.337957382202148 + ], + [ + "▁Robinson", + -12.33804702758789 + ], + [ + "▁Terra", + -12.338150978088379 + ], + [ + "▁creep", + -12.338167190551758 + ], + [ + "▁Lob", + -12.338184356689453 + ], + [ + "001", + -12.33820629119873 + ], + [ + "kop", + -12.338208198547363 + ], + [ + "Emb", + -12.338292121887207 + ], + [ + "▁forgive", + -12.338391304016113 + ], + [ + "▁icons", + -12.33847427368164 + ], + [ + "electric", + -12.3385009765625 + ], + [ + "▁faucet", + -12.338516235351562 + ], + [ + "▁invisible", + -12.3386812210083 + ], + [ + "sprach", + -12.338801383972168 + ], + [ + "▁beachten", + -12.33881664276123 + ], + [ + "rahm", + -12.338833808898926 + ], + [ + "▁Teacher", + -12.338919639587402 + ], + [ + "Fab", + -12.339070320129395 + ], + [ + "▁joue", + -12.339101791381836 + ], + [ + "▁Popular", + -12.339120864868164 + ], + [ + "▁Februar", + -12.339171409606934 + ], + [ + "sound", + -12.339251518249512 + ], + [ + "▁(0", + -12.339317321777344 + ], + [ + "▁Compare", + -12.33938980102539 + ], + [ + "▁pads", + -12.339455604553223 + ], + [ + "270", + -12.339498519897461 + ], + [ + "ousse", + -12.339548110961914 + ], + [ + "▁UAE", + -12.339786529541016 + ], + [ + "izări", + -12.339787483215332 + ], + [ + "▁bonuses", + -12.33993911743164 + ], + [ + "▁switches", + -12.3400239944458 + ], + [ + "▁Brothers", + -12.340166091918945 + ], + [ + "▁environmentally", + -12.340171813964844 + ], + [ + "vista", + -12.340264320373535 + ], + [ + "▁intentions", + -12.3402738571167 + ], + [ + "▁Terri", + -12.340301513671875 + ], + [ + "▁diabet", + -12.34030532836914 + ], + [ + "▁prese", + -12.340333938598633 + ], + [ + "▁parcurs", + -12.340389251708984 + ], + [ + "Warum", + -12.340449333190918 + ], + [ + "▁credentials", + -12.340455055236816 + ], + [ + "▁PLA", + -12.34046459197998 + ], + [ + "▁instruct", + -12.340470314025879 + ], + [ + "▁benefic", + -12.340633392333984 + ], + [ + "write", + -12.340675354003906 + ], + [ + "▁poids", + -12.340773582458496 + ], + [ + "▁Anspruch", + -12.340923309326172 + ], + [ + "▁avocado", + -12.340923309326172 + ], + [ + "▁inevitable", + -12.340923309326172 + ], + [ + "▁poorly", + -12.340950965881348 + ], + [ + "karte", + -12.340994834899902 + ], + [ + "▁Publishing", + -12.340999603271484 + ], + [ + "odată", + -12.341140747070312 + ], + [ + "▁scientifique", + -12.341157913208008 + ], + [ + "▁lăsa", + -12.341262817382812 + ], + [ + "▁secol", + -12.34131908416748 + ], + [ + "▁nevertheless", + -12.341392517089844 + ], + [ + "SAT", + -12.341597557067871 + ], + [ + "280", + -12.341651916503906 + ], + [ + "▁prevederi", + -12.341670989990234 + ], + [ + "▁chrome", + -12.342002868652344 + ], + [ + "institut", + -12.342267036437988 + ], + [ + "richtigen", + -12.34228515625 + ], + [ + "▁grief", + -12.342338562011719 + ], + [ + "▁penalties", + -12.342373847961426 + ], + [ + "▁Bayern", + -12.34238052368164 + ], + [ + "▁caramel", + -12.342473983764648 + ], + [ + "Now", + -12.342495918273926 + ], + [ + "Stiftung", + -12.342576026916504 + ], + [ + "country", + -12.342737197875977 + ], + [ + "dication", + -12.34278678894043 + ], + [ + "▁Chor", + -12.342801094055176 + ], + [ + "▁rămâne", + -12.342936515808105 + ], + [ + "▁TOP", + -12.34300708770752 + ], + [ + "▁complète", + -12.34301471710205 + ], + [ + "▁Marian", + -12.34302806854248 + ], + [ + "▁Avant", + -12.343121528625488 + ], + [ + "▁Shower", + -12.343156814575195 + ], + [ + "treu", + -12.34316349029541 + ], + [ + "▁chop", + -12.34321403503418 + ], + [ + "▁comfortably", + -12.343220710754395 + ], + [ + "▁autism", + -12.34323787689209 + ], + [ + "▁Sind", + -12.34328556060791 + ], + [ + "▁(20", + -12.343340873718262 + ], + [ + "▁Cinema", + -12.343414306640625 + ], + [ + "compania", + -12.343606948852539 + ], + [ + "▁Lex", + -12.343622207641602 + ], + [ + "▁Sofa", + -12.343716621398926 + ], + [ + "dru", + -12.343753814697266 + ], + [ + "▁verification", + -12.343770027160645 + ], + [ + "▁Immer", + -12.343825340270996 + ], + [ + "lomb", + -12.343829154968262 + ], + [ + "meric", + -12.34385871887207 + ], + [ + "▁slower", + -12.34398365020752 + ], + [ + "▁propag", + -12.344090461730957 + ], + [ + "Inter", + -12.344097137451172 + ], + [ + "selling", + -12.34418773651123 + ], + [ + "▁Bright", + -12.344269752502441 + ], + [ + "condition", + -12.344280242919922 + ], + [ + "PDF", + -12.344291687011719 + ], + [ + "oyez", + -12.344391822814941 + ], + [ + "▁Fried", + -12.344420433044434 + ], + [ + "▁Nazi", + -12.34443187713623 + ], + [ + "▁Buffalo", + -12.344447135925293 + ], + [ + "▁Sue", + -12.344449043273926 + ], + [ + "▁Rhein", + -12.34468936920166 + ], + [ + "▁Klaus", + -12.344889640808105 + ], + [ + "▁indiqu", + -12.344963073730469 + ], + [ + "echte", + -12.344996452331543 + ], + [ + "▁frecvent", + -12.345165252685547 + ], + [ + "▁conveniently", + -12.345187187194824 + ], + [ + "▁Moi", + -12.345197677612305 + ], + [ + "▁greenhouse", + -12.345220565795898 + ], + [ + "▁rédui", + -12.34524154663086 + ], + [ + "▁lengthy", + -12.34542179107666 + ], + [ + "verband", + -12.345534324645996 + ], + [ + "inţă", + -12.345622062683105 + ], + [ + "▁rigorous", + -12.345625877380371 + ], + [ + "▁Finish", + -12.34580135345459 + ], + [ + "▁FBI", + -12.346052169799805 + ], + [ + "cultura", + -12.346083641052246 + ], + [ + "▁compartment", + -12.346110343933105 + ], + [ + "▁pretend", + -12.346117973327637 + ], + [ + "▁assembled", + -12.346212387084961 + ], + [ + "▁Nie", + -12.34639835357666 + ], + [ + "fession", + -12.34640884399414 + ], + [ + "▁£2", + -12.34642219543457 + ], + [ + "algré", + -12.3468017578125 + ], + [ + "▁anterior", + -12.346817970275879 + ], + [ + "▁Wissenschaft", + -12.34683609008789 + ], + [ + "▁Harbor", + -12.346923828125 + ], + [ + "lix", + -12.346985816955566 + ], + [ + "=\"", + -12.347049713134766 + ], + [ + "▁breathtaking", + -12.34705638885498 + ], + [ + "▁Stern", + -12.34708309173584 + ], + [ + "▁Internetseite", + -12.347132682800293 + ], + [ + "▁locker", + -12.347216606140137 + ], + [ + "▁feather", + -12.34726619720459 + ], + [ + "Serv", + -12.347297668457031 + ], + [ + "▁snake", + -12.347332000732422 + ], + [ + "▁Border", + -12.347396850585938 + ], + [ + "▁undergo", + -12.347518920898438 + ], + [ + "▁petrol", + -12.347558975219727 + ], + [ + "▁dealership", + -12.3475923538208 + ], + [ + "▁commander", + -12.347596168518066 + ], + [ + "▁Monate", + -12.347599983215332 + ], + [ + "▁Guardian", + -12.347665786743164 + ], + [ + "▁Todd", + -12.347774505615234 + ], + [ + "Ann", + -12.347825050354004 + ], + [ + "ibilité", + -12.347918510437012 + ], + [ + "▁Quarter", + -12.347987174987793 + ], + [ + "▁portray", + -12.348097801208496 + ], + [ + "▁Tai", + -12.34813404083252 + ], + [ + "▁strikes", + -12.348224639892578 + ], + [ + "illage", + -12.348381042480469 + ], + [ + "▁IRS", + -12.348417282104492 + ], + [ + "▁lupta", + -12.348455429077148 + ], + [ + "▁Sper", + -12.348493576049805 + ], + [ + "PRO", + -12.348530769348145 + ], + [ + "▁Export", + -12.348549842834473 + ], + [ + "▁crypto", + -12.348587989807129 + ], + [ + "▁barbecue", + -12.348692893981934 + ], + [ + "▁portions", + -12.348787307739258 + ], + [ + "▁explicit", + -12.348793983459473 + ], + [ + "▁angenehm", + -12.348834037780762 + ], + [ + "▁marathon", + -12.348946571350098 + ], + [ + "▁apartament", + -12.348982810974121 + ], + [ + "▁Eva", + -12.349079132080078 + ], + [ + "plate", + -12.349181175231934 + ], + [ + "viel", + -12.34925365447998 + ], + [ + "FIN", + -12.34926986694336 + ], + [ + "dependent", + -12.34935188293457 + ], + [ + "▁cercet", + -12.34942626953125 + ], + [ + "▁midnight", + -12.349499702453613 + ], + [ + "copie", + -12.349563598632812 + ], + [ + "▁companii", + -12.349621772766113 + ], + [ + "▁tenu", + -12.349660873413086 + ], + [ + "1/2", + -12.349662780761719 + ], + [ + "2.4", + -12.349693298339844 + ], + [ + "abri", + -12.349699974060059 + ], + [ + "▁warn", + -12.34980297088623 + ], + [ + "▁luggage", + -12.349875450134277 + ], + [ + "numarul", + -12.349968910217285 + ], + [ + "▁contour", + -12.350014686584473 + ], + [ + "▁Ghost", + -12.350016593933105 + ], + [ + "Angaben", + -12.35012435913086 + ], + [ + "▁unemployment", + -12.350296020507812 + ], + [ + "▁rău", + -12.350380897521973 + ], + [ + "▁dispatch", + -12.350445747375488 + ], + [ + "investissement", + -12.350547790527344 + ], + [ + "▁passt", + -12.35057258605957 + ], + [ + "▁Germania", + -12.350578308105469 + ], + [ + "▁webpage", + -12.350651741027832 + ], + [ + "▁reservations", + -12.350688934326172 + ], + [ + "▁Kai", + -12.350743293762207 + ], + [ + "▁Cav", + -12.350890159606934 + ], + [ + "▁Patient", + -12.351109504699707 + ], + [ + "ер", + -12.351213455200195 + ], + [ + "▁Belle", + -12.351236343383789 + ], + [ + "▁Nashville", + -12.351296424865723 + ], + [ + "▁Talent", + -12.351332664489746 + ], + [ + "ouvrage", + -12.351364135742188 + ], + [ + "▁bekommt", + -12.351365089416504 + ], + [ + "USA", + -12.351430892944336 + ], + [ + "CES", + -12.351432800292969 + ], + [ + "▁Peru", + -12.351499557495117 + ], + [ + "▁erkennen", + -12.35153579711914 + ], + [ + "prinde", + -12.351569175720215 + ], + [ + "▁constitution", + -12.351922035217285 + ], + [ + "itatile", + -12.351998329162598 + ], + [ + "bah", + -12.352147102355957 + ], + [ + "▁avail", + -12.352148056030273 + ], + [ + "▁disponibile", + -12.352149963378906 + ], + [ + "hér", + -12.352258682250977 + ], + [ + "ол", + -12.352411270141602 + ], + [ + "▁startups", + -12.352435111999512 + ], + [ + "▁carton", + -12.352485656738281 + ], + [ + "▁Newsletter", + -12.35251235961914 + ], + [ + "éti", + -12.352560997009277 + ], + [ + "▁investigating", + -12.352779388427734 + ], + [ + "itul", + -12.352925300598145 + ], + [ + "touch", + -12.352962493896484 + ], + [ + "Sport", + -12.353137016296387 + ], + [ + "AME", + -12.353203773498535 + ], + [ + "MIN", + -12.353222846984863 + ], + [ + "metry", + -12.353371620178223 + ], + [ + "icy", + -12.353492736816406 + ], + [ + "▁Luna", + -12.35351848602295 + ], + [ + "▁asthma", + -12.353614807128906 + ], + [ + "▁conduc", + -12.35365104675293 + ], + [ + "▁Ari", + -12.35369873046875 + ], + [ + "trust", + -12.353832244873047 + ], + [ + "▁defines", + -12.353894233703613 + ], + [ + "▁Blend", + -12.353927612304688 + ], + [ + "azo", + -12.353989601135254 + ], + [ + "▁sweep", + -12.354169845581055 + ], + [ + "lope", + -12.354331016540527 + ], + [ + "ţinut", + -12.35439682006836 + ], + [ + "WD", + -12.354503631591797 + ], + [ + "▁appetite", + -12.354619979858398 + ], + [ + "▁Seed", + -12.354753494262695 + ], + [ + "Friend", + -12.354854583740234 + ], + [ + "▁repet", + -12.354876518249512 + ], + [ + "▁throat", + -12.354936599731445 + ], + [ + "philosoph", + -12.355141639709473 + ], + [ + "▁connaître", + -12.355156898498535 + ], + [ + "▁Counter", + -12.355299949645996 + ], + [ + "▁Anforderungen", + -12.35533332824707 + ], + [ + "▁Polit", + -12.355363845825195 + ], + [ + "▁Weather", + -12.3554048538208 + ], + [ + "bow", + -12.355423927307129 + ], + [ + "▁recreation", + -12.355484008789062 + ], + [ + "▁culinary", + -12.355571746826172 + ], + [ + "▁plage", + -12.355609893798828 + ], + [ + "▁Cruz", + -12.355659484863281 + ], + [ + "▁equip", + -12.355668067932129 + ], + [ + "▁Recent", + -12.355697631835938 + ], + [ + "LED", + -12.355767250061035 + ], + [ + "▁steak", + -12.355772972106934 + ], + [ + "▁belly", + -12.355880737304688 + ], + [ + "photo", + -12.356130599975586 + ], + [ + "▁lakes", + -12.35623836517334 + ], + [ + "▁intact", + -12.356287956237793 + ], + [ + "▁spiral", + -12.356386184692383 + ], + [ + "▁Billy", + -12.356468200683594 + ], + [ + "▁Understanding", + -12.356534957885742 + ], + [ + "▁Lay", + -12.356558799743652 + ], + [ + "▁roster", + -12.356632232666016 + ], + [ + "▁admire", + -12.356647491455078 + ], + [ + "▁android", + -12.356732368469238 + ], + [ + "▁technician", + -12.356734275817871 + ], + [ + "gène", + -12.356818199157715 + ], + [ + "motiv", + -12.356954574584961 + ], + [ + "▁Boat", + -12.356988906860352 + ], + [ + "▁genießen", + -12.357000350952148 + ], + [ + "▁Geschmack", + -12.357001304626465 + ], + [ + "▁heroes", + -12.3570556640625 + ], + [ + "▁1800", + -12.357137680053711 + ], + [ + "numeroase", + -12.35776138305664 + ], + [ + "▁anschließend", + -12.357802391052246 + ], + [ + "▁Spur", + -12.357813835144043 + ], + [ + "▁clarify", + -12.35784912109375 + ], + [ + "▁warmer", + -12.357889175415039 + ], + [ + "▁Ranch", + -12.357955932617188 + ], + [ + "▁simti", + -12.358024597167969 + ], + [ + "Thank", + -12.35838508605957 + ], + [ + "▁freight", + -12.358434677124023 + ], + [ + "▁administrators", + -12.358453750610352 + ], + [ + "Reg", + -12.358588218688965 + ], + [ + "Această", + -12.358670234680176 + ], + [ + "▁legume", + -12.358741760253906 + ], + [ + "▁utilizare", + -12.358786582946777 + ], + [ + "CON", + -12.358904838562012 + ], + [ + "urgi", + -12.358917236328125 + ], + [ + "▁Gesicht", + -12.358920097351074 + ], + [ + "▁counselor", + -12.358954429626465 + ], + [ + "▁mondiale", + -12.359009742736816 + ], + [ + "helm", + -12.359137535095215 + ], + [ + "▁Promo", + -12.359156608581543 + ], + [ + "▁Schweiz", + -12.35917854309082 + ], + [ + "Ich", + -12.35929012298584 + ], + [ + "▁intalni", + -12.359295845031738 + ], + [ + "▁Bloom", + -12.359318733215332 + ], + [ + "▁Score", + -12.359362602233887 + ], + [ + "▁Fruit", + -12.35944652557373 + ], + [ + "▁constraints", + -12.359447479248047 + ], + [ + "▁farmer", + -12.359745979309082 + ], + [ + "▁précise", + -12.359807014465332 + ], + [ + "evaluating", + -12.359868049621582 + ], + [ + "▁Period", + -12.359891891479492 + ], + [ + "byte", + -12.359893798828125 + ], + [ + "wah", + -12.360025405883789 + ], + [ + "Mac", + -12.360123634338379 + ], + [ + "iron", + -12.360197067260742 + ], + [ + "′", + -12.360337257385254 + ], + [ + "▁tehnic", + -12.360539436340332 + ], + [ + "▁legat", + -12.36054515838623 + ], + [ + "▁Pilot", + -12.360574722290039 + ], + [ + "▁Carpet", + -12.36064624786377 + ], + [ + "TEN", + -12.360812187194824 + ], + [ + "▁shareholders", + -12.36082649230957 + ], + [ + "vină", + -12.360880851745605 + ], + [ + "▁parole", + -12.360939979553223 + ], + [ + "ătă", + -12.360984802246094 + ], + [ + "bbing", + -12.361000061035156 + ], + [ + "▁switched", + -12.361002922058105 + ], + [ + "▁Petro", + -12.361010551452637 + ], + [ + "▁Vertrags", + -12.36111831665039 + ], + [ + "cham", + -12.361178398132324 + ], + [ + "wang", + -12.361284255981445 + ], + [ + "▁Bean", + -12.36139965057373 + ], + [ + "minister", + -12.361442565917969 + ], + [ + "▁Wu", + -12.361522674560547 + ], + [ + "▁Olympics", + -12.361539840698242 + ], + [ + "tipul", + -12.361542701721191 + ], + [ + "▁Citi", + -12.36166763305664 + ], + [ + "▁Fold", + -12.361873626708984 + ], + [ + "▁Partei", + -12.361940383911133 + ], + [ + "▁centrale", + -12.361984252929688 + ], + [ + "île", + -12.362032890319824 + ], + [ + "pflicht", + -12.362175941467285 + ], + [ + "heli", + -12.362398147583008 + ], + [ + "▁erwartet", + -12.362414360046387 + ], + [ + "▁oferta", + -12.362458229064941 + ], + [ + "▁NHS", + -12.36246395111084 + ], + [ + "annon", + -12.362570762634277 + ], + [ + "▁Rud", + -12.362701416015625 + ], + [ + "▁Stuttgart", + -12.362737655639648 + ], + [ + "▁rămas", + -12.362746238708496 + ], + [ + "▁eliminated", + -12.36275577545166 + ], + [ + "▁hiding", + -12.362797737121582 + ], + [ + "▁cadeau", + -12.362832069396973 + ], + [ + "▁mock", + -12.363115310668945 + ], + [ + "▁elder", + -12.363333702087402 + ], + [ + "▁Liz", + -12.363364219665527 + ], + [ + "aji", + -12.363544464111328 + ], + [ + "▁endlich", + -12.363653182983398 + ], + [ + "sufficient", + -12.363668441772461 + ], + [ + "▁zusätzliche", + -12.363712310791016 + ], + [ + "scient", + -12.363757133483887 + ], + [ + "▁Adjust", + -12.363883972167969 + ], + [ + "▁incentive", + -12.363945007324219 + ], + [ + "▁Papa", + -12.364012718200684 + ], + [ + "▁Pharma", + -12.364041328430176 + ], + [ + "▁conflicts", + -12.364107131958008 + ], + [ + "zählen", + -12.364113807678223 + ], + [ + "▁chien", + -12.364118576049805 + ], + [ + "KB", + -12.36413288116455 + ], + [ + "ultimi", + -12.364188194274902 + ], + [ + "▁Jul", + -12.36421012878418 + ], + [ + "▁Male", + -12.36422061920166 + ], + [ + "▁viewer", + -12.36427116394043 + ], + [ + "▁Sector", + -12.364328384399414 + ], + [ + "▁REAL", + -12.364344596862793 + ], + [ + "▁arbitr", + -12.36436939239502 + ], + [ + "resistant", + -12.364399909973145 + ], + [ + "▁Bristol", + -12.364423751831055 + ], + [ + "▁shy", + -12.364540100097656 + ], + [ + "SW", + -12.364593505859375 + ], + [ + "▁Kirk", + -12.36460018157959 + ], + [ + "centrul", + -12.364653587341309 + ], + [ + "▁Venezuela", + -12.364657402038574 + ], + [ + "▁communicating", + -12.364657402038574 + ], + [ + "▁Chemical", + -12.364663124084473 + ], + [ + "▁surprises", + -12.364843368530273 + ], + [ + "▁Jamie", + -12.364933967590332 + ], + [ + "▁Heavy", + -12.364965438842773 + ], + [ + "▁turnover", + -12.36498737335205 + ], + [ + "▁étudiants", + -12.365114212036133 + ], + [ + "welcher", + -12.365124702453613 + ], + [ + "▁preturi", + -12.365200996398926 + ], + [ + "▁Mono", + -12.365283966064453 + ], + [ + "▁paddle", + -12.365309715270996 + ], + [ + "▁accountability", + -12.365364074707031 + ], + [ + "OUS", + -12.365592956542969 + ], + [ + "▁marketers", + -12.365762710571289 + ], + [ + "fection", + -12.365900993347168 + ], + [ + "▁Outside", + -12.365921020507812 + ], + [ + "▁Jefferson", + -12.366114616394043 + ], + [ + "oaie", + -12.36617660522461 + ], + [ + "tenue", + -12.366275787353516 + ], + [ + "HU", + -12.366329193115234 + ], + [ + "Très", + -12.36639404296875 + ], + [ + "valoarea", + -12.36642837524414 + ], + [ + "103", + -12.366482734680176 + ], + [ + "▁Privacy", + -12.366580963134766 + ], + [ + "▁Leistungen", + -12.366598129272461 + ], + [ + "(3)", + -12.36662483215332 + ], + [ + "▁études", + -12.366734504699707 + ], + [ + "sko", + -12.366750717163086 + ], + [ + "drum", + -12.366822242736816 + ], + [ + "▁lamb", + -12.366842269897461 + ], + [ + "▁nicio", + -12.367094993591309 + ], + [ + "▁NATO", + -12.367104530334473 + ], + [ + "▁Freitag", + -12.367178916931152 + ], + [ + "▁precedent", + -12.367178916931152 + ], + [ + "▁partenaires", + -12.367202758789062 + ], + [ + "▁companiei", + -12.367234230041504 + ], + [ + "▁Plaza", + -12.367249488830566 + ], + [ + "▁disruption", + -12.367274284362793 + ], + [ + "▁violations", + -12.367338180541992 + ], + [ + "▁Reference", + -12.367446899414062 + ], + [ + "▁habitants", + -12.36770248413086 + ], + [ + "▁compost", + -12.36776351928711 + ], + [ + "▁citoyen", + -12.367785453796387 + ], + [ + "▁Historical", + -12.367857933044434 + ], + [ + "vollen", + -12.36793327331543 + ], + [ + "▁Eck", + -12.36815357208252 + ], + [ + "▁lumii", + -12.368180274963379 + ], + [ + "▁reusit", + -12.368278503417969 + ], + [ + "genic", + -12.368307113647461 + ], + [ + "Why", + -12.368436813354492 + ], + [ + "ASE", + -12.368474006652832 + ], + [ + "▁athlete", + -12.36854076385498 + ], + [ + "▁Spitze", + -12.368559837341309 + ], + [ + "▁schimbat", + -12.368566513061523 + ], + [ + "▁anonymous", + -12.368850708007812 + ], + [ + "jedes", + -12.368856430053711 + ], + [ + "exclu", + -12.368874549865723 + ], + [ + "factor", + -12.369199752807617 + ], + [ + "▁Dezember", + -12.369231224060059 + ], + [ + "▁scientist", + -12.369373321533203 + ], + [ + "▁likelihood", + -12.36947250366211 + ], + [ + "▁Rhode", + -12.369488716125488 + ], + [ + "▁Balance", + -12.369521141052246 + ], + [ + "istoria", + -12.36959457397461 + ], + [ + "▁Neil", + -12.369780540466309 + ], + [ + "▁bush", + -12.369919776916504 + ], + [ + "▁Ergebnisse", + -12.369935989379883 + ], + [ + "▁Sinn", + -12.369956016540527 + ], + [ + "▁spezielle", + -12.370128631591797 + ], + [ + "▁jucat", + -12.37015438079834 + ], + [ + "▁spite", + -12.370179176330566 + ], + [ + "▁Ultimate", + -12.370365142822266 + ], + [ + "▁fructe", + -12.370401382446289 + ], + [ + "▁asleep", + -12.370441436767578 + ], + [ + "▁Goal", + -12.370539665222168 + ], + [ + "▁PAR", + -12.370631217956543 + ], + [ + "▁rows", + -12.370705604553223 + ], + [ + "▁Fol", + -12.3709135055542 + ], + [ + "▁durata", + -12.370945930480957 + ], + [ + "▁traditionnel", + -12.37100887298584 + ], + [ + "▁tema", + -12.37122917175293 + ], + [ + "▁crédit", + -12.371232986450195 + ], + [ + "smallest", + -12.371358871459961 + ], + [ + "▁amino", + -12.371358871459961 + ], + [ + "▁elephant", + -12.371405601501465 + ], + [ + "▁tubes", + -12.371685028076172 + ], + [ + "▁Verwendung", + -12.371719360351562 + ], + [ + "▁Excellence", + -12.371889114379883 + ], + [ + "▁utilities", + -12.371962547302246 + ], + [ + "frau", + -12.372111320495605 + ], + [ + "▁poze", + -12.3721342086792 + ], + [ + "août", + -12.372307777404785 + ], + [ + "ango", + -12.372514724731445 + ], + [ + "give", + -12.372532844543457 + ], + [ + "▁appelé", + -12.372576713562012 + ], + [ + "▁yeast", + -12.372671127319336 + ], + [ + "▁enrollment", + -12.372676849365234 + ], + [ + "organiz", + -12.3727445602417 + ], + [ + "▁asociat", + -12.372753143310547 + ], + [ + "▁cattle", + -12.372772216796875 + ], + [ + "▁Solution", + -12.372798919677734 + ], + [ + "evoke", + -12.372807502746582 + ], + [ + "▁Hampshire", + -12.372857093811035 + ], + [ + "▁yeah", + -12.372878074645996 + ], + [ + "▁Argentina", + -12.372928619384766 + ], + [ + "▁abnormal", + -12.373022079467773 + ], + [ + "▁Heights", + -12.373082160949707 + ], + [ + "▁Mitchell", + -12.373099327087402 + ], + [ + "▁Quad", + -12.373350143432617 + ], + [ + "▁textures", + -12.373382568359375 + ], + [ + "▁coalition", + -12.373384475708008 + ], + [ + "▁dataset", + -12.37338924407959 + ], + [ + "World", + -12.373438835144043 + ], + [ + "ständ", + -12.373456001281738 + ], + [ + "▁groove", + -12.373476028442383 + ], + [ + "▁emotionally", + -12.373562812805176 + ], + [ + "▁preciz", + -12.373636245727539 + ], + [ + "kte", + -12.373741149902344 + ], + [ + "berechtigt", + -12.373828887939453 + ], + [ + "▁1971", + -12.373888969421387 + ], + [ + "grandes", + -12.373907089233398 + ], + [ + "▁Broadway", + -12.37391185760498 + ], + [ + "▁comunicat", + -12.373994827270508 + ], + [ + "nui", + -12.37402629852295 + ], + [ + "GER", + -12.374079704284668 + ], + [ + "pick", + -12.374125480651855 + ], + [ + "inscrit", + -12.37414264678955 + ], + [ + "▁Gross", + -12.374258995056152 + ], + [ + "▁McDonald", + -12.374310493469238 + ], + [ + "▁Zero", + -12.374330520629883 + ], + [ + "▁Halb", + -12.374341011047363 + ], + [ + "▁caractère", + -12.374553680419922 + ], + [ + "▁doctrine", + -12.374553680419922 + ], + [ + "▁Sinne", + -12.37458610534668 + ], + [ + "MLS", + -12.374594688415527 + ], + [ + "▁réel", + -12.374759674072266 + ], + [ + "▁Ful", + -12.37476921081543 + ], + [ + "limiting", + -12.37483024597168 + ], + [ + "▁Gan", + -12.374870300292969 + ], + [ + "▁exclude", + -12.37490463256836 + ], + [ + "imba", + -12.374974250793457 + ], + [ + "rolul", + -12.374991416931152 + ], + [ + "▁veggies", + -12.375059127807617 + ], + [ + "▁fasci", + -12.375092506408691 + ], + [ + "▁oval", + -12.375173568725586 + ], + [ + "▁contacter", + -12.375221252441406 + ], + [ + "▁linking", + -12.375279426574707 + ], + [ + "▁knit", + -12.375308990478516 + ], + [ + "▁enroll", + -12.375504493713379 + ], + [ + "▁dédié", + -12.375533103942871 + ], + [ + "▁renting", + -12.375541687011719 + ], + [ + "▁genera", + -12.37567138671875 + ], + [ + "citing", + -12.375691413879395 + ], + [ + "▁bend", + -12.375700950622559 + ], + [ + "guin", + -12.375752449035645 + ], + [ + "▁caregiver", + -12.375768661499023 + ], + [ + "▁könnt", + -12.375791549682617 + ], + [ + "▁Scripture", + -12.375795364379883 + ], + [ + "▁Mic", + -12.375899314880371 + ], + [ + "▁Denmark", + -12.37590217590332 + ], + [ + "▁qualifying", + -12.375917434692383 + ], + [ + "▁costumes", + -12.375958442687988 + ], + [ + "▁dwelling", + -12.37601375579834 + ], + [ + "▁recrut", + -12.376099586486816 + ], + [ + "▁bedding", + -12.37618637084961 + ], + [ + "gesprochen", + -12.376253128051758 + ], + [ + "▁editors", + -12.376386642456055 + ], + [ + "/12", + -12.37657642364502 + ], + [ + "▁cumparat", + -12.376583099365234 + ], + [ + "fiction", + -12.376730918884277 + ], + [ + "▁spinal", + -12.376740455627441 + ], + [ + "▁pathway", + -12.376799583435059 + ], + [ + "▁vârst", + -12.37683391571045 + ], + [ + "mba", + -12.376874923706055 + ], + [ + "▁enthusiastic", + -12.37692642211914 + ], + [ + "▁Watt", + -12.37697982788086 + ], + [ + "symptom", + -12.376992225646973 + ], + [ + "▁pup", + -12.37712287902832 + ], + [ + "▁glorious", + -12.377225875854492 + ], + [ + "▁fața", + -12.377228736877441 + ], + [ + "▁prohibited", + -12.377256393432617 + ], + [ + "vergleich", + -12.377286911010742 + ], + [ + "▁suspected", + -12.377334594726562 + ], + [ + "▁Railway", + -12.377381324768066 + ], + [ + "▁Aujourd", + -12.377469062805176 + ], + [ + "▁Patients", + -12.377476692199707 + ], + [ + "▁séance", + -12.377501487731934 + ], + [ + "▁contraire", + -12.377503395080566 + ], + [ + "▁cuvânt", + -12.37771224975586 + ], + [ + "▁trotzdem", + -12.37773609161377 + ], + [ + "émission", + -12.377795219421387 + ], + [ + "▁bore", + -12.37782096862793 + ], + [ + "▁safeguard", + -12.377851486206055 + ], + [ + "▁galleries", + -12.37820053100586 + ], + [ + "cron", + -12.378268241882324 + ], + [ + "▁Rica", + -12.378335952758789 + ], + [ + "fläche", + -12.37839126586914 + ], + [ + "▁Slow", + -12.37842082977295 + ], + [ + "▁vara", + -12.378549575805664 + ], + [ + "▁Swan", + -12.378564834594727 + ], + [ + "▁compounds", + -12.378564834594727 + ], + [ + "▁Slo", + -12.378621101379395 + ], + [ + "▁accommodations", + -12.378621101379395 + ], + [ + "▁Putin", + -12.378708839416504 + ], + [ + "▁undertaken", + -12.378767967224121 + ], + [ + "▁prépar", + -12.37879467010498 + ], + [ + "▁gandi", + -12.37881088256836 + ], + [ + "sediul", + -12.378924369812012 + ], + [ + "▁Nathan", + -12.379143714904785 + ], + [ + "▁fountain", + -12.379173278808594 + ], + [ + "▁mère", + -12.379194259643555 + ], + [ + "fatty", + -12.379201889038086 + ], + [ + "▁concentrated", + -12.379241943359375 + ], + [ + "richtung", + -12.379300117492676 + ], + [ + "▁appropriately", + -12.37955379486084 + ], + [ + "107", + -12.379631996154785 + ], + [ + "▁shark", + -12.379735946655273 + ], + [ + "▁Topic", + -12.379867553710938 + ], + [ + "▁Ausstellung", + -12.379880905151367 + ], + [ + "▁SUA", + -12.380267143249512 + ], + [ + "SER", + -12.380359649658203 + ], + [ + "▁Nicole", + -12.38039779663086 + ], + [ + "▁utilisateurs", + -12.380620956420898 + ], + [ + "▁Brazilian", + -12.380753517150879 + ], + [ + "▁continut", + -12.380865097045898 + ], + [ + "▁sanatate", + -12.380881309509277 + ], + [ + "faudra", + -12.380882263183594 + ], + [ + "nahm", + -12.380938529968262 + ], + [ + "▁Specific", + -12.381153106689453 + ], + [ + "aiba", + -12.381199836730957 + ], + [ + "cepând", + -12.381296157836914 + ], + [ + "▁Beer", + -12.381366729736328 + ], + [ + "roni", + -12.381616592407227 + ], + [ + "kay", + -12.381636619567871 + ], + [ + "▁gravity", + -12.381844520568848 + ], + [ + "▁verfügt", + -12.381856918334961 + ], + [ + "7:30", + -12.381878852844238 + ], + [ + "▁Players", + -12.381945610046387 + ], + [ + "▁Industries", + -12.38198184967041 + ], + [ + "punkte", + -12.382119178771973 + ], + [ + "▁yacht", + -12.382135391235352 + ], + [ + "-04", + -12.382149696350098 + ], + [ + "onné", + -12.382192611694336 + ], + [ + "▁Cards", + -12.382221221923828 + ], + [ + "▁fete", + -12.382420539855957 + ], + [ + "breaking", + -12.38257884979248 + ], + [ + "baum", + -12.382621765136719 + ], + [ + "nada", + -12.382651329040527 + ], + [ + "▁geplant", + -12.382750511169434 + ], + [ + "genuinely", + -12.382766723632812 + ], + [ + "talk", + -12.382871627807617 + ], + [ + "▁disadvantage", + -12.382920265197754 + ], + [ + "▁shutter", + -12.383003234863281 + ], + [ + "virus", + -12.38302230834961 + ], + [ + "▁cricket", + -12.38308048248291 + ], + [ + "▁comenzi", + -12.383102416992188 + ], + [ + "hier", + -12.383170127868652 + ], + [ + "▁aufzu", + -12.383198738098145 + ], + [ + "▁Rez", + -12.38321304321289 + ], + [ + "▁conclusions", + -12.383329391479492 + ], + [ + "▁Wang", + -12.383509635925293 + ], + [ + "Darüber", + -12.383524894714355 + ], + [ + "▁CSS", + -12.383573532104492 + ], + [ + "CW", + -12.383780479431152 + ], + [ + "▁Chr", + -12.383790969848633 + ], + [ + "▁traded", + -12.383843421936035 + ], + [ + "▁Schon", + -12.384265899658203 + ], + [ + "mped", + -12.38429069519043 + ], + [ + "▁alloy", + -12.384385108947754 + ], + [ + "AVE", + -12.38451099395752 + ], + [ + "▁imagery", + -12.384542465209961 + ], + [ + "▁resurse", + -12.38479995727539 + ], + [ + "▁Thunder", + -12.384834289550781 + ], + [ + "▁schimbare", + -12.384860038757324 + ], + [ + "▁Youtube", + -12.38499927520752 + ], + [ + "▁Monster", + -12.385189056396484 + ], + [ + "phil", + -12.385234832763672 + ], + [ + "▁bébé", + -12.385284423828125 + ], + [ + "Creating", + -12.385428428649902 + ], + [ + "ănă", + -12.385466575622559 + ], + [ + "▁Staat", + -12.385504722595215 + ], + [ + "adică", + -12.385531425476074 + ], + [ + "▁boyfriend", + -12.385552406311035 + ], + [ + "▁Winner", + -12.385594367980957 + ], + [ + "▁disputes", + -12.385653495788574 + ], + [ + "▁lush", + -12.3856840133667 + ], + [ + "▁CMS", + -12.385719299316406 + ], + [ + "▁locaux", + -12.385725021362305 + ], + [ + "▁Verfahren", + -12.38576889038086 + ], + [ + "▁Café", + -12.385786056518555 + ], + [ + "▁Vorstand", + -12.385870933532715 + ], + [ + "▁lucrat", + -12.385960578918457 + ], + [ + "▁Root", + -12.38602352142334 + ], + [ + "▁decis", + -12.386059761047363 + ], + [ + "▁Shadow", + -12.386062622070312 + ], + [ + "▁countryside", + -12.386067390441895 + ], + [ + "▁analiza", + -12.386114120483398 + ], + [ + "obos", + -12.38616943359375 + ], + [ + "opera", + -12.386175155639648 + ], + [ + "actu", + -12.386207580566406 + ], + [ + "▁Songs", + -12.3864164352417 + ], + [ + "reifen", + -12.38648509979248 + ], + [ + "▁hilft", + -12.386650085449219 + ], + [ + "region", + -12.386727333068848 + ], + [ + "▁categoria", + -12.387001991271973 + ], + [ + "capturing", + -12.38701343536377 + ], + [ + "▁1967", + -12.387025833129883 + ], + [ + "▁optimized", + -12.387032508850098 + ], + [ + "▁Dim", + -12.387353897094727 + ], + [ + "▁adapté", + -12.387447357177734 + ], + [ + "zeichnet", + -12.387524604797363 + ], + [ + "▁strada", + -12.387625694274902 + ], + [ + "fulness", + -12.38774585723877 + ], + [ + "▁technically", + -12.38774585723877 + ], + [ + "▁marker", + -12.387757301330566 + ], + [ + "▁vizita", + -12.387808799743652 + ], + [ + "▁imperative", + -12.387986183166504 + ], + [ + "▁pensé", + -12.38802719116211 + ], + [ + "▁drilling", + -12.388030052185059 + ], + [ + "ISA", + -12.38818073272705 + ], + [ + "▁Massage", + -12.388201713562012 + ], + [ + "▁Terry", + -12.388238906860352 + ], + [ + "▁pourtant", + -12.38835334777832 + ], + [ + "▁declaration", + -12.388440132141113 + ], + [ + "▁instructors", + -12.388453483581543 + ], + [ + "Eventually", + -12.38847827911377 + ], + [ + "▁banned", + -12.38847827911377 + ], + [ + "MAT", + -12.388520240783691 + ], + [ + "▁medici", + -12.38856315612793 + ], + [ + "▁Warm", + -12.388615608215332 + ], + [ + "▁trec", + -12.388731002807617 + ], + [ + "▁ecran", + -12.388763427734375 + ], + [ + "▁goat", + -12.388838768005371 + ], + [ + "▁manipulation", + -12.388850212097168 + ], + [ + "▁mayor", + -12.388898849487305 + ], + [ + "▁unterwegs", + -12.388975143432617 + ], + [ + "▁journals", + -12.3890380859375 + ], + [ + "▁hedge", + -12.389239311218262 + ], + [ + "Merc", + -12.389300346374512 + ], + [ + "▁joueurs", + -12.389411926269531 + ], + [ + "▁Religion", + -12.3894624710083 + ], + [ + "▁Mountains", + -12.389477729797363 + ], + [ + "▁renewed", + -12.389497756958008 + ], + [ + "▁Limit", + -12.389543533325195 + ], + [ + "ikea", + -12.389771461486816 + ], + [ + "▁utiliza", + -12.38977336883545 + ], + [ + "sogenannte", + -12.389808654785156 + ], + [ + "0.2", + -12.389836311340332 + ], + [ + "▁Organ", + -12.38987922668457 + ], + [ + "▁Shakespeare", + -12.389952659606934 + ], + [ + "▁Maintenance", + -12.38995361328125 + ], + [ + "▁Wärme", + -12.389954566955566 + ], + [ + "▁Northwest", + -12.390060424804688 + ], + [ + "▁numit", + -12.390106201171875 + ], + [ + "▁mica", + -12.390165328979492 + ], + [ + "turm", + -12.390168190002441 + ], + [ + "▁motivate", + -12.390250205993652 + ], + [ + "▁Staats", + -12.390355110168457 + ], + [ + "optimum", + -12.390487670898438 + ], + [ + "▁sortir", + -12.390546798706055 + ], + [ + "▁Asset", + -12.390555381774902 + ], + [ + "▁hervorragend", + -12.390692710876465 + ], + [ + "▁commentary", + -12.39071273803711 + ], + [ + "▁actuellement", + -12.390732765197754 + ], + [ + "NER", + -12.390765190124512 + ], + [ + "NL", + -12.390789985656738 + ], + [ + "ritt", + -12.390803337097168 + ], + [ + "▁Wirtschafts", + -12.390813827514648 + ], + [ + "träger", + -12.390840530395508 + ], + [ + "▁Versand", + -12.390870094299316 + ], + [ + "▁nostri", + -12.390953063964844 + ], + [ + "▁enorm", + -12.391227722167969 + ], + [ + "▁whale", + -12.391260147094727 + ], + [ + "▁Aufgabe", + -12.391277313232422 + ], + [ + "▁unfair", + -12.391291618347168 + ], + [ + "▁Cord", + -12.391315460205078 + ], + [ + "incorporating", + -12.39134693145752 + ], + [ + "luck", + -12.39157772064209 + ], + [ + "Afrique", + -12.39168643951416 + ], + [ + "▁coated", + -12.391857147216797 + ], + [ + "▁india", + -12.391908645629883 + ], + [ + "▁temporarily", + -12.39193058013916 + ], + [ + "▁ciuda", + -12.392097473144531 + ], + [ + "▁coral", + -12.392184257507324 + ], + [ + "▁wirkt", + -12.392203330993652 + ], + [ + "▁folding", + -12.392309188842773 + ], + [ + "wichtigsten", + -12.392398834228516 + ], + [ + "impacted", + -12.392422676086426 + ], + [ + "▁wählen", + -12.392423629760742 + ], + [ + "▁differentiate", + -12.392492294311523 + ], + [ + "▁froid", + -12.392544746398926 + ], + [ + "▁hug", + -12.39255142211914 + ], + [ + "▁construi", + -12.39255428314209 + ], + [ + "▁membru", + -12.392603874206543 + ], + [ + "▁masculin", + -12.392667770385742 + ], + [ + "partisan", + -12.392711639404297 + ], + [ + "▁schimba", + -12.392725944519043 + ], + [ + "▁economies", + -12.392827987670898 + ], + [ + "▁Abraham", + -12.392914772033691 + ], + [ + "wesen", + -12.393013954162598 + ], + [ + "enia", + -12.393026351928711 + ], + [ + "▁answering", + -12.393080711364746 + ], + [ + "▁activități", + -12.39309024810791 + ], + [ + "▁mémoire", + -12.393160820007324 + ], + [ + "▁versucht", + -12.393305778503418 + ], + [ + "ember", + -12.39333438873291 + ], + [ + "▁instala", + -12.39334774017334 + ], + [ + "▁eligibility", + -12.393407821655273 + ], + [ + "▁enjoyment", + -12.393409729003906 + ], + [ + "▁Arme", + -12.39350414276123 + ], + [ + "although", + -12.393534660339355 + ], + [ + "▁encompass", + -12.393596649169922 + ], + [ + "▁zufrieden", + -12.393658638000488 + ], + [ + "Script", + -12.393691062927246 + ], + [ + "KG", + -12.39385986328125 + ], + [ + "▁adhesive", + -12.393902778625488 + ], + [ + "▁Verkehrs", + -12.393908500671387 + ], + [ + "▁monitored", + -12.394103050231934 + ], + [ + "▁Conservation", + -12.394148826599121 + ], + [ + "hav", + -12.394156455993652 + ], + [ + "▁Above", + -12.394174575805664 + ], + [ + "▁Former", + -12.394241333007812 + ], + [ + "▁Certain", + -12.394250869750977 + ], + [ + "saving", + -12.394311904907227 + ], + [ + "▁Pun", + -12.394390106201172 + ], + [ + "▁awkward", + -12.394397735595703 + ], + [ + "▁Pretty", + -12.394410133361816 + ], + [ + "▁scanning", + -12.394417762756348 + ], + [ + "layer", + -12.394527435302734 + ], + [ + "motor", + -12.39453125 + ], + [ + "▁beginnt", + -12.39455795288086 + ], + [ + "▁affiliated", + -12.394681930541992 + ], + [ + "▁archives", + -12.394686698913574 + ], + [ + "▁sunshine", + -12.394892692565918 + ], + [ + "kha", + -12.394988059997559 + ], + [ + "▁investigated", + -12.395149230957031 + ], + [ + "▁fantas", + -12.395277976989746 + ], + [ + "▁united", + -12.395355224609375 + ], + [ + "allegedly", + -12.395373344421387 + ], + [ + "▁Eugen", + -12.3955078125 + ], + [ + "▁proprie", + -12.395843505859375 + ], + [ + "uca", + -12.396183013916016 + ], + [ + "DES", + -12.396187782287598 + ], + [ + "ştii", + -12.396190643310547 + ], + [ + "▁Running", + -12.39620590209961 + ], + [ + "lbstverständlich", + -12.396248817443848 + ], + [ + "index", + -12.396300315856934 + ], + [ + "▁studiu", + -12.396512031555176 + ], + [ + "URE", + -12.396553039550781 + ], + [ + "gültig", + -12.396627426147461 + ], + [ + "▁lundi", + -12.396649360656738 + ], + [ + "▁Zucker", + -12.396650314331055 + ], + [ + "▁positively", + -12.396721839904785 + ], + [ + "folgenden", + -12.396758079528809 + ], + [ + "anță", + -12.396800994873047 + ], + [ + "▁clan", + -12.396866798400879 + ], + [ + "▁literacy", + -12.396879196166992 + ], + [ + "▁ober", + -12.39699935913086 + ], + [ + "John", + -12.397003173828125 + ], + [ + "greg", + -12.39700984954834 + ], + [ + "▁titlu", + -12.397049903869629 + ], + [ + "▁ţări", + -12.39707088470459 + ], + [ + "Bra", + -12.397100448608398 + ], + [ + "▁Evans", + -12.397164344787598 + ], + [ + "modern", + -12.397172927856445 + ], + [ + "▁hauteur", + -12.397353172302246 + ], + [ + "refers", + -12.397416114807129 + ], + [ + "▁plasma", + -12.397575378417969 + ], + [ + "▁optic", + -12.397595405578613 + ], + [ + "▁shampoo", + -12.397619247436523 + ], + [ + "▁cheek", + -12.397727966308594 + ], + [ + "opted", + -12.397741317749023 + ], + [ + "▁persönlich", + -12.397832870483398 + ], + [ + "▁1945", + -12.398118019104004 + ], + [ + "ICI", + -12.398193359375 + ], + [ + "biotic", + -12.398222923278809 + ], + [ + "▁Beruf", + -12.398372650146484 + ], + [ + "▁trez", + -12.398383140563965 + ], + [ + "▁diploma", + -12.398388862609863 + ], + [ + "nahmen", + -12.398421287536621 + ], + [ + "▁curl", + -12.398625373840332 + ], + [ + "▁agricole", + -12.398824691772461 + ], + [ + "▁recomand", + -12.398844718933105 + ], + [ + "▁pediatric", + -12.398862838745117 + ], + [ + "Fiecare", + -12.39887523651123 + ], + [ + "Anlage", + -12.398906707763672 + ], + [ + "weiß", + -12.398974418640137 + ], + [ + "elecommunication", + -12.39898681640625 + ], + [ + "hog", + -12.399184226989746 + ], + [ + "▁Stamp", + -12.399364471435547 + ], + [ + "▁Tipp", + -12.399369239807129 + ], + [ + "▁kindness", + -12.399415969848633 + ], + [ + "▁Marina", + -12.399577140808105 + ], + [ + "▁Gleich", + -12.39963436126709 + ], + [ + "▁grij", + -12.39970588684082 + ], + [ + "▁desperate", + -12.39974594116211 + ], + [ + "▁recordings", + -12.399842262268066 + ], + [ + "▁neglect", + -12.399861335754395 + ], + [ + "▁inherent", + -12.400035858154297 + ], + [ + "▁Rezept", + -12.400138854980469 + ], + [ + "▁soins", + -12.400164604187012 + ], + [ + "▁brut", + -12.400250434875488 + ], + [ + "▁revolutionary", + -12.400495529174805 + ], + [ + "▁liberté", + -12.400530815124512 + ], + [ + "cours", + -12.400945663452148 + ], + [ + "▁Similar", + -12.401247024536133 + ], + [ + "▁cheveux", + -12.40136432647705 + ], + [ + "▁ieftin", + -12.401599884033203 + ], + [ + "▁promovare", + -12.40160846710205 + ], + [ + "▁grains", + -12.401729583740234 + ], + [ + "ти", + -12.401749610900879 + ], + [ + "▁fonctionnement", + -12.401789665222168 + ], + [ + "▁Coming", + -12.401832580566406 + ], + [ + "▁analytical", + -12.401847839355469 + ], + [ + "▁simplify", + -12.401856422424316 + ], + [ + "▁chambres", + -12.401893615722656 + ], + [ + "▁fifty", + -12.401930809020996 + ], + [ + "jour", + -12.402070999145508 + ], + [ + "▁(17", + -12.402194023132324 + ], + [ + "cărui", + -12.402292251586914 + ], + [ + "▁harmony", + -12.402352333068848 + ], + [ + "grin", + -12.402355194091797 + ], + [ + "▁drunk", + -12.402359962463379 + ], + [ + "260", + -12.402374267578125 + ], + [ + "3-5", + -12.40243148803711 + ], + [ + "▁articole", + -12.402442932128906 + ], + [ + "▁flooding", + -12.402482986450195 + ], + [ + "halle", + -12.402580261230469 + ], + [ + "▁defects", + -12.40276050567627 + ], + [ + "▁rifle", + -12.402839660644531 + ], + [ + "▁Boc", + -12.402843475341797 + ], + [ + "▁Athletic", + -12.40284538269043 + ], + [ + "▁acordat", + -12.40292739868164 + ], + [ + "AIR", + -12.402969360351562 + ], + [ + "▁entwickeln", + -12.403104782104492 + ], + [ + "▁Advance", + -12.403188705444336 + ], + [ + "▁Heil", + -12.403216361999512 + ], + [ + "Stainless", + -12.403345108032227 + ], + [ + "▁Psychology", + -12.40337085723877 + ], + [ + "▁omul", + -12.403435707092285 + ], + [ + "▁Arbeiten", + -12.403494834899902 + ], + [ + "▁rabbit", + -12.403495788574219 + ], + [ + "▁méta", + -12.40351390838623 + ], + [ + "ismul", + -12.403534889221191 + ], + [ + "▁Herausforderung", + -12.403594970703125 + ], + [ + "▁Euch", + -12.403654098510742 + ], + [ + "geschichte", + -12.40390682220459 + ], + [ + "▁Milk", + -12.404057502746582 + ], + [ + "▁pregăt", + -12.404065132141113 + ], + [ + "▁Standort", + -12.404141426086426 + ], + [ + "Val", + -12.404180526733398 + ], + [ + "▁Ronald", + -12.404350280761719 + ], + [ + "▁Werbe", + -12.404558181762695 + ], + [ + "▁restrict", + -12.404658317565918 + ], + [ + "▁tablespoon", + -12.404844284057617 + ], + [ + "▁Amendment", + -12.404845237731934 + ], + [ + "▁Johnny", + -12.404914855957031 + ], + [ + "▁lively", + -12.404938697814941 + ], + [ + "ORD", + -12.405147552490234 + ], + [ + "▁mulţi", + -12.40523624420166 + ], + [ + "èrent", + -12.405241012573242 + ], + [ + "Every", + -12.405277252197266 + ], + [ + "eignet", + -12.405296325683594 + ], + [ + "GD", + -12.40546989440918 + ], + [ + "▁Ghana", + -12.405628204345703 + ], + [ + "▁wealthy", + -12.40576171875 + ], + [ + "▁advocates", + -12.405818939208984 + ], + [ + "▁Campaign", + -12.40584659576416 + ], + [ + "▁posters", + -12.405964851379395 + ], + [ + "flug", + -12.406011581420898 + ], + [ + "▁métier", + -12.406139373779297 + ], + [ + "kir", + -12.406148910522461 + ], + [ + "bond", + -12.406176567077637 + ], + [ + "datorita", + -12.406188011169434 + ], + [ + "▁Hochzeit", + -12.406230926513672 + ], + [ + "▁effectué", + -12.406271934509277 + ], + [ + "▁angles", + -12.40654182434082 + ], + [ + "▁Electrical", + -12.406705856323242 + ], + [ + "▁Administrator", + -12.40674114227295 + ], + [ + "▁spur", + -12.407389640808105 + ], + [ + "▁größere", + -12.407444953918457 + ], + [ + "woke", + -12.407515525817871 + ], + [ + "▁gewinnen", + -12.407689094543457 + ], + [ + "▁ajută", + -12.407712936401367 + ], + [ + "▁ventilation", + -12.407853126525879 + ], + [ + "▁viaţa", + -12.407853126525879 + ], + [ + "▁Dinner", + -12.408079147338867 + ], + [ + "respond", + -12.408095359802246 + ], + [ + "▁OEM", + -12.408120155334473 + ], + [ + "▁affair", + -12.4081392288208 + ], + [ + "▁öffentlich", + -12.408143043518066 + ], + [ + "ENS", + -12.408209800720215 + ], + [ + "▁Cent", + -12.408224105834961 + ], + [ + "▁făc", + -12.408267974853516 + ], + [ + "▁Doppel", + -12.408285140991211 + ], + [ + "▁fericit", + -12.408363342285156 + ], + [ + "▁coordon", + -12.40845775604248 + ], + [ + "geht", + -12.408547401428223 + ], + [ + "▁perfekte", + -12.408610343933105 + ], + [ + "▁sportive", + -12.408700942993164 + ], + [ + "▁proiectul", + -12.40870189666748 + ], + [ + "▁deadly", + -12.408804893493652 + ], + [ + "Geschäft", + -12.408822059631348 + ], + [ + "▁inspirational", + -12.408854484558105 + ], + [ + "+1", + -12.409013748168945 + ], + [ + "▁pearl", + -12.409022331237793 + ], + [ + "▁scrub", + -12.409036636352539 + ], + [ + "▁scheint", + -12.409079551696777 + ], + [ + "poo", + -12.409147262573242 + ], + [ + "▁Pier", + -12.409220695495605 + ], + [ + "▁commented", + -12.409285545349121 + ], + [ + "lute", + -12.409302711486816 + ], + [ + "▁cancelled", + -12.409488677978516 + ], + [ + "Win", + -12.409605979919434 + ], + [ + "▁payroll", + -12.409781455993652 + ], + [ + "▁varsta", + -12.409881591796875 + ], + [ + "stuffed", + -12.410097122192383 + ], + [ + "▁beads", + -12.410138130187988 + ], + [ + "▁poems", + -12.410356521606445 + ], + [ + "pokesman", + -12.410399436950684 + ], + [ + "▁checklist", + -12.410523414611816 + ], + [ + "▁Mich", + -12.410636901855469 + ], + [ + "GEN", + -12.410676002502441 + ], + [ + "▁Lau", + -12.410783767700195 + ], + [ + "▁stie", + -12.410965919494629 + ], + [ + "▁Lovely", + -12.4110107421875 + ], + [ + "▁Anschluss", + -12.411062240600586 + ], + [ + "▁personaj", + -12.41108226776123 + ], + [ + "▁ausgestattet", + -12.411121368408203 + ], + [ + "▁beginners", + -12.411163330078125 + ], + [ + "▁noon", + -12.411189079284668 + ], + [ + "▁celule", + -12.41128921508789 + ], + [ + "Trans", + -12.411324501037598 + ], + [ + "boot", + -12.411331176757812 + ], + [ + "▁drumul", + -12.41136646270752 + ], + [ + "gruppen", + -12.41140079498291 + ], + [ + "étend", + -12.41140365600586 + ], + [ + "▁risques", + -12.411405563354492 + ], + [ + "acclaimed", + -12.411447525024414 + ], + [ + "▁celelalte", + -12.411617279052734 + ], + [ + "▁condiţii", + -12.411620140075684 + ], + [ + "▁skiing", + -12.411685943603516 + ], + [ + "▁optimale", + -12.411689758300781 + ], + [ + "technology", + -12.411773681640625 + ], + [ + "▁renew", + -12.411784172058105 + ], + [ + "Cloud", + -12.41179084777832 + ], + [ + "▁damaging", + -12.411905288696289 + ], + [ + "GT", + -12.412219047546387 + ], + [ + "▁Reform", + -12.41230583190918 + ], + [ + "vedem", + -12.412349700927734 + ], + [ + "▁indicat", + -12.412461280822754 + ], + [ + "▁Maker", + -12.412467002868652 + ], + [ + "▁lichid", + -12.412582397460938 + ], + [ + "3.1", + -12.412614822387695 + ], + [ + "păt", + -12.412620544433594 + ], + [ + "lumina", + -12.41264820098877 + ], + [ + "▁Situ", + -12.412806510925293 + ], + [ + "▁Archives", + -12.412857055664062 + ], + [ + "▁allergies", + -12.41287899017334 + ], + [ + "▁Cameron", + -12.412883758544922 + ], + [ + "▁Immun", + -12.412899017333984 + ], + [ + "wissenschaftlich", + -12.41301441192627 + ], + [ + "▁supplémentaire", + -12.413128852844238 + ], + [ + "▁puterea", + -12.413261413574219 + ], + [ + "Lab", + -12.413331985473633 + ], + [ + "inspired", + -12.413384437561035 + ], + [ + "▁shrink", + -12.413403511047363 + ], + [ + "▁voit", + -12.413426399230957 + ], + [ + "▁chopped", + -12.413467407226562 + ], + [ + "▁Franz", + -12.413537979125977 + ], + [ + "oku", + -12.413652420043945 + ], + [ + "▁suppress", + -12.413673400878906 + ], + [ + "▁impress", + -12.413751602172852 + ], + [ + "▁Liga", + -12.413755416870117 + ], + [ + "▁Eight", + -12.41378402709961 + ], + [ + "720", + -12.413795471191406 + ], + [ + "▁securely", + -12.413870811462402 + ], + [ + "KU", + -12.413934707641602 + ], + [ + "modell", + -12.413992881774902 + ], + [ + "Ensure", + -12.414154052734375 + ], + [ + "größte", + -12.414204597473145 + ], + [ + "▁réuni", + -12.414215087890625 + ], + [ + "▁Internal", + -12.41423225402832 + ], + [ + "▁Punkte", + -12.414320945739746 + ], + [ + "▁replicate", + -12.414412498474121 + ], + [ + "▁spreadsheet", + -12.414434432983398 + ], + [ + "▁Hindu", + -12.414549827575684 + ], + [ + "▁Cham", + -12.414578437805176 + ], + [ + "nati", + -12.414670944213867 + ], + [ + "imply", + -12.414679527282715 + ], + [ + "funded", + -12.414894104003906 + ], + [ + "▁charitable", + -12.414896011352539 + ], + [ + "▁imagined", + -12.415014266967773 + ], + [ + "hausen", + -12.41517448425293 + ], + [ + "Keeping", + -12.415239334106445 + ], + [ + "▁attitudes", + -12.415287971496582 + ], + [ + "esque", + -12.415365219116211 + ], + [ + "▁Tennis", + -12.415409088134766 + ], + [ + "Jeremy", + -12.415410041809082 + ], + [ + "▁majeur", + -12.415475845336914 + ], + [ + "▁stii", + -12.4155912399292 + ], + [ + "▁herbal", + -12.415790557861328 + ], + [ + "▁cauta", + -12.41580867767334 + ], + [ + "▁voluntary", + -12.415828704833984 + ], + [ + "wohl", + -12.415877342224121 + ], + [ + "▁ideea", + -12.41588306427002 + ], + [ + "▁WW", + -12.415899276733398 + ], + [ + "▁erneut", + -12.416010856628418 + ], + [ + "größten", + -12.416094779968262 + ], + [ + "Grâce", + -12.416159629821777 + ], + [ + "▁Köln", + -12.416193008422852 + ], + [ + "▁mobilier", + -12.416199684143066 + ], + [ + "▁fool", + -12.416254043579102 + ], + [ + "▁Calcul", + -12.416295051574707 + ], + [ + "attaque", + -12.41637897491455 + ], + [ + "▁digestive", + -12.41656494140625 + ], + [ + "performance", + -12.416647911071777 + ], + [ + "▁homeowner", + -12.41675853729248 + ], + [ + "▁hunger", + -12.4169282913208 + ], + [ + "2.3", + -12.41696834564209 + ], + [ + "▁Sort", + -12.417085647583008 + ], + [ + "▁Dennis", + -12.41723918914795 + ], + [ + "▁certificat", + -12.417250633239746 + ], + [ + "▁Canal", + -12.417337417602539 + ], + [ + "▁Yesterday", + -12.417424201965332 + ], + [ + "▁sausage", + -12.417499542236328 + ], + [ + "▁perdu", + -12.417736053466797 + ], + [ + "ösen", + -12.417741775512695 + ], + [ + "▁preserved", + -12.417750358581543 + ], + [ + "▁trendy", + -12.4177885055542 + ], + [ + "▁iubire", + -12.417935371398926 + ], + [ + "▁grandfather", + -12.417961120605469 + ], + [ + "▁shoppers", + -12.41820240020752 + ], + [ + "▁verschieden", + -12.418252944946289 + ], + [ + "▁gagner", + -12.41826343536377 + ], + [ + "▁lucra", + -12.418437004089355 + ], + [ + "metru", + -12.418464660644531 + ], + [ + "buz", + -12.418469429016113 + ], + [ + "▁flourish", + -12.418484687805176 + ], + [ + "affin", + -12.418523788452148 + ], + [ + "▁Pflanzen", + -12.41858196258545 + ], + [ + "agh", + -12.418588638305664 + ], + [ + "▁Gill", + -12.418660163879395 + ], + [ + "▁Kä", + -12.418671607971191 + ], + [ + "▁Wege", + -12.41876220703125 + ], + [ + "▁Liberal", + -12.418929100036621 + ], + [ + "▁Glasgow", + -12.418944358825684 + ], + [ + "Objekt", + -12.4189453125 + ], + [ + "▁Huawei", + -12.4189453125 + ], + [ + "appropri", + -12.418986320495605 + ], + [ + "▁genius", + -12.419037818908691 + ], + [ + "▁brokers", + -12.419068336486816 + ], + [ + "▁themed", + -12.41918659210205 + ], + [ + "▁barre", + -12.419210433959961 + ], + [ + "1.7", + -12.419219017028809 + ], + [ + "▁Electro", + -12.419303894042969 + ], + [ + "▁umbrella", + -12.419333457946777 + ], + [ + "▁advisory", + -12.419417381286621 + ], + [ + "▁comport", + -12.419421195983887 + ], + [ + "▁neuer", + -12.419452667236328 + ], + [ + "▁Wick", + -12.419568061828613 + ], + [ + "wak", + -12.419618606567383 + ], + [ + "▁Woman", + -12.419695854187012 + ], + [ + "▁lesser", + -12.419843673706055 + ], + [ + "▁replied", + -12.419987678527832 + ], + [ + "▁représente", + -12.420050621032715 + ], + [ + "▁thé", + -12.420135498046875 + ], + [ + "Deutsch", + -12.420428276062012 + ], + [ + "Cat", + -12.420483589172363 + ], + [ + "▁équipes", + -12.420534133911133 + ], + [ + "▁spider", + -12.420578956604004 + ], + [ + "▁Gaming", + -12.420589447021484 + ], + [ + "▁Liste", + -12.420592308044434 + ], + [ + "▁affection", + -12.420639038085938 + ], + [ + "lipsa", + -12.420982360839844 + ], + [ + "▁Spider", + -12.420987129211426 + ], + [ + "▁Julia", + -12.421034812927246 + ], + [ + "anlagen", + -12.421159744262695 + ], + [ + "Kon", + -12.421363830566406 + ], + [ + "nței", + -12.421368598937988 + ], + [ + "▁Verwaltung", + -12.421483993530273 + ], + [ + "▁raspuns", + -12.421489715576172 + ], + [ + "samt", + -12.421491622924805 + ], + [ + "▁creștere", + -12.421512603759766 + ], + [ + "▁decorate", + -12.421701431274414 + ], + [ + "▁Chain", + -12.422021865844727 + ], + [ + "ów", + -12.422050476074219 + ], + [ + "0-0", + -12.422104835510254 + ], + [ + "▁Cran", + -12.422407150268555 + ], + [ + "▁streak", + -12.42242431640625 + ], + [ + "ор", + -12.422517776489258 + ], + [ + "▁căuta", + -12.422754287719727 + ], + [ + "wende", + -12.422801971435547 + ], + [ + "▁haine", + -12.42280387878418 + ], + [ + "▁landscaping", + -12.423009872436523 + ], + [ + "▁historian", + -12.423016548156738 + ], + [ + "▁grandchildren", + -12.423033714294434 + ], + [ + "▁crawl", + -12.423056602478027 + ], + [ + "▁Cub", + -12.423239707946777 + ], + [ + "▁nécessaires", + -12.423515319824219 + ], + [ + "▁swift", + -12.42352294921875 + ], + [ + "▁calculation", + -12.423656463623047 + ], + [ + "▁acteurs", + -12.423715591430664 + ], + [ + "VT", + -12.423752784729004 + ], + [ + "▁Hristos", + -12.423778533935547 + ], + [ + "▁slices", + -12.423850059509277 + ], + [ + "See", + -12.424203872680664 + ], + [ + "▁Bran", + -12.424233436584473 + ], + [ + "Symbol", + -12.424449920654297 + ], + [ + "▁allowance", + -12.424492835998535 + ], + [ + "▁Effective", + -12.424537658691406 + ], + [ + "▁Wünsche", + -12.424539566040039 + ], + [ + "▁shiny", + -12.424569129943848 + ], + [ + "▁professionalism", + -12.424715995788574 + ], + [ + "/6", + -12.424970626831055 + ], + [ + "▁terrasse", + -12.425087928771973 + ], + [ + "▁researcher", + -12.425156593322754 + ], + [ + "▁fragile", + -12.425203323364258 + ], + [ + "▁greeting", + -12.425274848937988 + ], + [ + "freien", + -12.4253511428833 + ], + [ + "▁valuation", + -12.425372123718262 + ], + [ + "▁incur", + -12.425386428833008 + ], + [ + "▁Zwischen", + -12.425559997558594 + ], + [ + "▁comfy", + -12.425569534301758 + ], + [ + "▁méthode", + -12.42569351196289 + ], + [ + "▁Pirate", + -12.425816535949707 + ], + [ + "▁Moto", + -12.425822257995605 + ], + [ + "(6)", + -12.425823211669922 + ], + [ + "▁devin", + -12.42582893371582 + ], + [ + "▁civic", + -12.425837516784668 + ], + [ + "usage", + -12.425889015197754 + ], + [ + "▁istorie", + -12.425945281982422 + ], + [ + "▁piste", + -12.425955772399902 + ], + [ + "▁Rug", + -12.426091194152832 + ], + [ + "pä", + -12.426129341125488 + ], + [ + "▁matur", + -12.426148414611816 + ], + [ + "CAS", + -12.426155090332031 + ], + [ + "TIC", + -12.42618465423584 + ], + [ + "▁Reduce", + -12.426234245300293 + ], + [ + "▁commemorat", + -12.426321983337402 + ], + [ + "▁cease", + -12.42653751373291 + ], + [ + "unterschiedliche", + -12.42656421661377 + ], + [ + "▁cinnamon", + -12.426581382751465 + ], + [ + "▁Font", + -12.426583290100098 + ], + [ + "▁justify", + -12.426751136779785 + ], + [ + "deteriorat", + -12.426797866821289 + ], + [ + "▁Schön", + -12.42684555053711 + ], + [ + "plain", + -12.426993370056152 + ], + [ + "frist", + -12.427002906799316 + ], + [ + "▁helmet", + -12.42712116241455 + ], + [ + "▁statute", + -12.42721939086914 + ], + [ + "accept", + -12.427236557006836 + ], + [ + "▁1,5", + -12.42724323272705 + ], + [ + "▁recon", + -12.42724323272705 + ], + [ + "▁Möbel", + -12.427348136901855 + ], + [ + "▁idées", + -12.427367210388184 + ], + [ + "automat", + -12.427552223205566 + ], + [ + "Team", + -12.42758846282959 + ], + [ + "▁performers", + -12.427688598632812 + ], + [ + "▁microphone", + -12.427722930908203 + ], + [ + "impotriva", + -12.427775382995605 + ], + [ + "▁pillows", + -12.42780876159668 + ], + [ + "▁accountable", + -12.427812576293945 + ], + [ + "▁strings", + -12.42782974243164 + ], + [ + "hydrate", + -12.427835464477539 + ], + [ + "▁Yan", + -12.427865028381348 + ], + [ + "starea", + -12.427918434143066 + ], + [ + "▁présenté", + -12.42793083190918 + ], + [ + "▁extensively", + -12.428048133850098 + ], + [ + "äst", + -12.428114891052246 + ], + [ + "▁correlation", + -12.428115844726562 + ], + [ + "bespoke", + -12.428119659423828 + ], + [ + "▁creste", + -12.428196907043457 + ], + [ + "▁Armenia", + -12.428248405456543 + ], + [ + "nose", + -12.428426742553711 + ], + [ + "▁strengthening", + -12.428604125976562 + ], + [ + "▁Horizon", + -12.428627014160156 + ], + [ + "▁obesity", + -12.428627967834473 + ], + [ + "seasoned", + -12.428686141967773 + ], + [ + "▁screenshot", + -12.428736686706543 + ], + [ + "girl", + -12.42875862121582 + ], + [ + "▁hardest", + -12.428826332092285 + ], + [ + "▁weakness", + -12.428855895996094 + ], + [ + "effectuer", + -12.429012298583984 + ], + [ + "▁Florence", + -12.429034233093262 + ], + [ + "▁Europene", + -12.429062843322754 + ], + [ + "triggered", + -12.429333686828613 + ], + [ + "Apparently", + -12.42939567565918 + ], + [ + "▁diagnose", + -12.42943286895752 + ], + [ + "rushed", + -12.429494857788086 + ], + [ + "▁trotz", + -12.429516792297363 + ], + [ + "▁spécial", + -12.429680824279785 + ], + [ + "▁lumi", + -12.429783821105957 + ], + [ + "7:00", + -12.429877281188965 + ], + [ + "▁publicat", + -12.429903984069824 + ], + [ + "ос", + -12.430086135864258 + ], + [ + "▁hue", + -12.430136680603027 + ], + [ + "▁termination", + -12.430139541625977 + ], + [ + "▁Nam", + -12.430240631103516 + ], + [ + "Well", + -12.430376052856445 + ], + [ + "▁Extract", + -12.430441856384277 + ], + [ + "atiile", + -12.43062686920166 + ], + [ + "▁vivid", + -12.43076229095459 + ], + [ + "hrs", + -12.430858612060547 + ], + [ + "▁povesti", + -12.430984497070312 + ], + [ + "stehenden", + -12.430988311767578 + ], + [ + "▁informieren", + -12.431070327758789 + ], + [ + "employed", + -12.431133270263672 + ], + [ + "▁armor", + -12.431180953979492 + ], + [ + "▁Columbus", + -12.431191444396973 + ], + [ + "Registr", + -12.431200981140137 + ], + [ + "▁Kamera", + -12.431203842163086 + ], + [ + "▁ugly", + -12.431203842163086 + ], + [ + "outil", + -12.431234359741211 + ], + [ + "▁evenly", + -12.43134593963623 + ], + [ + "lungul", + -12.431349754333496 + ], + [ + "koch", + -12.431439399719238 + ], + [ + "▁Dig", + -12.431450843811035 + ], + [ + "purely", + -12.431489944458008 + ], + [ + "▁Surf", + -12.431560516357422 + ], + [ + "rilla", + -12.431628227233887 + ], + [ + "▁Watson", + -12.43171215057373 + ], + [ + "trug", + -12.431719779968262 + ], + [ + "figuring", + -12.431784629821777 + ], + [ + "▁competitor", + -12.431807518005371 + ], + [ + "▁humid", + -12.431889533996582 + ], + [ + "▁Lawyer", + -12.43189811706543 + ], + [ + "Added", + -12.43205451965332 + ], + [ + "▁salva", + -12.432056427001953 + ], + [ + "▁drainage", + -12.4321870803833 + ], + [ + "Featuring", + -12.432220458984375 + ], + [ + "▁Pel", + -12.43234634399414 + ], + [ + "▁acasa", + -12.432611465454102 + ], + [ + "▁expectation", + -12.43265438079834 + ], + [ + "gibt", + -12.432663917541504 + ], + [ + "▁marginal", + -12.432831764221191 + ], + [ + "ceni", + -12.433028221130371 + ], + [ + "▁européen", + -12.433065414428711 + ], + [ + "clav", + -12.433090209960938 + ], + [ + "▁Shot", + -12.433167457580566 + ], + [ + "commun", + -12.43322467803955 + ], + [ + "▁Calendar", + -12.433247566223145 + ], + [ + "▁trek", + -12.433348655700684 + ], + [ + "rechtliche", + -12.433406829833984 + ], + [ + "▁Perry", + -12.43342399597168 + ], + [ + "▁surge", + -12.433484077453613 + ], + [ + "geschäft", + -12.433504104614258 + ], + [ + "paced", + -12.433793067932129 + ], + [ + "depend", + -12.433871269226074 + ], + [ + "▁Sache", + -12.433947563171387 + ], + [ + "▁Example", + -12.433998107910156 + ], + [ + "▁lider", + -12.434118270874023 + ], + [ + "▁nochmal", + -12.434240341186523 + ], + [ + "▁Present", + -12.434243202209473 + ], + [ + "KW", + -12.434335708618164 + ], + [ + "prompted", + -12.434350967407227 + ], + [ + "logique", + -12.434444427490234 + ], + [ + "Université", + -12.434466361999512 + ], + [ + "lith", + -12.434489250183105 + ], + [ + "▁Gefahr", + -12.434579849243164 + ], + [ + "▁Acid", + -12.434625625610352 + ], + [ + "objets", + -12.434791564941406 + ], + [ + "▁societies", + -12.434791564941406 + ], + [ + "▁distraction", + -12.434816360473633 + ], + [ + "▁puissance", + -12.434934616088867 + ], + [ + "▁alleviat", + -12.435026168823242 + ], + [ + "▁Capitol", + -12.435050010681152 + ], + [ + "▁Heim", + -12.435129165649414 + ], + [ + "judicial", + -12.435230255126953 + ], + [ + "▁nowadays", + -12.435309410095215 + ], + [ + "▁Hammer", + -12.435317039489746 + ], + [ + "▁metallic", + -12.435327529907227 + ], + [ + "▁distr", + -12.435388565063477 + ], + [ + "▁dispos", + -12.435397148132324 + ], + [ + "profile", + -12.435408592224121 + ], + [ + "▁Nicolas", + -12.435602188110352 + ], + [ + "▁presa", + -12.435760498046875 + ], + [ + "augh", + -12.43578052520752 + ], + [ + "schuss", + -12.435787200927734 + ], + [ + "▁Diana", + -12.436062812805176 + ], + [ + "4-5", + -12.436097145080566 + ], + [ + "▁Chapel", + -12.43612003326416 + ], + [ + "▁zahar", + -12.436150550842285 + ], + [ + "âmb", + -12.4362154006958 + ], + [ + "▁Tarif", + -12.436264991760254 + ], + [ + "▁devastating", + -12.436339378356934 + ], + [ + "6:00", + -12.4364013671875 + ], + [ + "▁100,000", + -12.43645191192627 + ], + [ + "NIC", + -12.436580657958984 + ], + [ + "▁Lucas", + -12.436612129211426 + ], + [ + "▁bequem", + -12.436662673950195 + ], + [ + "▁Motion", + -12.436698913574219 + ], + [ + "7,000", + -12.436701774597168 + ], + [ + "▁malware", + -12.436708450317383 + ], + [ + "▁avenue", + -12.436723709106445 + ], + [ + "▁manger", + -12.436747550964355 + ], + [ + "▁Queensland", + -12.436857223510742 + ], + [ + "▁Papier", + -12.436861991882324 + ], + [ + "▁Increase", + -12.436880111694336 + ], + [ + "▁implies", + -12.436954498291016 + ], + [ + "▁äußer", + -12.43697452545166 + ], + [ + "▁Meine", + -12.436980247497559 + ], + [ + "Reuters", + -12.437155723571777 + ], + [ + "▁Belt", + -12.437232971191406 + ], + [ + "Educat", + -12.437251091003418 + ], + [ + "▁Aktion", + -12.437355041503906 + ], + [ + "schläge", + -12.437372207641602 + ], + [ + "▁înregistrat", + -12.437426567077637 + ], + [ + "▁Ortho", + -12.43756103515625 + ], + [ + "▁bulbs", + -12.437761306762695 + ], + [ + "kap", + -12.437793731689453 + ], + [ + "▁peinture", + -12.437901496887207 + ], + [ + "▁Lounge", + -12.437907218933105 + ], + [ + "▁Tampa", + -12.438008308410645 + ], + [ + "ifiziert", + -12.438100814819336 + ], + [ + "kinder", + -12.438172340393066 + ], + [ + "▁comparativ", + -12.438281059265137 + ], + [ + "häuser", + -12.438323974609375 + ], + [ + "incarn", + -12.438363075256348 + ], + [ + "▁amazon", + -12.438464164733887 + ], + [ + "▁Southeast", + -12.438505172729492 + ], + [ + "▁economical", + -12.438667297363281 + ], + [ + "▁broth", + -12.438697814941406 + ], + [ + "▁Secure", + -12.438750267028809 + ], + [ + "damals", + -12.438875198364258 + ], + [ + "▁Elementary", + -12.438921928405762 + ], + [ + "▁Wildlife", + -12.438995361328125 + ], + [ + "▁Jewel", + -12.439001083374023 + ], + [ + "▁protocols", + -12.439297676086426 + ], + [ + "▁zbor", + -12.4393892288208 + ], + [ + "▁enthusiasts", + -12.439398765563965 + ], + [ + "▁Mirror", + -12.439444541931152 + ], + [ + "▁soak", + -12.439537048339844 + ], + [ + "▁Sad", + -12.439574241638184 + ], + [ + "▁dishwasher", + -12.439957618713379 + ], + [ + "▁vollständig", + -12.440186500549316 + ], + [ + "▁Vermont", + -12.440407752990723 + ], + [ + "▁caut", + -12.440449714660645 + ], + [ + "▁fournisseur", + -12.440475463867188 + ], + [ + "▁Concrete", + -12.44047737121582 + ], + [ + "▁Instant", + -12.440595626831055 + ], + [ + "▁reveni", + -12.440597534179688 + ], + [ + "▁Surface", + -12.44059944152832 + ], + [ + "zumindest", + -12.440713882446289 + ], + [ + "▁feast", + -12.440725326538086 + ], + [ + "▁stretching", + -12.440803527832031 + ], + [ + "ERA", + -12.440997123718262 + ], + [ + "▁Scholarship", + -12.441020965576172 + ], + [ + "▁vineyard", + -12.4410400390625 + ], + [ + "▁régulièrement", + -12.441083908081055 + ], + [ + "▁patches", + -12.441093444824219 + ], + [ + "▁Gamb", + -12.44113540649414 + ], + [ + "▁Vereins", + -12.441152572631836 + ], + [ + "ège", + -12.441372871398926 + ], + [ + "▁constitutional", + -12.441411018371582 + ], + [ + "erreur", + -12.441413879394531 + ], + [ + "▁Colombia", + -12.441514015197754 + ], + [ + "UF", + -12.441618919372559 + ], + [ + "aider", + -12.441665649414062 + ], + [ + "cision", + -12.44180965423584 + ], + [ + "▁publishers", + -12.441913604736328 + ], + [ + "▁prelua", + -12.441967964172363 + ], + [ + "▁keiner", + -12.441990852355957 + ], + [ + "▁amid", + -12.442020416259766 + ], + [ + "▁quantitative", + -12.442031860351562 + ], + [ + "▁decay", + -12.442058563232422 + ], + [ + "▁distinguished", + -12.4420747756958 + ], + [ + "▁Gründe", + -12.442209243774414 + ], + [ + "▁statului", + -12.442362785339355 + ], + [ + "CAT", + -12.442436218261719 + ], + [ + "allow", + -12.442481994628906 + ], + [ + "▁mathematical", + -12.442550659179688 + ], + [ + "▁tragedy", + -12.44255542755127 + ], + [ + "▁heels", + -12.442609786987305 + ], + [ + "opia", + -12.44265365600586 + ], + [ + "▁merger", + -12.4428071975708 + ], + [ + "dispositif", + -12.442813873291016 + ], + [ + "▁pneu", + -12.44283390045166 + ], + [ + "elte", + -12.443058013916016 + ], + [ + "▁Introduction", + -12.443070411682129 + ], + [ + "▁biscuit", + -12.443134307861328 + ], + [ + "▁leftover", + -12.443275451660156 + ], + [ + "▁tester", + -12.443314552307129 + ], + [ + "▁Terre", + -12.443380355834961 + ], + [ + "▁Oui", + -12.44338321685791 + ], + [ + "▁rar", + -12.443520545959473 + ], + [ + "▁beverages", + -12.443666458129883 + ], + [ + "▁parenting", + -12.443892478942871 + ], + [ + "1-0", + -12.444053649902344 + ], + [ + "▁Barry", + -12.44417667388916 + ], + [ + "▁Lynn", + -12.444209098815918 + ], + [ + "▁Tyler", + -12.444262504577637 + ], + [ + "▁fotbal", + -12.44437026977539 + ], + [ + "dron", + -12.444475173950195 + ], + [ + "▁donor", + -12.44455623626709 + ], + [ + "▁drape", + -12.444558143615723 + ], + [ + "▁positioning", + -12.444963455200195 + ], + [ + "▁Tang", + -12.445006370544434 + ], + [ + "▁overwhelmed", + -12.445161819458008 + ], + [ + "▁perte", + -12.445192337036133 + ], + [ + "▁blender", + -12.445302963256836 + ], + [ + "TG", + -12.445467948913574 + ], + [ + "GHz", + -12.445490837097168 + ], + [ + "▁administrat", + -12.445719718933105 + ], + [ + "▁glaube", + -12.445771217346191 + ], + [ + "Char", + -12.445947647094727 + ], + [ + "impression", + -12.44627571105957 + ], + [ + "proving", + -12.446297645568848 + ], + [ + "▁Inner", + -12.446434020996094 + ], + [ + "root", + -12.446501731872559 + ], + [ + "▁Gedanken", + -12.446508407592773 + ], + [ + "▁underway", + -12.446596145629883 + ], + [ + "coat", + -12.44660758972168 + ], + [ + "▁thereof", + -12.446663856506348 + ], + [ + "rius", + -12.446700096130371 + ], + [ + "▁intermediate", + -12.446751594543457 + ], + [ + "gmail", + -12.446869850158691 + ], + [ + "114", + -12.446893692016602 + ], + [ + "▁interfere", + -12.446908950805664 + ], + [ + "▁Found", + -12.446930885314941 + ], + [ + "LF", + -12.447071075439453 + ], + [ + "▁equality", + -12.447099685668945 + ], + [ + "▁concurrent", + -12.44710636138916 + ], + [ + "akh", + -12.447107315063477 + ], + [ + "▁touching", + -12.44715690612793 + ], + [ + "▁curiosity", + -12.447235107421875 + ], + [ + "▁rendering", + -12.447263717651367 + ], + [ + "▁1964", + -12.447442054748535 + ], + [ + "sorge", + -12.447468757629395 + ], + [ + "ARC", + -12.447505950927734 + ], + [ + "▁Desktop", + -12.44752311706543 + ], + [ + "▁Tak", + -12.44760799407959 + ], + [ + "filtration", + -12.447651863098145 + ], + [ + "▁gates", + -12.4478759765625 + ], + [ + "Sehr", + -12.44791316986084 + ], + [ + "▁spatiu", + -12.44798755645752 + ], + [ + "▁Leg", + -12.448103904724121 + ], + [ + "▁aviation", + -12.448277473449707 + ], + [ + "wandel", + -12.44827938079834 + ], + [ + "▁Shar", + -12.448323249816895 + ], + [ + "▁Volks", + -12.448409080505371 + ], + [ + "maz", + -12.448698997497559 + ], + [ + "governmental", + -12.44874095916748 + ], + [ + "euros", + -12.448819160461426 + ], + [ + "avantage", + -12.448823928833008 + ], + [ + "sitzt", + -12.448856353759766 + ], + [ + "IER", + -12.448920249938965 + ], + [ + "▁Theory", + -12.44894027709961 + ], + [ + "Cependant", + -12.44907283782959 + ], + [ + "▁Teachers", + -12.449080467224121 + ], + [ + "anspruch", + -12.449095726013184 + ], + [ + "▁afecta", + -12.449139595031738 + ], + [ + "enko", + -12.449193000793457 + ], + [ + "▁breeding", + -12.449198722839355 + ], + [ + "▁Peak", + -12.449457168579102 + ], + [ + "▁găsit", + -12.449516296386719 + ], + [ + "▁măsuri", + -12.4495267868042 + ], + [ + "edia", + -12.449625968933105 + ], + [ + "biz", + -12.449640274047852 + ], + [ + "zum", + -12.449776649475098 + ], + [ + "▁schwierig", + -12.449847221374512 + ], + [ + "Sense", + -12.450050354003906 + ], + [ + "▁Jump", + -12.450081825256348 + ], + [ + "▁cocktails", + -12.450108528137207 + ], + [ + "abhängig", + -12.45012378692627 + ], + [ + "realised", + -12.450140953063965 + ], + [ + "▁programul", + -12.450214385986328 + ], + [ + "▁prévu", + -12.450238227844238 + ], + [ + "▁twitter", + -12.450372695922852 + ], + [ + "Union", + -12.450400352478027 + ], + [ + "▁Marathon", + -12.45040225982666 + ], + [ + "▁Christianity", + -12.450432777404785 + ], + [ + "▁Alberta", + -12.450811386108398 + ], + [ + "einheit", + -12.45097827911377 + ], + [ + "▁wellbeing", + -12.450982093811035 + ], + [ + "phen", + -12.451166152954102 + ], + [ + "▁Charleston", + -12.451180458068848 + ], + [ + "▁uncover", + -12.451323509216309 + ], + [ + "▁humaine", + -12.451464653015137 + ], + [ + "▁bleeding", + -12.451531410217285 + ], + [ + "▁manipul", + -12.451532363891602 + ], + [ + "▁humidity", + -12.451570510864258 + ], + [ + "▁Puis", + -12.451748847961426 + ], + [ + "▁aktuell", + -12.451922416687012 + ], + [ + "▁Nissan", + -12.451943397521973 + ], + [ + "▁Eisen", + -12.45202922821045 + ], + [ + "treiben", + -12.452059745788574 + ], + [ + "cios", + -12.452073097229004 + ], + [ + "ikh", + -12.452381134033203 + ], + [ + "acquiring", + -12.452466011047363 + ], + [ + "▁Wallpaper", + -12.452488899230957 + ], + [ + "▁rond", + -12.452558517456055 + ], + [ + "▁Doug", + -12.45267391204834 + ], + [ + "sourcing", + -12.452696800231934 + ], + [ + "▁1900", + -12.452825546264648 + ], + [ + "▁buni", + -12.452913284301758 + ], + [ + "vest", + -12.452916145324707 + ], + [ + "▁Bangladesh", + -12.452990531921387 + ], + [ + "Home", + -12.453160285949707 + ], + [ + "▁wrinkle", + -12.453252792358398 + ], + [ + "rado", + -12.453290939331055 + ], + [ + "▁Pain", + -12.45334243774414 + ], + [ + "▁herzlich", + -12.453354835510254 + ], + [ + "MRI", + -12.453426361083984 + ], + [ + "UG", + -12.453631401062012 + ], + [ + "▁Desk", + -12.453679084777832 + ], + [ + "▁remarc", + -12.453718185424805 + ], + [ + "▁sodium", + -12.453857421875 + ], + [ + "▁Jede", + -12.453892707824707 + ], + [ + "▁réelle", + -12.453959465026855 + ], + [ + "▁Polar", + -12.454068183898926 + ], + [ + "▁activists", + -12.454273223876953 + ], + [ + "lasted", + -12.454300880432129 + ], + [ + "Some", + -12.45432186126709 + ], + [ + "ISE", + -12.454338073730469 + ], + [ + "▁peine", + -12.454671859741211 + ], + [ + "▁crude", + -12.454852104187012 + ], + [ + "Maur", + -12.454916954040527 + ], + [ + "▁forcing", + -12.454933166503906 + ], + [ + "▁politici", + -12.454970359802246 + ], + [ + "▁condiții", + -12.454988479614258 + ], + [ + "▁Saving", + -12.454999923706055 + ], + [ + "▁descoperi", + -12.455020904541016 + ], + [ + "avenir", + -12.455055236816406 + ], + [ + "Akt", + -12.455069541931152 + ], + [ + "▁vocabulary", + -12.45509147644043 + ], + [ + "▁pont", + -12.455168724060059 + ], + [ + "West", + -12.45518970489502 + ], + [ + "lenk", + -12.455278396606445 + ], + [ + "▁Verbraucher", + -12.455367088317871 + ], + [ + "affects", + -12.455448150634766 + ], + [ + "▁Flower", + -12.455543518066406 + ], + [ + "▁Nebraska", + -12.455617904663086 + ], + [ + "▁assortment", + -12.455618858337402 + ], + [ + "hock", + -12.455619812011719 + ], + [ + "▁discounted", + -12.455803871154785 + ], + [ + "▁Sensor", + -12.455840110778809 + ], + [ + "Lie", + -12.45588207244873 + ], + [ + "▁Volkswagen", + -12.455887794494629 + ], + [ + "isseur", + -12.455888748168945 + ], + [ + "indice", + -12.455936431884766 + ], + [ + "▁scanner", + -12.455986022949219 + ], + [ + "fashioned", + -12.456040382385254 + ], + [ + "▁postal", + -12.456141471862793 + ], + [ + "ouvrir", + -12.45615291595459 + ], + [ + "▁seminars", + -12.45622444152832 + ], + [ + "ioase", + -12.456232070922852 + ], + [ + "▁Stanley", + -12.456260681152344 + ], + [ + "Various", + -12.456335067749023 + ], + [ + "essentiel", + -12.45650577545166 + ], + [ + "▁administered", + -12.456693649291992 + ], + [ + "▁concession", + -12.456748008728027 + ], + [ + "▁mould", + -12.456789016723633 + ], + [ + "▁strongest", + -12.456826210021973 + ], + [ + "Erlebnis", + -12.456933975219727 + ], + [ + "▁ehemalige", + -12.456933975219727 + ], + [ + "▁Tale", + -12.457234382629395 + ], + [ + "▁Buyer", + -12.457353591918945 + ], + [ + "ück", + -12.457578659057617 + ], + [ + "▁Kommentar", + -12.457720756530762 + ], + [ + "▁Schrift", + -12.457756996154785 + ], + [ + "Design", + -12.457792282104492 + ], + [ + "▁stirring", + -12.457937240600586 + ], + [ + "▁towels", + -12.457987785339355 + ], + [ + "▁$30", + -12.458101272583008 + ], + [ + "sprache", + -12.458279609680176 + ], + [ + "▁Regierung", + -12.458346366882324 + ], + [ + "▁nachhaltig", + -12.458406448364258 + ], + [ + "▁électronique", + -12.458515167236328 + ], + [ + "▁Andrei", + -12.458587646484375 + ], + [ + "because", + -12.458647727966309 + ], + [ + "informatique", + -12.458650588989258 + ], + [ + "IGHT", + -12.4586820602417 + ], + [ + "stepping", + -12.4586820602417 + ], + [ + "▁gris", + -12.458748817443848 + ], + [ + "vious", + -12.458773612976074 + ], + [ + "▁upside", + -12.4591064453125 + ], + [ + "▁Examples", + -12.459108352661133 + ], + [ + "IU", + -12.459110260009766 + ], + [ + "▁princess", + -12.459111213684082 + ], + [ + "spielen", + -12.45921516418457 + ], + [ + "legung", + -12.45950984954834 + ], + [ + "▁reflecting", + -12.4597806930542 + ], + [ + "▁Processing", + -12.459939002990723 + ], + [ + "▁jungle", + -12.460033416748047 + ], + [ + "▁insects", + -12.46006965637207 + ], + [ + "▁Sibiu", + -12.460220336914062 + ], + [ + "160", + -12.460259437561035 + ], + [ + "▁interessante", + -12.460267066955566 + ], + [ + "▁multimedia", + -12.460455894470215 + ], + [ + "essel", + -12.46049690246582 + ], + [ + "/18", + -12.460647583007812 + ], + [ + "nière", + -12.460683822631836 + ], + [ + "ministru", + -12.46072006225586 + ], + [ + "▁implants", + -12.460826873779297 + ], + [ + "▁Settings", + -12.461360931396484 + ], + [ + "▁invaluable", + -12.461432456970215 + ], + [ + "stains", + -12.461448669433594 + ], + [ + "onym", + -12.461518287658691 + ], + [ + "▁searched", + -12.461570739746094 + ], + [ + "▁disappointment", + -12.461628913879395 + ], + [ + "▁Iranian", + -12.461630821228027 + ], + [ + "▁questionnaire", + -12.461630821228027 + ], + [ + "Founder", + -12.46178913116455 + ], + [ + "▁Bericht", + -12.461792945861816 + ], + [ + "▁youngest", + -12.461896896362305 + ], + [ + "▁Automatic", + -12.461956024169922 + ], + [ + "▁plecat", + -12.46203327178955 + ], + [ + "geber", + -12.462119102478027 + ], + [ + "soweit", + -12.462124824523926 + ], + [ + "▁unfold", + -12.462236404418945 + ], + [ + "▁befinden", + -12.462274551391602 + ], + [ + "▁susţin", + -12.462637901306152 + ], + [ + "▁Mack", + -12.462675094604492 + ], + [ + "▁dificil", + -12.462757110595703 + ], + [ + "enseigne", + -12.463038444519043 + ], + [ + "▁vitamine", + -12.463047981262207 + ], + [ + "▁Memory", + -12.463092803955078 + ], + [ + "ripping", + -12.463129043579102 + ], + [ + "drin", + -12.463146209716797 + ], + [ + "3.2", + -12.463278770446777 + ], + [ + "▁verstehen", + -12.463287353515625 + ], + [ + "▁scaun", + -12.46341323852539 + ], + [ + "▁procédure", + -12.46380615234375 + ], + [ + "▁molecules", + -12.463911056518555 + ], + [ + "▁Anzahl", + -12.46391487121582 + ], + [ + "▁yogurt", + -12.464071273803711 + ], + [ + "▁Dominic", + -12.464113235473633 + ], + [ + "▁shocked", + -12.464156150817871 + ], + [ + "▁zilei", + -12.464269638061523 + ], + [ + "▁Heiz", + -12.464412689208984 + ], + [ + "▁Educational", + -12.464571952819824 + ], + [ + "BN", + -12.464577674865723 + ], + [ + "analyzing", + -12.464601516723633 + ], + [ + "hair", + -12.464676856994629 + ], + [ + "spiegel", + -12.464871406555176 + ], + [ + "▁illusion", + -12.464889526367188 + ], + [ + "BG", + -12.46505355834961 + ], + [ + "deductible", + -12.46513557434082 + ], + [ + "▁adj", + -12.4651460647583 + ], + [ + "▁accessory", + -12.465166091918945 + ], + [ + "▁Draw", + -12.465167999267578 + ], + [ + "▁airlines", + -12.46518611907959 + ], + [ + "▁satisfai", + -12.46536636352539 + ], + [ + "▁architects", + -12.465447425842285 + ], + [ + "istische", + -12.465508460998535 + ], + [ + "▁Healthy", + -12.465539932250977 + ], + [ + "großer", + -12.465669631958008 + ], + [ + "▁comunicare", + -12.465764999389648 + ], + [ + "▁Meyer", + -12.46577262878418 + ], + [ + "▁reproduction", + -12.465882301330566 + ], + [ + "▁Manufacturing", + -12.465929985046387 + ], + [ + "immobilier", + -12.465930938720703 + ], + [ + "▁Unterschied", + -12.465958595275879 + ], + [ + "▁cumpara", + -12.466029167175293 + ], + [ + "▁duplicate", + -12.466094017028809 + ], + [ + "▁(16", + -12.466096878051758 + ], + [ + "▁detector", + -12.466279983520508 + ], + [ + "▁observat", + -12.466387748718262 + ], + [ + "▁1965", + -12.466682434082031 + ], + [ + "▁Fantasy", + -12.466728210449219 + ], + [ + "▁brauchen", + -12.466728210449219 + ], + [ + "▁Participants", + -12.466780662536621 + ], + [ + "▁décide", + -12.466817855834961 + ], + [ + "▁kicke", + -12.466819763183594 + ], + [ + "▁SSL", + -12.466885566711426 + ], + [ + "360", + -12.466989517211914 + ], + [ + "Anim", + -12.467019081115723 + ], + [ + "▁cupcake", + -12.467031478881836 + ], + [ + "▁Lamb", + -12.467107772827148 + ], + [ + "▁Sä", + -12.467155456542969 + ], + [ + "ntă", + -12.46738052368164 + ], + [ + "▁Pig", + -12.467421531677246 + ], + [ + "1,000", + -12.467677116394043 + ], + [ + "nhof", + -12.467782020568848 + ], + [ + "▁discret", + -12.467947959899902 + ], + [ + "▁deloc", + -12.467991828918457 + ], + [ + "▁Bücher", + -12.467999458312988 + ], + [ + "chor", + -12.468042373657227 + ], + [ + "course", + -12.468070030212402 + ], + [ + "▁cough", + -12.468076705932617 + ], + [ + "▁erstellt", + -12.468087196350098 + ], + [ + "▁Than", + -12.468097686767578 + ], + [ + "stätte", + -12.46812915802002 + ], + [ + "▁exceptionally", + -12.468162536621094 + ], + [ + "▁semnal", + -12.468186378479004 + ], + [ + "▁Interessen", + -12.468329429626465 + ], + [ + "ле", + -12.468356132507324 + ], + [ + "xx", + -12.468402862548828 + ], + [ + "▁Veterans", + -12.468422889709473 + ], + [ + "▁Kreuz", + -12.468683242797852 + ], + [ + "▁Nachricht", + -12.468701362609863 + ], + [ + "treated", + -12.468894004821777 + ], + [ + "▁tide", + -12.469230651855469 + ], + [ + "▁nonetheless", + -12.469390869140625 + ], + [ + "▁Subject", + -12.469439506530762 + ], + [ + "▁Stau", + -12.469440460205078 + ], + [ + "▁stickers", + -12.469463348388672 + ], + [ + "Alp", + -12.46950912475586 + ], + [ + "▁flagship", + -12.469541549682617 + ], + [ + "▁trimite", + -12.469619750976562 + ], + [ + "▁polyester", + -12.469664573669434 + ], + [ + "▁locui", + -12.469671249389648 + ], + [ + "▁chili", + -12.46968936920166 + ], + [ + "▁Browser", + -12.469808578491211 + ], + [ + "sieg", + -12.469809532165527 + ], + [ + "▁Arabic", + -12.469876289367676 + ], + [ + "blich", + -12.47001838684082 + ], + [ + "▁wunderbar", + -12.470090866088867 + ], + [ + "▁furnishings", + -12.470210075378418 + ], + [ + "rtie", + -12.470243453979492 + ], + [ + "8.5", + -12.470742225646973 + ], + [ + "▁Sponsor", + -12.471016883850098 + ], + [ + "▁glitter", + -12.471280097961426 + ], + [ + "▁piaț", + -12.471402168273926 + ], + [ + "▁interviewed", + -12.471519470214844 + ], + [ + "▁Statistics", + -12.471529006958008 + ], + [ + "▁cerc", + -12.47154712677002 + ], + [ + "augmentation", + -12.47155475616455 + ], + [ + "▁Navi", + -12.471558570861816 + ], + [ + "▁Begriff", + -12.47156047821045 + ], + [ + "▁știu", + -12.471596717834473 + ], + [ + "▁unabhängig", + -12.471778869628906 + ], + [ + "▁könnten", + -12.471978187561035 + ], + [ + "▁travaille", + -12.472000122070312 + ], + [ + "▁companie", + -12.472027778625488 + ], + [ + "▁Scientific", + -12.472061157226562 + ], + [ + "▁Outlook", + -12.472091674804688 + ], + [ + "▁fairy", + -12.472158432006836 + ], + [ + "zam", + -12.472282409667969 + ], + [ + "bak", + -12.472448348999023 + ], + [ + "▁Traffic", + -12.472596168518066 + ], + [ + "gerät", + -12.472671508789062 + ], + [ + "▁freezing", + -12.472701072692871 + ], + [ + "▁broadband", + -12.4727201461792 + ], + [ + "110", + -12.47279167175293 + ], + [ + "▁revenu", + -12.472887992858887 + ], + [ + "listed", + -12.472900390625 + ], + [ + "▁Rico", + -12.472941398620605 + ], + [ + "Laure", + -12.472990036010742 + ], + [ + "ATA", + -12.473112106323242 + ], + [ + "▁participer", + -12.47313117980957 + ], + [ + "▁sponsorship", + -12.473235130310059 + ], + [ + "▁distress", + -12.473286628723145 + ], + [ + "▁Brisbane", + -12.47339916229248 + ], + [ + "schönen", + -12.473437309265137 + ], + [ + "▁fizice", + -12.473465919494629 + ], + [ + "▁Political", + -12.47362232208252 + ], + [ + "uhr", + -12.473657608032227 + ], + [ + "▁procedura", + -12.473713874816895 + ], + [ + "▁hervor", + -12.473770141601562 + ], + [ + "melted", + -12.473776817321777 + ], + [ + "▁Emp", + -12.47384262084961 + ], + [ + "▁Ernährung", + -12.4739351272583 + ], + [ + "▁Pendant", + -12.473944664001465 + ], + [ + "▁recipients", + -12.474047660827637 + ], + [ + "Claude", + -12.474133491516113 + ], + [ + "▁regimen", + -12.47415828704834 + ], + [ + "expo", + -12.474346160888672 + ], + [ + "adevăr", + -12.47437858581543 + ], + [ + "▁critically", + -12.474440574645996 + ], + [ + "▁grabbe", + -12.474468231201172 + ], + [ + "▁Kann", + -12.474474906921387 + ], + [ + "▁directeur", + -12.474613189697266 + ], + [ + "gator", + -12.474908828735352 + ], + [ + "problem", + -12.474910736083984 + ], + [ + "scribe", + -12.474913597106934 + ], + [ + "▁exig", + -12.474920272827148 + ], + [ + "Tri", + -12.474969863891602 + ], + [ + "▁aqua", + -12.475631713867188 + ], + [ + "appréci", + -12.47569465637207 + ], + [ + "▁viaţă", + -12.47571849822998 + ], + [ + "▁dominate", + -12.475865364074707 + ], + [ + "disc", + -12.475889205932617 + ], + [ + "▁conseiller", + -12.47603988647461 + ], + [ + "▁shuttle", + -12.476180076599121 + ], + [ + "▁Status", + -12.47623062133789 + ], + [ + "▁ausreichend", + -12.476371765136719 + ], + [ + "▁spät", + -12.476411819458008 + ], + [ + "▁remainder", + -12.476417541503906 + ], + [ + "wett", + -12.476430892944336 + ], + [ + "schlossen", + -12.476491928100586 + ], + [ + "PAC", + -12.476505279541016 + ], + [ + "▁suprafata", + -12.476617813110352 + ], + [ + "5.000", + -12.476673126220703 + ], + [ + "supplying", + -12.47673225402832 + ], + [ + "▁uniquely", + -12.476905822753906 + ], + [ + "▁retard", + -12.476929664611816 + ], + [ + "▁Bang", + -12.477006912231445 + ], + [ + "ieuse", + -12.477087020874023 + ], + [ + "▁Ted", + -12.477248191833496 + ], + [ + "▁ermöglichen", + -12.47732925415039 + ], + [ + "▁builders", + -12.477380752563477 + ], + [ + "▁proximité", + -12.477423667907715 + ], + [ + "▁unforgettable", + -12.477423667907715 + ], + [ + "256", + -12.477446556091309 + ], + [ + "fähigkeit", + -12.477550506591797 + ], + [ + "▁procurement", + -12.477561950683594 + ], + [ + "▁Gewicht", + -12.477693557739258 + ], + [ + "▁potentiel", + -12.47778606414795 + ], + [ + "▁topping", + -12.478300094604492 + ], + [ + "▁canada", + -12.478304862976074 + ], + [ + "▁Destin", + -12.478355407714844 + ], + [ + "▁Knowing", + -12.478411674499512 + ], + [ + "▁retained", + -12.478426933288574 + ], + [ + "▁zinc", + -12.478470802307129 + ], + [ + "▁worrying", + -12.478655815124512 + ], + [ + "faţa", + -12.478676795959473 + ], + [ + "▁initi", + -12.478837966918945 + ], + [ + "ORI", + -12.4788818359375 + ], + [ + "▁refuz", + -12.478921890258789 + ], + [ + "bruch", + -12.479202270507812 + ], + [ + "▁impun", + -12.479233741760254 + ], + [ + "▁persoană", + -12.479308128356934 + ], + [ + "EAR", + -12.479347229003906 + ], + [ + "bedarf", + -12.479368209838867 + ], + [ + "▁Gebiet", + -12.47940731048584 + ], + [ + "▁Roof", + -12.479436874389648 + ], + [ + "▁negligence", + -12.47957706451416 + ], + [ + "security", + -12.479618072509766 + ], + [ + "▁accesorii", + -12.479641914367676 + ], + [ + "▁unclear", + -12.479667663574219 + ], + [ + "▁securitate", + -12.479848861694336 + ], + [ + "▁spotlight", + -12.479896545410156 + ], + [ + "▁speziell", + -12.479923248291016 + ], + [ + "▁mentally", + -12.479942321777344 + ], + [ + "▁preservation", + -12.48011589050293 + ], + [ + "▁Promotion", + -12.480156898498535 + ], + [ + "partnered", + -12.480274200439453 + ], + [ + "▁Hinter", + -12.48031997680664 + ], + [ + "▁punishment", + -12.480359077453613 + ], + [ + "▁grease", + -12.480713844299316 + ], + [ + "▁NW", + -12.480714797973633 + ], + [ + "▁curse", + -12.480897903442383 + ], + [ + "ckle", + -12.48101806640625 + ], + [ + "▁Hire", + -12.481043815612793 + ], + [ + "▁Whole", + -12.481088638305664 + ], + [ + "▁basse", + -12.481289863586426 + ], + [ + "▁DNS", + -12.481427192687988 + ], + [ + "flamm", + -12.481560707092285 + ], + [ + "▁scoop", + -12.481574058532715 + ], + [ + "Norm", + -12.481663703918457 + ], + [ + "▁Surgery", + -12.481735229492188 + ], + [ + "▁widget", + -12.481741905212402 + ], + [ + "connected", + -12.481863021850586 + ], + [ + "autorité", + -12.481961250305176 + ], + [ + "▁utilis", + -12.482096672058105 + ], + [ + "▁formă", + -12.482185363769531 + ], + [ + "▁clearing", + -12.482307434082031 + ], + [ + "▁jumătate", + -12.482815742492676 + ], + [ + "größe", + -12.482831954956055 + ], + [ + "▁Tief", + -12.482852935791016 + ], + [ + "épi", + -12.482939720153809 + ], + [ + "zunehmen", + -12.483174324035645 + ], + [ + "▁touchdown", + -12.48318099975586 + ], + [ + "▁scholarships", + -12.483236312866211 + ], + [ + "▁dementia", + -12.483319282531738 + ], + [ + "▁Jeder", + -12.48333740234375 + ], + [ + "▁nightmare", + -12.483379364013672 + ], + [ + "▁Raw", + -12.48342514038086 + ], + [ + "absorbed", + -12.483468055725098 + ], + [ + "lohnt", + -12.483484268188477 + ], + [ + "quent", + -12.483580589294434 + ], + [ + "interest", + -12.483626365661621 + ], + [ + "OSS", + -12.483649253845215 + ], + [ + "▁Leaf", + -12.483667373657227 + ], + [ + "▁timeless", + -12.48381519317627 + ], + [ + "DY", + -12.483865737915039 + ], + [ + "▁Remote", + -12.483907699584961 + ], + [ + "chner", + -12.483938217163086 + ], + [ + "▁Pam", + -12.484014511108398 + ], + [ + "urban", + -12.484060287475586 + ], + [ + "во", + -12.484146118164062 + ], + [ + "▁Kunde", + -12.484166145324707 + ], + [ + "▁Laptop", + -12.484169006347656 + ], + [ + "finder", + -12.484336853027344 + ], + [ + "▁Pole", + -12.484567642211914 + ], + [ + "2.8", + -12.484588623046875 + ], + [ + "finished", + -12.484670639038086 + ], + [ + "▁prophet", + -12.484697341918945 + ], + [ + "mailed", + -12.484758377075195 + ], + [ + "2-0", + -12.4849214553833 + ], + [ + "▁disciples", + -12.484949111938477 + ], + [ + "▁intriguing", + -12.484980583190918 + ], + [ + "IRA", + -12.485033988952637 + ], + [ + "petit", + -12.485077857971191 + ], + [ + "▁Membership", + -12.485097885131836 + ], + [ + "▁provincial", + -12.485177040100098 + ], + [ + "▁Prüfung", + -12.485292434692383 + ], + [ + "-50", + -12.485450744628906 + ], + [ + "▁cryptocurrency", + -12.485522270202637 + ], + [ + "▁journalism", + -12.485536575317383 + ], + [ + "▁Downtown", + -12.485593795776367 + ], + [ + "inserted", + -12.485655784606934 + ], + [ + "▁Direction", + -12.485718727111816 + ], + [ + "lipid", + -12.485732078552246 + ], + [ + "▁Sebastian", + -12.485793113708496 + ], + [ + "fordert", + -12.48591136932373 + ], + [ + "Originally", + -12.485989570617676 + ], + [ + "tipp", + -12.486048698425293 + ], + [ + "verantwortlich", + -12.486064910888672 + ], + [ + "▁wheelchair", + -12.486085891723633 + ], + [ + "▁structura", + -12.48609733581543 + ], + [ + "▁Danny", + -12.486138343811035 + ], + [ + "999", + -12.486284255981445 + ], + [ + "▁Schiff", + -12.486380577087402 + ], + [ + "formally", + -12.486408233642578 + ], + [ + "focused", + -12.486428260803223 + ], + [ + "▁Vater", + -12.486478805541992 + ], + [ + "▁Dear", + -12.486599922180176 + ], + [ + "▁reinforce", + -12.486794471740723 + ], + [ + "proprietar", + -12.48690414428711 + ], + [ + "▁Kyle", + -12.487004280090332 + ], + [ + "În", + -12.487015724182129 + ], + [ + "▁servir", + -12.487268447875977 + ], + [ + "length", + -12.48730754852295 + ], + [ + "▁showroom", + -12.48735237121582 + ], + [ + "reli", + -12.487473487854004 + ], + [ + "▁Brü", + -12.487529754638672 + ], + [ + "▁Schle", + -12.487634658813477 + ], + [ + "▁profond", + -12.487773895263672 + ], + [ + "▁Superior", + -12.487826347351074 + ], + [ + "▁lifted", + -12.487844467163086 + ], + [ + "highlighting", + -12.487850189208984 + ], + [ + "▁Connection", + -12.48793888092041 + ], + [ + "▁similarly", + -12.487998962402344 + ], + [ + "▁diferit", + -12.488005638122559 + ], + [ + "▁sweater", + -12.488014221191406 + ], + [ + "État", + -12.48803997039795 + ], + [ + "rooted", + -12.488069534301758 + ], + [ + "▁sleeves", + -12.488236427307129 + ], + [ + "де", + -12.488264083862305 + ], + [ + "▁Laboratory", + -12.488265991210938 + ], + [ + "ündig", + -12.488719940185547 + ], + [ + "▁Viking", + -12.488741874694824 + ], + [ + "▁Origin", + -12.48878002166748 + ], + [ + "▁vibr", + -12.488812446594238 + ], + [ + "199", + -12.488974571228027 + ], + [ + "▁yummy", + -12.489001274108887 + ], + [ + "STAR", + -12.489140510559082 + ], + [ + "▁repro", + -12.489152908325195 + ], + [ + "▁Kirchen", + -12.489229202270508 + ], + [ + "hopper", + -12.48925495147705 + ], + [ + "zza", + -12.489335060119629 + ], + [ + "▁vitesse", + -12.48934555053711 + ], + [ + "▁minimalist", + -12.489412307739258 + ], + [ + "▁Election", + -12.489420890808105 + ], + [ + "draw", + -12.489501953125 + ], + [ + "▁candles", + -12.48959732055664 + ], + [ + "▁Mund", + -12.489615440368652 + ], + [ + "urged", + -12.489901542663574 + ], + [ + "▁cânt", + -12.489917755126953 + ], + [ + "Ultimately", + -12.49002742767334 + ], + [ + "▁Lift", + -12.490124702453613 + ], + [ + "loaded", + -12.490334510803223 + ], + [ + "demand", + -12.490508079528809 + ], + [ + "▁aleg", + -12.490621566772461 + ], + [ + "▁Discovery", + -12.490755081176758 + ], + [ + "▁Vienna", + -12.490960121154785 + ], + [ + "▁Kategorie", + -12.490961074829102 + ], + [ + "▁Cotton", + -12.490962028503418 + ], + [ + "▁$200", + -12.491043090820312 + ], + [ + "▁Drei", + -12.491052627563477 + ], + [ + "▁reicht", + -12.491168975830078 + ], + [ + "speicher", + -12.491231918334961 + ], + [ + "▁Immobilien", + -12.491483688354492 + ], + [ + "gefühl", + -12.491509437561035 + ], + [ + "make", + -12.491525650024414 + ], + [ + "pell", + -12.49155044555664 + ], + [ + "▁dull", + -12.491598129272461 + ], + [ + "▁arbeitet", + -12.491681098937988 + ], + [ + "retaining", + -12.491700172424316 + ], + [ + "losen", + -12.491707801818848 + ], + [ + "match", + -12.491876602172852 + ], + [ + "-60", + -12.491880416870117 + ], + [ + "▁ecological", + -12.492000579833984 + ], + [ + "▁vend", + -12.492051124572754 + ], + [ + "▁grammar", + -12.492061614990234 + ], + [ + "▁1:1", + -12.492225646972656 + ], + [ + "grilled", + -12.492279052734375 + ], + [ + "geordnet", + -12.492321014404297 + ], + [ + "▁Pav", + -12.49236011505127 + ], + [ + "▁Depot", + -12.492368698120117 + ], + [ + "▁Walking", + -12.492372512817383 + ], + [ + "teamed", + -12.492402076721191 + ], + [ + "▁torque", + -12.492537498474121 + ], + [ + "▁Venture", + -12.492659568786621 + ], + [ + "▁beginner", + -12.49269962310791 + ], + [ + "▁Monaten", + -12.492712020874023 + ], + [ + "▁Pune", + -12.493054389953613 + ], + [ + "connect", + -12.493075370788574 + ], + [ + "▁textbook", + -12.493132591247559 + ], + [ + "▁unprecedented", + -12.49314022064209 + ], + [ + "▁implied", + -12.493168830871582 + ], + [ + "▁cubic", + -12.493668556213379 + ], + [ + "enthält", + -12.493696212768555 + ], + [ + "▁Brenn", + -12.49388313293457 + ], + [ + "▁Expect", + -12.49394416809082 + ], + [ + "▁lever", + -12.4939603805542 + ], + [ + "veux", + -12.49399185180664 + ], + [ + "▁Claire", + -12.494112968444824 + ], + [ + "Acc", + -12.49432373046875 + ], + [ + "▁Typ", + -12.494478225708008 + ], + [ + "▁smoothie", + -12.494501113891602 + ], + [ + "▁Idaho", + -12.494780540466309 + ], + [ + "▁spati", + -12.494802474975586 + ], + [ + "▁bénéficier", + -12.49488353729248 + ], + [ + "▁Kle", + -12.495161056518555 + ], + [ + "▁serviciilor", + -12.495169639587402 + ], + [ + "▁prohibit", + -12.495267868041992 + ], + [ + "EAD", + -12.495417594909668 + ], + [ + "▁Turner", + -12.495418548583984 + ], + [ + "▁elibera", + -12.49543571472168 + ], + [ + "▁payday", + -12.495464324951172 + ], + [ + "▁prolong", + -12.495466232299805 + ], + [ + "▁sued", + -12.495481491088867 + ], + [ + "▁Devil", + -12.495536804199219 + ], + [ + "▁Skills", + -12.495552062988281 + ], + [ + "▁Marcel", + -12.495553970336914 + ], + [ + "▁silhouette", + -12.495601654052734 + ], + [ + "▁preț", + -12.495742797851562 + ], + [ + "▁Gö", + -12.495747566223145 + ], + [ + "▁Creator", + -12.495774269104004 + ], + [ + "fed", + -12.4959077835083 + ], + [ + "Cap", + -12.495997428894043 + ], + [ + "▁dedicate", + -12.496042251586914 + ], + [ + "0000", + -12.496124267578125 + ], + [ + "▁VAT", + -12.496259689331055 + ], + [ + "▁Firefox", + -12.496443748474121 + ], + [ + "▁therapies", + -12.496477127075195 + ], + [ + "▁screws", + -12.496662139892578 + ], + [ + "▁Province", + -12.496697425842285 + ], + [ + "▁problematic", + -12.496871948242188 + ], + [ + "▁Vid", + -12.496915817260742 + ], + [ + "▁Lost", + -12.496950149536133 + ], + [ + "▁elegance", + -12.497520446777344 + ], + [ + "▁Elegant", + -12.497525215148926 + ], + [ + "ignant", + -12.497573852539062 + ], + [ + "▁darin", + -12.497649192810059 + ], + [ + "▁anonym", + -12.497669219970703 + ], + [ + "▁vegeta", + -12.49767780303955 + ], + [ + "incoming", + -12.497762680053711 + ], + [ + "▁pills", + -12.497846603393555 + ], + [ + "governing", + -12.497893333435059 + ], + [ + "▁Haven", + -12.497920989990234 + ], + [ + "paper", + -12.497947692871094 + ], + [ + "räume", + -12.497979164123535 + ], + [ + "paw", + -12.498099327087402 + ], + [ + "▁spelling", + -12.498283386230469 + ], + [ + "ambele", + -12.498318672180176 + ], + [ + "▁reprezentat", + -12.498371124267578 + ], + [ + "▁mâ", + -12.49853515625 + ], + [ + "wirtschaftliche", + -12.498558044433594 + ], + [ + "▁valabil", + -12.498579025268555 + ], + [ + "▁konkret", + -12.498618125915527 + ], + [ + "▁financier", + -12.498619079589844 + ], + [ + "▁irre", + -12.499135971069336 + ], + [ + "▁Silicon", + -12.499171257019043 + ], + [ + "Viv", + -12.499181747436523 + ], + [ + "▁viruses", + -12.49927043914795 + ], + [ + "▁CNN", + -12.499324798583984 + ], + [ + "▁erleben", + -12.499482154846191 + ], + [ + "gina", + -12.499492645263672 + ], + [ + "punctul", + -12.49951457977295 + ], + [ + "▁Sfânt", + -12.499753952026367 + ], + [ + "▁Manage", + -12.499811172485352 + ], + [ + "▁payable", + -12.499984741210938 + ], + [ + "▁practitioner", + -12.500143051147461 + ], + [ + "▁conférence", + -12.50026798248291 + ], + [ + "▁drought", + -12.50027084350586 + ], + [ + "▁devote", + -12.500361442565918 + ], + [ + "wertung", + -12.500420570373535 + ], + [ + "stabil", + -12.5004301071167 + ], + [ + "▁balcon", + -12.500553131103516 + ], + [ + "▁Lebensmittel", + -12.500603675842285 + ], + [ + "COL", + -12.500950813293457 + ], + [ + "▁Domnul", + -12.501093864440918 + ], + [ + "carved", + -12.501359939575195 + ], + [ + "▁preparat", + -12.5014009475708 + ], + [ + "101", + -12.501537322998047 + ], + [ + "▁specimen", + -12.501580238342285 + ], + [ + "urgeon", + -12.501596450805664 + ], + [ + "LIC", + -12.50163459777832 + ], + [ + "Plattform", + -12.501643180847168 + ], + [ + "▁ramas", + -12.501739501953125 + ], + [ + "▁copilului", + -12.501791954040527 + ], + [ + "bacter", + -12.501812934875488 + ], + [ + "körper", + -12.501940727233887 + ], + [ + "▁Kru", + -12.501981735229492 + ], + [ + "▁Employ", + -12.502055168151855 + ], + [ + "office", + -12.502080917358398 + ], + [ + "▁simmer", + -12.502120018005371 + ], + [ + "qualität", + -12.502137184143066 + ], + [ + "▁freshly", + -12.502215385437012 + ], + [ + "▁Nine", + -12.50223159790039 + ], + [ + "▁tonnes", + -12.50223445892334 + ], + [ + "boden", + -12.502236366271973 + ], + [ + "enquête", + -12.50240707397461 + ], + [ + "▁Colour", + -12.502481460571289 + ], + [ + "▁Diagram", + -12.502495765686035 + ], + [ + "▁gewählt", + -12.502516746520996 + ], + [ + "▁viitoare", + -12.502538681030273 + ], + [ + "▁reporters", + -12.502913475036621 + ], + [ + "guer", + -12.502991676330566 + ], + [ + "▁Kombination", + -12.503021240234375 + ], + [ + "▁qualitative", + -12.50302505493164 + ], + [ + "Centrul", + -12.503131866455078 + ], + [ + "avy", + -12.503170013427734 + ], + [ + "▁Eng", + -12.503175735473633 + ], + [ + "▁sufletul", + -12.50327205657959 + ], + [ + "▁germ", + -12.503412246704102 + ], + [ + "▁prevented", + -12.503448486328125 + ], + [ + "appelle", + -12.503533363342285 + ], + [ + "gins", + -12.503556251525879 + ], + [ + "▁Skype", + -12.503585815429688 + ], + [ + "conditioned", + -12.503617286682129 + ], + [ + "▁clutch", + -12.503641128540039 + ], + [ + "environ", + -12.503694534301758 + ], + [ + "3.3", + -12.503774642944336 + ], + [ + "▁webinar", + -12.503866195678711 + ], + [ + "▁forty", + -12.504104614257812 + ], + [ + "▁Medicaid", + -12.504127502441406 + ], + [ + "▁dismissed", + -12.504167556762695 + ], + [ + "▁siblings", + -12.504168510437012 + ], + [ + "▁Jaw", + -12.504196166992188 + ], + [ + "guiding", + -12.504220962524414 + ], + [ + "cigarette", + -12.504374504089355 + ], + [ + "▁Shah", + -12.504681587219238 + ], + [ + "▁Lehrer", + -12.504684448242188 + ], + [ + "▁muscular", + -12.504694938659668 + ], + [ + "spatele", + -12.504796981811523 + ], + [ + "▁réduction", + -12.504836082458496 + ], + [ + "▁fixes", + -12.504851341247559 + ], + [ + "Span", + -12.50511646270752 + ], + [ + "▁Hudson", + -12.505231857299805 + ], + [ + "development", + -12.505250930786133 + ], + [ + "▁excluded", + -12.50525951385498 + ], + [ + "Democrat", + -12.505260467529297 + ], + [ + "▁nominal", + -12.505317687988281 + ], + [ + "purpose", + -12.50540828704834 + ], + [ + "▁bored", + -12.505500793457031 + ], + [ + "espèce", + -12.50550651550293 + ], + [ + "▁(30", + -12.5055570602417 + ], + [ + "Neither", + -12.505608558654785 + ], + [ + "hänge", + -12.505610466003418 + ], + [ + "square", + -12.505728721618652 + ], + [ + "voller", + -12.505736351013184 + ], + [ + "▁pertinent", + -12.505783081054688 + ], + [ + "▁Wool", + -12.50595474243164 + ], + [ + "settling", + -12.50607681274414 + ], + [ + "fangen", + -12.506148338317871 + ], + [ + "▁Testing", + -12.506152153015137 + ], + [ + "distin", + -12.506196022033691 + ], + [ + "▁Marken", + -12.506227493286133 + ], + [ + "▁Beta", + -12.506300926208496 + ], + [ + "▁fulfilling", + -12.506339073181152 + ], + [ + "Leider", + -12.506357192993164 + ], + [ + "black", + -12.506389617919922 + ], + [ + "occupe", + -12.50658893585205 + ], + [ + "itățile", + -12.506688117980957 + ], + [ + "Pay", + -12.506887435913086 + ], + [ + "▁bandwidth", + -12.506890296936035 + ], + [ + "▁neighbourhood", + -12.506918907165527 + ], + [ + "▁Gutschein", + -12.506922721862793 + ], + [ + "degree", + -12.507055282592773 + ], + [ + "ivité", + -12.507116317749023 + ], + [ + "4.1", + -12.507169723510742 + ], + [ + "▁tätig", + -12.507170677185059 + ], + [ + "topic", + -12.507242202758789 + ], + [ + "ätz", + -12.507243156433105 + ], + [ + "these", + -12.50733470916748 + ], + [ + "▁propriété", + -12.507438659667969 + ], + [ + "▁innings", + -12.507458686828613 + ], + [ + "▁Prevention", + -12.50754165649414 + ], + [ + "▁Saw", + -12.507585525512695 + ], + [ + "▁opener", + -12.507752418518066 + ], + [ + "entwicklung", + -12.507824897766113 + ], + [ + "▁Johann", + -12.507865905761719 + ], + [ + "▁statistic", + -12.507881164550781 + ], + [ + "oids", + -12.507966995239258 + ], + [ + "▁Delaware", + -12.508000373840332 + ], + [ + "▁Isle", + -12.508001327514648 + ], + [ + "▁accompagn", + -12.508028984069824 + ], + [ + "▁Risiko", + -12.508079528808594 + ], + [ + "▁Conform", + -12.508268356323242 + ], + [ + "zeichnen", + -12.508395195007324 + ], + [ + "▁acuz", + -12.508479118347168 + ], + [ + "▁Mort", + -12.508524894714355 + ], + [ + "Fällen", + -12.50853157043457 + ], + [ + "▁blended", + -12.50871467590332 + ], + [ + "found", + -12.50872802734375 + ], + [ + "▁gestalten", + -12.50874137878418 + ], + [ + "▁Découvrez", + -12.508830070495605 + ], + [ + "▁Wett", + -12.508956909179688 + ], + [ + "▁débat", + -12.508990287780762 + ], + [ + "▁Tire", + -12.509007453918457 + ], + [ + "benz", + -12.509037017822266 + ], + [ + "Yes", + -12.509074211120605 + ], + [ + "▁pierde", + -12.509110450744629 + ], + [ + "▁niciodata", + -12.509121894836426 + ], + [ + "▁precipit", + -12.509145736694336 + ], + [ + "▁lazy", + -12.509334564208984 + ], + [ + "▁creature", + -12.509370803833008 + ], + [ + "Wettbewerb", + -12.509385108947754 + ], + [ + "▁Explo", + -12.509496688842773 + ], + [ + "wolf", + -12.509657859802246 + ], + [ + "▁conséquence", + -12.509662628173828 + ], + [ + "▁jewellery", + -12.509662628173828 + ], + [ + "▁Extension", + -12.509735107421875 + ], + [ + "▁transmitted", + -12.509872436523438 + ], + [ + "▁darker", + -12.509973526000977 + ], + [ + "▁simbol", + -12.510065078735352 + ], + [ + "kim", + -12.510069847106934 + ], + [ + "▁proteja", + -12.510098457336426 + ], + [ + "▁Copper", + -12.510189056396484 + ], + [ + "mitglied", + -12.510218620300293 + ], + [ + "▁explosive", + -12.510222434997559 + ], + [ + "▁Nicolae", + -12.510223388671875 + ], + [ + "▁intricate", + -12.510231971740723 + ], + [ + "lati", + -12.510313034057617 + ], + [ + "Mark", + -12.510334014892578 + ], + [ + "▁Porsche", + -12.510339736938477 + ], + [ + "▁Revenue", + -12.510479927062988 + ], + [ + "4.2", + -12.510613441467285 + ], + [ + "certain", + -12.510836601257324 + ], + [ + "▁Coaching", + -12.510879516601562 + ], + [ + "▁allocated", + -12.510879516601562 + ], + [ + "▁optimiz", + -12.511017799377441 + ], + [ + "▁heel", + -12.511205673217773 + ], + [ + "▁indigenous", + -12.511330604553223 + ], + [ + "▁vineri", + -12.511396408081055 + ], + [ + "▁Inspector", + -12.51145076751709 + ], + [ + "▁colleague", + -12.5115327835083 + ], + [ + "ANG", + -12.511649131774902 + ], + [ + "éducation", + -12.511887550354004 + ], + [ + "▁Geschenk", + -12.51188850402832 + ], + [ + "channel", + -12.511899948120117 + ], + [ + "▁trapped", + -12.511954307556152 + ], + [ + "BF", + -12.511974334716797 + ], + [ + "▁firing", + -12.512086868286133 + ], + [ + "▁chlor", + -12.512103080749512 + ], + [ + "▁Carlos", + -12.512115478515625 + ], + [ + "▁proxy", + -12.512128829956055 + ], + [ + "▁pinch", + -12.512167930603027 + ], + [ + "▁Pete", + -12.512201309204102 + ], + [ + "phospho", + -12.512458801269531 + ], + [ + "▁waiver", + -12.51246452331543 + ], + [ + "▁Croatia", + -12.512480735778809 + ], + [ + "▁behave", + -12.51258373260498 + ], + [ + "▁frig", + -12.512676239013672 + ], + [ + "▁Vorteil", + -12.51279067993164 + ], + [ + "▁wichtiger", + -12.512837409973145 + ], + [ + "........", + -12.512929916381836 + ], + [ + "▁flick", + -12.513007164001465 + ], + [ + "▁Stanford", + -12.51306438446045 + ], + [ + "öse", + -12.513096809387207 + ], + [ + "▁Fernseh", + -12.513099670410156 + ], + [ + "▁vélo", + -12.51322078704834 + ], + [ + "reisen", + -12.513304710388184 + ], + [ + "residing", + -12.513504981994629 + ], + [ + "▁Taste", + -12.513580322265625 + ], + [ + "▁disappeared", + -12.513630867004395 + ], + [ + "▁Hood", + -12.513776779174805 + ], + [ + "▁fabriqu", + -12.514046669006348 + ], + [ + "▁Jake", + -12.514470100402832 + ], + [ + "Lastly", + -12.51462173461914 + ], + [ + "▁furnace", + -12.514673233032227 + ], + [ + "▁Ottawa", + -12.51473331451416 + ], + [ + "▁dictate", + -12.514742851257324 + ], + [ + "zece", + -12.514817237854004 + ], + [ + "protect", + -12.514932632446289 + ], + [ + "FU", + -12.51495361328125 + ], + [ + "Stack", + -12.514954566955566 + ], + [ + "▁teilweise", + -12.515018463134766 + ], + [ + "▁Publisher", + -12.51506233215332 + ], + [ + "▁lutte", + -12.515159606933594 + ], + [ + "202", + -12.515178680419922 + ], + [ + "psy", + -12.515190124511719 + ], + [ + "▁wünschen", + -12.515238761901855 + ], + [ + "▁pathways", + -12.515356063842773 + ], + [ + "ivitate", + -12.515559196472168 + ], + [ + "▁continuă", + -12.515658378601074 + ], + [ + "ziemlich", + -12.515791893005371 + ], + [ + "verted", + -12.515812873840332 + ], + [ + "▁sequel", + -12.515839576721191 + ], + [ + "tinct", + -12.51599407196045 + ], + [ + "vette", + -12.516020774841309 + ], + [ + "▁exceeding", + -12.516032218933105 + ], + [ + "▁Yorkshire", + -12.51607608795166 + ], + [ + "▁cleanse", + -12.51613998413086 + ], + [ + "Sadly", + -12.516159057617188 + ], + [ + "▁präsentiert", + -12.516164779663086 + ], + [ + "angled", + -12.516311645507812 + ], + [ + "tude", + -12.516339302062988 + ], + [ + "chain", + -12.516371726989746 + ], + [ + "▁Oakland", + -12.51639175415039 + ], + [ + "xia", + -12.516514778137207 + ], + [ + "▁foremost", + -12.51653003692627 + ], + [ + "▁incomplete", + -12.516786575317383 + ], + [ + "▁restriction", + -12.516905784606934 + ], + [ + "▁whatsoever", + -12.516908645629883 + ], + [ + "▁shipment", + -12.517017364501953 + ], + [ + "**", + -12.517059326171875 + ], + [ + "Aici", + -12.517110824584961 + ], + [ + "PART", + -12.517247200012207 + ], + [ + "▁grams", + -12.517251014709473 + ], + [ + "▁Folk", + -12.517457008361816 + ], + [ + "▁encryption", + -12.517467498779297 + ], + [ + "▁Alfred", + -12.517748832702637 + ], + [ + "▁Veränderung", + -12.517749786376953 + ], + [ + "▁privately", + -12.517817497253418 + ], + [ + "£", + -12.517909049987793 + ], + [ + "▁Sonne", + -12.51799201965332 + ], + [ + "kow", + -12.518117904663086 + ], + [ + "▁CBS", + -12.518172264099121 + ], + [ + "▁Feuer", + -12.518198013305664 + ], + [ + "▁crushed", + -12.518230438232422 + ], + [ + "▁cazare", + -12.518270492553711 + ], + [ + "▁beraten", + -12.518401145935059 + ], + [ + "envoi", + -12.518423080444336 + ], + [ + "▁genannt", + -12.51843547821045 + ], + [ + "▁Lok", + -12.518472671508789 + ], + [ + "nox", + -12.518569946289062 + ], + [ + "wishing", + -12.518759727478027 + ], + [ + "▁freak", + -12.518759727478027 + ], + [ + "rasi", + -12.51879596710205 + ], + [ + "▁calculations", + -12.518888473510742 + ], + [ + "▁sprechen", + -12.51890754699707 + ], + [ + "5:00", + -12.519062042236328 + ], + [ + "▁Gam", + -12.519074440002441 + ], + [ + "▁invasion", + -12.519159317016602 + ], + [ + "ZA", + -12.519230842590332 + ], + [ + "aiming", + -12.519327163696289 + ], + [ + "▁näher", + -12.519404411315918 + ], + [ + "▁Maßnahmen", + -12.519433975219727 + ], + [ + "▁măsură", + -12.519490242004395 + ], + [ + "▁Bestellung", + -12.519610404968262 + ], + [ + "▁gown", + -12.519665718078613 + ], + [ + "▁oblige", + -12.519747734069824 + ], + [ + "länder", + -12.51977825164795 + ], + [ + "posi", + -12.519853591918945 + ], + [ + "▁Earn", + -12.51988410949707 + ], + [ + "▁dubl", + -12.51999282836914 + ], + [ + "▁sticky", + -12.520100593566895 + ], + [ + "▁litter", + -12.520181655883789 + ], + [ + "▁Salz", + -12.520257949829102 + ], + [ + "▁Matter", + -12.520272254943848 + ], + [ + "▁Driving", + -12.520275115966797 + ], + [ + "▁pursu", + -12.520285606384277 + ], + [ + "ographer", + -12.520390510559082 + ], + [ + "▁touring", + -12.520400047302246 + ], + [ + "opter", + -12.520444869995117 + ], + [ + "▁fierce", + -12.520475387573242 + ], + [ + "▁Audit", + -12.520480155944824 + ], + [ + "▁imperi", + -12.520755767822266 + ], + [ + "▁positiv", + -12.520780563354492 + ], + [ + "règles", + -12.520849227905273 + ], + [ + "▁bouton", + -12.520990371704102 + ], + [ + "▁victorie", + -12.520990371704102 + ], + [ + "▁manuel", + -12.521015167236328 + ], + [ + "▁await", + -12.52103042602539 + ], + [ + "▁transformer", + -12.521041870117188 + ], + [ + "▁cupboard", + -12.52108383178711 + ], + [ + "▁Hag", + -12.521117210388184 + ], + [ + "naj", + -12.521214485168457 + ], + [ + "▁annoncé", + -12.52139663696289 + ], + [ + "▁scolaire", + -12.521401405334473 + ], + [ + "▁étape", + -12.521482467651367 + ], + [ + "▁pirate", + -12.521761894226074 + ], + [ + "▁Rated", + -12.521794319152832 + ], + [ + "LOT", + -12.521846771240234 + ], + [ + "▁natura", + -12.521944046020508 + ], + [ + "oga", + -12.522336959838867 + ], + [ + "Read", + -12.522388458251953 + ], + [ + "idio", + -12.522444725036621 + ], + [ + "▁recession", + -12.522698402404785 + ], + [ + "veţi", + -12.522761344909668 + ], + [ + "▁blossom", + -12.523082733154297 + ], + [ + "▁lunar", + -12.523141860961914 + ], + [ + "▁inhibit", + -12.52316951751709 + ], + [ + "gemein", + -12.523219108581543 + ], + [ + "▁Historic", + -12.523262023925781 + ], + [ + "▁HTTP", + -12.523370742797852 + ], + [ + "misiune", + -12.5234956741333 + ], + [ + "▁Manda", + -12.523601531982422 + ], + [ + "▁Hurricane", + -12.523643493652344 + ], + [ + "Strat", + -12.523646354675293 + ], + [ + "▁populaire", + -12.523756980895996 + ], + [ + "▁useless", + -12.523762702941895 + ], + [ + "▁Leipzig", + -12.523924827575684 + ], + [ + "▁Krankheit", + -12.52392578125 + ], + [ + "▁Bonne", + -12.52397346496582 + ], + [ + "▁tissu", + -12.52399730682373 + ], + [ + "▁Baum", + -12.523998260498047 + ], + [ + "▁BUT", + -12.524152755737305 + ], + [ + "▁Mondial", + -12.52423095703125 + ], + [ + "▁triangle", + -12.524242401123047 + ], + [ + "▁Tesla", + -12.524250984191895 + ], + [ + "▁pământ", + -12.52430534362793 + ], + [ + "▁aminte", + -12.524726867675781 + ], + [ + "▁vehicul", + -12.524770736694336 + ], + [ + "▁cerut", + -12.52482795715332 + ], + [ + "▁respiratory", + -12.524836540222168 + ], + [ + "▁rayon", + -12.524993896484375 + ], + [ + "▁gestaltet", + -12.525067329406738 + ], + [ + "310", + -12.525139808654785 + ], + [ + "pfl", + -12.525239944458008 + ], + [ + "▁shrimp", + -12.525337219238281 + ], + [ + "▁reconnu", + -12.525409698486328 + ], + [ + "ologique", + -12.525476455688477 + ], + [ + "▁unity", + -12.525674819946289 + ], + [ + "Speicher", + -12.52569580078125 + ], + [ + "▁Movement", + -12.525794982910156 + ], + [ + "ddling", + -12.52581787109375 + ], + [ + "OE", + -12.525818824768066 + ], + [ + "▁Resolution", + -12.525863647460938 + ], + [ + "esteem", + -12.525898933410645 + ], + [ + "▁Teen", + -12.526288986206055 + ], + [ + "▁believing", + -12.526463508605957 + ], + [ + "▁Tipps", + -12.526481628417969 + ], + [ + "jpg", + -12.526494026184082 + ], + [ + "▁obs", + -12.526519775390625 + ], + [ + "SHA", + -12.526702880859375 + ], + [ + "▁quietly", + -12.526907920837402 + ], + [ + "setting", + -12.52712345123291 + ], + [ + "▁elevator", + -12.527185440063477 + ], + [ + "phor", + -12.527194023132324 + ], + [ + "Just", + -12.52725887298584 + ], + [ + "▁legatura", + -12.52739143371582 + ], + [ + "elected", + -12.527414321899414 + ], + [ + "▁disclosed", + -12.527419090270996 + ], + [ + "quarter", + -12.52743148803711 + ], + [ + "zzy", + -12.527461051940918 + ], + [ + "▁gata", + -12.527491569519043 + ], + [ + "SAN", + -12.527532577514648 + ], + [ + "▁Cathedral", + -12.527592658996582 + ], + [ + "192", + -12.527656555175781 + ], + [ + "▁RBI", + -12.527726173400879 + ], + [ + "▁Seller", + -12.527798652648926 + ], + [ + "▁urine", + -12.527807235717773 + ], + [ + "▁Hardware", + -12.527966499328613 + ], + [ + "▁steadi", + -12.527993202209473 + ], + [ + "percussion", + -12.528158187866211 + ], + [ + "▁francez", + -12.528172492980957 + ], + [ + "▁rude", + -12.528202056884766 + ], + [ + "bod", + -12.528223037719727 + ], + [ + "cession", + -12.528249740600586 + ], + [ + "▁HTC", + -12.528372764587402 + ], + [ + "HB", + -12.528576850891113 + ], + [ + "▁descent", + -12.528644561767578 + ], + [ + "▁Painting", + -12.528681755065918 + ], + [ + "119", + -12.528684616088867 + ], + [ + "sagen", + -12.52877426147461 + ], + [ + "▁salvation", + -12.52880573272705 + ], + [ + "arro", + -12.528814315795898 + ], + [ + "0.3", + -12.52886962890625 + ], + [ + "▁Duck", + -12.52890396118164 + ], + [ + "Mit", + -12.529052734375 + ], + [ + "да", + -12.52927017211914 + ], + [ + "▁Diesel", + -12.529322624206543 + ], + [ + "▁Medal", + -12.529413223266602 + ], + [ + "▁interim", + -12.529439926147461 + ], + [ + "▁montagne", + -12.529439926147461 + ], + [ + "▁Pixel", + -12.529631614685059 + ], + [ + "LINE", + -12.529806137084961 + ], + [ + "▁dureri", + -12.529938697814941 + ], + [ + "▁Bengal", + -12.529990196228027 + ], + [ + "Legea", + -12.530080795288086 + ], + [ + "▁Strecke", + -12.530094146728516 + ], + [ + "▁schneller", + -12.53012752532959 + ], + [ + "▁Karten", + -12.5301513671875 + ], + [ + "cion", + -12.530241966247559 + ], + [ + "▁Coco", + -12.53037166595459 + ], + [ + "troisième", + -12.53052806854248 + ], + [ + "401", + -12.530616760253906 + ], + [ + "▁sandwiches", + -12.530704498291016 + ], + [ + "▁folosind", + -12.530920028686523 + ], + [ + "▁Folgen", + -12.530953407287598 + ], + [ + "▁triumph", + -12.530991554260254 + ], + [ + "▁Hintergrund", + -12.530996322631836 + ], + [ + "▁revelation", + -12.531084060668945 + ], + [ + "ôme", + -12.531222343444824 + ], + [ + "▁Nex", + -12.531245231628418 + ], + [ + "jährigen", + -12.531295776367188 + ], + [ + "▁militant", + -12.531296730041504 + ], + [ + "▁fabricant", + -12.531671524047852 + ], + [ + "iano", + -12.531713485717773 + ], + [ + "▁formulation", + -12.53188705444336 + ], + [ + "integrating", + -12.532050132751465 + ], + [ + "▁Items", + -12.532142639160156 + ], + [ + "▁contractual", + -12.532320976257324 + ], + [ + "AIDS", + -12.532424926757812 + ], + [ + "▁pitcher", + -12.532610893249512 + ], + [ + "▁Snap", + -12.532623291015625 + ], + [ + "▁systematic", + -12.532663345336914 + ], + [ + "▁referendum", + -12.532694816589355 + ], + [ + "gau", + -12.53281021118164 + ], + [ + "administration", + -12.532917022705078 + ], + [ + "▁speci", + -12.532981872558594 + ], + [ + "ieni", + -12.532998085021973 + ], + [ + "prox", + -12.533186912536621 + ], + [ + "▁bouquet", + -12.533241271972656 + ], + [ + "▁sinnvoll", + -12.533270835876465 + ], + [ + "▁Fleisch", + -12.533309936523438 + ], + [ + "ktuell", + -12.533381462097168 + ], + [ + "▁mushrooms", + -12.533408164978027 + ], + [ + "▁Straf", + -12.533470153808594 + ], + [ + "▁cresc", + -12.533491134643555 + ], + [ + "TEM", + -12.533502578735352 + ], + [ + "▁vindec", + -12.53352165222168 + ], + [ + "▁Drama", + -12.533540725708008 + ], + [ + "chief", + -12.533550262451172 + ], + [ + "▁müsst", + -12.533614158630371 + ], + [ + "▁Warner", + -12.533662796020508 + ], + [ + "118", + -12.533761024475098 + ], + [ + "▁saptamana", + -12.533831596374512 + ], + [ + "▁animaux", + -12.53412914276123 + ], + [ + "▁Directory", + -12.534146308898926 + ], + [ + "▁entgegen", + -12.53415584564209 + ], + [ + "▁deduction", + -12.534156799316406 + ], + [ + "▁Strategic", + -12.53426456451416 + ], + [ + "▁rats", + -12.534419059753418 + ], + [ + "▁Moses", + -12.534448623657227 + ], + [ + "eko", + -12.534564971923828 + ], + [ + "strict", + -12.534590721130371 + ], + [ + "▁Ashley", + -12.534603118896484 + ], + [ + "mik", + -12.534622192382812 + ], + [ + "▁relocate", + -12.534668922424316 + ], + [ + "▁whip", + -12.534738540649414 + ], + [ + "central", + -12.534750938415527 + ], + [ + "mack", + -12.534892082214355 + ], + [ + "stufe", + -12.534961700439453 + ], + [ + "▁Metropolitan", + -12.5349702835083 + ], + [ + "▁croissance", + -12.534974098205566 + ], + [ + "▁celebrities", + -12.535021781921387 + ], + [ + "▁Geh", + -12.53507137298584 + ], + [ + "▁verifica", + -12.535196304321289 + ], + [ + "▁satisfac", + -12.535211563110352 + ], + [ + "▁Julian", + -12.535271644592285 + ], + [ + "▁remotely", + -12.535432815551758 + ], + [ + "▁Safari", + -12.535542488098145 + ], + [ + "▁Chic", + -12.53557014465332 + ], + [ + "▁clamp", + -12.535818099975586 + ], + [ + "▁Schnee", + -12.535918235778809 + ], + [ + "grown", + -12.536069869995117 + ], + [ + "▁Character", + -12.536110877990723 + ], + [ + "▁charities", + -12.536137580871582 + ], + [ + "Thankfully", + -12.536625862121582 + ], + [ + "▁țară", + -12.53681468963623 + ], + [ + "IZ", + -12.536816596984863 + ], + [ + "Vielleicht", + -12.536999702453613 + ], + [ + "▁Pon", + -12.537108421325684 + ], + [ + "gegen", + -12.53711986541748 + ], + [ + "chez", + -12.537185668945312 + ], + [ + "Black", + -12.537544250488281 + ], + [ + "▁alimentare", + -12.537555694580078 + ], + [ + "▁verloren", + -12.537562370300293 + ], + [ + "▁predictions", + -12.537657737731934 + ], + [ + "Founded", + -12.53795337677002 + ], + [ + "▁femeie", + -12.538022994995117 + ], + [ + "wahrscheinlich", + -12.538107872009277 + ], + [ + "▁squeeze", + -12.53819465637207 + ], + [ + "▁verfügbar", + -12.538259506225586 + ], + [ + "▁hygiene", + -12.538393020629883 + ], + [ + "voire", + -12.538667678833008 + ], + [ + "▁birou", + -12.538901329040527 + ], + [ + "▁initiate", + -12.538921356201172 + ], + [ + "▁Patriot", + -12.539009094238281 + ], + [ + "▁Income", + -12.539159774780273 + ], + [ + "▁marry", + -12.539310455322266 + ], + [ + "lokal", + -12.539336204528809 + ], + [ + "logic", + -12.53940486907959 + ], + [ + "▁Abstract", + -12.53966236114502 + ], + [ + "▁grundsätzlich", + -12.539822578430176 + ], + [ + "▁tariff", + -12.539886474609375 + ], + [ + "▁definitiv", + -12.539892196655273 + ], + [ + "paz", + -12.53989315032959 + ], + [ + "Result", + -12.539921760559082 + ], + [ + "1:30", + -12.54005241394043 + ], + [ + "▁Latest", + -12.540075302124023 + ], + [ + "▁Dauer", + -12.540155410766602 + ], + [ + "Med", + -12.540275573730469 + ], + [ + "gewicht", + -12.540348052978516 + ], + [ + "▁Gaza", + -12.540430068969727 + ], + [ + "▁Newton", + -12.540769577026367 + ], + [ + "Dokument", + -12.540897369384766 + ], + [ + "formular", + -12.540945053100586 + ], + [ + "ILE", + -12.540964126586914 + ], + [ + "▁surse", + -12.541040420532227 + ], + [ + "MH", + -12.54116153717041 + ], + [ + "▁Arctic", + -12.541255950927734 + ], + [ + "▁ISBN", + -12.541274070739746 + ], + [ + "▁quarterback", + -12.541315078735352 + ], + [ + "▁absurd", + -12.541555404663086 + ], + [ + "▁Zusammenhang", + -12.541561126708984 + ], + [ + "▁Module", + -12.54156494140625 + ], + [ + "mented", + -12.541667938232422 + ], + [ + "worthy", + -12.541797637939453 + ], + [ + "▁célèbre", + -12.541828155517578 + ], + [ + "▁maritime", + -12.541836738586426 + ], + [ + "▁Reed", + -12.541938781738281 + ], + [ + "▁threaten", + -12.542037010192871 + ], + [ + "▁Satz", + -12.542095184326172 + ], + [ + "▁sticking", + -12.542203903198242 + ], + [ + "▁transcript", + -12.542372703552246 + ], + [ + "▁Morgen", + -12.542425155639648 + ], + [ + "▁Förder", + -12.542435646057129 + ], + [ + "▁Gottes", + -12.542572021484375 + ], + [ + "▁Coordinator", + -12.542648315429688 + ], + [ + "LOG", + -12.54265022277832 + ], + [ + "EAN", + -12.542677879333496 + ], + [ + "▁préparation", + -12.54273509979248 + ], + [ + "▁Brass", + -12.542799949645996 + ], + [ + "Așa", + -12.542853355407715 + ], + [ + "▁Utiliz", + -12.54294490814209 + ], + [ + "framed", + -12.542973518371582 + ], + [ + "▁asphalt", + -12.543050765991211 + ], + [ + "116", + -12.543061256408691 + ], + [ + "▁historically", + -12.54310417175293 + ], + [ + "▁doamn", + -12.543176651000977 + ], + [ + "Air", + -12.543293952941895 + ], + [ + "▁economist", + -12.543838500976562 + ], + [ + "fresh", + -12.54384994506836 + ], + [ + "engine", + -12.543906211853027 + ], + [ + "▁Rücken", + -12.543919563293457 + ], + [ + "▁worthwhile", + -12.544124603271484 + ], + [ + "▁Therapie", + -12.544140815734863 + ], + [ + "▁Joshua", + -12.544151306152344 + ], + [ + "sicherheit", + -12.544175148010254 + ], + [ + "▁scena", + -12.544254302978516 + ], + [ + "ifiant", + -12.54433822631836 + ], + [ + "/20", + -12.54442024230957 + ], + [ + "fehl", + -12.544469833374023 + ], + [ + "karten", + -12.544515609741211 + ], + [ + "501", + -12.544656753540039 + ], + [ + "▁vide", + -12.544673919677734 + ], + [ + "▁miliarde", + -12.544699668884277 + ], + [ + "▁trillion", + -12.54470157623291 + ], + [ + "oudre", + -12.544761657714844 + ], + [ + "nderung", + -12.544803619384766 + ], + [ + "▁inquiries", + -12.544992446899414 + ], + [ + "▁echipe", + -12.545034408569336 + ], + [ + "▁investiga", + -12.545040130615234 + ], + [ + "▁detailing", + -12.545042991638184 + ], + [ + "VIS", + -12.545086860656738 + ], + [ + "▁geographical", + -12.545157432556152 + ], + [ + "▁authentication", + -12.54519271850586 + ], + [ + "▁Schwa", + -12.545201301574707 + ], + [ + "▁Scri", + -12.545230865478516 + ], + [ + "▁discourage", + -12.54527473449707 + ], + [ + "Pass", + -12.54529094696045 + ], + [ + "▁scattered", + -12.54529857635498 + ], + [ + "▁langsam", + -12.545300483703613 + ], + [ + "telles", + -12.545380592346191 + ], + [ + "▁ramane", + -12.5454740524292 + ], + [ + "▁inhibitor", + -12.545486450195312 + ], + [ + "▁Habit", + -12.54556941986084 + ], + [ + "▁10:00", + -12.545577049255371 + ], + [ + "▁rezultat", + -12.545595169067383 + ], + [ + "äck", + -12.545943260192871 + ], + [ + ",000.", + -12.545979499816895 + ], + [ + "▁remedies", + -12.546103477478027 + ], + [ + "▁comportament", + -12.546195983886719 + ], + [ + "namen", + -12.546229362487793 + ], + [ + "▁#3", + -12.546327590942383 + ], + [ + "enstein", + -12.546493530273438 + ], + [ + "▁relevance", + -12.546516418457031 + ], + [ + "▁présentation", + -12.54655933380127 + ], + [ + "MHz", + -12.546648979187012 + ], + [ + "EMA", + -12.546661376953125 + ], + [ + "▁palace", + -12.546709060668945 + ], + [ + "▁vizibil", + -12.546723365783691 + ], + [ + "▁griev", + -12.546820640563965 + ], + [ + "▁severely", + -12.54688549041748 + ], + [ + "expert", + -12.546942710876465 + ], + [ + "▁ravi", + -12.54696273803711 + ], + [ + "▁feasible", + -12.547002792358398 + ], + [ + "▁Wholesale", + -12.547009468078613 + ], + [ + "▁graduat", + -12.547077178955078 + ], + [ + "Kü", + -12.547094345092773 + ], + [ + "▁quotation", + -12.547157287597656 + ], + [ + "/11", + -12.54716968536377 + ], + [ + "lutter", + -12.547415733337402 + ], + [ + "▁dice", + -12.547467231750488 + ], + [ + "modal", + -12.547749519348145 + ], + [ + "ggling", + -12.547819137573242 + ], + [ + "▁considér", + -12.547986030578613 + ], + [ + "▁Insel", + -12.548097610473633 + ], + [ + "▁Database", + -12.5483980178833 + ], + [ + "icism", + -12.548508644104004 + ], + [ + "▁quarterly", + -12.54851245880127 + ], + [ + "▁formule", + -12.548558235168457 + ], + [ + "▁renouvel", + -12.54873275756836 + ], + [ + "▁Treasure", + -12.548737525939941 + ], + [ + "▁1962", + -12.548844337463379 + ], + [ + "▁republic", + -12.549111366271973 + ], + [ + "▁États", + -12.549254417419434 + ], + [ + "▁salut", + -12.549356460571289 + ], + [ + "HK", + -12.54941463470459 + ], + [ + "▁Bali", + -12.549427032470703 + ], + [ + "▁Rechnung", + -12.549447059631348 + ], + [ + "fruit", + -12.54945182800293 + ], + [ + "lays", + -12.549467086791992 + ], + [ + "LAS", + -12.54951000213623 + ], + [ + "inclin", + -12.549708366394043 + ], + [ + "▁Cré", + -12.549813270568848 + ], + [ + "▁compt", + -12.54985237121582 + ], + [ + "țiilor", + -12.550056457519531 + ], + [ + "heft", + -12.550111770629883 + ], + [ + "▁Comisi", + -12.55024242401123 + ], + [ + "▁Nurse", + -12.550516128540039 + ], + [ + "loid", + -12.550540924072266 + ], + [ + "grove", + -12.550761222839355 + ], + [ + "▁Copy", + -12.550867080688477 + ], + [ + "▁Kampf", + -12.550873756408691 + ], + [ + "izată", + -12.550945281982422 + ], + [ + "würdig", + -12.551244735717773 + ], + [ + "-2018", + -12.551305770874023 + ], + [ + "ozo", + -12.551350593566895 + ], + [ + "▁integriert", + -12.551397323608398 + ], + [ + "▁réunion", + -12.551448822021484 + ], + [ + "▁mică", + -12.551520347595215 + ], + [ + "▁Chau", + -12.551595687866211 + ], + [ + "▁allegations", + -12.551626205444336 + ], + [ + "▁shaping", + -12.551640510559082 + ], + [ + "▁transcription", + -12.551671981811523 + ], + [ + "▁Monica", + -12.551711082458496 + ], + [ + "▁torture", + -12.551795959472656 + ], + [ + "▁cooperative", + -12.551962852478027 + ], + [ + "▁invité", + -12.551987648010254 + ], + [ + "▁bamboo", + -12.552204132080078 + ], + [ + "▁Thinking", + -12.55232048034668 + ], + [ + "▁gratis", + -12.552392959594727 + ], + [ + "117", + -12.55267333984375 + ], + [ + "renz", + -12.55279541015625 + ], + [ + "▁Fußball", + -12.552823066711426 + ], + [ + "▁Gram", + -12.552873611450195 + ], + [ + "sprung", + -12.55290412902832 + ], + [ + "▁Schluss", + -12.55308723449707 + ], + [ + "▁Diploma", + -12.553345680236816 + ], + [ + "▁apparatus", + -12.553363800048828 + ], + [ + "notably", + -12.553483963012695 + ], + [ + "▁exercit", + -12.553532600402832 + ], + [ + "ământ", + -12.553536415100098 + ], + [ + "▁masses", + -12.553610801696777 + ], + [ + "▁preuve", + -12.553642272949219 + ], + [ + "great", + -12.553754806518555 + ], + [ + "▁Drink", + -12.553792953491211 + ], + [ + "islam", + -12.553828239440918 + ], + [ + "ARM", + -12.553914070129395 + ], + [ + "indre", + -12.554404258728027 + ], + [ + "DW", + -12.554410934448242 + ], + [ + "▁Flowers", + -12.554500579833984 + ], + [ + "▁pill", + -12.554574966430664 + ], + [ + "▁objectifs", + -12.554594039916992 + ], + [ + "▁Bezug", + -12.554659843444824 + ], + [ + "▁assumptions", + -12.55466365814209 + ], + [ + "▁vesti", + -12.554742813110352 + ], + [ + "route", + -12.554783821105957 + ], + [ + "▁Bangkok", + -12.554815292358398 + ], + [ + "▁seamlessly", + -12.55482006072998 + ], + [ + "config", + -12.554882049560547 + ], + [ + "▁username", + -12.554890632629395 + ], + [ + "unsure", + -12.555024147033691 + ], + [ + "▁poser", + -12.555129051208496 + ], + [ + "▁impozit", + -12.555246353149414 + ], + [ + "▁metode", + -12.555333137512207 + ], + [ + "defending", + -12.555347442626953 + ], + [ + "▁Nic", + -12.555431365966797 + ], + [ + "▁Vertrag", + -12.555508613586426 + ], + [ + "▁plăcut", + -12.55552864074707 + ], + [ + "▁Pou", + -12.555675506591797 + ], + [ + "UCH", + -12.555785179138184 + ], + [ + "▁Fein", + -12.555903434753418 + ], + [ + "reading", + -12.555994987487793 + ], + [ + "snip", + -12.55604076385498 + ], + [ + "▁Livre", + -12.556401252746582 + ], + [ + "lander", + -12.556509971618652 + ], + [ + "▁hydraulic", + -12.556559562683105 + ], + [ + "veiled", + -12.556563377380371 + ], + [ + "intr", + -12.556609153747559 + ], + [ + "▁Domnului", + -12.556641578674316 + ], + [ + "▁$0.", + -12.556713104248047 + ], + [ + "▁kilometers", + -12.556753158569336 + ], + [ + "spann", + -12.556870460510254 + ], + [ + "▁credibility", + -12.556892395019531 + ], + [ + "▁eBook", + -12.556953430175781 + ], + [ + "VERY", + -12.556994438171387 + ], + [ + "▁Charm", + -12.557122230529785 + ], + [ + "Evangeli", + -12.557193756103516 + ], + [ + "▁anderer", + -12.557193756103516 + ], + [ + "▁Entry", + -12.557195663452148 + ], + [ + "ffy", + -12.5573148727417 + ], + [ + "▁Exc", + -12.55737018585205 + ], + [ + "▁Omega", + -12.557446479797363 + ], + [ + "▁Funktionen", + -12.557455062866211 + ], + [ + "▁Gay", + -12.55752182006836 + ], + [ + "▁acht", + -12.557608604431152 + ], + [ + "colored", + -12.557615280151367 + ], + [ + "itude", + -12.557634353637695 + ], + [ + "▁accompagné", + -12.557645797729492 + ], + [ + "▁unfortunate", + -12.557981491088867 + ], + [ + "▁DIN", + -12.558091163635254 + ], + [ + "▁installment", + -12.558252334594727 + ], + [ + "▁indépendant", + -12.558307647705078 + ], + [ + "These", + -12.558364868164062 + ], + [ + "mitten", + -12.558394432067871 + ], + [ + "thank", + -12.558470726013184 + ], + [ + "▁Trek", + -12.558721542358398 + ], + [ + "üchte", + -12.55874252319336 + ], + [ + "▁cuir", + -12.55875015258789 + ], + [ + "▁turbo", + -12.558802604675293 + ], + [ + "Table", + -12.558847427368164 + ], + [ + "▁Extrem", + -12.558866500854492 + ], + [ + "▁advertisements", + -12.55915355682373 + ], + [ + "▁chaîne", + -12.559206008911133 + ], + [ + "▁corridor", + -12.559473991394043 + ], + [ + "▁râ", + -12.559651374816895 + ], + [ + "▁Opening", + -12.559718132019043 + ], + [ + "Get", + -12.559747695922852 + ], + [ + "▁storytelling", + -12.55976676940918 + ], + [ + "▁severity", + -12.559771537780762 + ], + [ + "4\"", + -12.559956550598145 + ], + [ + "▁parasit", + -12.559967994689941 + ], + [ + "angebot", + -12.56002426147461 + ], + [ + "Data", + -12.56005573272705 + ], + [ + "listen", + -12.560086250305176 + ], + [ + "▁vârstă", + -12.560094833374023 + ], + [ + "▁swallow", + -12.56025505065918 + ], + [ + "TRE", + -12.560321807861328 + ], + [ + "▁daunting", + -12.56035041809082 + ], + [ + "▁Oli", + -12.560481071472168 + ], + [ + "▁definitive", + -12.56066608428955 + ], + [ + "▁rezerva", + -12.560667037963867 + ], + [ + "/15", + -12.560807228088379 + ], + [ + "▁Landschaft", + -12.560887336730957 + ], + [ + "▁Automotive", + -12.560934066772461 + ], + [ + "▁convers", + -12.56113052368164 + ], + [ + "▁thru", + -12.561139106750488 + ], + [ + "▁Township", + -12.561140060424805 + ], + [ + "▁tilt", + -12.56119441986084 + ], + [ + "▁Criminal", + -12.561227798461914 + ], + [ + "riez", + -12.561407089233398 + ], + [ + "▁Parking", + -12.561440467834473 + ], + [ + "▁humanitarian", + -12.561518669128418 + ], + [ + "▁Kilometer", + -12.561529159545898 + ], + [ + "controlled", + -12.56189250946045 + ], + [ + "▁Klick", + -12.561910629272461 + ], + [ + "support", + -12.56199836730957 + ], + [ + "handed", + -12.562005996704102 + ], + [ + "ämtliche", + -12.562104225158691 + ], + [ + "access", + -12.562232971191406 + ], + [ + "▁eleven", + -12.562232971191406 + ], + [ + "▁ferry", + -12.56229305267334 + ], + [ + "zieren", + -12.562620162963867 + ], + [ + "▁Gebrauch", + -12.562688827514648 + ], + [ + "▁vigoare", + -12.562689781188965 + ], + [ + "MON", + -12.562756538391113 + ], + [ + "fox", + -12.562886238098145 + ], + [ + "bestimmten", + -12.562894821166992 + ], + [ + "▁Gur", + -12.563069343566895 + ], + [ + "▁Mannschaft", + -12.563146591186523 + ], + [ + "▁patrol", + -12.563173294067383 + ], + [ + "▁casă", + -12.563376426696777 + ], + [ + "▁Stories", + -12.563380241394043 + ], + [ + "▁robotic", + -12.563425064086914 + ], + [ + "tiri", + -12.563576698303223 + ], + [ + "gewiesen", + -12.5636568069458 + ], + [ + "CV", + -12.563722610473633 + ], + [ + "▁parinti", + -12.563899040222168 + ], + [ + "▁Owen", + -12.563931465148926 + ], + [ + "▁Katie", + -12.564116477966309 + ], + [ + "▁Combine", + -12.56422233581543 + ], + [ + "enfalls", + -12.56442928314209 + ], + [ + "▁financière", + -12.564447402954102 + ], + [ + "▁parliament", + -12.564549446105957 + ], + [ + "▁Weekend", + -12.564616203308105 + ], + [ + "▁Sonic", + -12.564757347106934 + ], + [ + "▁fixture", + -12.56479263305664 + ], + [ + "majorité", + -12.56497573852539 + ], + [ + "▁gravel", + -12.565028190612793 + ], + [ + "realizate", + -12.565109252929688 + ], + [ + "examining", + -12.565113067626953 + ], + [ + "▁grim", + -12.5653657913208 + ], + [ + "▁stabili", + -12.565458297729492 + ], + [ + "▁Wochenende", + -12.56551456451416 + ], + [ + "▁Hebrew", + -12.565597534179688 + ], + [ + "▁Harrison", + -12.565799713134766 + ], + [ + "▁boundary", + -12.565858840942383 + ], + [ + "40,000", + -12.565902709960938 + ], + [ + "▁Ambassador", + -12.566208839416504 + ], + [ + "▁scoate", + -12.566229820251465 + ], + [ + "ffin", + -12.56623363494873 + ], + [ + "▁crème", + -12.566269874572754 + ], + [ + "▁obiecte", + -12.566378593444824 + ], + [ + "enţa", + -12.566763877868652 + ], + [ + "▁subsidiary", + -12.566797256469727 + ], + [ + "▁Franco", + -12.56688404083252 + ], + [ + "▁visuel", + -12.567042350769043 + ], + [ + "▁uitat", + -12.56708812713623 + ], + [ + "▁revisit", + -12.567122459411621 + ], + [ + "▁Camping", + -12.567150115966797 + ], + [ + "▁Divine", + -12.567304611206055 + ], + [ + "4-6", + -12.567323684692383 + ], + [ + "▁Brandon", + -12.567378997802734 + ], + [ + "ма", + -12.567450523376465 + ], + [ + "sofern", + -12.56745433807373 + ], + [ + "ntweder", + -12.56748104095459 + ], + [ + "▁Shoot", + -12.567618370056152 + ], + [ + "étais", + -12.56771183013916 + ], + [ + "SPEC", + -12.567930221557617 + ], + [ + "▁dreapta", + -12.567973136901855 + ], + [ + "▁repaired", + -12.568055152893066 + ], + [ + "pyr", + -12.568136215209961 + ], + [ + "▁warranties", + -12.568175315856934 + ], + [ + "▁représent", + -12.568263053894043 + ], + [ + "ADE", + -12.568293571472168 + ], + [ + "▁selective", + -12.56836223602295 + ], + [ + "▁Banking", + -12.568441390991211 + ], + [ + "▁ergonomic", + -12.568562507629395 + ], + [ + "...”", + -12.568602561950684 + ], + [ + "▁willingness", + -12.56867790222168 + ], + [ + "isser", + -12.568784713745117 + ], + [ + "▁confection", + -12.568961143493652 + ], + [ + "admi", + -12.569009780883789 + ], + [ + "▁Freizeit", + -12.569023132324219 + ], + [ + "▁illuminate", + -12.569151878356934 + ], + [ + "▁Repeat", + -12.569170951843262 + ], + [ + "▁Zeitpunkt", + -12.56933879852295 + ], + [ + "claimed", + -12.569439888000488 + ], + [ + "▁erhältlich", + -12.569480895996094 + ], + [ + "▁paysage", + -12.569537162780762 + ], + [ + "▁Atom", + -12.569890022277832 + ], + [ + "▁Graf", + -12.570086479187012 + ], + [ + "▁firmware", + -12.570093154907227 + ], + [ + "▁Swift", + -12.570180892944336 + ], + [ + "▁cercetare", + -12.57018756866455 + ], + [ + "▁internațional", + -12.570330619812012 + ], + [ + "▁zombie", + -12.570330619812012 + ], + [ + "▁Spread", + -12.57050609588623 + ], + [ + "ECO", + -12.57056999206543 + ], + [ + "▁Gestaltung", + -12.570758819580078 + ], + [ + "rast", + -12.570858001708984 + ], + [ + "▁perfume", + -12.5709228515625 + ], + [ + "▁roulette", + -12.570924758911133 + ], + [ + "▁distill", + -12.57096004486084 + ], + [ + "▁Produkten", + -12.570992469787598 + ], + [ + "225", + -12.571310043334961 + ], + [ + "facing", + -12.571371078491211 + ], + [ + "▁paradigm", + -12.571514129638672 + ], + [ + "▁Rah", + -12.571532249450684 + ], + [ + "▁Renault", + -12.571846961975098 + ], + [ + "willig", + -12.571864128112793 + ], + [ + "▁Vet", + -12.571890830993652 + ], + [ + "▁reprezenta", + -12.572126388549805 + ], + [ + "stoß", + -12.572185516357422 + ], + [ + "▁Weiß", + -12.5722074508667 + ], + [ + "▁Solo", + -12.572210311889648 + ], + [ + "▁Jin", + -12.572646141052246 + ], + [ + "▁Brussels", + -12.572693824768066 + ], + [ + "▁Tournament", + -12.572693824768066 + ], + [ + "▁proced", + -12.572710037231445 + ], + [ + "▁Rabbi", + -12.572835922241211 + ], + [ + "▁gameplay", + -12.572851181030273 + ], + [ + "▁ATM", + -12.572901725769043 + ], + [ + "▁firearm", + -12.572906494140625 + ], + [ + "revealing", + -12.573003768920898 + ], + [ + "schütz", + -12.57310676574707 + ], + [ + "▁Absolutely", + -12.573288917541504 + ], + [ + "▁interference", + -12.573433876037598 + ], + [ + "▁Employment", + -12.573558807373047 + ], + [ + "▁chord", + -12.57356071472168 + ], + [ + "▁oportun", + -12.573585510253906 + ], + [ + "▁frontier", + -12.573770523071289 + ], + [ + "▁Lunch", + -12.573891639709473 + ], + [ + "bread", + -12.57397174835205 + ], + [ + "▁rendered", + -12.573976516723633 + ], + [ + "5.1", + -12.573984146118164 + ], + [ + "▁motif", + -12.574066162109375 + ], + [ + "▁Schlag", + -12.574227333068848 + ], + [ + "113", + -12.574264526367188 + ], + [ + "▁Deux", + -12.574288368225098 + ], + [ + "▁surplus", + -12.574309349060059 + ], + [ + "ALS", + -12.574417114257812 + ], + [ + "▁abortion", + -12.574472427368164 + ], + [ + "▁airplane", + -12.574475288391113 + ], + [ + "▁migrants", + -12.574501991271973 + ], + [ + "kli", + -12.574539184570312 + ], + [ + "▁crochet", + -12.57454776763916 + ], + [ + "fahrer", + -12.574671745300293 + ], + [ + "▁reconstruction", + -12.57471752166748 + ], + [ + "▁difer", + -12.574752807617188 + ], + [ + "▁Conserv", + -12.57478141784668 + ], + [ + "▁NSW", + -12.57479476928711 + ], + [ + "▁regim", + -12.574844360351562 + ], + [ + "▁Except", + -12.574904441833496 + ], + [ + "▁trage", + -12.574978828430176 + ], + [ + "▁Consiliul", + -12.575058937072754 + ], + [ + "▁Bedarf", + -12.575064659118652 + ], + [ + "▁additive", + -12.5750732421875 + ], + [ + "know", + -12.5751371383667 + ], + [ + "▁sauna", + -12.57517147064209 + ], + [ + "▁mortality", + -12.575201034545898 + ], + [ + "kräftig", + -12.575358390808105 + ], + [ + "▁Own", + -12.575445175170898 + ], + [ + "nzo", + -12.575519561767578 + ], + [ + "▁villes", + -12.575543403625488 + ], + [ + "▁recette", + -12.575749397277832 + ], + [ + "▁attacking", + -12.575799942016602 + ], + [ + "beruf", + -12.57608699798584 + ], + [ + "▁integrat", + -12.57612419128418 + ], + [ + "realizarea", + -12.576201438903809 + ], + [ + "▁exemption", + -12.57628345489502 + ], + [ + "GW", + -12.576285362243652 + ], + [ + "▁Nano", + -12.576395034790039 + ], + [ + "SCH", + -12.576440811157227 + ], + [ + "▁honesty", + -12.576457023620605 + ], + [ + "▁Arriv", + -12.576515197753906 + ], + [ + "▁gland", + -12.576542854309082 + ], + [ + "▁proactive", + -12.576746940612793 + ], + [ + "▁agile", + -12.576837539672852 + ], + [ + "▁kernel", + -12.576844215393066 + ], + [ + "▁nurture", + -12.576860427856445 + ], + [ + "▁Patent", + -12.576963424682617 + ], + [ + "▁excursi", + -12.577189445495605 + ], + [ + "pulsion", + -12.577326774597168 + ], + [ + "stellte", + -12.577351570129395 + ], + [ + "ständige", + -12.577421188354492 + ], + [ + "▁Rebecca", + -12.577436447143555 + ], + [ + "▁Securities", + -12.577436447143555 + ], + [ + "mètre", + -12.577446937561035 + ], + [ + "LOW", + -12.577469825744629 + ], + [ + "▁consilier", + -12.577537536621094 + ], + [ + "▁Architekt", + -12.577733993530273 + ], + [ + "▁china", + -12.57777214050293 + ], + [ + "älfte", + -12.577778816223145 + ], + [ + "▁Combin", + -12.577795028686523 + ], + [ + "480", + -12.577999114990234 + ], + [ + "liv", + -12.578021049499512 + ], + [ + "▁peur", + -12.578067779541016 + ], + [ + "keep", + -12.57822322845459 + ], + [ + "▁Verhalten", + -12.578324317932129 + ], + [ + "▁peek", + -12.578446388244629 + ], + [ + "▁dient", + -12.578550338745117 + ], + [ + "▁prevazut", + -12.578625679016113 + ], + [ + "Emmanuel", + -12.57862663269043 + ], + [ + "▁incidence", + -12.57862663269043 + ], + [ + "▁Framework", + -12.578715324401855 + ], + [ + "dass", + -12.578816413879395 + ], + [ + "artiste", + -12.578874588012695 + ], + [ + "▁Accept", + -12.578971862792969 + ], + [ + "▁plunge", + -12.579073905944824 + ], + [ + "chauff", + -12.579118728637695 + ], + [ + "▁guilt", + -12.579156875610352 + ], + [ + "▁senator", + -12.57945442199707 + ], + [ + "▁disable", + -12.579776763916016 + ], + [ + "▁partout", + -12.579901695251465 + ], + [ + "JC", + -12.580045700073242 + ], + [ + "▁Highly", + -12.580150604248047 + ], + [ + "▁beneficii", + -12.58021068572998 + ], + [ + "fibro", + -12.580347061157227 + ], + [ + "interpreted", + -12.580550193786621 + ], + [ + "▁genauso", + -12.58056354522705 + ], + [ + "▁basil", + -12.580601692199707 + ], + [ + "▁Angst", + -12.580697059631348 + ], + [ + "rzte", + -12.580933570861816 + ], + [ + "Master", + -12.58112907409668 + ], + [ + "▁french", + -12.581324577331543 + ], + [ + "▁Duration", + -12.581343650817871 + ], + [ + "HM", + -12.581402778625488 + ], + [ + "▁Bert", + -12.581518173217773 + ], + [ + "▁1963", + -12.581534385681152 + ], + [ + "▁warrior", + -12.581604957580566 + ], + [ + "2007", + -12.581696510314941 + ], + [ + "▁recycle", + -12.581722259521484 + ], + [ + "▁fertiliz", + -12.581808090209961 + ], + [ + "▁hatch", + -12.581809997558594 + ], + [ + "ISH", + -12.581811904907227 + ], + [ + "luft", + -12.582321166992188 + ], + [ + "▁crying", + -12.582452774047852 + ], + [ + "▁activist", + -12.5824613571167 + ], + [ + "schränkt", + -12.582500457763672 + ], + [ + "▁diff", + -12.582500457763672 + ], + [ + "▁Demand", + -12.58262825012207 + ], + [ + "▁transported", + -12.582669258117676 + ], + [ + "▁Remodel", + -12.582686424255371 + ], + [ + "▁Etats", + -12.582704544067383 + ], + [ + "ANI", + -12.582777976989746 + ], + [ + "▁spéciale", + -12.582804679870605 + ], + [ + "▁Konzert", + -12.582805633544922 + ], + [ + "▁Bedürfnisse", + -12.58281135559082 + ], + [ + "▁overlooked", + -12.582864761352539 + ], + [ + "▁cutter", + -12.582974433898926 + ], + [ + "klär", + -12.58311939239502 + ], + [ + "▁Materialien", + -12.583135604858398 + ], + [ + "▁gewisse", + -12.583388328552246 + ], + [ + "bull", + -12.583499908447266 + ], + [ + "Good", + -12.583513259887695 + ], + [ + "Gig", + -12.583616256713867 + ], + [ + "Logic", + -12.583736419677734 + ], + [ + "▁Schlaf", + -12.583970069885254 + ], + [ + "▁Yankee", + -12.583996772766113 + ], + [ + "▁Batman", + -12.584020614624023 + ], + [ + "▁funcție", + -12.584166526794434 + ], + [ + "▁partenariat", + -12.584294319152832 + ], + [ + "▁Antrag", + -12.584348678588867 + ], + [ + "▁Pill", + -12.584519386291504 + ], + [ + "▁tram", + -12.584637641906738 + ], + [ + "▁Minor", + -12.58465576171875 + ], + [ + "pertaining", + -12.584678649902344 + ], + [ + "▁apropiere", + -12.584843635559082 + ], + [ + "▁Barack", + -12.584965705871582 + ], + [ + "schön", + -12.585174560546875 + ], + [ + "▁Sandy", + -12.585182189941406 + ], + [ + "kilometre", + -12.585192680358887 + ], + [ + "▁diy", + -12.585234642028809 + ], + [ + "▁1966", + -12.585453987121582 + ], + [ + "gelassen", + -12.585485458374023 + ], + [ + "▁Trial", + -12.585592269897461 + ], + [ + "▁Bauer", + -12.585603713989258 + ], + [ + "▁assumption", + -12.585648536682129 + ], + [ + "birth", + -12.585668563842773 + ], + [ + "rechnen", + -12.585861206054688 + ], + [ + "▁meci", + -12.585867881774902 + ], + [ + "▁gloss", + -12.585906982421875 + ], + [ + "▁sewer", + -12.58593463897705 + ], + [ + "▁Stimme", + -12.585955619812012 + ], + [ + "▁Fortune", + -12.585967063903809 + ], + [ + "▁Lösungen", + -12.586007118225098 + ], + [ + "▁impresi", + -12.586074829101562 + ], + [ + "schlaf", + -12.586089134216309 + ], + [ + "prüfung", + -12.586097717285156 + ], + [ + "▁instalat", + -12.586198806762695 + ], + [ + "▁picturesque", + -12.586233139038086 + ], + [ + "vait", + -12.586240768432617 + ], + [ + "8.1", + -12.58629035949707 + ], + [ + "▁călători", + -12.586392402648926 + ], + [ + "▁dix", + -12.586400032043457 + ], + [ + "▁furnished", + -12.586411476135254 + ], + [ + "▁dolari", + -12.586445808410645 + ], + [ + "▁regener", + -12.586562156677246 + ], + [ + "▁astazi", + -12.586621284484863 + ], + [ + "▁Sprach", + -12.586750030517578 + ], + [ + "delà", + -12.586846351623535 + ], + [ + "avec", + -12.58694076538086 + ], + [ + "▁Buddhist", + -12.586990356445312 + ], + [ + "▁alphabet", + -12.586990356445312 + ], + [ + "▁berichtet", + -12.587201118469238 + ], + [ + "ideally", + -12.587209701538086 + ], + [ + "▁annuel", + -12.587421417236328 + ], + [ + "▁laughing", + -12.587532997131348 + ], + [ + "▁Zustand", + -12.587639808654785 + ], + [ + "cini", + -12.587692260742188 + ], + [ + "solid", + -12.587724685668945 + ], + [ + "▁Broker", + -12.587868690490723 + ], + [ + "▁developmental", + -12.5879545211792 + ], + [ + "▁Summary", + -12.588191032409668 + ], + [ + "▁Trinity", + -12.58819580078125 + ], + [ + "▁sucre", + -12.58821964263916 + ], + [ + "▁sandal", + -12.588231086730957 + ], + [ + "PEN", + -12.588274955749512 + ], + [ + "gewinn", + -12.588486671447754 + ], + [ + "olé", + -12.588555335998535 + ], + [ + "matric", + -12.58865737915039 + ], + [ + "xton", + -12.588695526123047 + ], + [ + "werten", + -12.588740348815918 + ], + [ + "▁Dust", + -12.588765144348145 + ], + [ + "▁Journey", + -12.588791847229004 + ], + [ + "▁Rush", + -12.588793754577637 + ], + [ + "▁NCAA", + -12.588839530944824 + ], + [ + "▁allgemeine", + -12.588926315307617 + ], + [ + "▁Universe", + -12.589007377624512 + ], + [ + "▁connais", + -12.589099884033203 + ], + [ + "▁quantité", + -12.58912467956543 + ], + [ + "▁Kab", + -12.589150428771973 + ], + [ + "▁purse", + -12.589150428771973 + ], + [ + "Health", + -12.589210510253906 + ], + [ + "▁apărut", + -12.589288711547852 + ], + [ + "▁bypass", + -12.589313507080078 + ], + [ + "pronounced", + -12.58936595916748 + ], + [ + "▁magnitude", + -12.589393615722656 + ], + [ + "▁Walmart", + -12.589394569396973 + ], + [ + "ède", + -12.589409828186035 + ], + [ + "▁serum", + -12.589590072631836 + ], + [ + "▁baseline", + -12.589765548706055 + ], + [ + "STER", + -12.589932441711426 + ], + [ + "▁ONLY", + -12.590052604675293 + ], + [ + "▁individuell", + -12.590086936950684 + ], + [ + "▁Ghi", + -12.590139389038086 + ], + [ + "▁Ruby", + -12.59020709991455 + ], + [ + "▁Chal", + -12.590241432189941 + ], + [ + "▁Vier", + -12.590261459350586 + ], + [ + "5.0", + -12.5903902053833 + ], + [ + "▁fog", + -12.590519905090332 + ], + [ + "esel", + -12.590557098388672 + ], + [ + "▁Python", + -12.590598106384277 + ], + [ + "▁urmează", + -12.590608596801758 + ], + [ + "▁trustworthy", + -12.590639114379883 + ], + [ + "hört", + -12.590729713439941 + ], + [ + "▁tâche", + -12.59078311920166 + ], + [ + "Patri", + -12.590799331665039 + ], + [ + "▁grind", + -12.590928077697754 + ], + [ + "▁Raven", + -12.590934753417969 + ], + [ + "▁poursuiv", + -12.590951919555664 + ], + [ + "▁simpli", + -12.591140747070312 + ], + [ + "▁echo", + -12.591165542602539 + ], + [ + "▁Attention", + -12.591313362121582 + ], + [ + "Against", + -12.591402053833008 + ], + [ + "GET", + -12.59148120880127 + ], + [ + "▁turistic", + -12.591535568237305 + ], + [ + "▁tenure", + -12.59158992767334 + ], + [ + "▁alimentaire", + -12.591651916503906 + ], + [ + "Who", + -12.59172248840332 + ], + [ + "▁ändern", + -12.591729164123535 + ], + [ + "▁rebound", + -12.591778755187988 + ], + [ + "grenze", + -12.591849327087402 + ], + [ + "▁Fame", + -12.592093467712402 + ], + [ + "▁Kick", + -12.592215538024902 + ], + [ + "▁Detail", + -12.59228801727295 + ], + [ + "▁Push", + -12.592308044433594 + ], + [ + "production", + -12.592430114746094 + ], + [ + "▁Candidates", + -12.59244441986084 + ], + [ + "▁reușit", + -12.592484474182129 + ], + [ + "istischen", + -12.592525482177734 + ], + [ + "lassung", + -12.592649459838867 + ], + [ + "▁Hann", + -12.592713356018066 + ], + [ + "espère", + -12.592965126037598 + ], + [ + "▁vergessen", + -12.593008041381836 + ], + [ + "▁smiling", + -12.593010902404785 + ], + [ + "▁devotion", + -12.593016624450684 + ], + [ + "▁pastry", + -12.593071937561035 + ], + [ + "Add", + -12.593390464782715 + ], + [ + "▁authorization", + -12.593494415283203 + ], + [ + "▁Suisse", + -12.593568801879883 + ], + [ + "▁Berkeley", + -12.593611717224121 + ], + [ + "▁Guild", + -12.593660354614258 + ], + [ + "▁choir", + -12.593748092651367 + ], + [ + "learning", + -12.593802452087402 + ], + [ + "▁Tanz", + -12.593894004821777 + ], + [ + "mardi", + -12.594076156616211 + ], + [ + "▁rezultatele", + -12.594191551208496 + ], + [ + "▁earrings", + -12.594218254089355 + ], + [ + "▁turbine", + -12.594223976135254 + ], + [ + "▁jeudi", + -12.594284057617188 + ], + [ + "terapie", + -12.594576835632324 + ], + [ + "regain", + -12.59461498260498 + ], + [ + "SET", + -12.594643592834473 + ], + [ + "▁Hände", + -12.594681739807129 + ], + [ + "▁Globe", + -12.594683647155762 + ], + [ + "frag", + -12.594775199890137 + ], + [ + "▁Treasury", + -12.594820976257324 + ], + [ + "▁hazardous", + -12.594820976257324 + ], + [ + "▁Fahrt", + -12.594928741455078 + ], + [ + "▁fulfilled", + -12.594966888427734 + ], + [ + "▁manga", + -12.594987869262695 + ], + [ + "▁composé", + -12.595067977905273 + ], + [ + "▁ABS", + -12.595132827758789 + ], + [ + "▁preced", + -12.595197677612305 + ], + [ + "▁beauté", + -12.595233917236328 + ], + [ + "▁interessant", + -12.59526252746582 + ], + [ + "▁lieber", + -12.595324516296387 + ], + [ + "▁Kö", + -12.595378875732422 + ], + [ + "EMS", + -12.595410346984863 + ], + [ + "FER", + -12.595413208007812 + ], + [ + "▁eure", + -12.595427513122559 + ], + [ + "▁plumber", + -12.595427513122559 + ], + [ + "Love", + -12.595463752746582 + ], + [ + "▁Marcus", + -12.595635414123535 + ], + [ + "▁registry", + -12.595637321472168 + ], + [ + "▁uncle", + -12.595696449279785 + ], + [ + "▁neuf", + -12.595728874206543 + ], + [ + "▁Fläche", + -12.59575080871582 + ], + [ + "▁restaur", + -12.595815658569336 + ], + [ + "▁noticeable", + -12.595833778381348 + ], + [ + "▁riches", + -12.595871925354004 + ], + [ + "occupy", + -12.596031188964844 + ], + [ + "▁hurricane", + -12.596031188964844 + ], + [ + "▁gespeichert", + -12.596033096313477 + ], + [ + "▁Bordeaux", + -12.596039772033691 + ], + [ + "▁Maj", + -12.59637451171875 + ], + [ + "Applied", + -12.596439361572266 + ], + [ + "▁compter", + -12.596575736999512 + ], + [ + "impact", + -12.59663200378418 + ], + [ + "▁Improve", + -12.596758842468262 + ], + [ + "▁Calif", + -12.596832275390625 + ], + [ + "▁desfășur", + -12.596939086914062 + ], + [ + "▁packaged", + -12.597001075744629 + ], + [ + "180", + -12.59703540802002 + ], + [ + "devenu", + -12.597042083740234 + ], + [ + "▁Battery", + -12.597243309020996 + ], + [ + "▁objection", + -12.597254753112793 + ], + [ + "▁anual", + -12.597305297851562 + ], + [ + "▁Landscape", + -12.59731674194336 + ], + [ + "IQ", + -12.597403526306152 + ], + [ + "grès", + -12.597586631774902 + ], + [ + "▁witnesses", + -12.597750663757324 + ], + [ + "enţial", + -12.597764015197754 + ], + [ + "▁plateau", + -12.597779273986816 + ], + [ + "▁bilete", + -12.59783935546875 + ], + [ + "▁Bronze", + -12.59786605834961 + ], + [ + "▁Kiss", + -12.597946166992188 + ], + [ + "▁Serge", + -12.598093032836914 + ], + [ + "atomic", + -12.598145484924316 + ], + [ + "▁renovated", + -12.59817886352539 + ], + [ + "player", + -12.598212242126465 + ], + [ + "▁dirig", + -12.598291397094727 + ], + [ + "▁Îm", + -12.598296165466309 + ], + [ + "▁plimb", + -12.59843635559082 + ], + [ + "▁ambassador", + -12.598455429077148 + ], + [ + "▁apropiat", + -12.598455429077148 + ], + [ + "▁adaug", + -12.598602294921875 + ], + [ + "ogenic", + -12.59872055053711 + ], + [ + "kämpfe", + -12.598779678344727 + ], + [ + "▁Hillary", + -12.598907470703125 + ], + [ + "yak", + -12.598942756652832 + ], + [ + "General", + -12.59925365447998 + ], + [ + "▁Zugang", + -12.599400520324707 + ], + [ + "▁fertil", + -12.599457740783691 + ], + [ + "incat", + -12.599536895751953 + ], + [ + "assessing", + -12.599587440490723 + ], + [ + "▁Cincinnati", + -12.59967041015625 + ], + [ + "▁convincing", + -12.599685668945312 + ], + [ + "sadly", + -12.59974479675293 + ], + [ + "kunde", + -12.599801063537598 + ], + [ + "ambul", + -12.599913597106934 + ], + [ + "▁familii", + -12.599974632263184 + ], + [ + "juri", + -12.60007095336914 + ], + [ + "ionen", + -12.600102424621582 + ], + [ + "▁Wirtschaft", + -12.600130081176758 + ], + [ + "contract", + -12.600135803222656 + ], + [ + "punem", + -12.600151062011719 + ], + [ + "handlung", + -12.600394248962402 + ], + [ + "▁fournir", + -12.600455284118652 + ], + [ + "▁Ambi", + -12.600663185119629 + ], + [ + "▁Isaac", + -12.600663185119629 + ], + [ + "▁praying", + -12.6007719039917 + ], + [ + "▁Italien", + -12.600848197937012 + ], + [ + "233", + -12.600850105285645 + ], + [ + "spawn", + -12.600913047790527 + ], + [ + "▁legii", + -12.60092544555664 + ], + [ + "▁zuvor", + -12.601018905639648 + ], + [ + "▁comune", + -12.601030349731445 + ], + [ + "official", + -12.601165771484375 + ], + [ + "144", + -12.601290702819824 + ], + [ + "izeaza", + -12.601329803466797 + ], + [ + "▁Keller", + -12.601372718811035 + ], + [ + "ORE", + -12.601378440856934 + ], + [ + "122", + -12.601485252380371 + ], + [ + "incurred", + -12.60150146484375 + ], + [ + "CHA", + -12.601579666137695 + ], + [ + "▁Herzen", + -12.601590156555176 + ], + [ + "▁reasoning", + -12.6016263961792 + ], + [ + "affaire", + -12.601849555969238 + ], + [ + "ooth", + -12.601890563964844 + ], + [ + "155", + -12.601998329162598 + ], + [ + "▁invented", + -12.602113723754883 + ], + [ + "▁Comun", + -12.602140426635742 + ], + [ + "zähl", + -12.602179527282715 + ], + [ + "geliefert", + -12.602212905883789 + ], + [ + "explorer", + -12.602213859558105 + ], + [ + "nect", + -12.602326393127441 + ], + [ + "▁mercredi", + -12.602408409118652 + ], + [ + "▁volonté", + -12.602408409118652 + ], + [ + "easy", + -12.602453231811523 + ], + [ + "▁feat", + -12.602490425109863 + ], + [ + "rented", + -12.602580070495605 + ], + [ + "▁converter", + -12.602592468261719 + ], + [ + "Verhältnis", + -12.602713584899902 + ], + [ + "▁Iceland", + -12.602792739868164 + ], + [ + "▁pretul", + -12.602933883666992 + ], + [ + "▁Vorstellung", + -12.602960586547852 + ], + [ + "▁hydrogen", + -12.603096008300781 + ], + [ + "▁pouvai", + -12.603097915649414 + ], + [ + "▁dawn", + -12.603153228759766 + ], + [ + "▁Georg", + -12.603269577026367 + ], + [ + "▁cautious", + -12.603367805480957 + ], + [ + "▁Pattern", + -12.603464126586914 + ], + [ + "▁Ox", + -12.603602409362793 + ], + [ + "▁decizie", + -12.603676795959473 + ], + [ + "REC", + -12.603889465332031 + ], + [ + "▁Mortgage", + -12.60393238067627 + ], + [ + "attributed", + -12.603973388671875 + ], + [ + "floor", + -12.603992462158203 + ], + [ + "▁Wichtig", + -12.604207992553711 + ], + [ + "enseignant", + -12.604265213012695 + ], + [ + "▁civilization", + -12.604302406311035 + ], + [ + "▁dispozitie", + -12.60450553894043 + ], + [ + "▁geographic", + -12.604543685913086 + ], + [ + "▁Kun", + -12.604607582092285 + ], + [ + "LIN", + -12.604679107666016 + ], + [ + "▁auzit", + -12.604707717895508 + ], + [ + "except", + -12.604761123657227 + ], + [ + "▁superbe", + -12.604904174804688 + ], + [ + "▁installé", + -12.605000495910645 + ], + [ + "▁Peninsula", + -12.605154037475586 + ], + [ + "▁norme", + -12.605164527893066 + ], + [ + "elul", + -12.60517406463623 + ], + [ + "▁Experten", + -12.605256080627441 + ], + [ + "expression", + -12.605295181274414 + ], + [ + "Christ", + -12.605320930480957 + ], + [ + "▁Fuel", + -12.605369567871094 + ], + [ + "▁muffin", + -12.605485916137695 + ], + [ + "▁lecteur", + -12.605521202087402 + ], + [ + "▁gifted", + -12.605589866638184 + ], + [ + "▁Japon", + -12.605602264404297 + ], + [ + "▁SSD", + -12.605644226074219 + ], + [ + "▁Calgary", + -12.605765342712402 + ], + [ + "▁hooked", + -12.605876922607422 + ], + [ + "▁Joan", + -12.605896949768066 + ], + [ + "▁tangible", + -12.606083869934082 + ], + [ + "FW", + -12.606225967407227 + ], + [ + "olli", + -12.6062593460083 + ], + [ + "▁Platinum", + -12.606376647949219 + ], + [ + "▁miniature", + -12.606392860412598 + ], + [ + "▁lump", + -12.606608390808105 + ], + [ + "ologische", + -12.60689926147461 + ], + [ + "▁Istanbul", + -12.606987953186035 + ], + [ + "▁Compar", + -12.607060432434082 + ], + [ + "tropic", + -12.607256889343262 + ], + [ + "KING", + -12.607279777526855 + ], + [ + "Präsident", + -12.607297897338867 + ], + [ + "▁fotografii", + -12.607303619384766 + ], + [ + "hoped", + -12.607451438903809 + ], + [ + "▁pâte", + -12.607601165771484 + ], + [ + "▁mercy", + -12.60760498046875 + ], + [ + "▁quiz", + -12.607619285583496 + ], + [ + "demonstrating", + -12.607678413391113 + ], + [ + "▁douce", + -12.607832908630371 + ], + [ + "▁Vest", + -12.607841491699219 + ], + [ + "▁Harvey", + -12.6082181930542 + ], + [ + "▁breit", + -12.608227729797363 + ], + [ + "▁Bereits", + -12.608291625976562 + ], + [ + "▁breakthrough", + -12.608316421508789 + ], + [ + "▁masterpiece", + -12.608320236206055 + ], + [ + "▁Chester", + -12.60838794708252 + ], + [ + "▁indiqué", + -12.608451843261719 + ], + [ + "hook", + -12.60857105255127 + ], + [ + "statutory", + -12.608596801757812 + ], + [ + "▁Direkt", + -12.608617782592773 + ], + [ + "▁specs", + -12.608708381652832 + ], + [ + "Drive", + -12.608725547790527 + ], + [ + "▁survivors", + -12.608826637268066 + ], + [ + "▁jackpot", + -12.608840942382812 + ], + [ + "▁garder", + -12.608872413635254 + ], + [ + "▁Geburtstag", + -12.60887336730957 + ], + [ + "145", + -12.608963966369629 + ], + [ + "▁Clay", + -12.609028816223145 + ], + [ + "▁WHO", + -12.60906982421875 + ], + [ + "▁Ellen", + -12.609393119812012 + ], + [ + "▁bonheur", + -12.609440803527832 + ], + [ + "▁hazards", + -12.609440803527832 + ], + [ + "▁Kaiser", + -12.609488487243652 + ], + [ + "▁tightly", + -12.609506607055664 + ], + [ + "Universitatea", + -12.609529495239258 + ], + [ + "▁rinse", + -12.609533309936523 + ], + [ + "▁passant", + -12.609640121459961 + ], + [ + "▁sânge", + -12.609832763671875 + ], + [ + "▁peuple", + -12.60983657836914 + ], + [ + "jungen", + -12.609975814819336 + ], + [ + "▁inappropriate", + -12.610054969787598 + ], + [ + "▁mitigate", + -12.610066413879395 + ], + [ + "MID", + -12.610221862792969 + ], + [ + "▁telecom", + -12.610297203063965 + ], + [ + "▁plaj", + -12.610316276550293 + ], + [ + "▁presupune", + -12.610361099243164 + ], + [ + "acco", + -12.61038875579834 + ], + [ + "expressing", + -12.610654830932617 + ], + [ + "▁Symphony", + -12.61066722869873 + ], + [ + "temperatur", + -12.610710144042969 + ], + [ + "▁activităţi", + -12.610800743103027 + ], + [ + "▁amended", + -12.610847473144531 + ], + [ + "▁rehab", + -12.610909461975098 + ], + [ + "▁sportiv", + -12.611004829406738 + ], + [ + "hotel", + -12.611031532287598 + ], + [ + "branche", + -12.61103630065918 + ], + [ + "▁Noch", + -12.611079216003418 + ], + [ + "▁1961", + -12.611238479614258 + ], + [ + "release", + -12.611359596252441 + ], + [ + "blaze", + -12.611381530761719 + ], + [ + "Adv", + -12.61139965057373 + ], + [ + "Line", + -12.611671447753906 + ], + [ + "▁financiare", + -12.61184310913086 + ], + [ + "▁chauffage", + -12.611919403076172 + ], + [ + "мо", + -12.61192512512207 + ], + [ + "schuhe", + -12.612035751342773 + ], + [ + "blé", + -12.612040519714355 + ], + [ + "▁Echo", + -12.612468719482422 + ], + [ + "▁remarks", + -12.61253547668457 + ], + [ + "scriu", + -12.612629890441895 + ], + [ + "Vir", + -12.612701416015625 + ], + [ + "War", + -12.61271858215332 + ], + [ + "atifs", + -12.613006591796875 + ], + [ + "RING", + -12.613082885742188 + ], + [ + "▁Instruction", + -12.613150596618652 + ], + [ + "▁verlassen", + -12.613155364990234 + ], + [ + "▁ergänz", + -12.613234519958496 + ], + [ + "▁Emil", + -12.613248825073242 + ], + [ + "▁empire", + -12.613263130187988 + ], + [ + "▁Einkauf", + -12.613306999206543 + ], + [ + "utigen", + -12.613329887390137 + ], + [ + "▁audition", + -12.613390922546387 + ], + [ + "travelled", + -12.61347484588623 + ], + [ + "ло", + -12.613579750061035 + ], + [ + "▁infinite", + -12.613720893859863 + ], + [ + "▁Lieblings", + -12.613749504089355 + ], + [ + "▁vân", + -12.613754272460938 + ], + [ + "▁spinning", + -12.613778114318848 + ], + [ + "converting", + -12.614031791687012 + ], + [ + "▁uncertain", + -12.61415958404541 + ], + [ + "restul", + -12.614168167114258 + ], + [ + "▁colourful", + -12.61420726776123 + ], + [ + "▁accountant", + -12.614338874816895 + ], + [ + "bourg", + -12.614532470703125 + ], + [ + "▁structuri", + -12.614538192749023 + ], + [ + "▁Booking", + -12.61465835571289 + ], + [ + "intéresse", + -12.614683151245117 + ], + [ + "▁coordinated", + -12.614753723144531 + ], + [ + "▁precaution", + -12.61497688293457 + ], + [ + "▁Cheese", + -12.615015983581543 + ], + [ + "▁surfing", + -12.615192413330078 + ], + [ + "▁souffr", + -12.61524486541748 + ], + [ + "▁Menu", + -12.615447998046875 + ], + [ + "▁arthritis", + -12.615593910217285 + ], + [ + "▁headphones", + -12.615601539611816 + ], + [ + "▁upgrading", + -12.615602493286133 + ], + [ + "▁apparel", + -12.615653038024902 + ], + [ + "▁Haushalt", + -12.61572551727295 + ], + [ + "▁Personally", + -12.615815162658691 + ], + [ + "▁insane", + -12.615950584411621 + ], + [ + "▁fonduri", + -12.616083145141602 + ], + [ + "▁entier", + -12.616239547729492 + ], + [ + "▁Herbst", + -12.616264343261719 + ], + [ + "▁cyclist", + -12.616331100463867 + ], + [ + "▁filmmaker", + -12.616741180419922 + ], + [ + "▁Portuguese", + -12.616829872131348 + ], + [ + "▁nominee", + -12.616851806640625 + ], + [ + "▁Yang", + -12.616857528686523 + ], + [ + "▁slate", + -12.616943359375 + ], + [ + "▁entièrement", + -12.616974830627441 + ], + [ + "▁Umgang", + -12.617049217224121 + ], + [ + "shifted", + -12.617135047912598 + ], + [ + "▁défaut", + -12.617138862609863 + ], + [ + "heiz", + -12.617246627807617 + ], + [ + "▁Seal", + -12.617379188537598 + ], + [ + "▁servicing", + -12.617451667785645 + ], + [ + "marketing", + -12.617562294006348 + ], + [ + "▁demandé", + -12.617755889892578 + ], + [ + "TING", + -12.617841720581055 + ], + [ + "▁modifier", + -12.617907524108887 + ], + [ + "lysis", + -12.617966651916504 + ], + [ + "▁suplimentare", + -12.618117332458496 + ], + [ + "OTHER", + -12.618359565734863 + ], + [ + "Graph", + -12.618379592895508 + ], + [ + "▁coincide", + -12.618448257446289 + ], + [ + "governed", + -12.618598937988281 + ], + [ + "▁locking", + -12.618638038635254 + ], + [ + "▁Properties", + -12.618685722351074 + ], + [ + "▁Panama", + -12.61876392364502 + ], + [ + "▁Coupe", + -12.618846893310547 + ], + [ + "songwriter", + -12.618978500366211 + ], + [ + "exhibited", + -12.618988990783691 + ], + [ + "▁semnificativ", + -12.618995666503906 + ], + [ + "▁purchaser", + -12.619004249572754 + ], + [ + "▁puff", + -12.619097709655762 + ], + [ + "Back", + -12.619105339050293 + ], + [ + "fragt", + -12.61919116973877 + ], + [ + "▁deputy", + -12.619362831115723 + ], + [ + "▁revien", + -12.619556427001953 + ], + [ + "▁Christine", + -12.619558334350586 + ], + [ + "▁Cities", + -12.619573593139648 + ], + [ + "▁Charakter", + -12.61961555480957 + ], + [ + "atteindre", + -12.619625091552734 + ], + [ + "▁fou", + -12.619635581970215 + ], + [ + "▁obligatoire", + -12.619643211364746 + ], + [ + "INA", + -12.619791030883789 + ], + [ + "etc", + -12.6198148727417 + ], + [ + "▁newborn", + -12.620091438293457 + ], + [ + "▁explicitly", + -12.620116233825684 + ], + [ + "simplest", + -12.620203018188477 + ], + [ + "▁plateforme", + -12.62023639678955 + ], + [ + "ordinate", + -12.620291709899902 + ], + [ + "displaying", + -12.620346069335938 + ], + [ + "▁messy", + -12.620464324951172 + ], + [ + "gespielt", + -12.620466232299805 + ], + [ + "▁electron", + -12.62061882019043 + ], + [ + "▁Dreh", + -12.620796203613281 + ], + [ + "▁ambient", + -12.620976448059082 + ], + [ + "340", + -12.620979309082031 + ], + [ + "▁directive", + -12.62109375 + ], + [ + "▁Vall", + -12.621152877807617 + ], + [ + "ookie", + -12.621206283569336 + ], + [ + "▁wasted", + -12.621304512023926 + ], + [ + "CIS", + -12.621367454528809 + ], + [ + "lude", + -12.621378898620605 + ], + [ + "rach", + -12.621472358703613 + ], + [ + "▁gasest", + -12.62150764465332 + ], + [ + "▁miros", + -12.62150764465332 + ], + [ + "transforming", + -12.621536254882812 + ], + [ + "▁Milwaukee", + -12.621787071228027 + ], + [ + "▁uncommon", + -12.621789932250977 + ], + [ + "▁tableau", + -12.621841430664062 + ], + [ + "geräte", + -12.621952056884766 + ], + [ + "ophil", + -12.622139930725098 + ], + [ + "▁Jeep", + -12.62220287322998 + ], + [ + "▁wreck", + -12.622422218322754 + ], + [ + "LAND", + -12.622434616088867 + ], + [ + "attach", + -12.622566223144531 + ], + [ + "▁Panther", + -12.622634887695312 + ], + [ + "9:30", + -12.622777938842773 + ], + [ + "▁induce", + -12.622974395751953 + ], + [ + "▁privest", + -12.623006820678711 + ], + [ + "Ident", + -12.623047828674316 + ], + [ + "▁illnesses", + -12.623076438903809 + ], + [ + "▁inhabitants", + -12.623138427734375 + ], + [ + "▁fehlen", + -12.623357772827148 + ], + [ + "obtenu", + -12.623391151428223 + ], + [ + "▁gegründet", + -12.623655319213867 + ], + [ + "ARA", + -12.623711585998535 + ], + [ + "3-2", + -12.623835563659668 + ], + [ + "▁milliards", + -12.623968124389648 + ], + [ + "▁Bü", + -12.624001502990723 + ], + [ + "▁angegeben", + -12.624102592468262 + ], + [ + "TUR", + -12.624143600463867 + ], + [ + "▁arab", + -12.624166488647461 + ], + [ + "▁Scientist", + -12.624275207519531 + ], + [ + "▁minut", + -12.624394416809082 + ], + [ + "▁beast", + -12.624481201171875 + ], + [ + "▁accidentally", + -12.624573707580566 + ], + [ + "WN", + -12.624579429626465 + ], + [ + "▁Ralph", + -12.624588966369629 + ], + [ + "hängt", + -12.62462329864502 + ], + [ + "▁Erik", + -12.624639511108398 + ], + [ + "▁différent", + -12.624711990356445 + ], + [ + "▁conformitate", + -12.624842643737793 + ], + [ + "thriving", + -12.624900817871094 + ], + [ + "▁Piece", + -12.625123023986816 + ], + [ + "plasm", + -12.625152587890625 + ], + [ + "▁erwarten", + -12.62520980834961 + ], + [ + "owski", + -12.62523365020752 + ], + [ + "prayed", + -12.625293731689453 + ], + [ + "three", + -12.625542640686035 + ], + [ + "▁soundtrack", + -12.625651359558105 + ], + [ + "guru", + -12.625709533691406 + ], + [ + "▁cracked", + -12.625710487365723 + ], + [ + "▁adh", + -12.625823020935059 + ], + [ + "▁maître", + -12.625834465026855 + ], + [ + "▁Oberfläche", + -12.62585735321045 + ], + [ + "▁crab", + -12.625886917114258 + ], + [ + "▁Foster", + -12.625944137573242 + ], + [ + "▁gemütlich", + -12.626145362854004 + ], + [ + "SIC", + -12.626226425170898 + ], + [ + "ième", + -12.626298904418945 + ], + [ + "▁Few", + -12.626330375671387 + ], + [ + "gérer", + -12.626360893249512 + ], + [ + "2006", + -12.626456260681152 + ], + [ + "cool", + -12.626498222351074 + ], + [ + "▁dispune", + -12.626523971557617 + ], + [ + "recevoir", + -12.626577377319336 + ], + [ + "▁Bak", + -12.626585960388184 + ], + [ + "▁steer", + -12.62659740447998 + ], + [ + "ICS", + -12.626733779907227 + ], + [ + "▁Brett", + -12.626733779907227 + ], + [ + "▁downside", + -12.626751899719238 + ], + [ + "▁residency", + -12.62678050994873 + ], + [ + "important", + -12.626991271972656 + ], + [ + "ubb", + -12.627073287963867 + ], + [ + "mony", + -12.627259254455566 + ], + [ + "▁leasing", + -12.627341270446777 + ], + [ + "▁Gir", + -12.62735366821289 + ], + [ + "▁Biology", + -12.627364158630371 + ], + [ + "▁Colin", + -12.627463340759277 + ], + [ + "▁complicat", + -12.627775192260742 + ], + [ + "▁regroup", + -12.627899169921875 + ], + [ + "SPA", + -12.627950668334961 + ], + [ + "▁Veranstaltungen", + -12.627986907958984 + ], + [ + "convicted", + -12.628019332885742 + ], + [ + "▁Wonderful", + -12.628636360168457 + ], + [ + "züge", + -12.628799438476562 + ], + [ + "yton", + -12.628813743591309 + ], + [ + "EMENT", + -12.628887176513672 + ], + [ + "▁bent", + -12.62893009185791 + ], + [ + "heben", + -12.629231452941895 + ], + [ + "▁Sustainable", + -12.62926959991455 + ], + [ + "▁Newcastle", + -12.629276275634766 + ], + [ + "mother", + -12.629507064819336 + ], + [ + "▁eighth", + -12.629572868347168 + ], + [ + "▁atmosfer", + -12.629582405090332 + ], + [ + "expériment", + -12.629584312438965 + ], + [ + "▁Interest", + -12.629608154296875 + ], + [ + "▁successes", + -12.62964153289795 + ], + [ + "▁preschool", + -12.629802703857422 + ], + [ + "▁Funeral", + -12.629900932312012 + ], + [ + "blast", + -12.630083084106445 + ], + [ + "▁dimensiuni", + -12.630125999450684 + ], + [ + "▁Dow", + -12.630167007446289 + ], + [ + "▁pulp", + -12.63022518157959 + ], + [ + "▁Heather", + -12.630356788635254 + ], + [ + "▁erstellen", + -12.63044261932373 + ], + [ + "locating", + -12.630470275878906 + ], + [ + "direct", + -12.630475997924805 + ], + [ + "▁tractor", + -12.630494117736816 + ], + [ + "growing", + -12.630576133728027 + ], + [ + "▁inventor", + -12.630587577819824 + ], + [ + "ASA", + -12.63060188293457 + ], + [ + "insta", + -12.630732536315918 + ], + [ + "yana", + -12.63082504272461 + ], + [ + "▁squash", + -12.630839347839355 + ], + [ + "▁Basketball", + -12.630853652954102 + ], + [ + "AMA", + -12.631041526794434 + ], + [ + "insel", + -12.631093978881836 + ], + [ + "▁Fisch", + -12.631138801574707 + ], + [ + "▁metaphor", + -12.631221771240234 + ], + [ + "TES", + -12.631304740905762 + ], + [ + "▁conduce", + -12.631308555603027 + ], + [ + "stehende", + -12.631370544433594 + ], + [ + "▁FAQ", + -12.631475448608398 + ], + [ + "▁bezeichnet", + -12.631658554077148 + ], + [ + "wendung", + -12.631706237792969 + ], + [ + "▁Commonwealth", + -12.631776809692383 + ], + [ + "▁bait", + -12.631793975830078 + ], + [ + "▁Umsetzung", + -12.631834030151367 + ], + [ + "▁Equi", + -12.632063865661621 + ], + [ + "▁validity", + -12.632109642028809 + ], + [ + "Off", + -12.63222599029541 + ], + [ + "▁produsul", + -12.632314682006836 + ], + [ + "▁sensory", + -12.632363319396973 + ], + [ + "▁Imperial", + -12.632501602172852 + ], + [ + "▁Dick", + -12.632542610168457 + ], + [ + "kampf", + -12.632596969604492 + ], + [ + "▁Arzt", + -12.63267993927002 + ], + [ + "▁Reason", + -12.63267993927002 + ], + [ + "ITS", + -12.63270092010498 + ], + [ + "URL", + -12.632720947265625 + ], + [ + "demonstrates", + -12.632725715637207 + ], + [ + "▁dépend", + -12.632753372192383 + ], + [ + "NAS", + -12.632970809936523 + ], + [ + "▁funcți", + -12.633031845092773 + ], + [ + "▁vulnerability", + -12.633085250854492 + ], + [ + "2.7", + -12.633143424987793 + ], + [ + "layered", + -12.633152961730957 + ], + [ + "escence", + -12.633206367492676 + ], + [ + "▁République", + -12.633346557617188 + ], + [ + "▁Lust", + -12.633377075195312 + ], + [ + "▁sute", + -12.633381843566895 + ], + [ + "▁autonomous", + -12.633661270141602 + ], + [ + "Biserica", + -12.633662223815918 + ], + [ + "▁Chuck", + -12.633749961853027 + ], + [ + "▁protéger", + -12.6339750289917 + ], + [ + "rrell", + -12.634061813354492 + ], + [ + "▁Schaden", + -12.634062767028809 + ], + [ + "prennent", + -12.634100914001465 + ], + [ + "maß", + -12.6343412399292 + ], + [ + "OV", + -12.634453773498535 + ], + [ + "▁Wake", + -12.63450813293457 + ], + [ + "produire", + -12.634635925292969 + ], + [ + "▁Elder", + -12.634749412536621 + ], + [ + "Max", + -12.634839057922363 + ], + [ + "▁Chemistry", + -12.634918212890625 + ], + [ + "▁gourmet", + -12.634918212890625 + ], + [ + "erri", + -12.634967803955078 + ], + [ + "ени", + -12.635085105895996 + ], + [ + "▁Gru", + -12.635147094726562 + ], + [ + "▁vorbit", + -12.635408401489258 + ], + [ + "▁precede", + -12.635455131530762 + ], + [ + "▁randomly", + -12.635489463806152 + ], + [ + "▁efecte", + -12.63563060760498 + ], + [ + "▁calatori", + -12.635668754577637 + ], + [ + "▁Poor", + -12.635765075683594 + ], + [ + "List", + -12.635781288146973 + ], + [ + "▁regula", + -12.635964393615723 + ], + [ + "▁organisé", + -12.636028289794922 + ], + [ + "Div", + -12.636076927185059 + ], + [ + "▁volunteering", + -12.636423110961914 + ], + [ + "▁horr", + -12.636449813842773 + ], + [ + "9.99", + -12.636487007141113 + ], + [ + "▁UPS", + -12.636513710021973 + ], + [ + "▁englez", + -12.63652229309082 + ], + [ + "▁Eden", + -12.636523246765137 + ], + [ + "GG", + -12.63659954071045 + ], + [ + "▁typing", + -12.63664722442627 + ], + [ + "Likewise", + -12.636700630187988 + ], + [ + "▁stabilize", + -12.636737823486328 + ], + [ + "physio", + -12.636747360229492 + ], + [ + "ми", + -12.636785507202148 + ], + [ + "▁protagonist", + -12.636808395385742 + ], + [ + "▁velvet", + -12.636812210083008 + ], + [ + "schrank", + -12.636861801147461 + ], + [ + "▁Allah", + -12.63693618774414 + ], + [ + "▁forefront", + -12.636968612670898 + ], + [ + "▁salaries", + -12.637001037597656 + ], + [ + "▁prediction", + -12.637041091918945 + ], + [ + "▁Advent", + -12.637182235717773 + ], + [ + "politik", + -12.637280464172363 + ], + [ + "▁Heimat", + -12.637350082397461 + ], + [ + "ducted", + -12.637380599975586 + ], + [ + "ASH", + -12.637386322021484 + ], + [ + "▁Mold", + -12.637773513793945 + ], + [ + "▁publi", + -12.63784122467041 + ], + [ + "▁Vil", + -12.637892723083496 + ], + [ + "▁stu", + -12.637925148010254 + ], + [ + "INTE", + -12.638032913208008 + ], + [ + "▁fave", + -12.638151168823242 + ], + [ + "▁grounded", + -12.638175010681152 + ], + [ + "▁Anything", + -12.638184547424316 + ], + [ + "vik", + -12.638481140136719 + ], + [ + "Bank", + -12.63853645324707 + ], + [ + "deserved", + -12.638550758361816 + ], + [ + "machen", + -12.63874626159668 + ], + [ + "▁rugged", + -12.638751029968262 + ], + [ + "▁Nest", + -12.638901710510254 + ], + [ + "▁profund", + -12.639043807983398 + ], + [ + "▁quantum", + -12.639067649841309 + ], + [ + "▁funcționa", + -12.639118194580078 + ], + [ + "klu", + -12.639158248901367 + ], + [ + "▁consulter", + -12.63917350769043 + ], + [ + "MED", + -12.639286994934082 + ], + [ + "▁câştig", + -12.639334678649902 + ], + [ + "▁săptămâni", + -12.639334678649902 + ], + [ + "questioned", + -12.639517784118652 + ], + [ + "▁Trop", + -12.639530181884766 + ], + [ + "▁convo", + -12.639533042907715 + ], + [ + "▁sparkling", + -12.639533996582031 + ], + [ + "▁specialise", + -12.639566421508789 + ], + [ + "▁pancake", + -12.639726638793945 + ], + [ + "habitude", + -12.639727592468262 + ], + [ + "phal", + -12.640009880065918 + ], + [ + "▁Roche", + -12.640158653259277 + ], + [ + "▁personalities", + -12.640250205993652 + ], + [ + "▁Venice", + -12.640308380126953 + ], + [ + "▁comerciale", + -12.640379905700684 + ], + [ + "▁wounded", + -12.64075756072998 + ], + [ + "▁oraş", + -12.640864372253418 + ], + [ + "▁Pepper", + -12.641044616699219 + ], + [ + "▁Tourist", + -12.641094207763672 + ], + [ + "▁Mull", + -12.64116382598877 + ], + [ + "▁dignity", + -12.641234397888184 + ], + [ + "▁Fixed", + -12.641291618347168 + ], + [ + "çant", + -12.64130687713623 + ], + [ + "▁spectator", + -12.641402244567871 + ], + [ + "▁somn", + -12.641685485839844 + ], + [ + "▁ständig", + -12.641820907592773 + ], + [ + "▁resilience", + -12.641866683959961 + ], + [ + "▁Malta", + -12.642251014709473 + ], + [ + "▁problemele", + -12.642253875732422 + ], + [ + "▁Martha", + -12.642254829406738 + ], + [ + "▁extern", + -12.642267227172852 + ], + [ + "embre", + -12.642379760742188 + ], + [ + "▁médical", + -12.642526626586914 + ], + [ + "fordern", + -12.64256477355957 + ], + [ + "nji", + -12.642592430114746 + ], + [ + "▁aboard", + -12.642740249633789 + ], + [ + "▁sidewalk", + -12.642759323120117 + ], + [ + "WIN", + -12.642775535583496 + ], + [ + "▁Bobby", + -12.642842292785645 + ], + [ + "▁umfangreiche", + -12.642876625061035 + ], + [ + "leid", + -12.64292049407959 + ], + [ + "▁compens", + -12.642967224121094 + ], + [ + "▁juge", + -12.64299488067627 + ], + [ + "gerufen", + -12.64311408996582 + ], + [ + "▁médicament", + -12.643135070800781 + ], + [ + "▁1918", + -12.643155097961426 + ], + [ + "▁blanche", + -12.643163681030273 + ], + [ + "▁pleasing", + -12.643220901489258 + ], + [ + "▁propria", + -12.643471717834473 + ], + [ + "ergebnisse", + -12.643503189086914 + ], + [ + "▁retrouv", + -12.643571853637695 + ], + [ + "urteil", + -12.643592834472656 + ], + [ + "▁Draft", + -12.64361572265625 + ], + [ + "▁concluzi", + -12.643671035766602 + ], + [ + "centralized", + -12.643789291381836 + ], + [ + "▁Hannah", + -12.64382266998291 + ], + [ + "grija", + -12.64392375946045 + ], + [ + "▁Exercise", + -12.643972396850586 + ], + [ + "RAL", + -12.644001960754395 + ], + [ + "creme", + -12.64408016204834 + ], + [ + "High", + -12.644126892089844 + ], + [ + "clude", + -12.644131660461426 + ], + [ + "Considering", + -12.644208908081055 + ], + [ + "▁Guarantee", + -12.644404411315918 + ], + [ + "▁cuptor", + -12.644436836242676 + ], + [ + "ivität", + -12.64468002319336 + ], + [ + "▁Southwest", + -12.644882202148438 + ], + [ + "▁vivant", + -12.644890785217285 + ], + [ + "Your", + -12.64498519897461 + ], + [ + "▁Stunde", + -12.645003318786621 + ], + [ + "▁Ethernet", + -12.645040512084961 + ], + [ + "angebote", + -12.645078659057617 + ], + [ + "▁Sage", + -12.645271301269531 + ], + [ + "▁Boeing", + -12.645295143127441 + ], + [ + "▁$300", + -12.645381927490234 + ], + [ + "2-4", + -12.64546012878418 + ], + [ + "▁nécessit", + -12.645516395568848 + ], + [ + "▁ferment", + -12.645599365234375 + ], + [ + "▁Anmeldung", + -12.64567756652832 + ], + [ + "▁exhausted", + -12.645758628845215 + ], + [ + "▁Schloss", + -12.645772933959961 + ], + [ + "▁Replacement", + -12.645859718322754 + ], + [ + "▁Aussi", + -12.645933151245117 + ], + [ + "jection", + -12.646127700805664 + ], + [ + "978", + -12.64615535736084 + ], + [ + "▁siège", + -12.646258354187012 + ], + [ + "crest", + -12.646310806274414 + ], + [ + "▁jumatate", + -12.646312713623047 + ], + [ + "effizient", + -12.646317481994629 + ], + [ + "▁colaborare", + -12.6464262008667 + ], + [ + "HQ", + -12.646615028381348 + ], + [ + "130", + -12.646695137023926 + ], + [ + "culaire", + -12.646907806396484 + ], + [ + "▁Jamaica", + -12.646952629089355 + ], + [ + "▁cardboard", + -12.64731216430664 + ], + [ + "▁technische", + -12.64731502532959 + ], + [ + "▁cereri", + -12.647507667541504 + ], + [ + "▁contradict", + -12.647570610046387 + ], + [ + "▁irrigation", + -12.647586822509766 + ], + [ + "Nume", + -12.64765739440918 + ], + [ + "▁Bier", + -12.647714614868164 + ], + [ + "▁livrare", + -12.647903442382812 + ], + [ + "▁reservoir", + -12.647906303405762 + ], + [ + "vâr", + -12.648130416870117 + ], + [ + "▁galben", + -12.648213386535645 + ], + [ + "▁Geneva", + -12.648303985595703 + ], + [ + "▁lightning", + -12.648418426513672 + ], + [ + "wished", + -12.64842414855957 + ], + [ + "▁Blind", + -12.648481369018555 + ], + [ + "Interested", + -12.648499488830566 + ], + [ + "▁Primări", + -12.648627281188965 + ], + [ + "anthropo", + -12.648954391479492 + ], + [ + "▁Transaction", + -12.648961067199707 + ], + [ + "▁marcat", + -12.648971557617188 + ], + [ + "▁gelegen", + -12.649077415466309 + ], + [ + "▁contemporain", + -12.649182319641113 + ], + [ + "▁politică", + -12.649182319641113 + ], + [ + "▁1948", + -12.64928150177002 + ], + [ + "▁Mik", + -12.649287223815918 + ], + [ + "▁preţ", + -12.649310111999512 + ], + [ + "moor", + -12.649312973022461 + ], + [ + "ANN", + -12.649432182312012 + ], + [ + "▁constructive", + -12.649454116821289 + ], + [ + "konzept", + -12.649502754211426 + ], + [ + "▁entendu", + -12.649511337280273 + ], + [ + "▁Genesis", + -12.649541854858398 + ], + [ + "arzt", + -12.649581909179688 + ], + [ + "▁Allgemein", + -12.64970874786377 + ], + [ + "▁Derby", + -12.649725914001465 + ], + [ + "Class", + -12.649762153625488 + ], + [ + "▁$12", + -12.649770736694336 + ], + [ + "▁Tube", + -12.6498441696167 + ], + [ + "▁Contribu", + -12.649847030639648 + ], + [ + "▁HAVE", + -12.649860382080078 + ], + [ + "▁oxide", + -12.64986515045166 + ], + [ + "▁producator", + -12.649941444396973 + ], + [ + "▁Bench", + -12.650132179260254 + ], + [ + "▁comprehend", + -12.650139808654785 + ], + [ + "▁Damen", + -12.650494575500488 + ], + [ + "▁Garant", + -12.65056037902832 + ], + [ + "▁disappointing", + -12.650614738464355 + ], + [ + "▁réalisée", + -12.650693893432617 + ], + [ + "▁comportement", + -12.65072250366211 + ], + [ + "▁clash", + -12.650753021240234 + ], + [ + "▁curry", + -12.65076732635498 + ], + [ + "▁Lebanon", + -12.65078067779541 + ], + [ + "▁Romaniei", + -12.650784492492676 + ], + [ + "▁reprise", + -12.650840759277344 + ], + [ + "▁perceive", + -12.65095329284668 + ], + [ + "▁weaknesses", + -12.65101146697998 + ], + [ + "▁aminti", + -12.651057243347168 + ], + [ + "▁Concern", + -12.651103973388672 + ], + [ + "shadow", + -12.651310920715332 + ], + [ + "▁basin", + -12.651311874389648 + ], + [ + "moral", + -12.652063369750977 + ], + [ + "▁Hughes", + -12.652101516723633 + ], + [ + "Psych", + -12.652266502380371 + ], + [ + "▁Lieferung", + -12.65227222442627 + ], + [ + "▁serrurier", + -12.652379035949707 + ], + [ + "ussi", + -12.652386665344238 + ], + [ + "▁timpului", + -12.6524658203125 + ], + [ + "üm", + -12.652629852294922 + ], + [ + "▁Vladimir", + -12.652701377868652 + ], + [ + "▁Jag", + -12.65279483795166 + ], + [ + "▁verific", + -12.652849197387695 + ], + [ + "▁Pru", + -12.652894020080566 + ], + [ + "▁Laut", + -12.653285026550293 + ], + [ + "ITA", + -12.653287887573242 + ], + [ + "usually", + -12.653294563293457 + ], + [ + "▁carrière", + -12.65341854095459 + ], + [ + "▁extracted", + -12.653663635253906 + ], + [ + "kultur", + -12.653679847717285 + ], + [ + "öpfe", + -12.653932571411133 + ], + [ + "▁rejection", + -12.654016494750977 + ], + [ + "▁Hydr", + -12.654062271118164 + ], + [ + "▁informaţii", + -12.654098510742188 + ], + [ + "▁tolerate", + -12.654122352600098 + ], + [ + "▁cinéma", + -12.654302597045898 + ], + [ + "traumatic", + -12.654305458068848 + ], + [ + "produkt", + -12.654450416564941 + ], + [ + "▁Contest", + -12.654560089111328 + ], + [ + "lotte", + -12.654570579528809 + ], + [ + "▁Pension", + -12.65461254119873 + ], + [ + "▁Advertising", + -12.654623985290527 + ], + [ + "▁payout", + -12.654772758483887 + ], + [ + "▁Amanda", + -12.65481185913086 + ], + [ + "Elect", + -12.65485668182373 + ], + [ + "▁interiorul", + -12.654996871948242 + ], + [ + "stay", + -12.655348777770996 + ], + [ + "▁feminine", + -12.655352592468262 + ], + [ + "▁întâmplă", + -12.655437469482422 + ], + [ + "▁insult", + -12.65562915802002 + ], + [ + "▁chocolat", + -12.65567398071289 + ], + [ + "▁noroc", + -12.655750274658203 + ], + [ + "▁centr", + -12.655781745910645 + ], + [ + "▁Bühne", + -12.655858039855957 + ], + [ + "mighty", + -12.6558837890625 + ], + [ + "▁Buddha", + -12.655908584594727 + ], + [ + "▁parental", + -12.655997276306152 + ], + [ + "storm", + -12.656451225280762 + ], + [ + "recurring", + -12.6565523147583 + ], + [ + "▁luxe", + -12.656588554382324 + ], + [ + "niște", + -12.656728744506836 + ], + [ + "cuit", + -12.656839370727539 + ], + [ + "▁ausgewählt", + -12.656880378723145 + ], + [ + "▁dumb", + -12.657047271728516 + ], + [ + "IPS", + -12.657127380371094 + ], + [ + "▁Thir", + -12.65717887878418 + ], + [ + "Definitely", + -12.657195091247559 + ], + [ + "▁hilarious", + -12.657195091247559 + ], + [ + "▁rainbow", + -12.657231330871582 + ], + [ + "▁Bravo", + -12.657251358032227 + ], + [ + "▁entstanden", + -12.657259941101074 + ], + [ + "itorul", + -12.657269477844238 + ], + [ + "▁prosperity", + -12.657299041748047 + ], + [ + "▁Bord", + -12.657336235046387 + ], + [ + "▁familiei", + -12.657363891601562 + ], + [ + "▁scade", + -12.657425880432129 + ], + [ + "wöhn", + -12.657426834106445 + ], + [ + "▁ingrediente", + -12.65743637084961 + ], + [ + "RAD", + -12.657441139221191 + ], + [ + "▁tăi", + -12.657472610473633 + ], + [ + "bours", + -12.65747356414795 + ], + [ + "ATI", + -12.657540321350098 + ], + [ + "▁Blake", + -12.65761661529541 + ], + [ + "▁Implement", + -12.657712936401367 + ], + [ + "▁Beziehung", + -12.657838821411133 + ], + [ + "finanz", + -12.657953262329102 + ], + [ + "intestin", + -12.658513069152832 + ], + [ + "ließen", + -12.658535957336426 + ], + [ + "▁récent", + -12.658594131469727 + ], + [ + "▁laminate", + -12.658692359924316 + ], + [ + "▁Hör", + -12.65876579284668 + ], + [ + "▁personnalisé", + -12.658804893493652 + ], + [ + "edel", + -12.65890121459961 + ], + [ + "▁advertisement", + -12.658902168273926 + ], + [ + "▁pinterest", + -12.658921241760254 + ], + [ + "185", + -12.659058570861816 + ], + [ + "identité", + -12.65938949584961 + ], + [ + "▁Brick", + -12.659408569335938 + ], + [ + "Glu", + -12.65941047668457 + ], + [ + "▁attendant", + -12.659571647644043 + ], + [ + "▁Flip", + -12.659614562988281 + ], + [ + "attracting", + -12.659662246704102 + ], + [ + "functional", + -12.659703254699707 + ], + [ + "conceived", + -12.659772872924805 + ], + [ + "▁summarize", + -12.659773826599121 + ], + [ + "adjusting", + -12.659809112548828 + ], + [ + "CAL", + -12.660041809082031 + ], + [ + "▁Operating", + -12.660076141357422 + ], + [ + "zzi", + -12.66008472442627 + ], + [ + "▁Rover", + -12.6603364944458 + ], + [ + "▁versuchen", + -12.6603364944458 + ], + [ + "▁articulate", + -12.660600662231445 + ], + [ + "▁privé", + -12.660614013671875 + ], + [ + "▁consequent", + -12.660663604736328 + ], + [ + "EAT", + -12.660690307617188 + ], + [ + "▁Marsh", + -12.660696983337402 + ], + [ + "▁teenage", + -12.660717964172363 + ], + [ + "▁Renaissance", + -12.660740852355957 + ], + [ + "▁furnizor", + -12.660883903503418 + ], + [ + "▁Desert", + -12.660894393920898 + ], + [ + "unicipiului", + -12.66104793548584 + ], + [ + "▁ulterior", + -12.661065101623535 + ], + [ + "▁Ebene", + -12.661280632019043 + ], + [ + "▁monkey", + -12.661351203918457 + ], + [ + "▁enclosed", + -12.661389350891113 + ], + [ + "▁profitability", + -12.66139030456543 + ], + [ + "▁Evolution", + -12.661628723144531 + ], + [ + "▁adica", + -12.661670684814453 + ], + [ + "▁Structure", + -12.661709785461426 + ], + [ + "▁primer", + -12.661761283874512 + ], + [ + "▁asigură", + -12.662001609802246 + ], + [ + "▁Manuel", + -12.662220001220703 + ], + [ + "polita", + -12.662267684936523 + ], + [ + "▁Portable", + -12.662286758422852 + ], + [ + "fecți", + -12.662413597106934 + ], + [ + "▁obscure", + -12.662424087524414 + ], + [ + "▁Atlas", + -12.662436485290527 + ], + [ + "fährt", + -12.662679672241211 + ], + [ + "▁clinician", + -12.662837982177734 + ], + [ + "fuhr", + -12.66310977935791 + ], + [ + "▁matériaux", + -12.663113594055176 + ], + [ + "écrire", + -12.663142204284668 + ], + [ + "▁suspicious", + -12.6632080078125 + ], + [ + "pore", + -12.663263320922852 + ], + [ + "▁outdated", + -12.663304328918457 + ], + [ + "▁Mädchen", + -12.663328170776367 + ], + [ + "rcis", + -12.663420677185059 + ], + [ + "nicht", + -12.663463592529297 + ], + [ + "holding", + -12.663561820983887 + ], + [ + "▁heavier", + -12.66366195678711 + ], + [ + "ezimal", + -12.663960456848145 + ], + [ + "▁silicone", + -12.66397476196289 + ], + [ + "punerea", + -12.664108276367188 + ], + [ + "▁begeistert", + -12.664237976074219 + ], + [ + "2004", + -12.664283752441406 + ], + [ + "▁predecessor", + -12.664299011230469 + ], + [ + "▁overlap", + -12.664369583129883 + ], + [ + "▁digging", + -12.664376258850098 + ], + [ + "▁Upgrade", + -12.664407730102539 + ], + [ + "▁interesat", + -12.664543151855469 + ], + [ + "▁spinach", + -12.66456127166748 + ], + [ + "▁politice", + -12.664626121520996 + ], + [ + "activity", + -12.664831161499023 + ], + [ + "▁Rating", + -12.66484546661377 + ], + [ + "▁serrure", + -12.664846420288086 + ], + [ + "▁tânăr", + -12.664959907531738 + ], + [ + "▁WHAT", + -12.664970397949219 + ], + [ + "▁railroad", + -12.664989471435547 + ], + [ + "▁avid", + -12.665081024169922 + ], + [ + "▁Sophie", + -12.665084838867188 + ], + [ + "preferably", + -12.665173530578613 + ], + [ + "▁Fourth", + -12.665431022644043 + ], + [ + "kommenden", + -12.665452003479004 + ], + [ + "QUI", + -12.665478706359863 + ], + [ + "lohn", + -12.665505409240723 + ], + [ + "▁promis", + -12.665611267089844 + ], + [ + "▁shrub", + -12.665621757507324 + ], + [ + "nummer", + -12.66579818725586 + ], + [ + "▁dinosaur", + -12.665922164916992 + ], + [ + "▁Lucky", + -12.665937423706055 + ], + [ + "relates", + -12.666038513183594 + ], + [ + "▁FROM", + -12.666049003601074 + ], + [ + "▁racism", + -12.66610336303711 + ], + [ + "physical", + -12.66611385345459 + ], + [ + "alcoholic", + -12.666119575500488 + ], + [ + "▁reef", + -12.666126251220703 + ], + [ + "▁centru", + -12.66618824005127 + ], + [ + "université", + -12.66622257232666 + ], + [ + "▁visage", + -12.666232109069824 + ], + [ + "ităţile", + -12.666253089904785 + ], + [ + "▁Gent", + -12.666345596313477 + ], + [ + "zugeben", + -12.66643238067627 + ], + [ + "▁paradise", + -12.66646957397461 + ], + [ + "fuel", + -12.666505813598633 + ], + [ + "ografie", + -12.666568756103516 + ], + [ + "▁TIP", + -12.666730880737305 + ], + [ + "schreibung", + -12.66683292388916 + ], + [ + "▁bark", + -12.666840553283691 + ], + [ + "accéder", + -12.666895866394043 + ], + [ + "▁contamination", + -12.666937828063965 + ], + [ + "▁swelling", + -12.666950225830078 + ], + [ + "▁optimistic", + -12.666974067687988 + ], + [ + "▁differential", + -12.667015075683594 + ], + [ + "▁Arad", + -12.667030334472656 + ], + [ + "toxins", + -12.667075157165527 + ], + [ + "▁übernehmen", + -12.667091369628906 + ], + [ + "▁anime", + -12.667143821716309 + ], + [ + "actuel", + -12.667462348937988 + ], + [ + "▁bientôt", + -12.667525291442871 + ], + [ + "▁Patio", + -12.66761302947998 + ], + [ + "▁baisse", + -12.667630195617676 + ], + [ + "▁sprint", + -12.66773796081543 + ], + [ + "▁bilden", + -12.66811466217041 + ], + [ + "VAL", + -12.668132781982422 + ], + [ + "▁réflexion", + -12.668220520019531 + ], + [ + "hopping", + -12.668242454528809 + ], + [ + "genesis", + -12.66834545135498 + ], + [ + "achtet", + -12.668435096740723 + ], + [ + "▁chinois", + -12.668525695800781 + ], + [ + "▁dezvoltat", + -12.668795585632324 + ], + [ + "arguably", + -12.66884708404541 + ], + [ + "▁Protocol", + -12.66884708404541 + ], + [ + "▁Sterling", + -12.668862342834473 + ], + [ + "▁Cave", + -12.668975830078125 + ], + [ + "▁Condo", + -12.66921615600586 + ], + [ + "▁erhöht", + -12.669235229492188 + ], + [ + "typische", + -12.669416427612305 + ], + [ + "merged", + -12.669439315795898 + ], + [ + "▁accumulation", + -12.669560432434082 + ], + [ + "sicherlich", + -12.669569969177246 + ], + [ + "kW", + -12.669620513916016 + ], + [ + "▁schriftlich", + -12.669757843017578 + ], + [ + "▁Vorteile", + -12.669918060302734 + ], + [ + "▁Northeast", + -12.669922828674316 + ], + [ + "frunt", + -12.669941902160645 + ], + [ + "istik", + -12.670003890991211 + ], + [ + "erster", + -12.670035362243652 + ], + [ + "▁Assistance", + -12.670150756835938 + ], + [ + "▁Fantastic", + -12.670150756835938 + ], + [ + "▁bărbat", + -12.670150756835938 + ], + [ + "▁Grinding", + -12.670151710510254 + ], + [ + "▁diffusion", + -12.670161247253418 + ], + [ + "▁vreun", + -12.670331954956055 + ], + [ + "▁Butler", + -12.670342445373535 + ], + [ + "▁Cherry", + -12.670352935791016 + ], + [ + "▁visualization", + -12.670540809631348 + ], + [ + "Paket", + -12.670572280883789 + ], + [ + "blin", + -12.670619010925293 + ], + [ + "▁cadou", + -12.670705795288086 + ], + [ + "▁Celtic", + -12.670754432678223 + ], + [ + "alegerea", + -12.670894622802734 + ], + [ + "▁Dorf", + -12.671035766601562 + ], + [ + "▁Noir", + -12.671185493469238 + ], + [ + "payment", + -12.67126750946045 + ], + [ + "▁Caroline", + -12.671334266662598 + ], + [ + "▁Berry", + -12.671359062194824 + ], + [ + "▁professeur", + -12.67147445678711 + ], + [ + "▁gratuitement", + -12.671503067016602 + ], + [ + "Suntem", + -12.671523094177246 + ], + [ + "IAN", + -12.671738624572754 + ], + [ + "▁fingerprint", + -12.671780586242676 + ], + [ + "▁controversy", + -12.671781539916992 + ], + [ + "▁fled", + -12.671875 + ], + [ + "▁Pokémon", + -12.67210865020752 + ], + [ + "excluding", + -12.67211627960205 + ], + [ + "▁friction", + -12.672161102294922 + ], + [ + "therapie", + -12.67225456237793 + ], + [ + "/7", + -12.672398567199707 + ], + [ + "▁designation", + -12.672442436218262 + ], + [ + "▁Belgia", + -12.672704696655273 + ], + [ + "▁cursuri", + -12.672836303710938 + ], + [ + "model", + -12.672840118408203 + ], + [ + "super", + -12.672987937927246 + ], + [ + "▁réduit", + -12.673028945922852 + ], + [ + "▁implicit", + -12.673177719116211 + ], + [ + "athlon", + -12.673227310180664 + ], + [ + "anniversaire", + -12.673416137695312 + ], + [ + "▁teaspoon", + -12.673416137695312 + ], + [ + "▁corrosion", + -12.673418998718262 + ], + [ + "▁überzeugt", + -12.673418998718262 + ], + [ + "▁flawless", + -12.673421859741211 + ], + [ + "▁vegetation", + -12.673477172851562 + ], + [ + "▁iarna", + -12.673507690429688 + ], + [ + "▁psychologist", + -12.673591613769531 + ], + [ + "hora", + -12.673625946044922 + ], + [ + "gab", + -12.67387580871582 + ], + [ + "▁soothing", + -12.674084663391113 + ], + [ + "▁stew", + -12.674141883850098 + ], + [ + "▁wager", + -12.674172401428223 + ], + [ + "▁tinere", + -12.674322128295898 + ], + [ + "▁baut", + -12.674323081970215 + ], + [ + "ecunoscut", + -12.674352645874023 + ], + [ + "gearbeitet", + -12.674422264099121 + ], + [ + "▁functi", + -12.674480438232422 + ], + [ + "▁dürfte", + -12.674724578857422 + ], + [ + "▁média", + -12.674724578857422 + ], + [ + "▁campanie", + -12.67475700378418 + ], + [ + "▁Distribu", + -12.674817085266113 + ], + [ + "▁mentoring", + -12.674959182739258 + ], + [ + "▁criz", + -12.675020217895508 + ], + [ + "findest", + -12.675056457519531 + ], + [ + "▁Vasile", + -12.675058364868164 + ], + [ + "▁compassionate", + -12.675115585327148 + ], + [ + "▁Tudor", + -12.675140380859375 + ], + [ + "▁flare", + -12.675260543823242 + ], + [ + "intreaga", + -12.675283432006836 + ], + [ + "gaz", + -12.6753511428833 + ], + [ + "▁porcelain", + -12.675379753112793 + ], + [ + "▁expedition", + -12.675520896911621 + ], + [ + "▁Azure", + -12.67553997039795 + ], + [ + "räumen", + -12.675549507141113 + ], + [ + "eiro", + -12.675567626953125 + ], + [ + "variante", + -12.675804138183594 + ], + [ + "▁Lucy", + -12.675825119018555 + ], + [ + "ôle", + -12.675909996032715 + ], + [ + "▁revenir", + -12.67602252960205 + ], + [ + "▁stained", + -12.676040649414062 + ], + [ + "▁falsch", + -12.676166534423828 + ], + [ + "▁incorpor", + -12.676166534423828 + ], + [ + "merkt", + -12.676187515258789 + ], + [ + "▁achten", + -12.6762056350708 + ], + [ + "▁hello", + -12.676290512084961 + ], + [ + "selben", + -12.676422119140625 + ], + [ + "ifty", + -12.676525115966797 + ], + [ + "▁Feier", + -12.67653751373291 + ], + [ + "1.000", + -12.676557540893555 + ], + [ + "▁Patch", + -12.676583290100098 + ], + [ + "peptid", + -12.676846504211426 + ], + [ + "▁recovering", + -12.676898956298828 + ], + [ + "Symptom", + -12.677020072937012 + ], + [ + "▁Auckland", + -12.677020072937012 + ], + [ + "▁retrieve", + -12.677328109741211 + ], + [ + "▁800-", + -12.67733097076416 + ], + [ + "schlagen", + -12.677473068237305 + ], + [ + "▁lourd", + -12.677562713623047 + ], + [ + "▁Purple", + -12.67760181427002 + ], + [ + "▁mittels", + -12.677776336669922 + ], + [ + "▁Düsseldorf", + -12.67800521850586 + ], + [ + "▁getaway", + -12.67803955078125 + ], + [ + "▁Cedar", + -12.678061485290527 + ], + [ + "▁Function", + -12.678241729736328 + ], + [ + "▁bizarre", + -12.67833423614502 + ], + [ + "4.3", + -12.67849063873291 + ], + [ + "▁fundraiser", + -12.67866325378418 + ], + [ + "geared", + -12.678780555725098 + ], + [ + "▁privée", + -12.678781509399414 + ], + [ + "▁Bonjour", + -12.67894458770752 + ], + [ + "Gar", + -12.67895793914795 + ], + [ + "▁Lloyd", + -12.678991317749023 + ], + [ + "▁Reinigung", + -12.6790132522583 + ], + [ + "▁Geno", + -12.679155349731445 + ], + [ + "▁Teilnahme", + -12.67919635772705 + ], + [ + "pian", + -12.679362297058105 + ], + [ + "sammelt", + -12.679368019104004 + ], + [ + "Pad", + -12.679755210876465 + ], + [ + "▁Troy", + -12.67976188659668 + ], + [ + "HG", + -12.679943084716797 + ], + [ + "▁klein", + -12.679962158203125 + ], + [ + "▁lettuce", + -12.679978370666504 + ], + [ + "▁patrimoine", + -12.679978370666504 + ], + [ + "▁cooker", + -12.680055618286133 + ], + [ + "▁accesibil", + -12.680137634277344 + ], + [ + "▁Spray", + -12.680201530456543 + ], + [ + "▁negotiation", + -12.68047046661377 + ], + [ + "▁jewel", + -12.680480003356934 + ], + [ + "▁dynamique", + -12.68063735961914 + ], + [ + "▁plastique", + -12.68067741394043 + ], + [ + "▁Limo", + -12.680682182312012 + ], + [ + "▁Funk", + -12.68069076538086 + ], + [ + "▁omului", + -12.680702209472656 + ], + [ + "title", + -12.680768013000488 + ], + [ + "curved", + -12.68082046508789 + ], + [ + "▁Lemon", + -12.680851936340332 + ], + [ + "förder", + -12.680891990661621 + ], + [ + "▁bewusst", + -12.681112289428711 + ], + [ + "inevitably", + -12.681296348571777 + ], + [ + "▁derivative", + -12.681297302246094 + ], + [ + "2:30", + -12.681300163269043 + ], + [ + "komfort", + -12.681305885314941 + ], + [ + "original", + -12.681480407714844 + ], + [ + "sanct", + -12.681540489196777 + ], + [ + "▁matte", + -12.6815767288208 + ], + [ + "empêche", + -12.681628227233887 + ], + [ + "▁jucător", + -12.681634902954102 + ], + [ + "▁attentive", + -12.681640625 + ], + [ + "▁recunoscut", + -12.681674003601074 + ], + [ + "▁Brush", + -12.68167495727539 + ], + [ + "▁consommateur", + -12.68183422088623 + ], + [ + "érence", + -12.682063102722168 + ], + [ + "typical", + -12.682084083557129 + ], + [ + "strategie", + -12.682205200195312 + ], + [ + "Effekt", + -12.682290077209473 + ], + [ + "▁Alcohol", + -12.682292938232422 + ], + [ + "oji", + -12.682333946228027 + ], + [ + "▁ruler", + -12.682357788085938 + ], + [ + "▁Norwegian", + -12.682615280151367 + ], + [ + "▁PlayStation", + -12.682615280151367 + ], + [ + "▁Hook", + -12.682747840881348 + ], + [ + "▁viewpoint", + -12.682759284973145 + ], + [ + "THER", + -12.682841300964355 + ], + [ + "420", + -12.682888984680176 + ], + [ + "Consequently", + -12.68294620513916 + ], + [ + "▁entschieden", + -12.68294620513916 + ], + [ + "▁Trag", + -12.68295669555664 + ], + [ + "▁Dawn", + -12.683003425598145 + ], + [ + "▁fuss", + -12.68301773071289 + ], + [ + "*****", + -12.683040618896484 + ], + [ + "▁Bullet", + -12.683140754699707 + ], + [ + "CAM", + -12.683155059814453 + ], + [ + "▁wonderfully", + -12.683201789855957 + ], + [ + "▁parlamentar", + -12.683263778686523 + ], + [ + "▁geometric", + -12.683307647705078 + ], + [ + "talement", + -12.683321952819824 + ], + [ + "/2018", + -12.683577537536621 + ], + [ + "▁oversight", + -12.684036254882812 + ], + [ + "kindly", + -12.684080123901367 + ], + [ + "therm", + -12.684305191040039 + ], + [ + "▁treaba", + -12.6846342086792 + ], + [ + "▁Trim", + -12.68471908569336 + ], + [ + "▁intelege", + -12.684842109680176 + ], + [ + "cino", + -12.685032844543457 + ], + [ + "▁straw", + -12.68508529663086 + ], + [ + "Tru", + -12.685251235961914 + ], + [ + "▁Television", + -12.68530559539795 + ], + [ + "Trader", + -12.68538761138916 + ], + [ + "▁Passion", + -12.685394287109375 + ], + [ + "rescu", + -12.685622215270996 + ], + [ + "Nicol", + -12.685635566711426 + ], + [ + "luj", + -12.685805320739746 + ], + [ + "▁mijloace", + -12.685921669006348 + ], + [ + "▁Removal", + -12.685922622680664 + ], + [ + "▁1944", + -12.686034202575684 + ], + [ + "▁shortcut", + -12.686159133911133 + ], + [ + "▁Fett", + -12.686258316040039 + ], + [ + "largement", + -12.686371803283691 + ], + [ + "▁altern", + -12.686446189880371 + ], + [ + "▁cleansing", + -12.686562538146973 + ], + [ + "▁Qatar", + -12.686692237854004 + ], + [ + "▁Ceci", + -12.686826705932617 + ], + [ + "▁weave", + -12.686848640441895 + ], + [ + "schmerz", + -12.686878204345703 + ], + [ + "▁dots", + -12.686888694763184 + ], + [ + "Télécharger", + -12.68691635131836 + ], + [ + "▁Conduct", + -12.686944007873535 + ], + [ + "bekannten", + -12.687325477600098 + ], + [ + "▁lungime", + -12.687344551086426 + ], + [ + "▁Ferrari", + -12.687390327453613 + ], + [ + "▁totusi", + -12.687605857849121 + ], + [ + "▁Anniversary", + -12.687911033630371 + ], + [ + "▁wilderness", + -12.687911987304688 + ], + [ + "▁Christoph", + -12.687939643859863 + ], + [ + "▁Nikon", + -12.688112258911133 + ], + [ + "▁Digi", + -12.68818473815918 + ], + [ + "▁Blumen", + -12.688190460205078 + ], + [ + "▁altul", + -12.688249588012695 + ], + [ + "▁Parish", + -12.688321113586426 + ], + [ + "czy", + -12.688393592834473 + ], + [ + "▁temper", + -12.688401222229004 + ], + [ + "▁Powder", + -12.688576698303223 + ], + [ + "▁Arnold", + -12.688577651977539 + ], + [ + "capacitatea", + -12.688687324523926 + ], + [ + "nderungen", + -12.688787460327148 + ], + [ + "▁utilization", + -12.688859939575195 + ], + [ + "99%", + -12.688942909240723 + ], + [ + "▁Fear", + -12.689099311828613 + ], + [ + "JE", + -12.689165115356445 + ], + [ + "▁Simpson", + -12.689239501953125 + ], + [ + "▁Podcast", + -12.68924617767334 + ], + [ + "▁Cardinal", + -12.689290046691895 + ], + [ + "▁Distribution", + -12.689315795898438 + ], + [ + "▁Drawing", + -12.689373970031738 + ], + [ + "▁tint", + -12.689412117004395 + ], + [ + "▁hran", + -12.68945598602295 + ], + [ + "▁Slide", + -12.68960189819336 + ], + [ + "▁Vertrauen", + -12.689654350280762 + ], + [ + "cloth", + -12.68971061706543 + ], + [ + "▁redirect", + -12.689728736877441 + ], + [ + "126", + -12.689842224121094 + ], + [ + "▁constituie", + -12.68985652923584 + ], + [ + "Mai", + -12.690070152282715 + ], + [ + "▁idol", + -12.690088272094727 + ], + [ + "▁tehnice", + -12.690163612365723 + ], + [ + "dip", + -12.690393447875977 + ], + [ + "▁soldier", + -12.690400123596191 + ], + [ + "▁Ordin", + -12.690409660339355 + ], + [ + "wobe", + -12.69050407409668 + ], + [ + "▁Brent", + -12.69058895111084 + ], + [ + "▁Sudan", + -12.690597534179688 + ], + [ + "6000", + -12.690619468688965 + ], + [ + "turism", + -12.690689086914062 + ], + [ + "▁Rocky", + -12.690744400024414 + ], + [ + "naming", + -12.69092082977295 + ], + [ + "▁entrepreneurial", + -12.690925598144531 + ], + [ + "hearted", + -12.690962791442871 + ], + [ + "ayne", + -12.69097900390625 + ], + [ + "▁hover", + -12.691081047058105 + ], + [ + "▁skull", + -12.691279411315918 + ], + [ + "▁tribal", + -12.691407203674316 + ], + [ + "▁crafting", + -12.691543579101562 + ], + [ + "bewertungen", + -12.691569328308105 + ], + [ + "▁decizii", + -12.691625595092773 + ], + [ + "obwohl", + -12.691655158996582 + ], + [ + "▁compromised", + -12.691875457763672 + ], + [ + "▁quelqu", + -12.69195556640625 + ], + [ + "▁Hilton", + -12.692075729370117 + ], + [ + "▁maturity", + -12.692095756530762 + ], + [ + "gelesen", + -12.692100524902344 + ], + [ + "▁harbor", + -12.69210433959961 + ], + [ + "▁maple", + -12.692326545715332 + ], + [ + "▁développ", + -12.6924409866333 + ], + [ + "▁Nobody", + -12.692517280578613 + ], + [ + "équipement", + -12.69255542755127 + ], + [ + "121", + -12.69274616241455 + ], + [ + "140", + -12.692827224731445 + ], + [ + "▁artistes", + -12.692914962768555 + ], + [ + "▁depune", + -12.692941665649414 + ], + [ + "▁erase", + -12.693129539489746 + ], + [ + "▁erzählt", + -12.693197250366211 + ], + [ + "▁Hyundai", + -12.69323444366455 + ], + [ + "▁impairment", + -12.69323444366455 + ], + [ + "▁conving", + -12.693279266357422 + ], + [ + "chasing", + -12.693426132202148 + ], + [ + "▁Claus", + -12.693438529968262 + ], + [ + "▁adaptée", + -12.693687438964844 + ], + [ + "▁Raz", + -12.693740844726562 + ], + [ + "rugs", + -12.693796157836914 + ], + [ + "▁urme", + -12.69387435913086 + ], + [ + "Nonetheless", + -12.693902015686035 + ], + [ + "▁Cemetery", + -12.693902969360352 + ], + [ + "umps", + -12.693906784057617 + ], + [ + "ACA", + -12.694003105163574 + ], + [ + "▁perioade", + -12.694235801696777 + ], + [ + "▁slogan", + -12.694263458251953 + ], + [ + "▁downward", + -12.694441795349121 + ], + [ + "eidig", + -12.694446563720703 + ], + [ + "RAC", + -12.69444751739502 + ], + [ + "▁inaugur", + -12.694496154785156 + ], + [ + "се", + -12.694588661193848 + ], + [ + "▁înțeleg", + -12.694608688354492 + ], + [ + "▁hopeful", + -12.694635391235352 + ], + [ + "▁customization", + -12.6946439743042 + ], + [ + "▁prisoners", + -12.694708824157715 + ], + [ + "▁Rau", + -12.695270538330078 + ], + [ + "▁Pitt", + -12.695389747619629 + ], + [ + "ături", + -12.695542335510254 + ], + [ + "▁metabolic", + -12.695842742919922 + ], + [ + "▁Zach", + -12.695868492126465 + ], + [ + "▁umfassende", + -12.695914268493652 + ], + [ + "▁révél", + -12.695950508117676 + ], + [ + "131", + -12.696052551269531 + ], + [ + "ismului", + -12.696062088012695 + ], + [ + "▁Sac", + -12.696076393127441 + ], + [ + "efficacité", + -12.69624137878418 + ], + [ + "cruci", + -12.69625473022461 + ], + [ + "bisschen", + -12.69632339477539 + ], + [ + "▁Oster", + -12.696324348449707 + ], + [ + "lowered", + -12.6964693069458 + ], + [ + "▁Ausland", + -12.69674015045166 + ], + [ + "▁Pub", + -12.696794509887695 + ], + [ + "▁Marseille", + -12.696925163269043 + ], + [ + "▁Charter", + -12.696959495544434 + ], + [ + "howcasing", + -12.697010040283203 + ], + [ + "risti", + -12.6971435546875 + ], + [ + "▁thermostat", + -12.697151184082031 + ], + [ + "▁Clin", + -12.697233200073242 + ], + [ + "▁entsteht", + -12.697246551513672 + ], + [ + "Choosing", + -12.697248458862305 + ], + [ + "▁Schmerz", + -12.697284698486328 + ], + [ + "▁Till", + -12.697307586669922 + ], + [ + "▁Polo", + -12.697399139404297 + ], + [ + "▁proceduri", + -12.697402000427246 + ], + [ + "▁Believe", + -12.697444915771484 + ], + [ + "▁playful", + -12.697514533996582 + ], + [ + "▁verändert", + -12.697588920593262 + ], + [ + "▁pairing", + -12.697654724121094 + ], + [ + "MAG", + -12.69784927368164 + ], + [ + "leiste", + -12.69788932800293 + ], + [ + "▁testimonial", + -12.697916030883789 + ], + [ + "▁Economy", + -12.697916984558105 + ], + [ + "▁Wechsel", + -12.697918891906738 + ], + [ + "wirkung", + -12.69801139831543 + ], + [ + "▁exceeded", + -12.698030471801758 + ], + [ + "South", + -12.698067665100098 + ], + [ + "create", + -12.698221206665039 + ], + [ + "▁davantage", + -12.698270797729492 + ], + [ + "Log", + -12.69831657409668 + ], + [ + "▁irregular", + -12.698587417602539 + ], + [ + "VB", + -12.698691368103027 + ], + [ + "▁Rö", + -12.698741912841797 + ], + [ + "▁intreb", + -12.698881149291992 + ], + [ + "▁penser", + -12.698920249938965 + ], + [ + "▁déclaré", + -12.698923110961914 + ], + [ + "▁Tommy", + -12.699026107788086 + ], + [ + "2,500", + -12.699163436889648 + ], + [ + "▁Uganda", + -12.699260711669922 + ], + [ + "contacting", + -12.699445724487305 + ], + [ + "▁apreciat", + -12.699485778808594 + ], + [ + "▁beginnen", + -12.6995210647583 + ], + [ + "▁Gain", + -12.699580192565918 + ], + [ + "Office", + -12.69969654083252 + ], + [ + "ermittlung", + -12.699710845947266 + ], + [ + "▁Admission", + -12.699727058410645 + ], + [ + "▁Earl", + -12.6997652053833 + ], + [ + "▁Aviation", + -12.699833869934082 + ], + [ + "▁apologize", + -12.699929237365723 + ], + [ + "▁enclosure", + -12.699929237365723 + ], + [ + "▁Lack", + -12.69998836517334 + ], + [ + "wife", + -12.699995994567871 + ], + [ + "▁rotating", + -12.700016975402832 + ], + [ + "▁hergestellt", + -12.700020790100098 + ], + [ + "▁repository", + -12.70002269744873 + ], + [ + "TK", + -12.700149536132812 + ], + [ + "▁lectur", + -12.700190544128418 + ], + [ + "▁reflex", + -12.700286865234375 + ], + [ + "▁Harmon", + -12.700401306152344 + ], + [ + "▁vrem", + -12.700479507446289 + ], + [ + "▁Strange", + -12.70055103302002 + ], + [ + "▁champagne", + -12.700615882873535 + ], + [ + "▁oscil", + -12.700647354125977 + ], + [ + "sensitive", + -12.700677871704102 + ], + [ + "▁Sheriff", + -12.700841903686523 + ], + [ + "PRES", + -12.700956344604492 + ], + [ + "▁vow", + -12.70123291015625 + ], + [ + "▁dioxide", + -12.701276779174805 + ], + [ + "ен", + -12.701374053955078 + ], + [ + "▁corpului", + -12.701376914978027 + ], + [ + "▁prevăzut", + -12.70160961151123 + ], + [ + "India", + -12.701827049255371 + ], + [ + "hausse", + -12.70189094543457 + ], + [ + "▁clienți", + -12.701957702636719 + ], + [ + "▁entour", + -12.70202350616455 + ], + [ + "▁Sharp", + -12.70209789276123 + ], + [ + "▁teatru", + -12.702285766601562 + ], + [ + "▁Grow", + -12.702327728271484 + ], + [ + "▁caravan", + -12.70234203338623 + ], + [ + "▁sieben", + -12.702420234680176 + ], + [ + "▁cunosc", + -12.702502250671387 + ], + [ + "Bereichen", + -12.702527046203613 + ], + [ + "▁Benutzer", + -12.702619552612305 + ], + [ + "▁Ethiopia", + -12.702619552612305 + ], + [ + "▁Physics", + -12.702619552612305 + ], + [ + "preserving", + -12.70263385772705 + ], + [ + "ал", + -12.702712059020996 + ], + [ + "▁aerial", + -12.70272159576416 + ], + [ + "▁nouvel", + -12.702741622924805 + ], + [ + "▁stamped", + -12.702954292297363 + ], + [ + "▁inaugural", + -12.702970504760742 + ], + [ + "▁medicinal", + -12.702999114990234 + ], + [ + "Quite", + -12.703028678894043 + ], + [ + "accumulated", + -12.703165054321289 + ], + [ + "register", + -12.703271865844727 + ], + [ + "▁Falcon", + -12.70327377319336 + ], + [ + "▁boiling", + -12.703301429748535 + ], + [ + "▁advertised", + -12.703339576721191 + ], + [ + "collect", + -12.703362464904785 + ], + [ + "albeit", + -12.703418731689453 + ], + [ + "▁Organis", + -12.703473091125488 + ], + [ + "luate", + -12.703536033630371 + ], + [ + "▁préféré", + -12.70369815826416 + ], + [ + "▁frumoasa", + -12.703968048095703 + ], + [ + "▁truc", + -12.704092979431152 + ], + [ + "▁Fä", + -12.704154968261719 + ], + [ + "▁dome", + -12.704180717468262 + ], + [ + "Mobile", + -12.704191207885742 + ], + [ + "▁redeem", + -12.704198837280273 + ], + [ + "IONS", + -12.70422077178955 + ], + [ + "▁țări", + -12.704235076904297 + ], + [ + "▁singular", + -12.704385757446289 + ], + [ + "▁livestock", + -12.704425811767578 + ], + [ + "▁démont", + -12.704427719116211 + ], + [ + "clés", + -12.704527854919434 + ], + [ + "music", + -12.704561233520508 + ], + [ + "▁explicat", + -12.704602241516113 + ], + [ + "▁Fellowship", + -12.704703330993652 + ], + [ + "▁electrode", + -12.704760551452637 + ], + [ + "129", + -12.704977035522461 + ], + [ + "▁Rescue", + -12.704983711242676 + ], + [ + "▁Rocket", + -12.705159187316895 + ], + [ + "OSE", + -12.705301284790039 + ], + [ + "▁Sacramento", + -12.705317497253418 + ], + [ + "▁Haiti", + -12.705357551574707 + ], + [ + "▁Erwachsene", + -12.705390930175781 + ], + [ + "▁Terminal", + -12.70541000366211 + ], + [ + "URI", + -12.705453872680664 + ], + [ + "▁Rural", + -12.70549201965332 + ], + [ + "▁achizitiona", + -12.70552921295166 + ], + [ + "▁identifiable", + -12.705655097961426 + ], + [ + "▁gekauft", + -12.705659866333008 + ], + [ + "▁improper", + -12.705673217773438 + ], + [ + "lashes", + -12.705751419067383 + ], + [ + "vorbim", + -12.705751419067383 + ], + [ + "▁hinder", + -12.705862045288086 + ], + [ + "▁Grenz", + -12.705878257751465 + ], + [ + "Nav", + -12.705955505371094 + ], + [ + "alimentation", + -12.705972671508789 + ], + [ + "▁Cottage", + -12.7059965133667 + ], + [ + "▁nötig", + -12.706197738647461 + ], + [ + "▁cuprinde", + -12.70622444152832 + ], + [ + "session", + -12.706256866455078 + ], + [ + "▁Separat", + -12.70634651184082 + ], + [ + "▁besuchen", + -12.706672668457031 + ], + [ + "▁noodles", + -12.706684112548828 + ], + [ + "▁ballet", + -12.706696510314941 + ], + [ + "WG", + -12.706731796264648 + ], + [ + "▁Duty", + -12.706871032714844 + ], + [ + "▁porc", + -12.706944465637207 + ], + [ + "▁booster", + -12.70698356628418 + ], + [ + "galerie", + -12.707056045532227 + ], + [ + "▁Lance", + -12.707119941711426 + ], + [ + "▁déplac", + -12.707178115844727 + ], + [ + "▁rugby", + -12.707240104675293 + ], + [ + "▁upholstery", + -12.707345962524414 + ], + [ + "▁bustl", + -12.70736312866211 + ], + [ + "▁Dealer", + -12.70740032196045 + ], + [ + "▁genome", + -12.707414627075195 + ], + [ + "▁citizenship", + -12.707466125488281 + ], + [ + "rora", + -12.707515716552734 + ], + [ + "ARK", + -12.707776069641113 + ], + [ + "▁Semi", + -12.707820892333984 + ], + [ + "▁Improvement", + -12.707892417907715 + ], + [ + "▁negru", + -12.708142280578613 + ], + [ + "▁Bruxelles", + -12.70836067199707 + ], + [ + "flüge", + -12.70837688446045 + ], + [ + "▁Technique", + -12.708392143249512 + ], + [ + "▁Obst", + -12.708413124084473 + ], + [ + "2020", + -12.708560943603516 + ], + [ + "▁gek", + -12.708593368530273 + ], + [ + "▁drepturi", + -12.708600997924805 + ], + [ + "▁Logan", + -12.708605766296387 + ], + [ + "gelöst", + -12.70863151550293 + ], + [ + "▁grandparents", + -12.708702087402344 + ], + [ + "phin", + -12.708950996398926 + ], + [ + "▁dwell", + -12.709037780761719 + ], + [ + "▁Nobel", + -12.709151268005371 + ], + [ + "dial", + -12.70927906036377 + ], + [ + "▁spontan", + -12.709344863891602 + ], + [ + "advancing", + -12.70937728881836 + ], + [ + "starring", + -12.70947551727295 + ], + [ + "▁astea", + -12.709498405456543 + ], + [ + "igueur", + -12.709638595581055 + ], + [ + "▁Ancient", + -12.709700584411621 + ], + [ + "filter", + -12.70971965789795 + ], + [ + "Doar", + -12.709758758544922 + ], + [ + "▁Workers", + -12.709759712219238 + ], + [ + "Certainly", + -12.709906578063965 + ], + [ + "▁commencé", + -12.709914207458496 + ], + [ + "▁zipper", + -12.710001945495605 + ], + [ + "▁Selection", + -12.710070610046387 + ], + [ + "▁succ", + -12.710280418395996 + ], + [ + "headed", + -12.710345268249512 + ], + [ + "RIA", + -12.710350036621094 + ], + [ + "▁papa", + -12.710366249084473 + ], + [ + "▁profesionale", + -12.710394859313965 + ], + [ + "▁Zeichen", + -12.710402488708496 + ], + [ + "▁artisans", + -12.710489273071289 + ], + [ + "▁Geist", + -12.710585594177246 + ], + [ + "practic", + -12.710741996765137 + ], + [ + "▁ministrul", + -12.71076488494873 + ], + [ + "viens", + -12.710912704467773 + ], + [ + "prezintă", + -12.710919380187988 + ], + [ + "Integrated", + -12.710981369018555 + ], + [ + "▁rooftop", + -12.710989952087402 + ], + [ + "▁successor", + -12.710991859436035 + ], + [ + "OTO", + -12.711012840270996 + ], + [ + "liés", + -12.711027145385742 + ], + [ + "▁Diver", + -12.71121597290039 + ], + [ + "Specifically", + -12.711297988891602 + ], + [ + "▁calibr", + -12.711301803588867 + ], + [ + "KK", + -12.711341857910156 + ], + [ + "▁défense", + -12.711414337158203 + ], + [ + "▁english", + -12.711414337158203 + ], + [ + "verbrauch", + -12.711418151855469 + ], + [ + "▁attire", + -12.711433410644531 + ], + [ + "▁Recipe", + -12.711441040039062 + ], + [ + "équilibre", + -12.711457252502441 + ], + [ + "accumul", + -12.71157169342041 + ], + [ + "▁financement", + -12.71169662475586 + ], + [ + "rij", + -12.711962699890137 + ], + [ + "▁prince", + -12.711999893188477 + ], + [ + "▁préparer", + -12.7120361328125 + ], + [ + "surviving", + -12.71211051940918 + ], + [ + "operation", + -12.712233543395996 + ], + [ + "▁judet", + -12.71242904663086 + ], + [ + "▁Verantwortung", + -12.712433815002441 + ], + [ + "▁Vinyl", + -12.712536811828613 + ], + [ + "DEN", + -12.712584495544434 + ], + [ + "▁Tail", + -12.712589263916016 + ], + [ + "yearly", + -12.712590217590332 + ], + [ + "▁comisi", + -12.712613105773926 + ], + [ + "lava", + -12.71261978149414 + ], + [ + "▁succession", + -12.71264934539795 + ], + [ + "▁Whisk", + -12.713030815124512 + ], + [ + "▁precizat", + -12.713096618652344 + ], + [ + "▁unmittelbar", + -12.713117599487305 + ], + [ + "ICH", + -12.713139533996582 + ], + [ + "▁atteint", + -12.713199615478516 + ], + [ + "▁hometown", + -12.713268280029297 + ], + [ + "▁Zip", + -12.71328353881836 + ], + [ + "▁Weekly", + -12.71336841583252 + ], + [ + "▁crashes", + -12.713401794433594 + ], + [ + "▁Turbo", + -12.713421821594238 + ], + [ + "▁susține", + -12.713468551635742 + ], + [ + "▁Venus", + -12.713587760925293 + ], + [ + "▁finalement", + -12.713595390319824 + ], + [ + "rewarded", + -12.713693618774414 + ], + [ + "▁principau", + -12.713899612426758 + ], + [ + "▁régional", + -12.713979721069336 + ], + [ + "▁1958", + -12.714178085327148 + ], + [ + "▁Musical", + -12.714189529418945 + ], + [ + "▁stylist", + -12.714251518249512 + ], + [ + "cetate", + -12.714282035827637 + ], + [ + "gorge", + -12.71433162689209 + ], + [ + "▁espresso", + -12.714493751525879 + ], + [ + "überall", + -12.714576721191406 + ], + [ + "▁NHL", + -12.714593887329102 + ], + [ + "▁Dock", + -12.71472454071045 + ], + [ + "▁mosquito", + -12.71481704711914 + ], + [ + "▁forthcoming", + -12.714852333068848 + ], + [ + "▁Visitors", + -12.714881896972656 + ], + [ + "kro", + -12.714882850646973 + ], + [ + "_______", + -12.715048789978027 + ], + [ + "▁STEM", + -12.715105056762695 + ], + [ + "9.5", + -12.715141296386719 + ], + [ + "accompagne", + -12.715177536010742 + ], + [ + "▁Trick", + -12.715202331542969 + ], + [ + "▁endorsement", + -12.715400695800781 + ], + [ + "▁amplifier", + -12.715498924255371 + ], + [ + "▁malicious", + -12.715499877929688 + ], + [ + "▁roam", + -12.71552848815918 + ], + [ + "▁kennt", + -12.715635299682617 + ], + [ + "Connor", + -12.715690612792969 + ], + [ + "▁dysfunction", + -12.715828895568848 + ], + [ + "▁zuverlässig", + -12.715840339660645 + ], + [ + "▁corpul", + -12.71595573425293 + ], + [ + "▁boule", + -12.715967178344727 + ], + [ + "otti", + -12.715991973876953 + ], + [ + "440", + -12.716050148010254 + ], + [ + "▁mimic", + -12.716056823730469 + ], + [ + "farben", + -12.716129302978516 + ], + [ + "▁Wagner", + -12.716214179992676 + ], + [ + "Kom", + -12.7162504196167 + ], + [ + "▁miteinander", + -12.716269493103027 + ], + [ + "▁String", + -12.716296195983887 + ], + [ + "▁Ellis", + -12.716313362121582 + ], + [ + "▁Perth", + -12.716337203979492 + ], + [ + "▁temperatura", + -12.716381072998047 + ], + [ + "umbling", + -12.716397285461426 + ], + [ + "▁Medizin", + -12.716554641723633 + ], + [ + "▁KY", + -12.71660327911377 + ], + [ + "apei", + -12.716642379760742 + ], + [ + "counter", + -12.716647148132324 + ], + [ + "strich", + -12.71665096282959 + ], + [ + "▁Între", + -12.716652870178223 + ], + [ + "▁Cliff", + -12.716785430908203 + ], + [ + "▁foreclosure", + -12.716864585876465 + ], + [ + "................", + -12.716878890991211 + ], + [ + "Clearly", + -12.717028617858887 + ], + [ + "AJ", + -12.717057228088379 + ], + [ + "ndro", + -12.717180252075195 + ], + [ + "▁Arsenal", + -12.717206001281738 + ], + [ + "▁Recherche", + -12.717216491699219 + ], + [ + "Guests", + -12.717225074768066 + ], + [ + "▁besucht", + -12.717242240905762 + ], + [ + "wissen", + -12.717266082763672 + ], + [ + "fekt", + -12.717414855957031 + ], + [ + "hottest", + -12.717414855957031 + ], + [ + "▁Tomorrow", + -12.717547416687012 + ], + [ + "▁Signature", + -12.717557907104492 + ], + [ + "127", + -12.717583656311035 + ], + [ + "▁competence", + -12.71766471862793 + ], + [ + "Einige", + -12.717686653137207 + ], + [ + "patented", + -12.71782112121582 + ], + [ + "▁Exhibition", + -12.717889785766602 + ], + [ + "▁verbessern", + -12.717889785766602 + ], + [ + "▁Garcia", + -12.718043327331543 + ], + [ + "▁inquire", + -12.718278884887695 + ], + [ + "coping", + -12.718353271484375 + ], + [ + "▁linguri", + -12.71842098236084 + ], + [ + "▁trivia", + -12.718433380126953 + ], + [ + "▁începutul", + -12.718489646911621 + ], + [ + "▁parteneriat", + -12.7186279296875 + ], + [ + "tagen", + -12.718636512756348 + ], + [ + "▁engagé", + -12.718916893005371 + ], + [ + "▁chalk", + -12.718944549560547 + ], + [ + "▁fashionable", + -12.719416618347168 + ], + [ + "0.8", + -12.719635009765625 + ], + [ + "▁sticker", + -12.719751358032227 + ], + [ + "▁desperately", + -12.719765663146973 + ], + [ + "höhe", + -12.719903945922852 + ], + [ + "▁fericire", + -12.71994400024414 + ], + [ + "évaluation", + -12.719948768615723 + ], + [ + "▁Divide", + -12.719959259033203 + ], + [ + "▁indulge", + -12.719979286193848 + ], + [ + "fett", + -12.720014572143555 + ], + [ + "▁communal", + -12.72017765045166 + ], + [ + "▁mindful", + -12.720187187194824 + ], + [ + "dauert", + -12.720192909240723 + ], + [ + "▁veille", + -12.720263481140137 + ], + [ + "▁vér", + -12.720330238342285 + ], + [ + "▁Baseball", + -12.720373153686523 + ], + [ + "▁succeeded", + -12.720418930053711 + ], + [ + "▁Terrasse", + -12.720420837402344 + ], + [ + "irgend", + -12.720500946044922 + ], + [ + "▁Munich", + -12.720556259155273 + ], + [ + "weisung", + -12.72067642211914 + ], + [ + "metre", + -12.720916748046875 + ], + [ + "▁Raymond", + -12.721015930175781 + ], + [ + "▁chute", + -12.72102165222168 + ], + [ + "▁Accounting", + -12.721075057983398 + ], + [ + "▁pantry", + -12.721122741699219 + ], + [ + "▁underwater", + -12.721181869506836 + ], + [ + "ARI", + -12.721222877502441 + ], + [ + "lowed", + -12.721245765686035 + ], + [ + "numbered", + -12.721430778503418 + ], + [ + "REN", + -12.72148609161377 + ], + [ + "▁industriel", + -12.721489906311035 + ], + [ + "wäh", + -12.721531867980957 + ], + [ + "kenntnis", + -12.721631050109863 + ], + [ + "▁govern", + -12.721635818481445 + ], + [ + "strained", + -12.721661567687988 + ], + [ + "▁rythme", + -12.721689224243164 + ], + [ + "ин", + -12.72169303894043 + ], + [ + "▁burner", + -12.721723556518555 + ], + [ + "▁zählt", + -12.721790313720703 + ], + [ + "▁verte", + -12.721883773803711 + ], + [ + "▁Catalog", + -12.721896171569824 + ], + [ + "▁Bruno", + -12.721988677978516 + ], + [ + "0.7", + -12.721997261047363 + ], + [ + "▁litig", + -12.72207260131836 + ], + [ + "▁greet", + -12.722129821777344 + ], + [ + "▁stool", + -12.722393035888672 + ], + [ + "gression", + -12.722457885742188 + ], + [ + "▁Klassen", + -12.722491264343262 + ], + [ + "▁neon", + -12.722661018371582 + ], + [ + "▁Tall", + -12.722734451293945 + ], + [ + "▁satin", + -12.722895622253418 + ], + [ + "▁Bend", + -12.722915649414062 + ], + [ + "▁soluţi", + -12.723077774047852 + ], + [ + "▁styl", + -12.723196983337402 + ], + [ + "▁Siri", + -12.723358154296875 + ], + [ + "▁Sanders", + -12.723464012145996 + ], + [ + "▁spike", + -12.723499298095703 + ], + [ + "pinion", + -12.723854064941406 + ], + [ + "▁purta", + -12.724122047424316 + ], + [ + "CARE", + -12.724224090576172 + ], + [ + "▁creştere", + -12.724311828613281 + ], + [ + "▁fry", + -12.724374771118164 + ], + [ + "▁Schweizer", + -12.724400520324707 + ], + [ + "durchschnittlich", + -12.724411010742188 + ], + [ + "celaşi", + -12.724446296691895 + ], + [ + "▁deceased", + -12.724474906921387 + ], + [ + "▁Nerv", + -12.724668502807617 + ], + [ + "2-2", + -12.7247314453125 + ], + [ + "▁Stahl", + -12.724753379821777 + ], + [ + "▁workload", + -12.724834442138672 + ], + [ + "erhielt", + -12.724984169006348 + ], + [ + "▁hypothesis", + -12.725103378295898 + ], + [ + "bib", + -12.725110054016113 + ], + [ + "▁ţară", + -12.725116729736328 + ], + [ + "vaut", + -12.725122451782227 + ], + [ + "prehensi", + -12.725184440612793 + ], + [ + "▁Offering", + -12.725188255310059 + ], + [ + "▁dislike", + -12.725252151489258 + ], + [ + "▁firewall", + -12.725252151489258 + ], + [ + "mania", + -12.725255966186523 + ], + [ + "195", + -12.725278854370117 + ], + [ + "▁Champ", + -12.725324630737305 + ], + [ + "▁philosophical", + -12.725343704223633 + ], + [ + "länge", + -12.72553539276123 + ], + [ + "advisable", + -12.725785255432129 + ], + [ + "negotiating", + -12.725785255432129 + ], + [ + "Providing", + -12.725791931152344 + ], + [ + "▁1959", + -12.725801467895508 + ], + [ + "▁spyware", + -12.725831031799316 + ], + [ + "sharing", + -12.725837707519531 + ], + [ + "▁prévoi", + -12.725905418395996 + ], + [ + "▁jaune", + -12.7260103225708 + ], + [ + "schoss", + -12.726028442382812 + ], + [ + "▁obține", + -12.726129531860352 + ], + [ + "▁attraktiv", + -12.726489067077637 + ], + [ + "gemeinschaft", + -12.7265043258667 + ], + [ + "BV", + -12.726505279541016 + ], + [ + "Top", + -12.726617813110352 + ], + [ + "▁Sharon", + -12.726625442504883 + ], + [ + "bok", + -12.726675033569336 + ], + [ + "▁résist", + -12.726811408996582 + ], + [ + "Napoca", + -12.726822853088379 + ], + [ + "▁Uncategorized", + -12.726898193359375 + ], + [ + "▁trustee", + -12.726936340332031 + ], + [ + "▁remise", + -12.727025985717773 + ], + [ + "▁aştept", + -12.727165222167969 + ], + [ + "▁allergic", + -12.727206230163574 + ], + [ + "èvre", + -12.727211952209473 + ], + [ + "LAR", + -12.72734546661377 + ], + [ + "1.9", + -12.727497100830078 + ], + [ + "▁outbreak", + -12.727520942687988 + ], + [ + "▁trocken", + -12.727568626403809 + ], + [ + "▁laughter", + -12.727724075317383 + ], + [ + "▁Attend", + -12.727785110473633 + ], + [ + "jung", + -12.727822303771973 + ], + [ + "racking", + -12.727934837341309 + ], + [ + "ORS", + -12.728178024291992 + ], + [ + "▁rasp", + -12.728527069091797 + ], + [ + "VF", + -12.728551864624023 + ], + [ + "▁Tamil", + -12.72860050201416 + ], + [ + "124", + -12.728602409362793 + ], + [ + "▁Fiber", + -12.728714942932129 + ], + [ + "▁launches", + -12.728755950927734 + ], + [ + "Post", + -12.728777885437012 + ], + [ + "▁bucks", + -12.729072570800781 + ], + [ + "▁Nicholas", + -12.72923755645752 + ], + [ + "▁cărți", + -12.729255676269531 + ], + [ + "emper", + -12.729681968688965 + ], + [ + "Point", + -12.729689598083496 + ], + [ + "fraction", + -12.729753494262695 + ], + [ + "▁BIG", + -12.729804992675781 + ], + [ + "▁lancer", + -12.729829788208008 + ], + [ + "EVER", + -12.72997760772705 + ], + [ + "trend", + -12.73000431060791 + ], + [ + "▁remerci", + -12.730076789855957 + ], + [ + "▁prevalent", + -12.730168342590332 + ], + [ + "370", + -12.730290412902832 + ], + [ + "▁bestellen", + -12.730327606201172 + ], + [ + "Buying", + -12.730341911315918 + ], + [ + "▁Aufbau", + -12.730416297912598 + ], + [ + "▁opini", + -12.730416297912598 + ], + [ + "▁regiune", + -12.730663299560547 + ], + [ + "▁martial", + -12.73069953918457 + ], + [ + "LK", + -12.730754852294922 + ], + [ + "▁Feuerwehr", + -12.730974197387695 + ], + [ + "screened", + -12.73099422454834 + ], + [ + "Blue", + -12.73120403289795 + ], + [ + "▁analize", + -12.731237411499023 + ], + [ + "▁lure", + -12.731247901916504 + ], + [ + "▁internally", + -12.731283187866211 + ], + [ + "father", + -12.731322288513184 + ], + [ + "▁diplomatic", + -12.731343269348145 + ], + [ + "▁Activity", + -12.731464385986328 + ], + [ + "▁cliqu", + -12.73156452178955 + ], + [ + "▁adequately", + -12.731809616088867 + ], + [ + "▁Elena", + -12.73183822631836 + ], + [ + "▁Citizens", + -12.732102394104004 + ], + [ + "▁Länge", + -12.732295989990234 + ], + [ + "▁respectful", + -12.732300758361816 + ], + [ + "▁zuständig", + -12.73248291015625 + ], + [ + "▁réception", + -12.732584953308105 + ], + [ + "▁headset", + -12.732686996459961 + ], + [ + "▁awhile", + -12.732705116271973 + ], + [ + "▁speculation", + -12.732707977294922 + ], + [ + "▁WhatsApp", + -12.732714653015137 + ], + [ + "▁tulbur", + -12.732731819152832 + ], + [ + "▁voluntar", + -12.732758522033691 + ], + [ + "▁Studium", + -12.73277473449707 + ], + [ + "▁protector", + -12.732833862304688 + ], + [ + "▁Wrap", + -12.732840538024902 + ], + [ + "staat", + -12.732951164245605 + ], + [ + "▁judgement", + -12.733396530151367 + ], + [ + "unauthorized", + -12.733397483825684 + ], + [ + "Rank", + -12.733487129211426 + ], + [ + "pră", + -12.733503341674805 + ], + [ + "▁Paw", + -12.733627319335938 + ], + [ + "▁relev", + -12.733664512634277 + ], + [ + "▁arbor", + -12.733830451965332 + ], + [ + "stretches", + -12.733885765075684 + ], + [ + "nook", + -12.733906745910645 + ], + [ + "▁Tunis", + -12.733907699584961 + ], + [ + "▁shocking", + -12.734036445617676 + ], + [ + "▁oppress", + -12.73414421081543 + ], + [ + "10.1", + -12.7341890335083 + ], + [ + "▁ERP", + -12.734310150146484 + ], + [ + "wolle", + -12.7343168258667 + ], + [ + "▁Catch", + -12.734352111816406 + ], + [ + "Plus", + -12.734368324279785 + ], + [ + "Market", + -12.734445571899414 + ], + [ + "scribed", + -12.734536170959473 + ], + [ + "▁décoration", + -12.734594345092773 + ], + [ + "▁chanson", + -12.734607696533203 + ], + [ + "▁Midwest", + -12.734763145446777 + ], + [ + "▁Spencer", + -12.734795570373535 + ], + [ + "▁societate", + -12.734807968139648 + ], + [ + "curated", + -12.735087394714355 + ], + [ + "▁canopy", + -12.735135078430176 + ], + [ + "ат", + -12.735142707824707 + ], + [ + "Sig", + -12.73514461517334 + ], + [ + "▁witch", + -12.735153198242188 + ], + [ + "envoyer", + -12.735175132751465 + ], + [ + "▁$1,000", + -12.735230445861816 + ], + [ + "▁peripheral", + -12.735482215881348 + ], + [ + "nnouncing", + -12.735509872436523 + ], + [ + "perfect", + -12.73559284210205 + ], + [ + "▁warten", + -12.735748291015625 + ], + [ + "ELI", + -12.735822677612305 + ], + [ + "▁recap", + -12.735912322998047 + ], + [ + "dün", + -12.735978126525879 + ], + [ + "▁Spre", + -12.736029624938965 + ], + [ + "2005", + -12.736153602600098 + ], + [ + "▁réparation", + -12.73617935180664 + ], + [ + "▁extraordinar", + -12.736196517944336 + ], + [ + "existence", + -12.736337661743164 + ], + [ + "oanele", + -12.736467361450195 + ], + [ + "▁reprezentant", + -12.736474990844727 + ], + [ + "▁attacker", + -12.736490249633789 + ], + [ + "▁Berliner", + -12.73657512664795 + ], + [ + "experience", + -12.736649513244629 + ], + [ + "▁Monde", + -12.736800193786621 + ], + [ + "intervention", + -12.736956596374512 + ], + [ + "▁Einstellung", + -12.736977577209473 + ], + [ + "▁Valentin", + -12.737011909484863 + ], + [ + "▁zonă", + -12.737200736999512 + ], + [ + "occupant", + -12.737223625183105 + ], + [ + "▁mobilis", + -12.737260818481445 + ], + [ + "metall", + -12.737261772155762 + ], + [ + "evangeli", + -12.73729133605957 + ], + [ + "Adding", + -12.737326622009277 + ], + [ + "▁Roland", + -12.73735237121582 + ], + [ + "ENCE", + -12.737462043762207 + ], + [ + "▁Insul", + -12.737478256225586 + ], + [ + "tellement", + -12.737497329711914 + ], + [ + "▁Blogger", + -12.737499237060547 + ], + [ + "▁prote", + -12.737504005432129 + ], + [ + "▁Minimum", + -12.737574577331543 + ], + [ + "▁termic", + -12.737624168395996 + ], + [ + "▁Sachen", + -12.737859725952148 + ], + [ + "▁Maschinen", + -12.737863540649414 + ], + [ + "▁Dragnea", + -12.737926483154297 + ], + [ + "▁overtime", + -12.737967491149902 + ], + [ + "calorie", + -12.737968444824219 + ], + [ + "▁jene", + -12.73814868927002 + ], + [ + "▁Satan", + -12.738153457641602 + ], + [ + "▁currencies", + -12.73827075958252 + ], + [ + "▁echipamente", + -12.738329887390137 + ], + [ + "▁forgiveness", + -12.73843765258789 + ], + [ + "▁Pause", + -12.738479614257812 + ], + [ + "▁Witt", + -12.738529205322266 + ], + [ + "STOR", + -12.738632202148438 + ], + [ + "▁actuelle", + -12.738703727722168 + ], + [ + "▁Ard", + -12.738853454589844 + ], + [ + "▁Constitu", + -12.738880157470703 + ], + [ + "ghan", + -12.7388916015625 + ], + [ + "Make", + -12.738906860351562 + ], + [ + "▁garne", + -12.738947868347168 + ], + [ + "▁Hitler", + -12.738956451416016 + ], + [ + "▁rubbish", + -12.738973617553711 + ], + [ + "6.0", + -12.739025115966797 + ], + [ + "▁Giving", + -12.739177703857422 + ], + [ + "▁persever", + -12.73937702178955 + ], + [ + "wirk", + -12.7394380569458 + ], + [ + "liegenden", + -12.739455223083496 + ], + [ + "▁morceau", + -12.73946762084961 + ], + [ + "atty", + -12.73961067199707 + ], + [ + "▁Quebec", + -12.739669799804688 + ], + [ + "harmonie", + -12.739705085754395 + ], + [ + "Nummer", + -12.739721298217773 + ], + [ + "▁splendid", + -12.739747047424316 + ], + [ + "▁halfway", + -12.739808082580566 + ], + [ + "▁periodically", + -12.740071296691895 + ], + [ + "▁Ländern", + -12.740077018737793 + ], + [ + "▁AAA", + -12.740083694458008 + ], + [ + "▁Frost", + -12.740198135375977 + ], + [ + "▁heroin", + -12.740289688110352 + ], + [ + "▁bucurie", + -12.7403564453125 + ], + [ + "▁Pradesh", + -12.74036693572998 + ], + [ + "zusetzen", + -12.740405082702637 + ], + [ + "raising", + -12.740425109863281 + ], + [ + "▁furniz", + -12.740567207336426 + ], + [ + "▁convi", + -12.740575790405273 + ], + [ + "pictured", + -12.740911483764648 + ], + [ + "▁inadequate", + -12.741065979003906 + ], + [ + "▁aprobat", + -12.741069793701172 + ], + [ + "▁exercising", + -12.741083145141602 + ], + [ + "▁faisai", + -12.741138458251953 + ], + [ + "▁prosecution", + -12.741231918334961 + ], + [ + "380", + -12.741402626037598 + ], + [ + "▁Potential", + -12.74145793914795 + ], + [ + "▁Magi", + -12.741523742675781 + ], + [ + "From", + -12.741752624511719 + ], + [ + "batterie", + -12.74181079864502 + ], + [ + "▁poisson", + -12.74185562133789 + ], + [ + "▁Probe", + -12.741950988769531 + ], + [ + "▁pastel", + -12.741998672485352 + ], + [ + "▁tracked", + -12.742410659790039 + ], + [ + "▁advertisers", + -12.74251937866211 + ], + [ + "adevar", + -12.742537498474121 + ], + [ + "ит", + -12.742776870727539 + ], + [ + "▁Herren", + -12.742815971374512 + ], + [ + "EAM", + -12.742820739746094 + ], + [ + "▁scooter", + -12.742822647094727 + ], + [ + "requesting", + -12.742841720581055 + ], + [ + "dynamis", + -12.742949485778809 + ], + [ + "▁dahin", + -12.742961883544922 + ], + [ + "▁tweak", + -12.743061065673828 + ], + [ + "▁hail", + -12.743101119995117 + ], + [ + "▁întotdeauna", + -12.743160247802734 + ], + [ + "▁Publikum", + -12.743167877197266 + ], + [ + "▁panoramic", + -12.743167877197266 + ], + [ + "▁PRE", + -12.74331283569336 + ], + [ + "▁thrill", + -12.743361473083496 + ], + [ + "Open", + -12.743366241455078 + ], + [ + "▁Layer", + -12.74345588684082 + ], + [ + "▁Bosch", + -12.743459701538086 + ], + [ + "hull", + -12.743511199951172 + ], + [ + "▁născut", + -12.743518829345703 + ], + [ + "tausch", + -12.743559837341309 + ], + [ + "▁autoturism", + -12.743577003479004 + ], + [ + "▁crank", + -12.743701934814453 + ], + [ + "CLE", + -12.743735313415527 + ], + [ + "▁Frederick", + -12.74386978149414 + ], + [ + "mog", + -12.743887901306152 + ], + [ + "behalten", + -12.74396800994873 + ], + [ + "▁aunt", + -12.744050979614258 + ], + [ + "▁Triple", + -12.744141578674316 + ], + [ + "▁Ark", + -12.744242668151855 + ], + [ + "AUD", + -12.744440078735352 + ], + [ + "▁Candy", + -12.744505882263184 + ], + [ + "tama", + -12.744515419006348 + ], + [ + "▁Evaluation", + -12.744571685791016 + ], + [ + "▁Memphis", + -12.744571685791016 + ], + [ + "▁stellar", + -12.74457836151123 + ], + [ + "▁fabricat", + -12.744632720947266 + ], + [ + "▁terminat", + -12.744868278503418 + ], + [ + "▁domnul", + -12.744913101196289 + ], + [ + "▁keynote", + -12.744925498962402 + ], + [ + "▁dentistry", + -12.744951248168945 + ], + [ + "rift", + -12.745052337646484 + ], + [ + "▁bilan", + -12.745119094848633 + ], + [ + "2.6", + -12.745125770568848 + ], + [ + "undergoing", + -12.745210647583008 + ], + [ + "▁pseudo", + -12.745274543762207 + ], + [ + "▁maşin", + -12.745280265808105 + ], + [ + "▁munte", + -12.74555492401123 + ], + [ + "▁VW", + -12.745932579040527 + ], + [ + "▁Rab", + -12.74593448638916 + ], + [ + "▁sustine", + -12.745972633361816 + ], + [ + "▁Bedingungen", + -12.745977401733398 + ], + [ + "▁învăţ", + -12.745980262756348 + ], + [ + "▁pyramid", + -12.745983123779297 + ], + [ + "HEN", + -12.746020317077637 + ], + [ + "▁citrus", + -12.746058464050293 + ], + [ + "Code", + -12.746064186096191 + ], + [ + "▁Beginning", + -12.746164321899414 + ], + [ + "▁discourse", + -12.746249198913574 + ], + [ + "▁miercuri", + -12.746329307556152 + ], + [ + "▁producător", + -12.74637508392334 + ], + [ + "▁analys", + -12.746397972106934 + ], + [ + "▁Evan", + -12.7467041015625 + ], + [ + "138", + -12.746987342834473 + ], + [ + "▁târziu", + -12.74703311920166 + ], + [ + "▁relocation", + -12.747052192687988 + ], + [ + "decizia", + -12.74708080291748 + ], + [ + "tollen", + -12.74714183807373 + ], + [ + "TRO", + -12.747180938720703 + ], + [ + "▁runway", + -12.74719524383545 + ], + [ + "illet", + -12.747270584106445 + ], + [ + "▁serveur", + -12.747387886047363 + ], + [ + "bezogen", + -12.747427940368652 + ], + [ + "▁believers", + -12.747668266296387 + ], + [ + "determined", + -12.747711181640625 + ], + [ + "▁reinforced", + -12.74791431427002 + ], + [ + "▁wedge", + -12.748006820678711 + ], + [ + "methyl", + -12.74807357788086 + ], + [ + "MES", + -12.748188018798828 + ], + [ + "vpn", + -12.748374938964844 + ], + [ + "▁consta", + -12.74837875366211 + ], + [ + "▁vizitat", + -12.748420715332031 + ], + [ + "modul", + -12.748455047607422 + ], + [ + "▁routing", + -12.748528480529785 + ], + [ + "tempted", + -12.748540878295898 + ], + [ + "URS", + -12.748785018920898 + ], + [ + "apprentissage", + -12.748795509338379 + ], + [ + "▁Hungary", + -12.748796463012695 + ], + [ + "Previously", + -12.74880313873291 + ], + [ + "▁translator", + -12.748804092407227 + ], + [ + "▁resonate", + -12.748830795288086 + ], + [ + "201", + -12.748851776123047 + ], + [ + "3-0", + -12.749029159545898 + ], + [ + "▁reunion", + -12.749090194702148 + ], + [ + "▁palate", + -12.749096870422363 + ], + [ + "0.4", + -12.749171257019043 + ], + [ + "reheat", + -12.74924373626709 + ], + [ + "Roo", + -12.749261856079102 + ], + [ + "200,000", + -12.74940013885498 + ], + [ + "Bro", + -12.749431610107422 + ], + [ + "▁estimation", + -12.749468803405762 + ], + [ + "schneiden", + -12.749499320983887 + ], + [ + "▁Inspired", + -12.749506950378418 + ], + [ + "▁lottery", + -12.749539375305176 + ], + [ + "▁Friedrich", + -12.749887466430664 + ], + [ + "FIT", + -12.749913215637207 + ], + [ + "0.6", + -12.7499418258667 + ], + [ + "▁dagegen", + -12.74997615814209 + ], + [ + "▁Reb", + -12.750115394592285 + ], + [ + "▁Eigenschaften", + -12.75020694732666 + ], + [ + "▁molding", + -12.750361442565918 + ], + [ + "▁Harper", + -12.750548362731934 + ], + [ + "verwaltung", + -12.75055980682373 + ], + [ + "▁Schlüssel", + -12.75055980682373 + ], + [ + "▁desfasura", + -12.75055980682373 + ], + [ + "▁rencontrer", + -12.75055980682373 + ], + [ + "▁negoci", + -12.750581741333008 + ], + [ + "▁Leading", + -12.750615119934082 + ], + [ + "▁necesita", + -12.750652313232422 + ], + [ + "▁biking", + -12.750683784484863 + ], + [ + "▁jointly", + -12.75069808959961 + ], + [ + "▁crush", + -12.750702857971191 + ], + [ + "Vol", + -12.750768661499023 + ], + [ + "▁ebay", + -12.750836372375488 + ], + [ + "▁Shri", + -12.750991821289062 + ], + [ + "▁AMD", + -12.751029968261719 + ], + [ + "FG", + -12.751032829284668 + ], + [ + "Argentin", + -12.75120735168457 + ], + [ + "▁incercat", + -12.751431465148926 + ], + [ + "▁tidy", + -12.751628875732422 + ], + [ + "▁provoqu", + -12.751635551452637 + ], + [ + "▁Written", + -12.751649856567383 + ], + [ + "▁Kooperation", + -12.751666069030762 + ], + [ + "▁scripture", + -12.751952171325684 + ], + [ + "▁Pflicht", + -12.751974105834961 + ], + [ + "ficial", + -12.752013206481934 + ], + [ + "vremea", + -12.752013206481934 + ], + [ + "▁Growing", + -12.752115249633789 + ], + [ + "▁redesign", + -12.752119064331055 + ], + [ + "▁obstacle", + -12.752214431762695 + ], + [ + "▁rugam", + -12.752235412597656 + ], + [ + "▁SPD", + -12.752243995666504 + ], + [ + "165", + -12.752270698547363 + ], + [ + "fiz", + -12.752284049987793 + ], + [ + "▁startet", + -12.752326011657715 + ], + [ + "▁Principle", + -12.752327919006348 + ], + [ + "▁abdominal", + -12.752327919006348 + ], + [ + "▁podium", + -12.752528190612793 + ], + [ + "duty", + -12.752616882324219 + ], + [ + "bonne", + -12.752679824829102 + ], + [ + "▁Serbia", + -12.752687454223633 + ], + [ + "▁brunch", + -12.752839088439941 + ], + [ + "▁Personne", + -12.752975463867188 + ], + [ + "▁Idea", + -12.753034591674805 + ], + [ + "forementioned", + -12.753036499023438 + ], + [ + "▁chassis", + -12.753037452697754 + ], + [ + "gebühr", + -12.753050804138184 + ], + [ + "ucun", + -12.753061294555664 + ], + [ + "▁Maz", + -12.7531156539917 + ], + [ + "1-4", + -12.75318431854248 + ], + [ + "kleid", + -12.753273963928223 + ], + [ + "▁Volvo", + -12.753337860107422 + ], + [ + "brechen", + -12.753378868103027 + ], + [ + "▁homepage", + -12.753472328186035 + ], + [ + "fuz", + -12.753509521484375 + ], + [ + "▁abgeschlossen", + -12.753595352172852 + ], + [ + "▁gelungen", + -12.753658294677734 + ], + [ + "▁booklet", + -12.753711700439453 + ], + [ + "▁Ukrainian", + -12.753745079040527 + ], + [ + "▁Melissa", + -12.753746032714844 + ], + [ + "CENT", + -12.75379467010498 + ], + [ + "▁intégré", + -12.753806114196777 + ], + [ + "weighing", + -12.753827095031738 + ], + [ + "▁crumbl", + -12.753894805908203 + ], + [ + "▁bunk", + -12.754167556762695 + ], + [ + "krieg", + -12.754207611083984 + ], + [ + "▁freshman", + -12.754307746887207 + ], + [ + "alaya", + -12.754339218139648 + ], + [ + "Avem", + -12.754353523254395 + ], + [ + "▁Kne", + -12.754423141479492 + ], + [ + "▁upstairs", + -12.75448226928711 + ], + [ + "AIL", + -12.754508972167969 + ], + [ + "țul", + -12.75478744506836 + ], + [ + "▁Lecture", + -12.754817962646484 + ], + [ + "▁entdecken", + -12.754843711853027 + ], + [ + "▁GMT", + -12.754912376403809 + ], + [ + "▁Leitung", + -12.754937171936035 + ], + [ + "▁inclined", + -12.755170822143555 + ], + [ + "▁skillet", + -12.75555419921875 + ], + [ + "FN", + -12.755742073059082 + ], + [ + "▁Perform", + -12.755821228027344 + ], + [ + "shift", + -12.75583267211914 + ], + [ + "recognizing", + -12.755873680114746 + ], + [ + "▁concise", + -12.755873680114746 + ], + [ + "▁obsessed", + -12.755873680114746 + ], + [ + "▁removable", + -12.755873680114746 + ], + [ + "▁Relax", + -12.755888938903809 + ], + [ + "delegates", + -12.75605583190918 + ], + [ + "▁expedi", + -12.756074905395508 + ], + [ + "▁Schä", + -12.756138801574707 + ], + [ + "iete", + -12.756211280822754 + ], + [ + "▁reciproc", + -12.756229400634766 + ], + [ + "▁neutr", + -12.75625228881836 + ], + [ + "lactic", + -12.756314277648926 + ], + [ + "▁Nah", + -12.756328582763672 + ], + [ + "scene", + -12.7565279006958 + ], + [ + "▁Helm", + -12.756563186645508 + ], + [ + "▁Bewerbung", + -12.756671905517578 + ], + [ + "▁Cassi", + -12.75667953491211 + ], + [ + "▁Gelegenheit", + -12.756939888000488 + ], + [ + "▁reflective", + -12.757140159606934 + ], + [ + "▁încredere", + -12.757149696350098 + ], + [ + "▁cigarettes", + -12.75717544555664 + ], + [ + "▁Zusätzlich", + -12.757295608520508 + ], + [ + "▁intercept", + -12.75731372833252 + ], + [ + "▁Finn", + -12.757468223571777 + ], + [ + "▁ignor", + -12.757661819458008 + ], + [ + "gian", + -12.75766372680664 + ], + [ + "BRA", + -12.757740020751953 + ], + [ + "leader", + -12.757957458496094 + ], + [ + "nius", + -12.757981300354004 + ], + [ + "▁skies", + -12.757987022399902 + ], + [ + "▁nunta", + -12.758023262023926 + ], + [ + "▁grec", + -12.758041381835938 + ], + [ + "arranging", + -12.75816822052002 + ], + [ + "wartet", + -12.758231163024902 + ], + [ + "▁kostet", + -12.758377075195312 + ], + [ + "▁Entre", + -12.758541107177734 + ], + [ + "Mag", + -12.758575439453125 + ], + [ + "▁radiator", + -12.758598327636719 + ], + [ + "übrigens", + -12.758689880371094 + ], + [ + "Internet", + -12.758706092834473 + ], + [ + "▁connexion", + -12.758718490600586 + ], + [ + "▁prolonged", + -12.758854866027832 + ], + [ + "▁capabil", + -12.75914192199707 + ], + [ + "▁feeder", + -12.759217262268066 + ], + [ + "Initially", + -12.759223937988281 + ], + [ + "Green", + -12.75926685333252 + ], + [ + "▁passiert", + -12.759272575378418 + ], + [ + "▁courtyard", + -12.759299278259277 + ], + [ + "▁judeţ", + -12.759320259094238 + ], + [ + "▁Coalition", + -12.759431838989258 + ], + [ + "▁atmospheric", + -12.759431838989258 + ], + [ + "▁velocity", + -12.759431838989258 + ], + [ + "▁Frühstück", + -12.759432792663574 + ], + [ + "vacancies", + -12.759438514709473 + ], + [ + "unified", + -12.759538650512695 + ], + [ + "▁Ahmed", + -12.759538650512695 + ], + [ + "poured", + -12.759550094604492 + ], + [ + "▁Mikro", + -12.75959587097168 + ], + [ + "▁Klar", + -12.759661674499512 + ], + [ + "kommt", + -12.759681701660156 + ], + [ + "seated", + -12.759744644165039 + ], + [ + "musik", + -12.75976848602295 + ], + [ + "▁stimulation", + -12.759841918945312 + ], + [ + "▁solicitat", + -12.759880065917969 + ], + [ + "▁politically", + -12.760165214538574 + ], + [ + "restoring", + -12.760322570800781 + ], + [ + "▁Rag", + -12.760435104370117 + ], + [ + "▁officielle", + -12.760468482971191 + ], + [ + "▁Annie", + -12.760479927062988 + ], + [ + "▁tourne", + -12.760634422302246 + ], + [ + "▁Joel", + -12.760642051696777 + ], + [ + "blieben", + -12.760666847229004 + ], + [ + "▁repayment", + -12.760736465454102 + ], + [ + "▁Strategi", + -12.760781288146973 + ], + [ + "▁prietenii", + -12.760804176330566 + ], + [ + "▁Montgomery", + -12.760858535766602 + ], + [ + "▁résidence", + -12.760858535766602 + ], + [ + "▁sunglasses", + -12.760858535766602 + ], + [ + "▁1956", + -12.760882377624512 + ], + [ + "MEN", + -12.76093578338623 + ], + [ + "pouvant", + -12.760997772216797 + ], + [ + "375", + -12.761061668395996 + ], + [ + "directed", + -12.761173248291016 + ], + [ + "▁grinder", + -12.76120662689209 + ], + [ + "rträge", + -12.761279106140137 + ], + [ + "▁nickel", + -12.761299133300781 + ], + [ + "▁Maintain", + -12.761313438415527 + ], + [ + "▁Holmes", + -12.761392593383789 + ], + [ + "▁obtinut", + -12.76157283782959 + ], + [ + "▁walnut", + -12.761585235595703 + ], + [ + "▁consultancy", + -12.761640548706055 + ], + [ + "cooled", + -12.761651039123535 + ], + [ + "▁Brig", + -12.761711120605469 + ], + [ + "▁Produc", + -12.761873245239258 + ], + [ + "street", + -12.76187515258789 + ], + [ + "▁Einfach", + -12.761897087097168 + ], + [ + "North", + -12.762149810791016 + ], + [ + "▁PET", + -12.76220989227295 + ], + [ + "▁Président", + -12.762288093566895 + ], + [ + "▁produsului", + -12.762457847595215 + ], + [ + "literatur", + -12.762483596801758 + ], + [ + "133", + -12.762561798095703 + ], + [ + "▁recours", + -12.762591361999512 + ], + [ + "▁verpflichtet", + -12.76264476776123 + ], + [ + "▁Wur", + -12.762733459472656 + ], + [ + "▁psiholog", + -12.762796401977539 + ], + [ + "Veg", + -12.762871742248535 + ], + [ + "▁hype", + -12.762930870056152 + ], + [ + "augmenter", + -12.762974739074707 + ], + [ + "▁Welsh", + -12.763012886047363 + ], + [ + "mounted", + -12.763158798217773 + ], + [ + "▁Wann", + -12.763425827026367 + ], + [ + "▁gezeigt", + -12.763620376586914 + ], + [ + "▁memo", + -12.763631820678711 + ], + [ + "veterinary", + -12.763717651367188 + ], + [ + "▁Olympia", + -12.763717651367188 + ], + [ + "▁handsome", + -12.763871192932129 + ], + [ + "yama", + -12.763911247253418 + ], + [ + "studio", + -12.763912200927734 + ], + [ + "sozial", + -12.764020919799805 + ], + [ + "▁reap", + -12.764104843139648 + ], + [ + "▁didactic", + -12.764111518859863 + ], + [ + "▁Cookie", + -12.764126777648926 + ], + [ + "▁cooper", + -12.764230728149414 + ], + [ + "▁discern", + -12.76441478729248 + ], + [ + "▁Ubuntu", + -12.764433860778809 + ], + [ + "domain", + -12.76443862915039 + ], + [ + "▁plasa", + -12.764460563659668 + ], + [ + "hong", + -12.764585494995117 + ], + [ + "▁Freiheit", + -12.764662742614746 + ], + [ + "▁Gateway", + -12.764678001403809 + ], + [ + "▁poke", + -12.764796257019043 + ], + [ + "▁niedrig", + -12.76484203338623 + ], + [ + "▁corrected", + -12.764899253845215 + ], + [ + "▁predator", + -12.76490306854248 + ], + [ + "QA", + -12.76507568359375 + ], + [ + "Physio", + -12.765101432800293 + ], + [ + "MAS", + -12.765108108520508 + ], + [ + "▁sanctuary", + -12.765151023864746 + ], + [ + "▁aferent", + -12.76523494720459 + ], + [ + "▁perdre", + -12.765268325805664 + ], + [ + "▁recherch", + -12.765397071838379 + ], + [ + "ready", + -12.76559829711914 + ], + [ + "without", + -12.76560115814209 + ], + [ + "▁locuitori", + -12.765628814697266 + ], + [ + "▁Memo", + -12.765636444091797 + ], + [ + "▁Laden", + -12.765646934509277 + ], + [ + "danken", + -12.76577377319336 + ], + [ + "▁CNC", + -12.765861511230469 + ], + [ + "▁jealous", + -12.765881538391113 + ], + [ + "▁Background", + -12.765951156616211 + ], + [ + "▁Marx", + -12.765999794006348 + ], + [ + "▁Heli", + -12.766039848327637 + ], + [ + "▁osteo", + -12.766057968139648 + ], + [ + "▁rassembl", + -12.766162872314453 + ], + [ + "▁altceva", + -12.766226768493652 + ], + [ + "▁beschäftigt", + -12.766226768493652 + ], + [ + "▁accru", + -12.766266822814941 + ], + [ + "üft", + -12.766273498535156 + ], + [ + "▁sprout", + -12.766288757324219 + ], + [ + "endorf", + -12.76647663116455 + ], + [ + "▁specialitate", + -12.766483306884766 + ], + [ + "éanmoins", + -12.766586303710938 + ], + [ + "▁poign", + -12.766663551330566 + ], + [ + "▁mânca", + -12.766668319702148 + ], + [ + "▁stretched", + -12.766752243041992 + ], + [ + "fensiv", + -12.76677131652832 + ], + [ + "▁Auction", + -12.76683235168457 + ], + [ + "hints", + -12.766944885253906 + ], + [ + "▁typo", + -12.766983032226562 + ], + [ + "▁Rare", + -12.767003059387207 + ], + [ + "▁interruption", + -12.767043113708496 + ], + [ + "▁Mean", + -12.76709270477295 + ], + [ + "privileged", + -12.767108917236328 + ], + [ + "▁purtat", + -12.767129898071289 + ], + [ + "studie", + -12.767229080200195 + ], + [ + "offres", + -12.767248153686523 + ], + [ + "▁flap", + -12.76729679107666 + ], + [ + "▁rhetoric", + -12.767304420471191 + ], + [ + "▁snapshot", + -12.767325401306152 + ], + [ + "▁Conservative", + -12.767367362976074 + ], + [ + "▁taie", + -12.767416954040527 + ], + [ + "Game", + -12.767499923706055 + ], + [ + "▁naissance", + -12.767663955688477 + ], + [ + "Prof", + -12.767704963684082 + ], + [ + "qualified", + -12.767745971679688 + ], + [ + "▁suppression", + -12.767749786376953 + ], + [ + "▁răspunde", + -12.767765045166016 + ], + [ + "▁1/3", + -12.767803192138672 + ], + [ + "▁lieben", + -12.767858505249023 + ], + [ + "ù", + -12.767898559570312 + ], + [ + "america", + -12.767955780029297 + ], + [ + "▁Mum", + -12.768182754516602 + ], + [ + "▁Researchers", + -12.76827335357666 + ], + [ + "quip", + -12.768308639526367 + ], + [ + "▁fenomen", + -12.768383026123047 + ], + [ + "stools", + -12.768387794494629 + ], + [ + "▁commodity", + -12.768742561340332 + ], + [ + "▁rejuvenat", + -12.768745422363281 + ], + [ + "▁ausgezeichnet", + -12.76876449584961 + ], + [ + "▁păcate", + -12.768784523010254 + ], + [ + "3.6", + -12.76882553100586 + ], + [ + "zwei", + -12.768904685974121 + ], + [ + "accounted", + -12.768982887268066 + ], + [ + "▁Cycle", + -12.76900863647461 + ], + [ + "politischen", + -12.769031524658203 + ], + [ + "Normally", + -12.76904010772705 + ], + [ + "▁transcend", + -12.769158363342285 + ], + [ + "▁Classes", + -12.769268989562988 + ], + [ + "▁vene", + -12.769363403320312 + ], + [ + "protein", + -12.76942253112793 + ], + [ + "formulaire", + -12.76944351196289 + ], + [ + "▁endurance", + -12.769463539123535 + ], + [ + "▁Census", + -12.769464492797852 + ], + [ + "▁census", + -12.7694673538208 + ], + [ + "▁conțin", + -12.76952838897705 + ], + [ + "▁multinational", + -12.769563674926758 + ], + [ + "▁consomm", + -12.769572257995605 + ], + [ + "▁Porter", + -12.769762992858887 + ], + [ + "▁marvel", + -12.769777297973633 + ], + [ + "▁probable", + -12.769824028015137 + ], + [ + "dependable", + -12.770044326782227 + ], + [ + "▁crore", + -12.77015495300293 + ], + [ + "▁6:30", + -12.770224571228027 + ], + [ + "▁Bradley", + -12.77032470703125 + ], + [ + "molecule", + -12.770400047302246 + ], + [ + "inclusiv", + -12.770516395568848 + ], + [ + "▁privilégi", + -12.770543098449707 + ], + [ + "▁cerere", + -12.770611763000488 + ], + [ + "ouille", + -12.770696640014648 + ], + [ + "▁âgé", + -12.770787239074707 + ], + [ + "▁ghid", + -12.770801544189453 + ], + [ + "▁Controller", + -12.77082347869873 + ], + [ + "▁incredere", + -12.770988464355469 + ], + [ + "▁hostel", + -12.771015167236328 + ], + [ + "wissenschaft", + -12.771121978759766 + ], + [ + "▁cooperate", + -12.771183967590332 + ], + [ + "ки", + -12.771202087402344 + ], + [ + "▁Küchen", + -12.771384239196777 + ], + [ + "▁BIO", + -12.771406173706055 + ], + [ + "▁deliveries", + -12.771458625793457 + ], + [ + "▁urmări", + -12.771553993225098 + ], + [ + "▁überzeugen", + -12.771631240844727 + ], + [ + "Roofing", + -12.771703720092773 + ], + [ + "▁Adel", + -12.771737098693848 + ], + [ + "▁navy", + -12.77181339263916 + ], + [ + "▁cider", + -12.772101402282715 + ], + [ + "▁dulce", + -12.772109985351562 + ], + [ + "▁inspirat", + -12.772163391113281 + ], + [ + "allez", + -12.772164344787598 + ], + [ + "HH", + -12.77221965789795 + ], + [ + "▁Danish", + -12.7722749710083 + ], + [ + "CDC", + -12.7722806930542 + ], + [ + "▁Milch", + -12.772303581237793 + ], + [ + "▁Hockey", + -12.772346496582031 + ], + [ + "▁Smooth", + -12.772347450256348 + ], + [ + "▁FIFA", + -12.772361755371094 + ], + [ + "▁Devon", + -12.772364616394043 + ], + [ + "chung", + -12.772379875183105 + ], + [ + "▁villain", + -12.772420883178711 + ], + [ + "▁musée", + -12.772441864013672 + ], + [ + "tiennent", + -12.772557258605957 + ], + [ + "chou", + -12.772732734680176 + ], + [ + "kopf", + -12.772809982299805 + ], + [ + "printed", + -12.77281379699707 + ], + [ + "▁Depression", + -12.773076057434082 + ], + [ + "▁opioid", + -12.773082733154297 + ], + [ + "nomie", + -12.773098945617676 + ], + [ + "▁footwear", + -12.773211479187012 + ], + [ + "▁Cause", + -12.773260116577148 + ], + [ + "SEL", + -12.773515701293945 + ], + [ + "▁Roller", + -12.773523330688477 + ], + [ + "▁einzigartige", + -12.773589134216309 + ], + [ + "desea", + -12.773597717285156 + ], + [ + "▁nasty", + -12.773792266845703 + ], + [ + "formulated", + -12.773877143859863 + ], + [ + "breaker", + -12.773958206176758 + ], + [ + "▁goodies", + -12.773961067199707 + ], + [ + "▁sandy", + -12.774189949035645 + ], + [ + "method", + -12.77425479888916 + ], + [ + "▁Maple", + -12.774308204650879 + ], + [ + "gefragt", + -12.774435997009277 + ], + [ + "▁decreasing", + -12.774515151977539 + ], + [ + "ceşti", + -12.774555206298828 + ], + [ + "▁DUI", + -12.774563789367676 + ], + [ + "▁pierdere", + -12.774574279785156 + ], + [ + "▁brushes", + -12.77466869354248 + ], + [ + "▁Fully", + -12.774712562561035 + ], + [ + "filtered", + -12.774789810180664 + ], + [ + "ruins", + -12.774988174438477 + ], + [ + "Save", + -12.775114059448242 + ], + [ + "sweeping", + -12.7752046585083 + ], + [ + "PCR", + -12.775334358215332 + ], + [ + "▁folded", + -12.775337219238281 + ], + [ + "▁urca", + -12.775444030761719 + ], + [ + "▁clic", + -12.775484085083008 + ], + [ + "▁spécialiste", + -12.775614738464355 + ], + [ + "▁durfte", + -12.775686264038086 + ], + [ + "tuși", + -12.775871276855469 + ], + [ + "▁diligent", + -12.77596378326416 + ], + [ + "▁verdict", + -12.775972366333008 + ], + [ + "▁chaise", + -12.776039123535156 + ], + [ + "▁cleanup", + -12.776068687438965 + ], + [ + "▁Guitar", + -12.776076316833496 + ], + [ + "▁Dip", + -12.776142120361328 + ], + [ + "vru", + -12.776260375976562 + ], + [ + "▁cogn", + -12.776373863220215 + ], + [ + "something", + -12.776529312133789 + ], + [ + "hidr", + -12.776535034179688 + ], + [ + "ENG", + -12.776607513427734 + ], + [ + "Paul", + -12.776679039001465 + ], + [ + "▁reboot", + -12.776687622070312 + ], + [ + "savvy", + -12.776688575744629 + ], + [ + "▁Macron", + -12.776710510253906 + ], + [ + "▁Kino", + -12.77682876586914 + ], + [ + "232", + -12.776832580566406 + ], + [ + "▁gravit", + -12.776861190795898 + ], + [ + "ANC", + -12.776883125305176 + ], + [ + "▁petrecut", + -12.776944160461426 + ], + [ + "▁signage", + -12.776959419250488 + ], + [ + "odia", + -12.776987075805664 + ], + [ + "▁GRA", + -12.77712631225586 + ], + [ + "▁alegeril", + -12.777129173278809 + ], + [ + "leger", + -12.77717399597168 + ], + [ + "▁medicamente", + -12.777174949645996 + ], + [ + "pentru", + -12.777249336242676 + ], + [ + "▁collectif", + -12.777251243591309 + ], + [ + "▁Sohn", + -12.777298927307129 + ], + [ + "205", + -12.777313232421875 + ], + [ + "▁Reach", + -12.77733039855957 + ], + [ + "RAM", + -12.777400970458984 + ], + [ + "3.4", + -12.777405738830566 + ], + [ + "▁bleach", + -12.777409553527832 + ], + [ + "▁diligence", + -12.777414321899414 + ], + [ + "▁MORE", + -12.777440071105957 + ], + [ + "▁Critical", + -12.777471542358398 + ], + [ + "▁singură", + -12.77767276763916 + ], + [ + "▁adversar", + -12.777791023254395 + ], + [ + "▁Buzz", + -12.7778902053833 + ], + [ + "▁demeure", + -12.778063774108887 + ], + [ + "▁nephew", + -12.778141021728516 + ], + [ + "▁Boom", + -12.77817440032959 + ], + [ + "▁shining", + -12.77819538116455 + ], + [ + "▁sponge", + -12.778206825256348 + ], + [ + "liest", + -12.77841854095459 + ], + [ + "rseits", + -12.778690338134766 + ], + [ + "▁capita", + -12.778823852539062 + ], + [ + "esthesia", + -12.778867721557617 + ], + [ + "500,000", + -12.77895736694336 + ], + [ + "▁Pressure", + -12.77898120880127 + ], + [ + "ifikation", + -12.779021263122559 + ], + [ + "▁acceleration", + -12.779181480407715 + ], + [ + "▁Pfarr", + -12.779282569885254 + ], + [ + "▁imobil", + -12.779304504394531 + ], + [ + "▁pericol", + -12.779326438903809 + ], + [ + "▁flock", + -12.779454231262207 + ], + [ + "▁Scholar", + -12.77962875366211 + ], + [ + "▁Fusion", + -12.779630661010742 + ], + [ + "▁revolve", + -12.779637336730957 + ], + [ + "Plugin", + -12.779664993286133 + ], + [ + "▁Ruf", + -12.779691696166992 + ], + [ + "▁tehnici", + -12.780024528503418 + ], + [ + "voice", + -12.78005313873291 + ], + [ + "▁anomal", + -12.780203819274902 + ], + [ + "▁gefallen", + -12.780252456665039 + ], + [ + "▁Wyoming", + -12.780322074890137 + ], + [ + "▁9:00", + -12.780354499816895 + ], + [ + "packed", + -12.780461311340332 + ], + [ + "▁Zimbabwe", + -12.780686378479004 + ], + [ + "▁glücklich", + -12.780766487121582 + ], + [ + "ethanol", + -12.78077220916748 + ], + [ + "▁effektiv", + -12.780936241149902 + ], + [ + "▁saptamani", + -12.781049728393555 + ], + [ + "▁umfasst", + -12.781052589416504 + ], + [ + "▁Werbung", + -12.781103134155273 + ], + [ + "▁undermine", + -12.781164169311523 + ], + [ + "▁Lego", + -12.781322479248047 + ], + [ + "▁Rac", + -12.781323432922363 + ], + [ + "educating", + -12.781441688537598 + ], + [ + "leiten", + -12.781451225280762 + ], + [ + "derma", + -12.781518936157227 + ], + [ + "hängen", + -12.781597137451172 + ], + [ + "Lumin", + -12.781846046447754 + ], + [ + "▁PNL", + -12.781913757324219 + ], + [ + "▁volcano", + -12.782064437866211 + ], + [ + "▁Anfrage", + -12.782066345214844 + ], + [ + "▁resp", + -12.782124519348145 + ], + [ + "leigh", + -12.78217601776123 + ], + [ + "▁addict", + -12.782176971435547 + ], + [ + "WORK", + -12.782312393188477 + ], + [ + "▁FY", + -12.782322883605957 + ], + [ + "▁maneuver", + -12.782513618469238 + ], + [ + "flächen", + -12.782525062561035 + ], + [ + "zweck", + -12.782527923583984 + ], + [ + "tolerant", + -12.782609939575195 + ], + [ + "Davidson", + -12.78272533416748 + ], + [ + "▁meteor", + -12.782849311828613 + ], + [ + "▁Stephanie", + -12.78291130065918 + ], + [ + "▁plafon", + -12.783126831054688 + ], + [ + "technischen", + -12.78316879272461 + ], + [ + "unused", + -12.783193588256836 + ], + [ + "▁voulai", + -12.783228874206543 + ], + [ + "▁fehlt", + -12.783447265625 + ], + [ + "möglichen", + -12.783955574035645 + ], + [ + "▁Twenty", + -12.783968925476074 + ], + [ + "composing", + -12.783979415893555 + ], + [ + "▁rebate", + -12.78400707244873 + ], + [ + "Italie", + -12.784036636352539 + ], + [ + "▁goodbye", + -12.784058570861816 + ], + [ + "wild", + -12.784061431884766 + ], + [ + "▁lancé", + -12.784077644348145 + ], + [ + "▁wunderschöne", + -12.784083366394043 + ], + [ + "▁Frontier", + -12.784139633178711 + ], + [ + "▁murit", + -12.784313201904297 + ], + [ + "▁scump", + -12.78464412689209 + ], + [ + "OVER", + -12.784682273864746 + ], + [ + "▁meme", + -12.784709930419922 + ], + [ + "Super", + -12.784733772277832 + ], + [ + "▁Crack", + -12.784849166870117 + ], + [ + "rennen", + -12.784907341003418 + ], + [ + "▁interessiert", + -12.784941673278809 + ], + [ + "▁relaţi", + -12.784942626953125 + ], + [ + "▁factories", + -12.784975051879883 + ], + [ + "▁[...]", + -12.785066604614258 + ], + [ + "▁vizite", + -12.785075187683105 + ], + [ + "▁erfolgen", + -12.785199165344238 + ], + [ + "▁Hosting", + -12.785244941711426 + ], + [ + "▁localitate", + -12.78528118133545 + ], + [ + "▁chasse", + -12.785415649414062 + ], + [ + "▁Meadow", + -12.785465240478516 + ], + [ + "▁expansive", + -12.785513877868652 + ], + [ + "hov", + -12.785874366760254 + ], + [ + "Phil", + -12.785978317260742 + ], + [ + "illian", + -12.786107063293457 + ], + [ + "▁manipulate", + -12.786107063293457 + ], + [ + "informationen", + -12.786130905151367 + ], + [ + "▁profesionist", + -12.786162376403809 + ], + [ + "risen", + -12.786252975463867 + ], + [ + "frem", + -12.786300659179688 + ], + [ + "Act", + -12.78640079498291 + ], + [ + "supervised", + -12.786491394042969 + ], + [ + "▁capul", + -12.786506652832031 + ], + [ + "▁Craiova", + -12.786528587341309 + ], + [ + "▁victoire", + -12.786528587341309 + ], + [ + "▁guitarist", + -12.786680221557617 + ], + [ + "▁identific", + -12.786684036254883 + ], + [ + "democrat", + -12.786864280700684 + ], + [ + "Authentic", + -12.786894798278809 + ], + [ + "▁Autumn", + -12.786894798278809 + ], + [ + "▁bodi", + -12.787014961242676 + ], + [ + "April", + -12.787044525146484 + ], + [ + "▁Burger", + -12.787049293518066 + ], + [ + "▁BEST", + -12.787490844726562 + ], + [ + "▁torrent", + -12.78749942779541 + ], + [ + "UV", + -12.787567138671875 + ], + [ + "▁renal", + -12.787676811218262 + ], + [ + "founded", + -12.787693977355957 + ], + [ + "203", + -12.787956237792969 + ], + [ + "▁Flooring", + -12.78799057006836 + ], + [ + "▁kilogram", + -12.787994384765625 + ], + [ + "▁garantiert", + -12.788139343261719 + ], + [ + "▁fulfil", + -12.788204193115234 + ], + [ + "303", + -12.788330078125 + ], + [ + "▁schafft", + -12.788363456726074 + ], + [ + "▁butterfly", + -12.788365364074707 + ], + [ + "▁Stuart", + -12.788382530212402 + ], + [ + "▁Versuch", + -12.788392066955566 + ], + [ + "▁liking", + -12.788412094116211 + ], + [ + "▁chercher", + -12.788508415222168 + ], + [ + "▁wrapping", + -12.788527488708496 + ], + [ + "schrieb", + -12.788652420043945 + ], + [ + "▁abuz", + -12.788718223571777 + ], + [ + "▁maîtrise", + -12.788772583007812 + ], + [ + "EQ", + -12.788887977600098 + ], + [ + "▁Erinnerung", + -12.789095878601074 + ], + [ + "▁bridal", + -12.78909969329834 + ], + [ + "Rock", + -12.789118766784668 + ], + [ + "▁copied", + -12.789193153381348 + ], + [ + "Met", + -12.789206504821777 + ], + [ + "▁incep", + -12.789233207702637 + ], + [ + "▁sinus", + -12.789336204528809 + ], + [ + "▁Felix", + -12.789831161499023 + ], + [ + "▁Deluxe", + -12.789837837219238 + ], + [ + "▁GPU", + -12.789848327636719 + ], + [ + "Sie", + -12.790164947509766 + ], + [ + "lowering", + -12.790262222290039 + ], + [ + "▁Trotz", + -12.790282249450684 + ], + [ + "333", + -12.790417671203613 + ], + [ + "withstand", + -12.79055118560791 + ], + [ + "▁Aufenthalt", + -12.790566444396973 + ], + [ + "▁unhealthy", + -12.790567398071289 + ], + [ + "▁urbain", + -12.790573120117188 + ], + [ + "▁LOL", + -12.790702819824219 + ], + [ + "▁Ballet", + -12.79074478149414 + ], + [ + "▁Decoration", + -12.79083251953125 + ], + [ + "weist", + -12.790839195251465 + ], + [ + "▁Residence", + -12.790932655334473 + ], + [ + "▁Leeds", + -12.791055679321289 + ], + [ + "▁Genau", + -12.791084289550781 + ], + [ + "Imagin", + -12.791136741638184 + ], + [ + "▁suspicion", + -12.791300773620605 + ], + [ + "▁pêche", + -12.791301727294922 + ], + [ + "▁Soccer", + -12.791306495666504 + ], + [ + "▁protectie", + -12.791553497314453 + ], + [ + "ATS", + -12.791796684265137 + ], + [ + "stocked", + -12.791838645935059 + ], + [ + "▁gymnas", + -12.79184627532959 + ], + [ + "ASP", + -12.792027473449707 + ], + [ + "▁Independence", + -12.792037010192871 + ], + [ + "▁Wizard", + -12.792037963867188 + ], + [ + "▁nitrogen", + -12.79204273223877 + ], + [ + "amerikanische", + -12.7920503616333 + ], + [ + "▁Indianapolis", + -12.79205322265625 + ], + [ + "catches", + -12.792131423950195 + ], + [ + "stria", + -12.792275428771973 + ], + [ + "schätze", + -12.79235553741455 + ], + [ + "▁Räume", + -12.792387962341309 + ], + [ + "▁Interesting", + -12.792403221130371 + ], + [ + "bürger", + -12.79240608215332 + ], + [ + "sweet", + -12.792410850524902 + ], + [ + "Identify", + -12.792632102966309 + ], + [ + "EEN", + -12.792651176452637 + ], + [ + "▁£3", + -12.792654991149902 + ], + [ + "interacting", + -12.7926664352417 + ], + [ + "NYSE", + -12.792762756347656 + ], + [ + "▁Dynamics", + -12.79277515411377 + ], + [ + "▁modificări", + -12.792777061462402 + ], + [ + "▁Kumar", + -12.792936325073242 + ], + [ + "chette", + -12.79313850402832 + ], + [ + "▁presiune", + -12.79316234588623 + ], + [ + "arni", + -12.793164253234863 + ], + [ + "▁vielfältig", + -12.793221473693848 + ], + [ + "KC", + -12.793259620666504 + ], + [ + "▁Cuisine", + -12.793513298034668 + ], + [ + "▁australia", + -12.793885231018066 + ], + [ + "▁încet", + -12.794026374816895 + ], + [ + "▁caracteristic", + -12.794257164001465 + ], + [ + "▁cookbook", + -12.794501304626465 + ], + [ + "▁douleur", + -12.79453182220459 + ], + [ + "AVI", + -12.794593811035156 + ], + [ + "artikel", + -12.794740676879883 + ], + [ + "feta", + -12.79493522644043 + ], + [ + "▁fréquent", + -12.794987678527832 + ], + [ + "▁Prophet", + -12.795051574707031 + ], + [ + "▁dépense", + -12.795202255249023 + ], + [ + "▁Smile", + -12.795235633850098 + ], + [ + "▁lawmakers", + -12.79525375366211 + ], + [ + "▁Kollegen", + -12.795391082763672 + ], + [ + "▁Pir", + -12.79555606842041 + ], + [ + "serez", + -12.79561710357666 + ], + [ + "▁consumator", + -12.795656204223633 + ], + [ + "▁playlist", + -12.795730590820312 + ], + [ + "▁envisage", + -12.795733451843262 + ], + [ + "swept", + -12.795780181884766 + ], + [ + "▁Grim", + -12.795825004577637 + ], + [ + "▁widow", + -12.795836448669434 + ], + [ + "authorised", + -12.795886039733887 + ], + [ + "▁(...)", + -12.796035766601562 + ], + [ + "▁photographic", + -12.796060562133789 + ], + [ + "▁libertate", + -12.796173095703125 + ], + [ + "▁principalement", + -12.796201705932617 + ], + [ + "umming", + -12.796260833740234 + ], + [ + "▁Montréal", + -12.796465873718262 + ], + [ + "▁compilation", + -12.796468734741211 + ], + [ + "▁erlaubt", + -12.79647159576416 + ], + [ + "▁biblical", + -12.796518325805664 + ], + [ + "volume", + -12.796561241149902 + ], + [ + "5-7", + -12.796809196472168 + ], + [ + "▁Versch", + -12.79689884185791 + ], + [ + "▁Shark", + -12.796957015991211 + ], + [ + "ologne", + -12.796969413757324 + ], + [ + "4.4", + -12.797086715698242 + ], + [ + "decken", + -12.797112464904785 + ], + [ + "▁frequencies", + -12.797205924987793 + ], + [ + "▁inferior", + -12.79720687866211 + ], + [ + "visible", + -12.797321319580078 + ], + [ + "▁educator", + -12.797394752502441 + ], + [ + "▁soziale", + -12.797420501708984 + ], + [ + "▁billet", + -12.797523498535156 + ], + [ + "folosirea", + -12.797574996948242 + ], + [ + "▁aufgenommen", + -12.797590255737305 + ], + [ + "▁Thread", + -12.797649383544922 + ], + [ + "registering", + -12.797694206237793 + ], + [ + "▁Loop", + -12.797747611999512 + ], + [ + "innovation", + -12.79783821105957 + ], + [ + "▁elimination", + -12.797857284545898 + ], + [ + "136", + -12.797883987426758 + ], + [ + "▁fluctu", + -12.797892570495605 + ], + [ + "▁Mercury", + -12.79794692993164 + ], + [ + "▁bouche", + -12.797955513000488 + ], + [ + "▁hurdle", + -12.7979736328125 + ], + [ + "▁Bennett", + -12.798040390014648 + ], + [ + "STI", + -12.79818344116211 + ], + [ + "▁théâtre", + -12.798316955566406 + ], + [ + "▁confortable", + -12.798359870910645 + ], + [ + "▁Automobil", + -12.79838752746582 + ], + [ + "▁Donna", + -12.798399925231934 + ], + [ + "▁foyer", + -12.79841136932373 + ], + [ + "▁hollow", + -12.798465728759766 + ], + [ + "▁règlement", + -12.79861068725586 + ], + [ + "effi", + -12.798616409301758 + ], + [ + "▁sediment", + -12.79869270324707 + ], + [ + "▁Mä", + -12.798774719238281 + ], + [ + "▁faint", + -12.798833847045898 + ], + [ + "feti", + -12.79890251159668 + ], + [ + "▁Concord", + -12.798959732055664 + ], + [ + "▁Ladies", + -12.798990249633789 + ], + [ + "▁pregatit", + -12.799052238464355 + ], + [ + "▁Ensemble", + -12.79905891418457 + ], + [ + "▁Ingredient", + -12.79905891418457 + ], + [ + "▁Respond", + -12.79914379119873 + ], + [ + "▁impaired", + -12.799356460571289 + ], + [ + "▁Feedback", + -12.799430847167969 + ], + [ + "▁ultrasound", + -12.799461364746094 + ], + [ + "▁Guvernului", + -12.799617767333984 + ], + [ + "▁Unterricht", + -12.799654006958008 + ], + [ + "▁prosecut", + -12.799662590026855 + ], + [ + "spend", + -12.799732208251953 + ], + [ + "▁capitol", + -12.799800872802734 + ], + [ + "USD", + -12.799822807312012 + ], + [ + "observing", + -12.799947738647461 + ], + [ + "▁effortlessly", + -12.800045013427734 + ], + [ + "▁Setting", + -12.80010986328125 + ], + [ + "▁spontaneous", + -12.80020809173584 + ], + [ + "▁LEGO", + -12.800238609313965 + ], + [ + "initiative", + -12.800299644470215 + ], + [ + "▁Sak", + -12.800299644470215 + ], + [ + "Interestingly", + -12.800326347351074 + ], + [ + "▁Yale", + -12.800352096557617 + ], + [ + "▁größer", + -12.80038070678711 + ], + [ + "RIC", + -12.800406455993652 + ], + [ + "▁distracted", + -12.800436973571777 + ], + [ + "drafted", + -12.800484657287598 + ], + [ + "▁Brenda", + -12.800522804260254 + ], + [ + "monopol", + -12.800551414489746 + ], + [ + "städt", + -12.800580024719238 + ], + [ + "▁altar", + -12.80058765411377 + ], + [ + "▁Hannover", + -12.800596237182617 + ], + [ + "▁Spiritual", + -12.800702095031738 + ], + [ + "▁thriller", + -12.800747871398926 + ], + [ + "▁Schneider", + -12.800760269165039 + ], + [ + "▁accumulate", + -12.800817489624023 + ], + [ + "▁mediului", + -12.800822257995605 + ], + [ + "▁Mathematics", + -12.800914764404297 + ], + [ + "▁paradox", + -12.800986289978027 + ], + [ + "▁Sham", + -12.801230430603027 + ], + [ + "▁SITE", + -12.801375389099121 + ], + [ + "▁echipei", + -12.801508903503418 + ], + [ + "▁staircase", + -12.801660537719727 + ], + [ + "▁întrebări", + -12.801705360412598 + ], + [ + "Commerce", + -12.802020072937012 + ], + [ + "▁selfie", + -12.802353858947754 + ], + [ + "▁Pocket", + -12.802404403686523 + ], + [ + "▁niemand", + -12.80263614654541 + ], + [ + "Tool", + -12.802678108215332 + ], + [ + "igma", + -12.802695274353027 + ], + [ + "utilisant", + -12.802915573120117 + ], + [ + "▁negatively", + -12.80295181274414 + ], + [ + "Secondly", + -12.802955627441406 + ], + [ + "▁ROI", + -12.8030366897583 + ], + [ + "Arch", + -12.803121566772461 + ], + [ + "▁continuity", + -12.80318546295166 + ], + [ + "▁Prayer", + -12.803235054016113 + ], + [ + "inverse", + -12.803241729736328 + ], + [ + "▁Himmel", + -12.803336143493652 + ], + [ + "prinz", + -12.803478240966797 + ], + [ + "wichtigen", + -12.803496360778809 + ], + [ + "étage", + -12.803522109985352 + ], + [ + "summe", + -12.8036527633667 + ], + [ + "▁Zeitung", + -12.80366039276123 + ], + [ + "▁realization", + -12.803897857666016 + ], + [ + "▁influent", + -12.804291725158691 + ], + [ + "▁Valid", + -12.804357528686523 + ], + [ + "▁publicity", + -12.804439544677734 + ], + [ + "▁vertreten", + -12.804447174072266 + ], + [ + "▁Shoes", + -12.804609298706055 + ], + [ + "▁Diabetes", + -12.80463695526123 + ], + [ + "▁anticipation", + -12.804670333862305 + ], + [ + "▁Blank", + -12.8047456741333 + ], + [ + "asked", + -12.804899215698242 + ], + [ + "Power", + -12.804938316345215 + ], + [ + "arrelage", + -12.805140495300293 + ], + [ + "▁appraisal", + -12.80538272857666 + ], + [ + "▁harassment", + -12.805542945861816 + ], + [ + "Anzeige", + -12.805682182312012 + ], + [ + "liners", + -12.80584716796875 + ], + [ + "Firstly", + -12.805851936340332 + ], + [ + "transferring", + -12.805951118469238 + ], + [ + "▁Diane", + -12.806012153625488 + ], + [ + "▁1/2\"", + -12.80606746673584 + ], + [ + "▁adrenal", + -12.806131362915039 + ], + [ + "▁Prague", + -12.806208610534668 + ], + [ + "insertion", + -12.80635929107666 + ], + [ + "▁Fahrer", + -12.806465148925781 + ], + [ + "▁divin", + -12.806585311889648 + ], + [ + "▁douche", + -12.80673885345459 + ], + [ + "▁meticulous", + -12.806879043579102 + ], + [ + "▁IEEE", + -12.806981086730957 + ], + [ + "▁Rabatt", + -12.807259559631348 + ], + [ + "Runner", + -12.807342529296875 + ], + [ + "▁Leder", + -12.807429313659668 + ], + [ + "project", + -12.80745792388916 + ], + [ + "▁Split", + -12.807562828063965 + ], + [ + "Gold", + -12.807600021362305 + ], + [ + "5.00", + -12.807629585266113 + ], + [ + "iola", + -12.807655334472656 + ], + [ + "standardized", + -12.807890892028809 + ], + [ + "ordination", + -12.807984352111816 + ], + [ + "▁Egal", + -12.808158874511719 + ], + [ + "▁ruhig", + -12.808241844177246 + ], + [ + "▁judiciar", + -12.80837345123291 + ], + [ + "▁Nowadays", + -12.808374404907227 + ], + [ + "▁whistle", + -12.808374404907227 + ], + [ + "▁superhero", + -12.808379173278809 + ], + [ + "▁PowerPoint", + -12.808408737182617 + ], + [ + "flop", + -12.808420181274414 + ], + [ + "olph", + -12.808460235595703 + ], + [ + "▁pallet", + -12.808916091918945 + ], + [ + "posons", + -12.809005737304688 + ], + [ + "▁Listing", + -12.809032440185547 + ], + [ + "Tag", + -12.809075355529785 + ], + [ + "introductory", + -12.809122085571289 + ], + [ + "▁Profil", + -12.809123992919922 + ], + [ + "symmetric", + -12.809126853942871 + ], + [ + "▁aisle", + -12.809138298034668 + ], + [ + "▁ajouté", + -12.809147834777832 + ], + [ + "opathy", + -12.809149742126465 + ], + [ + "prezentate", + -12.809155464172363 + ], + [ + "▁hurry", + -12.809165000915527 + ], + [ + "Auth", + -12.809310913085938 + ], + [ + "▁Homepage", + -12.809435844421387 + ], + [ + "ashes", + -12.809489250183105 + ], + [ + "▁inklusive", + -12.809496879577637 + ], + [ + "populated", + -12.809502601623535 + ], + [ + "▁nein", + -12.809554100036621 + ], + [ + "▁syndicat", + -12.809690475463867 + ], + [ + "▁développé", + -12.809842109680176 + ], + [ + "▁Domestic", + -12.809877395629883 + ], + [ + "essay", + -12.809967994689941 + ], + [ + "Atelier", + -12.809980392456055 + ], + [ + "▁proceeding", + -12.810006141662598 + ], + [ + "▁SAS", + -12.810038566589355 + ], + [ + "task", + -12.810063362121582 + ], + [ + "▁blackjack", + -12.810114860534668 + ], + [ + "Key", + -12.810186386108398 + ], + [ + "thérapie", + -12.810247421264648 + ], + [ + "▁Cohen", + -12.810397148132324 + ], + [ + "Direct", + -12.810510635375977 + ], + [ + "▁Estimat", + -12.810517311096191 + ], + [ + "élève", + -12.810616493225098 + ], + [ + "cind", + -12.810640335083008 + ], + [ + "▁prezenț", + -12.810701370239258 + ], + [ + "▁notorious", + -12.810725212097168 + ], + [ + "climbed", + -12.810816764831543 + ], + [ + "▁flexibil", + -12.810830116271973 + ], + [ + "▁entlang", + -12.810855865478516 + ], + [ + "longed", + -12.81103515625 + ], + [ + "▁elbow", + -12.811078071594238 + ], + [ + "BH", + -12.811296463012695 + ], + [ + "▁Radu", + -12.811376571655273 + ], + [ + "▁lonely", + -12.811378479003906 + ], + [ + "ALA", + -12.811405181884766 + ], + [ + "Variante", + -12.811639785766602 + ], + [ + "▁Influen", + -12.81169319152832 + ], + [ + "▁Budapest", + -12.811747550964355 + ], + [ + "▁Gemüse", + -12.811747550964355 + ], + [ + "▁continental", + -12.811750411987305 + ], + [ + "ippo", + -12.811771392822266 + ], + [ + "▁Affordable", + -12.81212329864502 + ], + [ + "▁niece", + -12.812187194824219 + ], + [ + "oscopic", + -12.812190055847168 + ], + [ + "▁Grid", + -12.81222152709961 + ], + [ + "sliced", + -12.812270164489746 + ], + [ + "▁voici", + -12.812294006347656 + ], + [ + "aveam", + -12.812471389770508 + ], + [ + "▁Lars", + -12.812612533569336 + ], + [ + "APA", + -12.812657356262207 + ], + [ + "▁particulière", + -12.812858581542969 + ], + [ + "sorb", + -12.8128662109375 + ], + [ + "▁1955", + -12.812887191772461 + ], + [ + "▁solutii", + -12.812942504882812 + ], + [ + "loch", + -12.812960624694824 + ], + [ + "▁summon", + -12.813212394714355 + ], + [ + "wurf", + -12.813271522521973 + ], + [ + "▁protecți", + -12.813288688659668 + ], + [ + "2001", + -12.813499450683594 + ], + [ + "▁sophomore", + -12.813627243041992 + ], + [ + "▁Schwerpunkt", + -12.813628196716309 + ], + [ + "▁diplomat", + -12.813687324523926 + ], + [ + "▁artistique", + -12.813726425170898 + ], + [ + "▁accueille", + -12.813739776611328 + ], + [ + "Disp", + -12.813746452331543 + ], + [ + "inherited", + -12.813764572143555 + ], + [ + "▁COMP", + -12.813889503479004 + ], + [ + "▁envoyé", + -12.814046859741211 + ], + [ + "▁tuning", + -12.814056396484375 + ], + [ + "▁entspricht", + -12.814062118530273 + ], + [ + "▁exerc", + -12.81406307220459 + ], + [ + "▁accessoires", + -12.8140869140625 + ], + [ + "▁Automat", + -12.814348220825195 + ], + [ + "importance", + -12.814408302307129 + ], + [ + "▁travellers", + -12.814432144165039 + ], + [ + "seiten", + -12.814474105834961 + ], + [ + "▁slider", + -12.814481735229492 + ], + [ + "effect", + -12.814591407775879 + ], + [ + "▁siding", + -12.814669609069824 + ], + [ + "▁Crit", + -12.814780235290527 + ], + [ + "▁sportif", + -12.814827919006348 + ], + [ + "▁Accessories", + -12.81513500213623 + ], + [ + "▁Anteil", + -12.815184593200684 + ], + [ + "▁limbi", + -12.81519603729248 + ], + [ + "▁vendre", + -12.815269470214844 + ], + [ + "borg", + -12.815435409545898 + ], + [ + "▁Deposit", + -12.815508842468262 + ], + [ + "▁Hö", + -12.815717697143555 + ], + [ + "employé", + -12.8157320022583 + ], + [ + "▁Bangalore", + -12.815887451171875 + ], + [ + "▁itinerary", + -12.815888404846191 + ], + [ + "▁Deliver", + -12.816008567810059 + ], + [ + "dik", + -12.816024780273438 + ], + [ + "▁advent", + -12.816100120544434 + ], + [ + "▁Turk", + -12.81614875793457 + ], + [ + "▁Nico", + -12.816154479980469 + ], + [ + "organizarea", + -12.816161155700684 + ], + [ + "▁remport", + -12.816166877746582 + ], + [ + "▁tribunal", + -12.816266059875488 + ], + [ + "▁Rusia", + -12.8162841796875 + ], + [ + "glazed", + -12.816339492797852 + ], + [ + "▁destiné", + -12.816502571105957 + ], + [ + "304", + -12.816533088684082 + ], + [ + "album", + -12.816650390625 + ], + [ + "▁junction", + -12.81665325164795 + ], + [ + "▁Fleet", + -12.816664695739746 + ], + [ + "venant", + -12.81667423248291 + ], + [ + "▁buddy", + -12.816694259643555 + ], + [ + "▁neglected", + -12.816694259643555 + ], + [ + "▁Mask", + -12.816783905029297 + ], + [ + "▁testament", + -12.816844940185547 + ], + [ + "▁Basil", + -12.81690788269043 + ], + [ + "masă", + -12.816922187805176 + ], + [ + "▁racist", + -12.81692886352539 + ], + [ + "640", + -12.816990852355957 + ], + [ + "▁Standing", + -12.817028045654297 + ], + [ + "▁MUST", + -12.817266464233398 + ], + [ + "situation", + -12.817327499389648 + ], + [ + "▁informiert", + -12.817337036132812 + ], + [ + "ABA", + -12.817353248596191 + ], + [ + "▁Timothy", + -12.817397117614746 + ], + [ + "▁generosity", + -12.817397117614746 + ], + [ + "▁erscheint", + -12.817402839660645 + ], + [ + "▁verarbeitet", + -12.81740665435791 + ], + [ + "▁burial", + -12.817444801330566 + ], + [ + "▁limestone", + -12.817458152770996 + ], + [ + "▁1953", + -12.817480087280273 + ], + [ + "▁Lucr", + -12.817506790161133 + ], + [ + "small", + -12.817633628845215 + ], + [ + "aveau", + -12.81763744354248 + ], + [ + "versiune", + -12.81773567199707 + ], + [ + "▁inkl", + -12.81775951385498 + ], + [ + "▁Minneapolis", + -12.81777572631836 + ], + [ + "Spiel", + -12.81781005859375 + ], + [ + "▁encode", + -12.817895889282227 + ], + [ + "▁beforehand", + -12.818021774291992 + ], + [ + "▁Vital", + -12.818086624145508 + ], + [ + "▁socialist", + -12.818228721618652 + ], + [ + "inho", + -12.81824779510498 + ], + [ + "▁chapel", + -12.81825065612793 + ], + [ + "▁Monitoring", + -12.81838607788086 + ], + [ + "▁quotidienne", + -12.818404197692871 + ], + [ + "cloud", + -12.818506240844727 + ], + [ + "▁desfăşur", + -12.818531036376953 + ], + [ + "▁1952", + -12.818638801574707 + ], + [ + "▁Rü", + -12.818690299987793 + ], + [ + "▁Sigma", + -12.818804740905762 + ], + [ + "134", + -12.818835258483887 + ], + [ + "Sullivan", + -12.818909645080566 + ], + [ + "▁Bevölkerung", + -12.818909645080566 + ], + [ + "▁sufficiently", + -12.818953514099121 + ], + [ + "Check", + -12.818992614746094 + ], + [ + "rnie", + -12.8190336227417 + ], + [ + "contamin", + -12.819132804870605 + ], + [ + "▁gewonnen", + -12.81928825378418 + ], + [ + "▁bugetul", + -12.819376945495605 + ], + [ + "▁mustard", + -12.819414138793945 + ], + [ + "132", + -12.819478988647461 + ], + [ + "0.9", + -12.819535255432129 + ], + [ + "▁tratat", + -12.81957721710205 + ], + [ + "▁dilemma", + -12.819666862487793 + ], + [ + "▁versatility", + -12.819666862487793 + ], + [ + "▁clutter", + -12.819670677185059 + ], + [ + "▁Musk", + -12.81973934173584 + ], + [ + "▁Beide", + -12.819750785827637 + ], + [ + "hurst", + -12.819758415222168 + ], + [ + "atsu", + -12.819767951965332 + ], + [ + "absence", + -12.819784164428711 + ], + [ + "rebounds", + -12.819881439208984 + ], + [ + "6.1", + -12.820029258728027 + ], + [ + "Dia", + -12.820046424865723 + ], + [ + "▁siguranță", + -12.820060729980469 + ], + [ + "▁Blade", + -12.820072174072266 + ], + [ + "▁disrupt", + -12.820074081420898 + ], + [ + "▁visiteurs", + -12.820169448852539 + ], + [ + "tested", + -12.820282936096191 + ], + [ + "▁Lup", + -12.820353507995605 + ], + [ + "▁Rouge", + -12.820371627807617 + ], + [ + "▁asbestos", + -12.82042407989502 + ], + [ + "▁moisturize", + -12.820427894592285 + ], + [ + "▁acknowledg", + -12.82045841217041 + ], + [ + "▁procent", + -12.820467948913574 + ], + [ + "▁swear", + -12.82050895690918 + ], + [ + "▁911", + -12.820647239685059 + ], + [ + "präsent", + -12.820724487304688 + ], + [ + "▁cohort", + -12.82072639465332 + ], + [ + "▁intimid", + -12.820830345153809 + ], + [ + "JS", + -12.820849418640137 + ], + [ + "îm", + -12.82096004486084 + ], + [ + "▁Kunststoff", + -12.820963859558105 + ], + [ + "rison", + -12.820972442626953 + ], + [ + "▁praf", + -12.82097339630127 + ], + [ + "▁convient", + -12.821019172668457 + ], + [ + "▁partenaire", + -12.821088790893555 + ], + [ + "▁Verantwortlich", + -12.821182250976562 + ], + [ + "▁semiconductor", + -12.821182250976562 + ], + [ + "▁kürz", + -12.821187019348145 + ], + [ + "▁Bottom", + -12.821187973022461 + ], + [ + "▁tratamentul", + -12.82127571105957 + ], + [ + "Source", + -12.821331024169922 + ], + [ + "authored", + -12.82172679901123 + ], + [ + "robo", + -12.821867942810059 + ], + [ + "▁turf", + -12.82194709777832 + ], + [ + "▁liebe", + -12.821971893310547 + ], + [ + "▁Fotografi", + -12.821995735168457 + ], + [ + "Big", + -12.822064399719238 + ], + [ + "▁fireworks", + -12.822081565856934 + ], + [ + "▁presă", + -12.822135925292969 + ], + [ + "▁conceal", + -12.822269439697266 + ], + [ + "▁originated", + -12.82227897644043 + ], + [ + "▁biciclet", + -12.822319984436035 + ], + [ + "acești", + -12.822577476501465 + ], + [ + "▁mortar", + -12.822585105895996 + ], + [ + "▁Wunder", + -12.822626113891602 + ], + [ + "ionist", + -12.822696685791016 + ], + [ + "KM", + -12.822871208190918 + ], + [ + "▁Marion", + -12.822918891906738 + ], + [ + "produkte", + -12.822933197021484 + ], + [ + "▁Sprint", + -12.822999000549316 + ], + [ + "▁Nachde", + -12.8230619430542 + ], + [ + "▁verfüge", + -12.823100090026855 + ], + [ + "Marea", + -12.823177337646484 + ], + [ + "▁compressor", + -12.823253631591797 + ], + [ + "Arm", + -12.823290824890137 + ], + [ + "Auf", + -12.823311805725098 + ], + [ + "▁Polyester", + -12.823461532592773 + ], + [ + "▁Sheffield", + -12.823461532592773 + ], + [ + "illiard", + -12.823494911193848 + ], + [ + "▁misleading", + -12.82353401184082 + ], + [ + "multi", + -12.823749542236328 + ], + [ + "ripped", + -12.82381820678711 + ], + [ + "▁Cosmetic", + -12.82383918762207 + ], + [ + "▁Regal", + -12.823890686035156 + ], + [ + "▁authenticity", + -12.82414436340332 + ], + [ + "▁customizable", + -12.824219703674316 + ], + [ + "▁bathtub", + -12.824275016784668 + ], + [ + "▁Average", + -12.824292182922363 + ], + [ + "▁Muster", + -12.824522018432617 + ], + [ + "290", + -12.824529647827148 + ], + [ + "▁Ersatz", + -12.824570655822754 + ], + [ + "▁Might", + -12.824588775634766 + ], + [ + "published", + -12.82461929321289 + ], + [ + "▁Interpret", + -12.824640274047852 + ], + [ + "▁încep", + -12.82480239868164 + ], + [ + "▁proto", + -12.824851036071777 + ], + [ + "▁disque", + -12.824889183044434 + ], + [ + "▁Palestine", + -12.824980735778809 + ], + [ + "Over", + -12.824981689453125 + ], + [ + "▁verbessert", + -12.824983596801758 + ], + [ + "▁liefern", + -12.825017929077148 + ], + [ + "▁Handlung", + -12.825095176696777 + ], + [ + "▁Handels", + -12.825150489807129 + ], + [ + "▁eater", + -12.825201988220215 + ], + [ + "▁$40", + -12.825251579284668 + ], + [ + "illard", + -12.825334548950195 + ], + [ + "▁apariti", + -12.825413703918457 + ], + [ + "▁gag", + -12.825422286987305 + ], + [ + "▁chimic", + -12.825541496276855 + ], + [ + "▁Guru", + -12.825594902038574 + ], + [ + "▁Toilet", + -12.82571792602539 + ], + [ + "▁Tochter", + -12.825748443603516 + ], + [ + "▁Aurora", + -12.82579231262207 + ], + [ + "contro", + -12.825922966003418 + ], + [ + "▁GOP", + -12.825995445251465 + ], + [ + "Provence", + -12.826130867004395 + ], + [ + "▁Frieden", + -12.82614803314209 + ], + [ + "ăci", + -12.826216697692871 + ], + [ + "portée", + -12.826268196105957 + ], + [ + "▁upright", + -12.826300621032715 + ], + [ + "▁Physician", + -12.82650375366211 + ], + [ + "▁juridique", + -12.82650375366211 + ], + [ + "▁territorial", + -12.82650375366211 + ], + [ + "▁kindergarten", + -12.826505661010742 + ], + [ + "aéroport", + -12.826510429382324 + ], + [ + "▁whisper", + -12.826513290405273 + ], + [ + "▁capacities", + -12.826562881469727 + ], + [ + "dichte", + -12.826641082763672 + ], + [ + "▁Grenzen", + -12.826822280883789 + ], + [ + "▁Riv", + -12.82710075378418 + ], + [ + "épreuve", + -12.827266693115234 + ], + [ + "▁Scheme", + -12.827290534973145 + ], + [ + "mesures", + -12.827330589294434 + ], + [ + "▁Einfluss", + -12.827333450317383 + ], + [ + "appui", + -12.827713966369629 + ], + [ + "▁apuc", + -12.827827453613281 + ], + [ + "▁radiat", + -12.82794189453125 + ], + [ + "▁allergy", + -12.828035354614258 + ], + [ + "▁spear", + -12.828038215637207 + ], + [ + "▁Luxembourg", + -12.828086853027344 + ], + [ + "▁Registered", + -12.828115463256836 + ], + [ + "▁Shape", + -12.828198432922363 + ], + [ + "genie", + -12.828328132629395 + ], + [ + "nsonsten", + -12.828385353088379 + ], + [ + "▁Symposium", + -12.828412055969238 + ], + [ + "forderung", + -12.828474998474121 + ], + [ + "▁personalizat", + -12.82866096496582 + ], + [ + "▁ştiu", + -12.82875919342041 + ], + [ + "blatt", + -12.828804016113281 + ], + [ + "▁geometry", + -12.828807830810547 + ], + [ + "▁8:30", + -12.828831672668457 + ], + [ + "▁Fahrrad", + -12.828861236572266 + ], + [ + "After", + -12.828927040100098 + ], + [ + "▁ventilat", + -12.829072952270508 + ], + [ + "▁nylon", + -12.829190254211426 + ], + [ + "▁verkauft", + -12.829304695129395 + ], + [ + "öß", + -12.829345703125 + ], + [ + "▁Kath", + -12.829523086547852 + ], + [ + "▁Nuclear", + -12.829558372497559 + ], + [ + "▁Verizon", + -12.829560279846191 + ], + [ + "▁spokesperson", + -12.829560279846191 + ], + [ + "▁vietii", + -12.829560279846191 + ], + [ + "▁prescri", + -12.829629898071289 + ], + [ + "ру", + -12.829666137695312 + ], + [ + "6.2", + -12.829801559448242 + ], + [ + "▁spațiu", + -12.830018997192383 + ], + [ + "▁solvent", + -12.83006763458252 + ], + [ + ",000,000", + -12.830142974853516 + ], + [ + "reuen", + -12.830185890197754 + ], + [ + "plast", + -12.830245018005371 + ], + [ + "▁Activities", + -12.830334663391113 + ], + [ + "▁domni", + -12.83056926727295 + ], + [ + "▁trophy", + -12.830572128295898 + ], + [ + "▁saddle", + -12.830657958984375 + ], + [ + "▁renovat", + -12.830708503723145 + ], + [ + "▁bumper", + -12.830717086791992 + ], + [ + "▁penny", + -12.830741882324219 + ], + [ + "omato", + -12.830743789672852 + ], + [ + "AQ", + -12.83083438873291 + ], + [ + "kunst", + -12.830843925476074 + ], + [ + "hydrat", + -12.830860137939453 + ], + [ + "minder", + -12.830931663513184 + ], + [ + "trecerea", + -12.830949783325195 + ], + [ + "brush", + -12.831185340881348 + ], + [ + "TEC", + -12.83121395111084 + ], + [ + "Please", + -12.831253051757812 + ], + [ + "hydrated", + -12.831483840942383 + ], + [ + "ICAL", + -12.831636428833008 + ], + [ + "trauen", + -12.831639289855957 + ], + [ + "9,000", + -12.83175277709961 + ], + [ + "▁2030", + -12.831830024719238 + ], + [ + "▁Chennai", + -12.831854820251465 + ], + [ + "▁empirical", + -12.831854820251465 + ], + [ + "▁Subscribe", + -12.83206844329834 + ], + [ + "▁vorgestellt", + -12.832120895385742 + ], + [ + "▁Springfield", + -12.832159996032715 + ], + [ + "▁continuu", + -12.832311630249023 + ], + [ + "208", + -12.832351684570312 + ], + [ + "▁Bearing", + -12.83240795135498 + ], + [ + "2003", + -12.832572937011719 + ], + [ + "cheta", + -12.832608222961426 + ], + [ + "▁empathy", + -12.832623481750488 + ], + [ + "▁Alert", + -12.832817077636719 + ], + [ + "▁recreate", + -12.832879066467285 + ], + [ + "PJ", + -12.833159446716309 + ], + [ + "Name", + -12.83323860168457 + ], + [ + "▁Mouse", + -12.833405494689941 + ], + [ + "▁disturbing", + -12.833443641662598 + ], + [ + "▁leichter", + -12.83344841003418 + ], + [ + "▁cruel", + -12.833507537841797 + ], + [ + "▁detective", + -12.833531379699707 + ], + [ + "▁reimbursement", + -12.833626747131348 + ], + [ + "▁Gemeinschaft", + -12.833772659301758 + ], + [ + "▁adolescents", + -12.833772659301758 + ], + [ + "▁Reality", + -12.833954811096191 + ], + [ + "▁Stockholm", + -12.83415699005127 + ], + [ + "▁Gründen", + -12.834304809570312 + ], + [ + "▁Reflect", + -12.83432388305664 + ], + [ + "▁Palmer", + -12.834336280822754 + ], + [ + "▁treac", + -12.8343505859375 + ], + [ + "▁tentative", + -12.834497451782227 + ], + [ + "▁surrender", + -12.834677696228027 + ], + [ + "▁broadly", + -12.834734916687012 + ], + [ + "▁județ", + -12.834814071655273 + ], + [ + "▁Thu", + -12.834845542907715 + ], + [ + "wärts", + -12.834961891174316 + ], + [ + "▁crește", + -12.835074424743652 + ], + [ + "▁déplacement", + -12.835208892822266 + ], + [ + "blanc", + -12.835268020629883 + ], + [ + "▁£5", + -12.835308074951172 + ], + [ + "▁confidentiality", + -12.835320472717285 + ], + [ + "veraging", + -12.835444450378418 + ], + [ + "unité", + -12.835609436035156 + ], + [ + "clar", + -12.83564567565918 + ], + [ + "rigg", + -12.835693359375 + ], + [ + "honneur", + -12.835694313049316 + ], + [ + "▁adventurous", + -12.835694313049316 + ], + [ + "▁Nutzen", + -12.835758209228516 + ], + [ + "▁Kabel", + -12.835800170898438 + ], + [ + "empowering", + -12.836040496826172 + ], + [ + "verhalten", + -12.836042404174805 + ], + [ + "▁prevail", + -12.8361234664917 + ], + [ + "mashed", + -12.836138725280762 + ], + [ + "▁1947", + -12.83616828918457 + ], + [ + "function", + -12.836292266845703 + ], + [ + "niveaux", + -12.83633041381836 + ], + [ + "▁territories", + -12.836463928222656 + ], + [ + "▁Permanent", + -12.836465835571289 + ], + [ + "▁christmas", + -12.836471557617188 + ], + [ + "arguing", + -12.836490631103516 + ], + [ + "zukünftig", + -12.836654663085938 + ], + [ + "▁Eindruck", + -12.836817741394043 + ], + [ + "personalised", + -12.836854934692383 + ], + [ + "▁vecin", + -12.837211608886719 + ], + [ + "▁Affiliate", + -12.837234497070312 + ], + [ + "▁Silk", + -12.837249755859375 + ], + [ + "▁Tub", + -12.837440490722656 + ], + [ + "▁remont", + -12.837493896484375 + ], + [ + "▁sauber", + -12.837530136108398 + ], + [ + "gehörig", + -12.837562561035156 + ], + [ + "Maritime", + -12.83771800994873 + ], + [ + "▁Bö", + -12.837973594665527 + ], + [ + "▁1957", + -12.83800220489502 + ], + [ + "▁unparalleled", + -12.838005065917969 + ], + [ + "▁fulfillment", + -12.838042259216309 + ], + [ + "▁collage", + -12.838179588317871 + ], + [ + "fenders", + -12.838248252868652 + ], + [ + "▁neige", + -12.838275909423828 + ], + [ + "▁gamers", + -12.838325500488281 + ], + [ + "tefan", + -12.838339805603027 + ], + [ + "▁wifi", + -12.838349342346191 + ], + [ + "▁leisten", + -12.83835506439209 + ], + [ + "▁Verbesserung", + -12.838390350341797 + ], + [ + "▁composant", + -12.838400840759277 + ], + [ + "▁LORD", + -12.8384370803833 + ], + [ + "arrive", + -12.838472366333008 + ], + [ + "▁conquer", + -12.838562965393066 + ], + [ + "▁lentil", + -12.838767051696777 + ], + [ + "▁Sprech", + -12.838995933532715 + ], + [ + "▁substitution", + -12.839015007019043 + ], + [ + ".05.", + -12.839020729064941 + ], + [ + "FORM", + -12.839144706726074 + ], + [ + "cădere", + -12.839154243469238 + ], + [ + "▁canyon", + -12.839430809020996 + ], + [ + "▁capacitate", + -12.839442253112793 + ], + [ + "▁menace", + -12.839461326599121 + ], + [ + "▁Antique", + -12.839519500732422 + ], + [ + "▁dizaine", + -12.839550971984863 + ], + [ + "▁Saturn", + -12.839578628540039 + ], + [ + "▁gastro", + -12.83962631225586 + ], + [ + "▁Vand", + -12.839641571044922 + ], + [ + "▁africa", + -12.839682579040527 + ], + [ + "▁hackers", + -12.839702606201172 + ], + [ + "▁Bailey", + -12.839736938476562 + ], + [ + "ouette", + -12.839822769165039 + ], + [ + "hoch", + -12.839885711669922 + ], + [ + "étudiant", + -12.839973449707031 + ], + [ + "▁1600", + -12.840004920959473 + ], + [ + "utiliz", + -12.840167999267578 + ], + [ + "reinigung", + -12.840263366699219 + ], + [ + "▁mileage", + -12.84029483795166 + ], + [ + "▁consacré", + -12.840309143066406 + ], + [ + "▁Norfolk", + -12.840327262878418 + ], + [ + "stacked", + -12.840659141540527 + ], + [ + "anbieter", + -12.840731620788574 + ], + [ + "▁gewünschte", + -12.84073543548584 + ], + [ + "▁silicon", + -12.840761184692383 + ], + [ + "Ensuite", + -12.840794563293457 + ], + [ + "▁vendu", + -12.840850830078125 + ], + [ + "▁viteza", + -12.840851783752441 + ], + [ + "▁evaluare", + -12.840913772583008 + ], + [ + "▁contient", + -12.841036796569824 + ], + [ + "▁Viagra", + -12.841100692749023 + ], + [ + "▁circumstance", + -12.841283798217773 + ], + [ + "walker", + -12.841383934020996 + ], + [ + "▁Aluminium", + -12.84148120880127 + ], + [ + "ço", + -12.841556549072266 + ], + [ + "▁Kli", + -12.841643333435059 + ], + [ + "▁deliberately", + -12.841649055480957 + ], + [ + "▁gamble", + -12.841893196105957 + ], + [ + "▁nourri", + -12.841903686523438 + ], + [ + "▁sealing", + -12.84194278717041 + ], + [ + "▁Atmosphäre", + -12.842255592346191 + ], + [ + "▁erschien", + -12.842260360717773 + ], + [ + "▁brightness", + -12.842340469360352 + ], + [ + "autonomie", + -12.84251594543457 + ], + [ + "▁propel", + -12.842525482177734 + ], + [ + "▁Infrastructure", + -12.842642784118652 + ], + [ + "▁război", + -12.842642784118652 + ], + [ + "▁jelly", + -12.842684745788574 + ], + [ + "scalable", + -12.84280776977539 + ], + [ + "regal", + -12.84296703338623 + ], + [ + "▁sarcini", + -12.843031883239746 + ], + [ + "▁Dienstag", + -12.84304428100586 + ], + [ + "▁Receive", + -12.8430814743042 + ], + [ + "▁mango", + -12.843356132507324 + ], + [ + "▁compétition", + -12.84341812133789 + ], + [ + "▁Monument", + -12.843428611755371 + ], + [ + "▁mast", + -12.844159126281738 + ], + [ + "▁instructed", + -12.84425163269043 + ], + [ + "▁aventur", + -12.844277381896973 + ], + [ + "139", + -12.844298362731934 + ], + [ + "▁Parmi", + -12.84435749053955 + ], + [ + "confined", + -12.844416618347168 + ], + [ + "acious", + -12.844441413879395 + ], + [ + "▁simptome", + -12.844581604003906 + ], + [ + "▁Fischer", + -12.844897270202637 + ], + [ + "störung", + -12.844985008239746 + ], + [ + "▁bilateral", + -12.84504508972168 + ], + [ + "preşedintele", + -12.845274925231934 + ], + [ + "accueillir", + -12.845357894897461 + ], + [ + "▁Schmidt", + -12.845359802246094 + ], + [ + "litis", + -12.845373153686523 + ], + [ + "WL", + -12.8454008102417 + ], + [ + "▁Rise", + -12.845436096191406 + ], + [ + "▁streamline", + -12.845556259155273 + ], + [ + "sozialen", + -12.845585823059082 + ], + [ + "▁Emirates", + -12.845746040344238 + ], + [ + "▁encrypted", + -12.845746040344238 + ], + [ + "▁unfamiliar", + -12.845746040344238 + ], + [ + "established", + -12.84577751159668 + ], + [ + "▁Tätigkeit", + -12.845818519592285 + ], + [ + "▁unaware", + -12.845913887023926 + ], + [ + "2:00", + -12.8460054397583 + ], + [ + "macher", + -12.846013069152832 + ], + [ + "NSA", + -12.8461275100708 + ], + [ + "▁rutier", + -12.846177101135254 + ], + [ + "▁Trent", + -12.846212387084961 + ], + [ + "▁sickness", + -12.846277236938477 + ], + [ + "▁advert", + -12.846417427062988 + ], + [ + "▁Kranken", + -12.846426963806152 + ], + [ + "▁Sandra", + -12.846443176269531 + ], + [ + "▁Recreation", + -12.846449851989746 + ], + [ + "▁Evidence", + -12.846524238586426 + ], + [ + "▁Immigration", + -12.846524238586426 + ], + [ + "▁carriage", + -12.846524238586426 + ], + [ + "▁justified", + -12.84655475616455 + ], + [ + "▁veche", + -12.846579551696777 + ], + [ + "PGA", + -12.846604347229004 + ], + [ + "▁Carmen", + -12.846735000610352 + ], + [ + "▁Faites", + -12.846750259399414 + ], + [ + "▁erfüllt", + -12.84691333770752 + ], + [ + "▁voilà", + -12.846931457519531 + ], + [ + "▁împlin", + -12.846959114074707 + ], + [ + "deposited", + -12.84721565246582 + ], + [ + "▁decisiv", + -12.847241401672363 + ], + [ + "CSA", + -12.847249031066895 + ], + [ + "pathy", + -12.84726619720459 + ], + [ + "▁erweitert", + -12.847302436828613 + ], + [ + "▁liquor", + -12.847302436828613 + ], + [ + "▁resilient", + -12.847302436828613 + ], + [ + "▁walmart", + -12.847302436828613 + ], + [ + "▁fencing", + -12.847308158874512 + ], + [ + "▁dépasse", + -12.84731388092041 + ], + [ + "KT", + -12.847354888916016 + ], + [ + "▁fries", + -12.847368240356445 + ], + [ + "vadă", + -12.847421646118164 + ], + [ + "▁Spania", + -12.847478866577148 + ], + [ + "▁complètement", + -12.847725868225098 + ], + [ + "▁lucrari", + -12.84777545928955 + ], + [ + "▁Lieb", + -12.847908973693848 + ], + [ + "leistungen", + -12.847943305969238 + ], + [ + "198", + -12.847979545593262 + ], + [ + "▁Schnell", + -12.847997665405273 + ], + [ + "▁radius", + -12.84814453125 + ], + [ + "▁beneficiaries", + -12.848151206970215 + ], + [ + "▁northwest", + -12.848174095153809 + ], + [ + "▁#4", + -12.848223686218262 + ], + [ + "▁embryo", + -12.848492622375488 + ], + [ + "▁ditch", + -12.848791122436523 + ], + [ + "▁Seriously", + -12.848859786987305 + ], + [ + "oppel", + -12.848941802978516 + ], + [ + "▁stalk", + -12.849053382873535 + ], + [ + "écriture", + -12.849066734313965 + ], + [ + "512", + -12.84912109375 + ], + [ + "wiesen", + -12.849271774291992 + ], + [ + "▁Consum", + -12.849321365356445 + ], + [ + "▁lună", + -12.849405288696289 + ], + [ + "▁lantern", + -12.849441528320312 + ], + [ + "▁italian", + -12.849629402160645 + ], + [ + "▁achiziți", + -12.849639892578125 + ], + [ + "▁catalyst", + -12.849639892578125 + ], + [ + "▁Arbeitgeber", + -12.849662780761719 + ], + [ + "▁researched", + -12.8496675491333 + ], + [ + "▁drastically", + -12.849679946899414 + ], + [ + "versammlung", + -12.849735260009766 + ], + [ + "410", + -12.849800109863281 + ], + [ + "▁impus", + -12.850153923034668 + ], + [ + "▁interchange", + -12.850173950195312 + ], + [ + "▁pharmacie", + -12.850215911865234 + ], + [ + "Live", + -12.850354194641113 + ], + [ + "dents", + -12.850384712219238 + ], + [ + "▁charcoal", + -12.850419998168945 + ], + [ + "▁odihn", + -12.850420951843262 + ], + [ + "▁pistol", + -12.850444793701172 + ], + [ + "▁complaining", + -12.850576400756836 + ], + [ + "manager", + -12.850578308105469 + ], + [ + "themed", + -12.850578308105469 + ], + [ + "▁Chang", + -12.850650787353516 + ], + [ + "▁rookie", + -12.85070514678955 + ], + [ + "Great", + -12.850706100463867 + ], + [ + "▁smoker", + -12.850733757019043 + ], + [ + "▁Container", + -12.850812911987305 + ], + [ + "▁bancaire", + -12.850852966308594 + ], + [ + "▁Actual", + -12.850966453552246 + ], + [ + "füllen", + -12.850982666015625 + ], + [ + "forum", + -12.850985527038574 + ], + [ + "bleib", + -12.851073265075684 + ], + [ + "▁combi", + -12.851079940795898 + ], + [ + "smoked", + -12.851137161254883 + ], + [ + "difficultés", + -12.851161003112793 + ], + [ + "▁tactical", + -12.851240158081055 + ], + [ + "▁sichtbar", + -12.851483345031738 + ], + [ + "▁dreptate", + -12.851598739624023 + ], + [ + "ERT", + -12.85168743133545 + ], + [ + "▁Pond", + -12.85177993774414 + ], + [ + "▁Holly", + -12.851844787597656 + ], + [ + "erfolg", + -12.8518705368042 + ], + [ + "▁Nordic", + -12.851896286010742 + ], + [ + "évènement", + -12.851983070373535 + ], + [ + "embracing", + -12.851984024047852 + ], + [ + "▁Maximum", + -12.851984024047852 + ], + [ + "▁défend", + -12.85205078125 + ], + [ + "▁fruct", + -12.852056503295898 + ], + [ + "▁Conditioning", + -12.852099418640137 + ], + [ + "LG", + -12.852127075195312 + ], + [ + "exigence", + -12.852166175842285 + ], + [ + "amide", + -12.852187156677246 + ], + [ + "▁darunter", + -12.852208137512207 + ], + [ + "▁EVERY", + -12.852420806884766 + ], + [ + "▁comparat", + -12.85244083404541 + ], + [ + "boosting", + -12.852452278137207 + ], + [ + "▁Hawaiian", + -12.852553367614746 + ], + [ + "▁Geburt", + -12.852752685546875 + ], + [ + "deci", + -12.852782249450684 + ], + [ + "▁Apollo", + -12.852803230285645 + ], + [ + "▁schützen", + -12.852821350097656 + ], + [ + "tragere", + -12.852893829345703 + ], + [ + "Online", + -12.852904319763184 + ], + [ + "▁neural", + -12.852913856506348 + ], + [ + "▁lucrez", + -12.853188514709473 + ], + [ + "▁phenomenal", + -12.853253364562988 + ], + [ + "▁Height", + -12.853368759155273 + ], + [ + "coordinating", + -12.853548049926758 + ], + [ + "geschnitten", + -12.853631019592285 + ], + [ + "auront", + -12.853641510009766 + ], + [ + "▁administer", + -12.853644371032715 + ], + [ + "▁contend", + -12.853707313537598 + ], + [ + "▁crispy", + -12.853784561157227 + ], + [ + "chuck", + -12.854011535644531 + ], + [ + "▁Condition", + -12.8540678024292 + ], + [ + "gestaltung", + -12.854324340820312 + ], + [ + "▁Blvd", + -12.854331970214844 + ], + [ + "▁subjective", + -12.854470252990723 + ], + [ + "▁événements", + -12.854708671569824 + ], + [ + "▁Jenny", + -12.855131149291992 + ], + [ + "▁cumpăra", + -12.85519027709961 + ], + [ + "constructing", + -12.855262756347656 + ], + [ + "▁instructional", + -12.85539436340332 + ], + [ + "▁sterling", + -12.855446815490723 + ], + [ + "scrise", + -12.855470657348633 + ], + [ + "▁Boulevard", + -12.855551719665527 + ], + [ + "pipe", + -12.855620384216309 + ], + [ + "▁Pride", + -12.855748176574707 + ], + [ + "▁Kau", + -12.855751991271973 + ], + [ + "▁overhaul", + -12.855924606323242 + ], + [ + "▁Recruitment", + -12.855925559997559 + ], + [ + "▁thrilling", + -12.856218338012695 + ], + [ + "living", + -12.856302261352539 + ], + [ + "▁rămân", + -12.85645866394043 + ], + [ + "▁MOD", + -12.85661792755127 + ], + [ + "▁Newport", + -12.856675148010254 + ], + [ + "▁infectious", + -12.856688499450684 + ], + [ + "6-3", + -12.856860160827637 + ], + [ + "▁Apache", + -12.856976509094238 + ], + [ + "▁dependence", + -12.85698413848877 + ], + [ + "nutzung", + -12.857199668884277 + ], + [ + "praised", + -12.857211112976074 + ], + [ + "▁craving", + -12.857346534729004 + ], + [ + "▁cramp", + -12.857397079467773 + ], + [ + "▁mancare", + -12.857455253601074 + ], + [ + "▁entdeckt", + -12.857474327087402 + ], + [ + "▁Pioneer", + -12.857484817504883 + ], + [ + "▁Adelaide", + -12.857490539550781 + ], + [ + "2.0", + -12.857503890991211 + ], + [ + "168", + -12.857526779174805 + ], + [ + "▁Decorating", + -12.857611656188965 + ], + [ + "▁unpleasant", + -12.857854843139648 + ], + [ + "▁déclaration", + -12.857865333557129 + ], + [ + "▁Grafik", + -12.857908248901367 + ], + [ + "5-2", + -12.857937812805176 + ], + [ + "căci", + -12.857940673828125 + ], + [ + "▁invade", + -12.858171463012695 + ], + [ + "▁internaţional", + -12.858259201049805 + ], + [ + "▁fraudulent", + -12.858281135559082 + ], + [ + "▁crestere", + -12.858441352844238 + ], + [ + "ografic", + -12.858729362487793 + ], + [ + "plină", + -12.859140396118164 + ], + [ + "sunteti", + -12.859150886535645 + ], + [ + "/04", + -12.859176635742188 + ], + [ + "▁admis", + -12.85935115814209 + ], + [ + "▁mediation", + -12.859403610229492 + ], + [ + "ICC", + -12.859424591064453 + ], + [ + "roș", + -12.859660148620605 + ], + [ + "▁Aroma", + -12.8596773147583 + ], + [ + "1:00", + -12.859792709350586 + ], + [ + "gasesc", + -12.859822273254395 + ], + [ + "▁Defence", + -12.859850883483887 + ], + [ + "▁dictionary", + -12.859856605529785 + ], + [ + "▁Batterie", + -12.859865188598633 + ], + [ + "▁gesunde", + -12.85997486114502 + ], + [ + "146", + -12.860099792480469 + ], + [ + "▁mortal", + -12.860129356384277 + ], + [ + "▁Flughafen", + -12.860230445861816 + ], + [ + "hhh", + -12.860284805297852 + ], + [ + "▁novice", + -12.860342025756836 + ], + [ + "▁Develop", + -12.86043930053711 + ], + [ + "▁accidental", + -12.860516548156738 + ], + [ + "Muzeul", + -12.86054515838623 + ], + [ + "▁Jupiter", + -12.86062240600586 + ], + [ + "supposedly", + -12.860662460327148 + ], + [ + "energy", + -12.860758781433105 + ], + [ + "▁montrer", + -12.860764503479004 + ], + [ + "recalled", + -12.860795021057129 + ], + [ + "Press", + -12.860801696777344 + ], + [ + "▁postcard", + -12.86080265045166 + ], + [ + "target", + -12.86081600189209 + ], + [ + "▁vêtements", + -12.860881805419922 + ], + [ + "▁particle", + -12.860888481140137 + ], + [ + "professional", + -12.8608980178833 + ], + [ + "▁1949", + -12.860917091369629 + ], + [ + "yah", + -12.860980033874512 + ], + [ + "▁Spiegel", + -12.861017227172852 + ], + [ + "▁Jeffrey", + -12.861023902893066 + ], + [ + "fahrzeug", + -12.861027717590332 + ], + [ + "▁Plug", + -12.861051559448242 + ], + [ + "▁violin", + -12.861150741577148 + ], + [ + "▁condemn", + -12.861381530761719 + ], + [ + "▁conducere", + -12.861398696899414 + ], + [ + "▁Chevrolet", + -12.861412048339844 + ], + [ + "▁conceput", + -12.861461639404297 + ], + [ + "▁Merri", + -12.861493110656738 + ], + [ + "judging", + -12.861559867858887 + ], + [ + "embraced", + -12.86168098449707 + ], + [ + "▁Compact", + -12.861715316772461 + ], + [ + "▁château", + -12.861807823181152 + ], + [ + "etch", + -12.861945152282715 + ], + [ + "bedroom", + -12.861995697021484 + ], + [ + "People", + -12.862038612365723 + ], + [ + "25,000", + -12.86209774017334 + ], + [ + "ocyte", + -12.862146377563477 + ], + [ + "▁Lenovo", + -12.862205505371094 + ], + [ + "▁Hampton", + -12.862241744995117 + ], + [ + "5.2", + -12.862244606018066 + ], + [ + "▁progres", + -12.862266540527344 + ], + [ + "hoc", + -12.862288475036621 + ], + [ + "▁complementary", + -12.86241340637207 + ], + [ + "turned", + -12.862485885620117 + ], + [ + "mangel", + -12.862508773803711 + ], + [ + "▁Drew", + -12.862592697143555 + ], + [ + "épisode", + -12.86259651184082 + ], + [ + "▁Versorgung", + -12.86259651184082 + ], + [ + "▁ausdrücklich", + -12.86259651184082 + ], + [ + "ciune", + -12.862788200378418 + ], + [ + "▁sfârșit", + -12.862990379333496 + ], + [ + "Agricultural", + -12.862991333007812 + ], + [ + "▁caffeine", + -12.862991333007812 + ], + [ + "▁emergencies", + -12.862991333007812 + ], + [ + "▁unhappy", + -12.862991333007812 + ], + [ + "(7)", + -12.863043785095215 + ], + [ + "▁inlocui", + -12.863059043884277 + ], + [ + "▁Rochester", + -12.863153457641602 + ], + [ + "183", + -12.863155364990234 + ], + [ + "niz", + -12.863285064697266 + ], + [ + "tasche", + -12.863462448120117 + ], + [ + "▁Salle", + -12.86347484588623 + ], + [ + "cît", + -12.863478660583496 + ], + [ + "▁Singer", + -12.863489151000977 + ], + [ + "▁economically", + -12.863506317138672 + ], + [ + "▁ieși", + -12.863525390625 + ], + [ + "▁façade", + -12.86378288269043 + ], + [ + "Ohne", + -12.863801956176758 + ], + [ + "▁edible", + -12.863842964172363 + ], + [ + "Rob", + -12.863851547241211 + ], + [ + "▁(2014)", + -12.863859176635742 + ], + [ + "▁Zar", + -12.863919258117676 + ], + [ + "▁obey", + -12.863995552062988 + ], + [ + "Pack", + -12.864087104797363 + ], + [ + "▁Omni", + -12.864198684692383 + ], + [ + "▁Gilbert", + -12.864212036132812 + ], + [ + "▁Vlad", + -12.86429500579834 + ], + [ + "▁pauvre", + -12.864333152770996 + ], + [ + "▁secular", + -12.864383697509766 + ], + [ + "Center", + -12.864415168762207 + ], + [ + "▁Prospect", + -12.864457130432129 + ], + [ + "▁Noah", + -12.86450481414795 + ], + [ + "▁Interactive", + -12.86471176147461 + ], + [ + "▁centaine", + -12.86485767364502 + ], + [ + "▁cerebral", + -12.864971160888672 + ], + [ + "▁Novel", + -12.865013122558594 + ], + [ + "▁Käufer", + -12.865039825439453 + ], + [ + "werfen", + -12.865056991577148 + ], + [ + "▁reluctant", + -12.865143775939941 + ], + [ + "ес", + -12.86520004272461 + ], + [ + "Look", + -12.86521053314209 + ], + [ + "Erkrankung", + -12.86536693572998 + ], + [ + "▁cucumber", + -12.86536693572998 + ], + [ + "/2017", + -12.865399360656738 + ], + [ + "▁flank", + -12.865405082702637 + ], + [ + "opportunité", + -12.865667343139648 + ], + [ + "zugleich", + -12.865766525268555 + ], + [ + "RAT", + -12.865840911865234 + ], + [ + "▁avantages", + -12.865880012512207 + ], + [ + "▁außer", + -12.866008758544922 + ], + [ + "GV", + -12.866090774536133 + ], + [ + "▁Continental", + -12.866159439086914 + ], + [ + "▁affiliation", + -12.866159439086914 + ], + [ + "▁ursprünglich", + -12.86618423461914 + ], + [ + "▁hardship", + -12.866349220275879 + ], + [ + "âme", + -12.86647891998291 + ], + [ + "▁hallway", + -12.866576194763184 + ], + [ + "▁afară", + -12.866578102111816 + ], + [ + "western", + -12.866714477539062 + ], + [ + "▁Jacket", + -12.866802215576172 + ], + [ + "▁culturelle", + -12.866876602172852 + ], + [ + "▁glaci", + -12.866995811462402 + ], + [ + "metoda", + -12.867036819458008 + ], + [ + "▁clerk", + -12.867045402526855 + ], + [ + "▁ordinance", + -12.867185592651367 + ], + [ + "▁Initial", + -12.867197036743164 + ], + [ + "waking", + -12.86722469329834 + ], + [ + "▁Secondary", + -12.867366790771484 + ], + [ + "▁Solomon", + -12.867411613464355 + ], + [ + "glomer", + -12.867488861083984 + ], + [ + "SYS", + -12.867530822753906 + ], + [ + "▁Florin", + -12.867596626281738 + ], + [ + "ffentlich", + -12.867670059204102 + ], + [ + "▁Printer", + -12.867674827575684 + ], + [ + "▁dimineata", + -12.86774730682373 + ], + [ + "▁stripes", + -12.867748260498047 + ], + [ + "plugged", + -12.86776065826416 + ], + [ + "öhl", + -12.867836952209473 + ], + [ + "infused", + -12.867875099182129 + ], + [ + "▁Rubber", + -12.867895126342773 + ], + [ + "paved", + -12.867898941040039 + ], + [ + "▁Devi", + -12.867995262145996 + ], + [ + "▁subway", + -12.8681640625 + ], + [ + "▁gases", + -12.868306159973145 + ], + [ + "▁reguli", + -12.868371963500977 + ], + [ + "▁Rebel", + -12.868413925170898 + ], + [ + "▁destructive", + -12.868546485900879 + ], + [ + "▁oferind", + -12.868664741516113 + ], + [ + "9001", + -12.868876457214355 + ], + [ + "CRA", + -12.868912696838379 + ], + [ + "why", + -12.868932723999023 + ], + [ + "sensul", + -12.869036674499512 + ], + [ + "guter", + -12.869277000427246 + ], + [ + "Empfehlung", + -12.869338035583496 + ], + [ + "▁convertible", + -12.86953353881836 + ], + [ + "▁predominantly", + -12.869637489318848 + ], + [ + "▁Mentor", + -12.869649887084961 + ], + [ + "Practic", + -12.869720458984375 + ], + [ + "▁echipă", + -12.869754791259766 + ], + [ + "onsite", + -12.869853019714355 + ], + [ + "▁zunehmend", + -12.86994743347168 + ], + [ + "▁Harbour", + -12.870016098022461 + ], + [ + "▁pineapple", + -12.870133399963379 + ], + [ + "▁gasoline", + -12.870139122009277 + ], + [ + "▁Jaguar", + -12.870158195495605 + ], + [ + "kno", + -12.870259284973145 + ], + [ + "▁heap", + -12.870448112487793 + ], + [ + "▁fictional", + -12.870481491088867 + ], + [ + "fiinta", + -12.870753288269043 + ], + [ + "▁Amber", + -12.87081241607666 + ], + [ + "▁Exclusive", + -12.870929718017578 + ], + [ + "▁Pharmaceutical", + -12.870929718017578 + ], + [ + "▁unterscheide", + -12.871044158935547 + ], + [ + "▁1942", + -12.871116638183594 + ], + [ + "▁Ceiling", + -12.87115478515625 + ], + [ + "developed", + -12.871228218078613 + ], + [ + "▁consacr", + -12.87132453918457 + ], + [ + "▁Membr", + -12.871411323547363 + ], + [ + "erton", + -12.871447563171387 + ], + [ + "habitation", + -12.871685981750488 + ], + [ + "▁longevity", + -12.871726989746094 + ], + [ + "▁Starbucks", + -12.871728897094727 + ], + [ + "▁poat", + -12.871771812438965 + ], + [ + "▁commissioner", + -12.871794700622559 + ], + [ + "pedia", + -12.871938705444336 + ], + [ + "popped", + -12.872468948364258 + ], + [ + "versorgung", + -12.872525215148926 + ], + [ + "▁Aktivitäten", + -12.872525215148926 + ], + [ + "▁Betreuung", + -12.872525215148926 + ], + [ + "▁afacere", + -12.872968673706055 + ], + [ + "▁Mechanical", + -12.873323440551758 + ], + [ + "▁Leiter", + -12.873346328735352 + ], + [ + "▁scaling", + -12.873427391052246 + ], + [ + "▁Slim", + -12.87350082397461 + ], + [ + "▁temperaturi", + -12.873516082763672 + ], + [ + "ACH", + -12.873558044433594 + ], + [ + "▁jährlich", + -12.873682022094727 + ], + [ + "▁photographie", + -12.873722076416016 + ], + [ + "▁préalable", + -12.873725891113281 + ], + [ + "▁părinți", + -12.87372875213623 + ], + [ + "▁Farmers", + -12.873873710632324 + ], + [ + "▁Printable", + -12.873905181884766 + ], + [ + "Früh", + -12.873908996582031 + ], + [ + "approved", + -12.87398624420166 + ], + [ + "otro", + -12.874094009399414 + ], + [ + "▁veneer", + -12.874099731445312 + ], + [ + "▁Warriors", + -12.874122619628906 + ], + [ + "▁Approach", + -12.874149322509766 + ], + [ + "Share", + -12.874238967895508 + ], + [ + "▁buds", + -12.874252319335938 + ], + [ + "▁Într", + -12.874330520629883 + ], + [ + "glichen", + -12.87452507019043 + ], + [ + "▁anbieten", + -12.87452507019043 + ], + [ + "MET", + -12.874539375305176 + ], + [ + "amélioration", + -12.87468147277832 + ], + [ + "ländische", + -12.87468433380127 + ], + [ + "nsgesamt", + -12.874764442443848 + ], + [ + "einiger", + -12.874822616577148 + ], + [ + "▁Förderung", + -12.874876022338867 + ], + [ + "destroying", + -12.874910354614258 + ], + [ + "▁accreditation", + -12.874922752380371 + ], + [ + "reminiscent", + -12.875094413757324 + ], + [ + "▁retriev", + -12.87528133392334 + ], + [ + "▁Flü", + -12.875306129455566 + ], + [ + "▁Monsieur", + -12.875322341918945 + ], + [ + "German", + -12.87536334991455 + ], + [ + "Orice", + -12.875443458557129 + ], + [ + "künftig", + -12.875523567199707 + ], + [ + "▁vorbi", + -12.875639915466309 + ], + [ + "▁intentionally", + -12.875733375549316 + ], + [ + "▁îngrij", + -12.875743865966797 + ], + [ + "▁laughed", + -12.875850677490234 + ], + [ + "▁Fiction", + -12.875913619995117 + ], + [ + "▁inteligent", + -12.875914573669434 + ], + [ + "▁Translation", + -12.875953674316406 + ], + [ + "greete", + -12.875983238220215 + ], + [ + "▁énergétique", + -12.876123428344727 + ], + [ + "uncovered", + -12.876248359680176 + ], + [ + "▁évidemment", + -12.876523971557617 + ], + [ + "▁Vietnamese", + -12.876535415649414 + ], + [ + "▁Libya", + -12.876675605773926 + ], + [ + "▁Trailer", + -12.876734733581543 + ], + [ + "▁Wohl", + -12.876871109008789 + ], + [ + "▁Congo", + -12.87698745727539 + ], + [ + "▁freut", + -12.877002716064453 + ], + [ + "zauber", + -12.877090454101562 + ], + [ + "▁Pân", + -12.877142906188965 + ], + [ + "▁mentine", + -12.877333641052246 + ], + [ + "▁welding", + -12.877335548400879 + ], + [ + "▁Mircea", + -12.8773775100708 + ], + [ + "▁optimism", + -12.877455711364746 + ], + [ + "VEL", + -12.877504348754883 + ], + [ + "oilea", + -12.877540588378906 + ], + [ + "▁thereafter", + -12.877612113952637 + ], + [ + "▁André", + -12.877710342407227 + ], + [ + "forschung", + -12.877799987792969 + ], + [ + "running", + -12.878022193908691 + ], + [ + "▁hostile", + -12.878059387207031 + ], + [ + "Homme", + -12.87811279296875 + ], + [ + "▁Satellite", + -12.878129005432129 + ], + [ + "▁collagen", + -12.87841796875 + ], + [ + "▁concedi", + -12.878518104553223 + ], + [ + "▁produziert", + -12.87852954864502 + ], + [ + "▁virgin", + -12.878540992736816 + ], + [ + "frant", + -12.87857723236084 + ], + [ + "▁teammates", + -12.878744125366211 + ], + [ + "▁faceti", + -12.878802299499512 + ], + [ + "▁Restoration", + -12.87893295288086 + ], + [ + "▁detached", + -12.878935813903809 + ], + [ + "▁Instructor", + -12.878950119018555 + ], + [ + "montag", + -12.879227638244629 + ], + [ + "▁borrowing", + -12.879375457763672 + ], + [ + "▁Retro", + -12.879446983337402 + ], + [ + "▁behandelt", + -12.879536628723145 + ], + [ + "▁Aussage", + -12.879715919494629 + ], + [ + "▁snorkel", + -12.879734992980957 + ], + [ + "▁Proceedings", + -12.879754066467285 + ], + [ + "▁Judy", + -12.879776000976562 + ], + [ + "▁Wendy", + -12.879783630371094 + ], + [ + "artă", + -12.879920959472656 + ], + [ + "▁Vergangenheit", + -12.88013744354248 + ], + [ + "▁Gegner", + -12.880139350891113 + ], + [ + "▁ulcer", + -12.880166053771973 + ], + [ + "wirksam", + -12.880553245544434 + ], + [ + "▁închis", + -12.880560874938965 + ], + [ + "▁emission", + -12.88068962097168 + ], + [ + "ulescu", + -12.880754470825195 + ], + [ + "▁bancar", + -12.880819320678711 + ], + [ + "compromising", + -12.880924224853516 + ], + [ + "▁Priest", + -12.881156921386719 + ], + [ + "▁Progress", + -12.881318092346191 + ], + [ + "▁punish", + -12.88144588470459 + ], + [ + "▁Afin", + -12.881450653076172 + ], + [ + "▁Bog", + -12.881514549255371 + ], + [ + "lunii", + -12.881525039672852 + ], + [ + "▁ressembl", + -12.881570816040039 + ], + [ + "▁Creation", + -12.881644248962402 + ], + [ + "effet", + -12.881668090820312 + ], + [ + "Versicherung", + -12.881671905517578 + ], + [ + "médias", + -12.881672859191895 + ], + [ + "▁Kritik", + -12.881793975830078 + ], + [ + "idia", + -12.881896018981934 + ], + [ + "▁Wasch", + -12.881929397583008 + ], + [ + "UAL", + -12.882059097290039 + ], + [ + "Approximately", + -12.882149696350098 + ], + [ + "izari", + -12.882152557373047 + ], + [ + "▁Dortmund", + -12.882152557373047 + ], + [ + "▁contul", + -12.882343292236328 + ], + [ + "▁Airways", + -12.882408142089844 + ], + [ + "sicherung", + -12.882535934448242 + ], + [ + "échelle", + -12.882560729980469 + ], + [ + "ADD", + -12.882582664489746 + ], + [ + "DIA", + -12.88259506225586 + ], + [ + "kabel", + -12.882621765136719 + ], + [ + "Media", + -12.88268756866455 + ], + [ + "ampli", + -12.882894515991211 + ], + [ + "▁quarry", + -12.88295841217041 + ], + [ + "▁acoper", + -12.883072853088379 + ], + [ + "halter", + -12.883326530456543 + ], + [ + "▁solicitor", + -12.883684158325195 + ], + [ + "phosphat", + -12.883763313293457 + ], + [ + "▁drown", + -12.883773803710938 + ], + [ + "congratulat", + -12.884047508239746 + ], + [ + "▁uneven", + -12.884087562561035 + ], + [ + "▁rupe", + -12.884154319763184 + ], + [ + "▁heureux", + -12.88417911529541 + ], + [ + "caractéristiques", + -12.884221076965332 + ], + [ + "60,000", + -12.884283065795898 + ], + [ + "ambigu", + -12.884340286254883 + ], + [ + "224", + -12.884417533874512 + ], + [ + "dov", + -12.88454532623291 + ], + [ + "▁Naturally", + -12.884629249572754 + ], + [ + "▁Ernst", + -12.884634017944336 + ], + [ + "Camp", + -12.884757995605469 + ], + [ + "▁Worldwide", + -12.884909629821777 + ], + [ + "▁antrenament", + -12.885042190551758 + ], + [ + "▁jocul", + -12.88521671295166 + ], + [ + "▁broccoli", + -12.88537883758545 + ], + [ + "▁fascinated", + -12.88537883758545 + ], + [ + "▁Abbey", + -12.885387420654297 + ], + [ + "▁aquarium", + -12.885390281677246 + ], + [ + "HAN", + -12.885458946228027 + ], + [ + "chaffung", + -12.885480880737305 + ], + [ + "137", + -12.885503768920898 + ], + [ + "rumors", + -12.885515213012695 + ], + [ + "reliance", + -12.885557174682617 + ], + [ + "▁vaccination", + -12.8856782913208 + ], + [ + "responsabilitate", + -12.885777473449707 + ], + [ + "▁legislati", + -12.885782241821289 + ], + [ + "ATT", + -12.885826110839844 + ], + [ + "206", + -12.885896682739258 + ], + [ + "▁miere", + -12.885967254638672 + ], + [ + "▁rezultatul", + -12.885988235473633 + ], + [ + "părea", + -12.88599681854248 + ], + [ + "zuführen", + -12.886159896850586 + ], + [ + "▁Kompetenz", + -12.886187553405762 + ], + [ + "▁nickname", + -12.886195182800293 + ], + [ + "pilot", + -12.88620376586914 + ], + [ + "▁ninth", + -12.886252403259277 + ], + [ + "▁Tyr", + -12.886446952819824 + ], + [ + "▁misuse", + -12.886469841003418 + ], + [ + "▁SUP", + -12.886514663696289 + ], + [ + "▁Attack", + -12.88667106628418 + ], + [ + "Smart", + -12.88669490814209 + ], + [ + "▁Philosoph", + -12.886930465698242 + ], + [ + "▁Alege", + -12.886931419372559 + ], + [ + "▁femeile", + -12.886967658996582 + ], + [ + "▁Heating", + -12.88698673248291 + ], + [ + "▁Cricket", + -12.886999130249023 + ], + [ + "▁scholar", + -12.887049674987793 + ], + [ + "Model", + -12.887073516845703 + ], + [ + "▁stimulating", + -12.887182235717773 + ], + [ + "▁industrielle", + -12.887189865112305 + ], + [ + "▁phenomena", + -12.887303352355957 + ], + [ + "▁Nahrung", + -12.887414932250977 + ], + [ + "▁Conditioner", + -12.887433052062988 + ], + [ + "führ", + -12.887489318847656 + ], + [ + "▁révolution", + -12.88757610321045 + ], + [ + "plastic", + -12.887595176696777 + ], + [ + "▁approximate", + -12.887596130371094 + ], + [ + "▁dienen", + -12.887624740600586 + ], + [ + "▁obsession", + -12.887807846069336 + ], + [ + "▁rectangular", + -12.887807846069336 + ], + [ + "Allemagne", + -12.887808799743652 + ], + [ + "▁Tanzania", + -12.887824058532715 + ], + [ + "border", + -12.887884140014648 + ], + [ + "▁crashed", + -12.887958526611328 + ], + [ + "visor", + -12.887974739074707 + ], + [ + "▁autorizat", + -12.888072967529297 + ], + [ + "▁Champagne", + -12.888222694396973 + ], + [ + "längst", + -12.888238906860352 + ], + [ + "▁realities", + -12.888314247131348 + ], + [ + "▁Keyword", + -12.88831615447998 + ], + [ + "▁GUI", + -12.888495445251465 + ], + [ + "▁simplified", + -12.88865852355957 + ], + [ + "▁Rack", + -12.888681411743164 + ], + [ + "▁Zahlen", + -12.888693809509277 + ], + [ + "growth", + -12.888897895812988 + ], + [ + "▁rehearsal", + -12.888991355895996 + ], + [ + "▁Epic", + -12.888999938964844 + ], + [ + "▁réussite", + -12.889195442199707 + ], + [ + "▁politician", + -12.889263153076172 + ], + [ + "▁emoți", + -12.889378547668457 + ], + [ + "▁delegation", + -12.889449119567871 + ], + [ + "▁со", + -12.889464378356934 + ], + [ + "oversized", + -12.889477729797363 + ], + [ + "▁Motto", + -12.889481544494629 + ], + [ + "1860", + -12.889788627624512 + ], + [ + "▁defective", + -12.889803886413574 + ], + [ + "brewing", + -12.889852523803711 + ], + [ + "linguistic", + -12.890243530273438 + ], + [ + "▁Hopkins", + -12.890265464782715 + ], + [ + "▁(2012)", + -12.89030933380127 + ], + [ + "crease", + -12.890436172485352 + ], + [ + "▁Versicherungs", + -12.89052677154541 + ], + [ + "▁Noble", + -12.890752792358398 + ], + [ + "▁Bekannt", + -12.890896797180176 + ], + [ + "▁vorstellen", + -12.89095401763916 + ], + [ + "▁suburban", + -12.890970230102539 + ], + [ + "DAC", + -12.890995025634766 + ], + [ + "▁scatter", + -12.89103889465332 + ], + [ + "▁Artificial", + -12.8910551071167 + ], + [ + "▁reactor", + -12.891073226928711 + ], + [ + "▁modelling", + -12.89108943939209 + ], + [ + "▁Holder", + -12.891148567199707 + ], + [ + "athon", + -12.891149520874023 + ], + [ + "147", + -12.891190528869629 + ], + [ + "▁stagn", + -12.891257286071777 + ], + [ + "ARY", + -12.891261100769043 + ], + [ + "Space", + -12.89126968383789 + ], + [ + "▁Gibson", + -12.891718864440918 + ], + [ + "▁Investigator", + -12.89173698425293 + ], + [ + "▁1914", + -12.891818046569824 + ], + [ + "▁Muhammad", + -12.891868591308594 + ], + [ + "▁shove", + -12.892073631286621 + ], + [ + "▁erklären", + -12.892276763916016 + ], + [ + "▁abdomen", + -12.892277717590332 + ], + [ + "▁Mazda", + -12.892349243164062 + ], + [ + "▁hemo", + -12.892364501953125 + ], + [ + "National", + -12.892455101013184 + ], + [ + "starken", + -12.89267635345459 + ], + [ + "▁Cyprus", + -12.892683982849121 + ], + [ + "▁tread", + -12.892721176147461 + ], + [ + "▁sweetness", + -12.892725944519043 + ], + [ + "stunden", + -12.892790794372559 + ], + [ + "▁couverture", + -12.893059730529785 + ], + [ + "▁Successful", + -12.893060684204102 + ], + [ + "▁oublier", + -12.893171310424805 + ], + [ + "▁esential", + -12.893203735351562 + ], + [ + "estival", + -12.89321231842041 + ], + [ + "gnac", + -12.893280029296875 + ], + [ + "▁Basement", + -12.893457412719727 + ], + [ + "presumably", + -12.893497467041016 + ], + [ + "▁mourn", + -12.893561363220215 + ], + [ + "armée", + -12.893677711486816 + ], + [ + "148", + -12.893845558166504 + ], + [ + "▁residue", + -12.894006729125977 + ], + [ + "▁metalic", + -12.89404296875 + ], + [ + "▁Zell", + -12.89425277709961 + ], + [ + "Build", + -12.894280433654785 + ], + [ + "▁prevalence", + -12.894312858581543 + ], + [ + "▁wrestling", + -12.894312858581543 + ], + [ + "▁ascuns", + -12.894325256347656 + ], + [ + "Sacred", + -12.894340515136719 + ], + [ + "Tec", + -12.89438533782959 + ], + [ + "▁Kindergarten", + -12.894389152526855 + ], + [ + "bindung", + -12.894464492797852 + ], + [ + "▁ritm", + -12.894545555114746 + ], + [ + "▁triste", + -12.894651412963867 + ], + [ + "▁introdus", + -12.894758224487305 + ], + [ + "/2016", + -12.894824028015137 + ], + [ + "▁română", + -12.894899368286133 + ], + [ + "▁bibli", + -12.89490032196045 + ], + [ + "▁cigar", + -12.894913673400879 + ], + [ + "Rie", + -12.894990921020508 + ], + [ + "▁intentional", + -12.894999504089355 + ], + [ + "▁cuprins", + -12.895098686218262 + ], + [ + "remarkably", + -12.895129203796387 + ], + [ + "▁printemps", + -12.895133972167969 + ], + [ + "▁declining", + -12.895171165466309 + ], + [ + "Magazin", + -12.89552116394043 + ], + [ + "▁săptămână", + -12.895537376403809 + ], + [ + "▁vérifier", + -12.895549774169922 + ], + [ + "▁Speise", + -12.895584106445312 + ], + [ + "▁reteta", + -12.8956298828125 + ], + [ + "heed", + -12.895772933959961 + ], + [ + "▁Compliance", + -12.895946502685547 + ], + [ + "▁embroidery", + -12.895946502685547 + ], + [ + "cried", + -12.896025657653809 + ], + [ + "▁(„", + -12.896282196044922 + ], + [ + "▁heck", + -12.89629077911377 + ], + [ + "▁sadness", + -12.896501541137695 + ], + [ + "▁impulse", + -12.896585464477539 + ], + [ + "ATH", + -12.896740913391113 + ], + [ + "▁lavender", + -12.896773338317871 + ], + [ + "uiesc", + -12.896790504455566 + ], + [ + "▁Disorder", + -12.896876335144043 + ], + [ + "stroke", + -12.896991729736328 + ], + [ + "▁piaţ", + -12.8970365524292 + ], + [ + "ournée", + -12.897049903869629 + ], + [ + "▁Barnes", + -12.8971586227417 + ], + [ + "▁scăzut", + -12.897172927856445 + ], + [ + "▁équipements", + -12.89725112915039 + ], + [ + "OND", + -12.897375106811523 + ], + [ + "▁Compet", + -12.897424697875977 + ], + [ + "▁Bestell", + -12.89748477935791 + ], + [ + "▁immédiatement", + -12.897587776184082 + ], + [ + "aparut", + -12.89759635925293 + ], + [ + "▁rainfall", + -12.897882461547852 + ], + [ + "oreille", + -12.89797306060791 + ], + [ + "▁ministère", + -12.898014068603516 + ], + [ + "iris", + -12.898140907287598 + ], + [ + "dyna", + -12.898279190063477 + ], + [ + "drücken", + -12.898343086242676 + ], + [ + "▁détect", + -12.89834976196289 + ], + [ + "▁fonctionnalité", + -12.89840030670166 + ], + [ + "▁imbalance", + -12.89840030670166 + ], + [ + "▁unpredictable", + -12.89840030670166 + ], + [ + "▁literar", + -12.89846134185791 + ], + [ + "▁Windsor", + -12.898472785949707 + ], + [ + "▁Unlimited", + -12.898481369018555 + ], + [ + "colour", + -12.898674964904785 + ], + [ + "▁Portfolio", + -12.898810386657715 + ], + [ + "149", + -12.898883819580078 + ], + [ + "volution", + -12.898890495300293 + ], + [ + "▁folgende", + -12.899078369140625 + ], + [ + "▁arbitration", + -12.899105072021484 + ], + [ + "kicking", + -12.89913558959961 + ], + [ + "zügig", + -12.89923095703125 + ], + [ + "▁1941", + -12.899311065673828 + ], + [ + "▁Drake", + -12.89955997467041 + ], + [ + "▁ausführlich", + -12.899630546569824 + ], + [ + "▁chaussure", + -12.899630546569824 + ], + [ + "▁intestinal", + -12.89976692199707 + ], + [ + "▁pilgrim", + -12.900040626525879 + ], + [ + "▁Bark", + -12.900142669677734 + ], + [ + "between", + -12.900157928466797 + ], + [ + "disposed", + -12.900175094604492 + ], + [ + "▁Dylan", + -12.900218963623047 + ], + [ + "ств", + -12.900253295898438 + ], + [ + "NOR", + -12.900287628173828 + ], + [ + "traces", + -12.90038776397705 + ], + [ + "▁moindre", + -12.900500297546387 + ], + [ + "▁$10,000", + -12.900552749633789 + ], + [ + "212", + -12.900599479675293 + ], + [ + "wusste", + -12.900659561157227 + ], + [ + "▁predictable", + -12.900671005249023 + ], + [ + "poţi", + -12.900679588317871 + ], + [ + "▁Celsius", + -12.900860786437988 + ], + [ + "gebunden", + -12.90086841583252 + ], + [ + "▁Legacy", + -12.900891304016113 + ], + [ + "movers", + -12.90090274810791 + ], + [ + "▁concret", + -12.90098762512207 + ], + [ + "▁simpla", + -12.901050567626953 + ], + [ + "rechnet", + -12.901103973388672 + ], + [ + "▁certainty", + -12.901144981384277 + ], + [ + "entrepreneurship", + -12.901153564453125 + ], + [ + "kohl", + -12.901289939880371 + ], + [ + "▁curte", + -12.901311874389648 + ], + [ + "▁Forbes", + -12.901411056518555 + ], + [ + "▁Zusatz", + -12.901535987854004 + ], + [ + "blending", + -12.90163803100586 + ], + [ + "▁variat", + -12.901642799377441 + ], + [ + "▁galaxy", + -12.90168285369873 + ], + [ + "▁safari", + -12.90168571472168 + ], + [ + "▁municipalities", + -12.9017972946167 + ], + [ + "▁Drept", + -12.90180778503418 + ], + [ + "aufnahme", + -12.902128219604492 + ], + [ + "▁endorse", + -12.902223587036133 + ], + [ + "einrichtung", + -12.902244567871094 + ], + [ + "Sync", + -12.902270317077637 + ], + [ + "abide", + -12.902323722839355 + ], + [ + "brushed", + -12.902350425720215 + ], + [ + "▁actiune", + -12.902410507202148 + ], + [ + "quaint", + -12.902498245239258 + ], + [ + "▁volatility", + -12.902504920959473 + ], + [ + "▁repetitive", + -12.902505874633789 + ], + [ + "▁découvr", + -12.902560234069824 + ], + [ + "Totodat", + -12.902585983276367 + ], + [ + "▁românesc", + -12.902682304382324 + ], + [ + "▁tempting", + -12.902772903442383 + ], + [ + "thesis", + -12.902947425842285 + ], + [ + "secure", + -12.903013229370117 + ], + [ + "delt", + -12.903019905090332 + ], + [ + "▁şef", + -12.903167724609375 + ], + [ + "▁epidemic", + -12.903326988220215 + ], + [ + "▁Appliance", + -12.903327941894531 + ], + [ + "cearcă", + -12.903331756591797 + ], + [ + "▁lodging", + -12.903361320495605 + ], + [ + "▁photographed", + -12.903507232666016 + ], + [ + "geschlagen", + -12.903794288635254 + ], + [ + "▁Methodist", + -12.90380859375 + ], + [ + "▁Transit", + -12.90389347076416 + ], + [ + "▁Länder", + -12.903934478759766 + ], + [ + "villa", + -12.903986930847168 + ], + [ + "▁toilette", + -12.904031753540039 + ], + [ + "anno", + -12.904074668884277 + ], + [ + "▁Aufnahme", + -12.904091835021973 + ], + [ + "▁Coral", + -12.904099464416504 + ], + [ + "pourraient", + -12.904129981994629 + ], + [ + "▁digestion", + -12.904245376586914 + ], + [ + "▁Vacation", + -12.904274940490723 + ], + [ + "▁Rugby", + -12.904275894165039 + ], + [ + "MIC", + -12.904311180114746 + ], + [ + "▁choc", + -12.904417991638184 + ], + [ + "2002", + -12.904492378234863 + ], + [ + "gestion", + -12.904674530029297 + ], + [ + "▁Zoom", + -12.904745101928711 + ], + [ + "essor", + -12.904763221740723 + ], + [ + "weighed", + -12.904793739318848 + ], + [ + "▁dispus", + -12.904987335205078 + ], + [ + "▁redemption", + -12.90502643585205 + ], + [ + "▁plaster", + -12.905071258544922 + ], + [ + "▁Quilt", + -12.90507698059082 + ], + [ + "▁teritoriul", + -12.905088424682617 + ], + [ + "ndern", + -12.905097961425781 + ], + [ + "▁expired", + -12.905105590820312 + ], + [ + "▁Tribunal", + -12.905122756958008 + ], + [ + "occupation", + -12.9052152633667 + ], + [ + "▁woodland", + -12.905248641967773 + ], + [ + "vieux", + -12.905254364013672 + ], + [ + "▁Midland", + -12.905465126037598 + ], + [ + "gât", + -12.90571117401123 + ], + [ + "électricité", + -12.905800819396973 + ], + [ + "▁vanzare", + -12.905811309814453 + ], + [ + "biologi", + -12.905961036682129 + ], + [ + "▁vive", + -12.906060218811035 + ], + [ + "▁Alarm", + -12.906097412109375 + ], + [ + "▁experiență", + -12.9061279296875 + ], + [ + "▁Loch", + -12.906133651733398 + ], + [ + "▁Pedro", + -12.906194686889648 + ], + [ + "▁detergent", + -12.906217575073242 + ], + [ + "language", + -12.906554222106934 + ], + [ + "▁sedan", + -12.906655311584473 + ], + [ + "▁Brady", + -12.906736373901367 + ], + [ + "▁compus", + -12.906976699829102 + ], + [ + "▁landfill", + -12.906982421875 + ], + [ + "giu", + -12.907039642333984 + ], + [ + "beziehung", + -12.9070405960083 + ], + [ + "▁picior", + -12.907184600830078 + ], + [ + "ALI", + -12.907235145568848 + ], + [ + "▁Commander", + -12.907256126403809 + ], + [ + "EPS", + -12.907303810119629 + ], + [ + "▁Textil", + -12.907320022583008 + ], + [ + "▁industria", + -12.907339096069336 + ], + [ + "lox", + -12.907365798950195 + ], + [ + "▁eclectic", + -12.907453536987305 + ], + [ + "▁gracious", + -12.907477378845215 + ], + [ + "Uniunea", + -12.907525062561035 + ], + [ + "bps", + -12.90754222869873 + ], + [ + "▁entertained", + -12.907634735107422 + ], + [ + "depinde", + -12.907767295837402 + ], + [ + "▁daylight", + -12.907893180847168 + ], + [ + "▁résistance", + -12.907995223999023 + ], + [ + "ARN", + -12.908194541931152 + ], + [ + "▁unavailable", + -12.908201217651367 + ], + [ + "Curtea", + -12.908390045166016 + ], + [ + "▁pores", + -12.908502578735352 + ], + [ + "▁Tonight", + -12.908649444580078 + ], + [ + "▁datori", + -12.90869426727295 + ], + [ + "▁gezielt", + -12.908703804016113 + ], + [ + "▁rupture", + -12.90875244140625 + ], + [ + "▁disput", + -12.908848762512207 + ], + [ + "▁sonstige", + -12.908895492553711 + ], + [ + "▁Ordnung", + -12.90910816192627 + ], + [ + "▁beschrieben", + -12.909114837646484 + ], + [ + "▁Rainbow", + -12.90911865234375 + ], + [ + "▁Werkzeug", + -12.909136772155762 + ], + [ + "GIN", + -12.909354209899902 + ], + [ + "facilitating", + -12.909490585327148 + ], + [ + "hunt", + -12.90955638885498 + ], + [ + "▁Serving", + -12.909673690795898 + ], + [ + "Writ", + -12.909692764282227 + ], + [ + "requisite", + -12.909798622131348 + ], + [ + "▁Kerry", + -12.90989875793457 + ], + [ + "▁riesig", + -12.909957885742188 + ], + [ + "▁Healing", + -12.91030502319336 + ], + [ + "▁1954", + -12.910365104675293 + ], + [ + "▁mousse", + -12.910428047180176 + ], + [ + "▁Positive", + -12.910764694213867 + ], + [ + "embodie", + -12.910772323608398 + ], + [ + "▁penetrate", + -12.910774230957031 + ], + [ + "endorsed", + -12.910882949829102 + ], + [ + "▁situatia", + -12.910927772521973 + ], + [ + "▁Unity", + -12.911083221435547 + ], + [ + "142", + -12.911102294921875 + ], + [ + "▁farmhouse", + -12.911138534545898 + ], + [ + "▁Handbook", + -12.911368370056152 + ], + [ + "▁symbolic", + -12.911378860473633 + ], + [ + "pristine", + -12.911439895629883 + ], + [ + "moitié", + -12.911595344543457 + ], + [ + "▁Sessions", + -12.912017822265625 + ], + [ + "technisch", + -12.912116050720215 + ], + [ + "▁lesquel", + -12.912148475646973 + ], + [ + "▁electronically", + -12.912208557128906 + ], + [ + "▁modificat", + -12.912240982055664 + ], + [ + "▁adjoin", + -12.912242889404297 + ], + [ + "actualité", + -12.912256240844727 + ], + [ + "vati", + -12.91229248046875 + ], + [ + "VENT", + -12.912299156188965 + ], + [ + "▁salsa", + -12.912333488464355 + ], + [ + "acupunctur", + -12.912424087524414 + ], + [ + "▁Opportunity", + -12.912424087524414 + ], + [ + "▁Inspection", + -12.912425994873047 + ], + [ + "▁vereinbart", + -12.912425994873047 + ], + [ + "▁Residents", + -12.912426948547363 + ], + [ + "▁perennial", + -12.91242790222168 + ], + [ + "CHAN", + -12.912555694580078 + ], + [ + "Search", + -12.912572860717773 + ], + [ + "UTE", + -12.912696838378906 + ], + [ + "▁Lens", + -12.912703514099121 + ], + [ + "▁Banner", + -12.91281509399414 + ], + [ + "aménagement", + -12.912839889526367 + ], + [ + "▁Decision", + -12.91286849975586 + ], + [ + "▁ferr", + -12.912869453430176 + ], + [ + "▁Transformation", + -12.912878036499023 + ], + [ + "▁Stamm", + -12.912955284118652 + ], + [ + "▁Galerie", + -12.913003921508789 + ], + [ + "onny", + -12.913126945495605 + ], + [ + "▁caption", + -12.913195610046387 + ], + [ + "▁viitorul", + -12.91323471069336 + ], + [ + "▁professionelle", + -12.913281440734863 + ], + [ + "drepturile", + -12.913294792175293 + ], + [ + "ylon", + -12.913345336914062 + ], + [ + "Société", + -12.913387298583984 + ], + [ + "AIS", + -12.913456916809082 + ], + [ + "March", + -12.91350269317627 + ], + [ + "▁Rav", + -12.91357707977295 + ], + [ + "▁1946", + -12.913691520690918 + ], + [ + "accompagnement", + -12.913713455200195 + ], + [ + "Liviu", + -12.913716316223145 + ], + [ + "▁Appeal", + -12.913826942443848 + ], + [ + "▁sentir", + -12.913952827453613 + ], + [ + "▁Indigenous", + -12.914087295532227 + ], + [ + "▁wizard", + -12.914087295532227 + ], + [ + "▁collateral", + -12.914127349853516 + ], + [ + "▁Proof", + -12.914324760437012 + ], + [ + "▁prze", + -12.914398193359375 + ], + [ + "▁obținut", + -12.91450309753418 + ], + [ + "COP", + -12.914629936218262 + ], + [ + "▁obiect", + -12.914681434631348 + ], + [ + "▁isolate", + -12.914685249328613 + ], + [ + "▁nieder", + -12.914793014526367 + ], + [ + "TECH", + -12.914953231811523 + ], + [ + "▁Sharing", + -12.914998054504395 + ], + [ + "Ideally", + -12.915008544921875 + ], + [ + "▁naked", + -12.915059089660645 + ], + [ + "horaire", + -12.915130615234375 + ], + [ + "▁prelucrare", + -12.915180206298828 + ], + [ + "▁forcément", + -12.915349006652832 + ], + [ + "▁ESPN", + -12.915403366088867 + ], + [ + "▁southwest", + -12.9154634475708 + ], + [ + "▁Timber", + -12.915682792663574 + ], + [ + "kleidung", + -12.915748596191406 + ], + [ + "MJ", + -12.915854454040527 + ], + [ + "Ped", + -12.915889739990234 + ], + [ + "▁lymph", + -12.916181564331055 + ], + [ + "wärme", + -12.916399002075195 + ], + [ + "▁Olivia", + -12.916610717773438 + ], + [ + "Ziua", + -12.916705131530762 + ], + [ + "reihe", + -12.916747093200684 + ], + [ + "▁selfish", + -12.916752815246582 + ], + [ + "▁geography", + -12.916814804077148 + ], + [ + "▁etaj", + -12.916924476623535 + ], + [ + "▁acquis", + -12.91698932647705 + ], + [ + "▁rejoin", + -12.91701602935791 + ], + [ + "7.1", + -12.917097091674805 + ], + [ + "▁paix", + -12.91713809967041 + ], + [ + "tirer", + -12.917284965515137 + ], + [ + "▁clase", + -12.91745662689209 + ], + [ + "▁blink", + -12.917572021484375 + ], + [ + "▁Interface", + -12.917611122131348 + ], + [ + "nado", + -12.917655944824219 + ], + [ + "RIT", + -12.91777515411377 + ], + [ + "ESC", + -12.918120384216309 + ], + [ + "▁carving", + -12.918190002441406 + ], + [ + "▁articolul", + -12.918194770812988 + ], + [ + "▁wreath", + -12.918258666992188 + ], + [ + "▁propaganda", + -12.918266296386719 + ], + [ + "▁Pair", + -12.918267250061035 + ], + [ + "▁pamant", + -12.91831111907959 + ], + [ + "▁venituri", + -12.918357849121094 + ], + [ + "rtz", + -12.91835880279541 + ], + [ + "uddle", + -12.918529510498047 + ], + [ + "uille", + -12.918543815612793 + ], + [ + "▁embed", + -12.918654441833496 + ], + [ + "0.05", + -12.918655395507812 + ], + [ + "▁Brighton", + -12.918718338012695 + ], + [ + "estens", + -12.918742179870605 + ], + [ + "▁occupational", + -12.918862342834473 + ], + [ + "ем", + -12.918890953063965 + ], + [ + "wünsche", + -12.919081687927246 + ], + [ + "▁Poetry", + -12.91909408569336 + ], + [ + "▁visualize", + -12.919109344482422 + ], + [ + "Across", + -12.919121742248535 + ], + [ + "▁essentielle", + -12.919123649597168 + ], + [ + "beratung", + -12.919143676757812 + ], + [ + "▁Guidelines", + -12.91919231414795 + ], + [ + "▁Fehl", + -12.919198036193848 + ], + [ + "▁liberty", + -12.91921329498291 + ], + [ + "▁Investigation", + -12.91922378540039 + ], + [ + "▁sunrise", + -12.919266700744629 + ], + [ + "▁12:00", + -12.919541358947754 + ], + [ + "venind", + -12.919583320617676 + ], + [ + "▁lotion", + -12.919655799865723 + ], + [ + "conscious", + -12.91968822479248 + ], + [ + "logists", + -12.91973876953125 + ], + [ + "▁judecător", + -12.919893264770508 + ], + [ + "▁Ecuador", + -12.919928550720215 + ], + [ + "▁ambulance", + -12.91994857788086 + ], + [ + "▁Already", + -12.920026779174805 + ], + [ + "▁eröffnet", + -12.920090675354004 + ], + [ + "▁naval", + -12.92010498046875 + ], + [ + "▁imposibil", + -12.92011547088623 + ], + [ + "▁Merry", + -12.92011833190918 + ], + [ + "▁Duncan", + -12.920272827148438 + ], + [ + "▁léger", + -12.9203519821167 + ], + [ + "▁delta", + -12.920391082763672 + ], + [ + "▁Machinery", + -12.920578002929688 + ], + [ + "▁craftsmanship", + -12.920766830444336 + ], + [ + "▁angezeigt", + -12.9207763671875 + ], + [ + "▁formidable", + -12.9207763671875 + ], + [ + "▁Startup", + -12.920878410339355 + ], + [ + "venus", + -12.920969009399414 + ], + [ + "▁tannin", + -12.921019554138184 + ], + [ + "collaborating", + -12.921128273010254 + ], + [ + "▁abrupt", + -12.921152114868164 + ], + [ + "emergence", + -12.921171188354492 + ], + [ + "Dienstleistungen", + -12.921197891235352 + ], + [ + "▁liefert", + -12.921217918395996 + ], + [ + "engagement", + -12.921222686767578 + ], + [ + "▁maximise", + -12.921304702758789 + ], + [ + "modeled", + -12.9214448928833 + ], + [ + "▁crane", + -12.92148208618164 + ], + [ + "▁effortless", + -12.921540260314941 + ], + [ + "▁Buffet", + -12.92160701751709 + ], + [ + "8000", + -12.921648979187012 + ], + [ + "▁Überblick", + -12.921687126159668 + ], + [ + "micro", + -12.921981811523438 + ], + [ + "▁vergleichen", + -12.92204475402832 + ], + [ + "143", + -12.922080993652344 + ], + [ + "5.6", + -12.922094345092773 + ], + [ + "▁odata", + -12.922131538391113 + ], + [ + "▁interviu", + -12.922162055969238 + ], + [ + "▁poliţi", + -12.922375679016113 + ], + [ + "plated", + -12.922383308410645 + ], + [ + "Roman", + -12.922406196594238 + ], + [ + "▁satisfactory", + -12.922453880310059 + ], + [ + "▁unanimous", + -12.922459602355957 + ], + [ + "▁întâln", + -12.922464370727539 + ], + [ + "nonsense", + -12.922558784484863 + ], + [ + "▁HOW", + -12.922616004943848 + ], + [ + "prezinta", + -12.922639846801758 + ], + [ + "▁măsura", + -12.9226655960083 + ], + [ + "▁Fuji", + -12.92275619506836 + ], + [ + "▁Meaning", + -12.92278003692627 + ], + [ + "aspiring", + -12.922850608825684 + ], + [ + "▁Suceava", + -12.922863006591797 + ], + [ + "arba", + -12.922983169555664 + ], + [ + "pressive", + -12.922988891601562 + ], + [ + "▁creek", + -12.92301082611084 + ], + [ + "trakt", + -12.923023223876953 + ], + [ + "▁fluffy", + -12.923303604125977 + ], + [ + "▁bateau", + -12.923371315002441 + ], + [ + "ме", + -12.923545837402344 + ], + [ + "UNG", + -12.923609733581543 + ], + [ + "motifs", + -12.923907279968262 + ], + [ + "Type", + -12.923958778381348 + ], + [ + "perçu", + -12.924132347106934 + ], + [ + "singurul", + -12.924139022827148 + ], + [ + "▁(2011)", + -12.92418384552002 + ], + [ + "▁hemp", + -12.924263954162598 + ], + [ + "betroffenen", + -12.92431640625 + ], + [ + "▁sermon", + -12.924369812011719 + ], + [ + "AID", + -12.924545288085938 + ], + [ + "3.7", + -12.924627304077148 + ], + [ + "▁heiß", + -12.92463207244873 + ], + [ + "▁bolnav", + -12.924982070922852 + ], + [ + "First", + -12.924995422363281 + ], + [ + "▁interrupt", + -12.925040245056152 + ], + [ + "phag", + -12.925106048583984 + ], + [ + "235", + -12.925201416015625 + ], + [ + "▁discoveries", + -12.925262451171875 + ], + [ + "▁Wellington", + -12.925263404846191 + ], + [ + "▁wechseln", + -12.925298690795898 + ], + [ + "▁strategically", + -12.925379753112793 + ], + [ + "▁iphone", + -12.925440788269043 + ], + [ + "geteilt", + -12.925646781921387 + ], + [ + "generative", + -12.925748825073242 + ], + [ + "▁Monroe", + -12.925806045532227 + ], + [ + "▁Execut", + -12.925863265991211 + ], + [ + "▁knitting", + -12.925931930541992 + ], + [ + "▁Couple", + -12.925939559936523 + ], + [ + "▁Shade", + -12.926020622253418 + ], + [ + "▁Taj", + -12.926060676574707 + ], + [ + "950", + -12.926077842712402 + ], + [ + "boiled", + -12.92609977722168 + ], + [ + "▁mixes", + -12.926130294799805 + ], + [ + "betroffene", + -12.926156044006348 + ], + [ + "▁continuation", + -12.926169395446777 + ], + [ + "▁begleitet", + -12.926226615905762 + ], + [ + "▁numerical", + -12.926281929016113 + ], + [ + "▁(2013)", + -12.92630386352539 + ], + [ + "▁nourish", + -12.926399230957031 + ], + [ + "oricar", + -12.926485061645508 + ], + [ + "focus", + -12.926486015319824 + ], + [ + "▁Crazy", + -12.926651000976562 + ], + [ + "▁ascend", + -12.926671028137207 + ], + [ + "▁vinde", + -12.926855087280273 + ], + [ + "roar", + -12.926874160766602 + ], + [ + "Vac", + -12.926929473876953 + ], + [ + "▁Zuschauer", + -12.927068710327148 + ], + [ + "izeze", + -12.927179336547852 + ], + [ + "▁Mindest", + -12.92721939086914 + ], + [ + "lingual", + -12.927229881286621 + ], + [ + "▁violet", + -12.927264213562012 + ], + [ + "▁Opfer", + -12.927299499511719 + ], + [ + "ARS", + -12.927431106567383 + ], + [ + "4.7", + -12.92744255065918 + ], + [ + "millennial", + -12.927492141723633 + ], + [ + "▁striv", + -12.927639961242676 + ], + [ + "▁bishop", + -12.927680015563965 + ], + [ + "▁Durham", + -12.927708625793457 + ], + [ + "opathic", + -12.927817344665527 + ], + [ + "Where", + -12.927999496459961 + ], + [ + "▁Rider", + -12.928030014038086 + ], + [ + "▁Reid", + -12.928030967712402 + ], + [ + "stumbled", + -12.928156852722168 + ], + [ + "deep", + -12.92827320098877 + ], + [ + "▁11:00", + -12.928340911865234 + ], + [ + "▁Essex", + -12.928380966186523 + ], + [ + "▁Analyst", + -12.928397178649902 + ], + [ + "feel", + -12.928546905517578 + ], + [ + "▁rave", + -12.928601264953613 + ], + [ + "▁Eddie", + -12.928631782531738 + ], + [ + "▁communiqué", + -12.928756713867188 + ], + [ + "[/", + -12.928791046142578 + ], + [ + "▁Tho", + -12.929011344909668 + ], + [ + "ffentlichkeit", + -12.929019927978516 + ], + [ + "instrument", + -12.929126739501953 + ], + [ + "▁metropolitan", + -12.929179191589355 + ], + [ + "▁experienţ", + -12.929181098937988 + ], + [ + "East", + -12.929198265075684 + ], + [ + "Compared", + -12.929434776306152 + ], + [ + "worn", + -12.929484367370605 + ], + [ + "berufliche", + -12.92966365814209 + ], + [ + "▁Umstände", + -12.929710388183594 + ], + [ + "individuellen", + -12.929901123046875 + ], + [ + "siehe", + -12.929912567138672 + ], + [ + "▁sfarsit", + -12.929969787597656 + ], + [ + "▁Strength", + -12.929999351501465 + ], + [ + "▁prejudice", + -12.930024147033691 + ], + [ + "▁shutdown", + -12.930159568786621 + ], + [ + "chatting", + -12.93022346496582 + ], + [ + "▁Gerne", + -12.930227279663086 + ], + [ + "▁Yum", + -12.930305480957031 + ], + [ + "▁coastline", + -12.930387496948242 + ], + [ + "▁headboard", + -12.930623054504395 + ], + [ + "▁politische", + -12.930768966674805 + ], + [ + "Sub", + -12.930838584899902 + ], + [ + "▁Henderson", + -12.930870056152344 + ], + [ + "▁astonishing", + -12.930870056152344 + ], + [ + "▁Dresden", + -12.930871963500977 + ], + [ + "▁strawberry", + -12.93088436126709 + ], + [ + "prenez", + -12.930889129638672 + ], + [ + "▁Monaco", + -12.930912971496582 + ], + [ + "▁empowered", + -12.930953025817871 + ], + [ + "fäl", + -12.93109130859375 + ], + [ + "▁creier", + -12.931120872497559 + ], + [ + "▁Equ", + -12.931300163269043 + ], + [ + "▁Selling", + -12.931379318237305 + ], + [ + "▁$35", + -12.931483268737793 + ], + [ + "konto", + -12.931503295898438 + ], + [ + "▁Procedure", + -12.931715965270996 + ], + [ + "▁reduziert", + -12.931715965270996 + ], + [ + "▁royalty", + -12.931740760803223 + ], + [ + "wyn", + -12.931756019592285 + ], + [ + "▁Unfall", + -12.932141304016113 + ], + [ + "NAT", + -12.932161331176758 + ], + [ + "▁grafic", + -12.93251895904541 + ], + [ + "▁Collective", + -12.932563781738281 + ], + [ + "▁Computing", + -12.932564735412598 + ], + [ + "▁Established", + -12.932594299316406 + ], + [ + "▁zest", + -12.932598114013672 + ], + [ + "venez", + -12.932611465454102 + ], + [ + "follow", + -12.9326171875 + ], + [ + "▁Motivation", + -12.932640075683594 + ], + [ + "▁dictator", + -12.932755470275879 + ], + [ + "whichever", + -12.93281078338623 + ], + [ + "▁întâmpl", + -12.93293285369873 + ], + [ + "Flüchtling", + -12.932987213134766 + ], + [ + "EMI", + -12.933015823364258 + ], + [ + "404", + -12.933019638061523 + ], + [ + "ICK", + -12.93302059173584 + ], + [ + "emplacement", + -12.933191299438477 + ], + [ + "complete", + -12.933349609375 + ], + [ + "advising", + -12.933412551879883 + ], + [ + "▁Administrative", + -12.933481216430664 + ], + [ + "▁deviation", + -12.933496475219727 + ], + [ + "▁experienț", + -12.933500289916992 + ], + [ + "lethor", + -12.933996200561523 + ], + [ + "▁compress", + -12.934081077575684 + ], + [ + "rival", + -12.934173583984375 + ], + [ + "reprendre", + -12.934186935424805 + ], + [ + "ugi", + -12.934266090393066 + ], + [ + "▁Invitation", + -12.934267044067383 + ], + [ + "▁retina", + -12.934332847595215 + ], + [ + "▁farther", + -12.934335708618164 + ], + [ + "▁fenêtre", + -12.934799194335938 + ], + [ + "6-7", + -12.934815406799316 + ], + [ + "zhou", + -12.934834480285645 + ], + [ + "▁Piano", + -12.934840202331543 + ], + [ + "▁Congrats", + -12.935114860534668 + ], + [ + "▁Configur", + -12.935131072998047 + ], + [ + "▁superficial", + -12.935179710388184 + ], + [ + "▁melting", + -12.935315132141113 + ], + [ + "▁raspunde", + -12.935626983642578 + ], + [ + "▁drip", + -12.93564224243164 + ], + [ + "östlich", + -12.9358491897583 + ], + [ + "189", + -12.935925483703613 + ], + [ + "▁Ludwig", + -12.935959815979004 + ], + [ + "▁keto", + -12.935985565185547 + ], + [ + "▁Bogdan", + -12.936013221740723 + ], + [ + "▁contracted", + -12.936029434204102 + ], + [ + "▁revive", + -12.936100006103516 + ], + [ + "▁cristal", + -12.936232566833496 + ], + [ + "▁mailbox", + -12.936257362365723 + ], + [ + "președintele", + -12.936559677124023 + ], + [ + "▁seekers", + -12.936627388000488 + ], + [ + "func", + -12.936904907226562 + ], + [ + "▁Markus", + -12.93691349029541 + ], + [ + "Unter", + -12.936923027038574 + ], + [ + "▁übertragen", + -12.937003135681152 + ], + [ + "▁adaptive", + -12.937024116516113 + ], + [ + "caster", + -12.937051773071289 + ], + [ + "▁geek", + -12.937164306640625 + ], + [ + "▁réservation", + -12.937236785888672 + ], + [ + "▁irritation", + -12.937240600585938 + ], + [ + "▁HDMI", + -12.937346458435059 + ], + [ + "Seeing", + -12.937485694885254 + ], + [ + "▁genul", + -12.937569618225098 + ], + [ + "▁catastrophe", + -12.937662124633789 + ], + [ + "▁Tweet", + -12.937665939331055 + ], + [ + "TZ", + -12.937729835510254 + ], + [ + "▁credible", + -12.937946319580078 + ], + [ + "▁cobor", + -12.938064575195312 + ], + [ + "▁realizeaz", + -12.938159942626953 + ], + [ + "journal", + -12.938274383544922 + ], + [ + "▁shaking", + -12.938532829284668 + ], + [ + "3-6", + -12.938572883605957 + ], + [ + "▁beneficiaz", + -12.938605308532715 + ], + [ + "▁Frankreich", + -12.938633918762207 + ], + [ + "committing", + -12.9386568069458 + ], + [ + "AMS", + -12.938835144042969 + ], + [ + "▁Feli", + -12.939007759094238 + ], + [ + "▁Producer", + -12.939023971557617 + ], + [ + "▁übrig", + -12.93940544128418 + ], + [ + "gemeinde", + -12.939593315124512 + ], + [ + "should", + -12.939799308776855 + ], + [ + "▁neurons", + -12.939799308776855 + ], + [ + "▁Agenda", + -12.939833641052246 + ], + [ + "▁hashtag", + -12.939896583557129 + ], + [ + "▁confortabil", + -12.939897537231445 + ], + [ + "520", + -12.940008163452148 + ], + [ + "bonded", + -12.940033912658691 + ], + [ + "▁următoare", + -12.940191268920898 + ], + [ + "▁volatile", + -12.940223693847656 + ], + [ + "infamous", + -12.940225601196289 + ], + [ + "seară", + -12.940229415893555 + ], + [ + "▁Sorge", + -12.940346717834473 + ], + [ + "▁Beiträge", + -12.940420150756836 + ], + [ + "▁îndeplin", + -12.940449714660645 + ], + [ + "gespräch", + -12.940649032592773 + ], + [ + "▁joueur", + -12.940701484680176 + ], + [ + "▁outsourcing", + -12.940701484680176 + ], + [ + "▁Guvernul", + -12.940814018249512 + ], + [ + "6-2", + -12.940818786621094 + ], + [ + "▁prioritize", + -12.941068649291992 + ], + [ + "▁duminică", + -12.941076278686523 + ], + [ + "▁resignation", + -12.941076278686523 + ], + [ + "▁Converter", + -12.941079139709473 + ], + [ + "hereby", + -12.941155433654785 + ], + [ + "▁stresses", + -12.941299438476562 + ], + [ + "▁brun", + -12.941415786743164 + ], + [ + "▁elev", + -12.941423416137695 + ], + [ + "▁Skip", + -12.941479682922363 + ], + [ + "540", + -12.941499710083008 + ], + [ + "TURE", + -12.941603660583496 + ], + [ + "▁Lynch", + -12.941635131835938 + ], + [ + "▁preveni", + -12.941643714904785 + ], + [ + "compatible", + -12.941692352294922 + ], + [ + "surveyed", + -12.941702842712402 + ], + [ + "▁Ausnahme", + -12.941713333129883 + ], + [ + "▁medicul", + -12.941812515258789 + ], + [ + "▁subtil", + -12.941865921020508 + ], + [ + "▁Quali", + -12.941890716552734 + ], + [ + "▁techno", + -12.941900253295898 + ], + [ + "presently", + -12.94193172454834 + ], + [ + "▁Müller", + -12.941934585571289 + ], + [ + "DIRECT", + -12.941937446594238 + ], + [ + "schuld", + -12.941944122314453 + ], + [ + "▁Bloomberg", + -12.941994667053223 + ], + [ + "feuer", + -12.942181587219238 + ], + [ + "▁Pharmacy", + -12.942270278930664 + ], + [ + "▁Schnitt", + -12.942301750183105 + ], + [ + "186", + -12.942333221435547 + ], + [ + "peaks", + -12.942355155944824 + ], + [ + "▁Gemeinsam", + -12.94235897064209 + ], + [ + "▁récemment", + -12.94235897064209 + ], + [ + "▁Pascal", + -12.942490577697754 + ], + [ + "filmed", + -12.942523956298828 + ], + [ + "RCA", + -12.942548751831055 + ], + [ + "▁virtuelle", + -12.942622184753418 + ], + [ + "▁dotat", + -12.942630767822266 + ], + [ + "logisch", + -12.942717552185059 + ], + [ + "▁Luck", + -12.943005561828613 + ], + [ + "cosy", + -12.943132400512695 + ], + [ + "▁Awareness", + -12.943216323852539 + ], + [ + "▁gesetzlich", + -12.943263053894043 + ], + [ + "padded", + -12.943306922912598 + ], + [ + "▁Lotus", + -12.943395614624023 + ], + [ + "urging", + -12.9434175491333 + ], + [ + "▁mushroom", + -12.943426132202148 + ], + [ + "▁adultes", + -12.943527221679688 + ], + [ + "▁Coca", + -12.943571090698242 + ], + [ + "▁recev", + -12.943586349487305 + ], + [ + "▁mantra", + -12.943610191345215 + ], + [ + "▁practise", + -12.943644523620605 + ], + [ + "▁acceler", + -12.943663597106934 + ], + [ + "bolster", + -12.943756103515625 + ], + [ + "▁compressed", + -12.943818092346191 + ], + [ + "TIN", + -12.943899154663086 + ], + [ + "▁aromatic", + -12.944236755371094 + ], + [ + "geleitet", + -12.944408416748047 + ], + [ + "▁fibr", + -12.944443702697754 + ], + [ + "exécut", + -12.94444751739502 + ], + [ + "▁unconscious", + -12.94456958770752 + ], + [ + "HAR", + -12.944607734680176 + ], + [ + "▁Gregory", + -12.944661140441895 + ], + [ + "▁Manila", + -12.944738388061523 + ], + [ + "ozitate", + -12.944756507873535 + ], + [ + "exemplary", + -12.944803237915039 + ], + [ + "éventuel", + -12.944906234741211 + ], + [ + "▁Craciun", + -12.944930076599121 + ], + [ + "▁tehnologii", + -12.944931030273438 + ], + [ + "▁Despre", + -12.945138931274414 + ], + [ + "▁1917", + -12.945141792297363 + ], + [ + "▁upfront", + -12.945146560668945 + ], + [ + "▁Iulia", + -12.945280075073242 + ], + [ + "▁erwähnt", + -12.945359230041504 + ], + [ + "▁magnesium", + -12.945359230041504 + ], + [ + "▁descriptive", + -12.94536304473877 + ], + [ + "▁consumul", + -12.945364952087402 + ], + [ + "▁10-15", + -12.945423126220703 + ], + [ + "▁erfüllen", + -12.945611953735352 + ], + [ + "gig", + -12.945657730102539 + ], + [ + "430", + -12.945765495300293 + ], + [ + "▁Migration", + -12.945789337158203 + ], + [ + "bră", + -12.94579029083252 + ], + [ + "▁réforme", + -12.945863723754883 + ], + [ + "▁york", + -12.94610595703125 + ], + [ + "dritten", + -12.946109771728516 + ], + [ + "cumva", + -12.946182250976562 + ], + [ + "▁Alumni", + -12.946218490600586 + ], + [ + "▁Ceramic", + -12.946222305297852 + ], + [ + "▁rappelle", + -12.946236610412598 + ], + [ + "▁pianist", + -12.946248054504395 + ], + [ + "twisted", + -12.946306228637695 + ], + [ + "earned", + -12.946432113647461 + ], + [ + "▁Hose", + -12.946514129638672 + ], + [ + "156", + -12.946610450744629 + ], + [ + "▁Salmon", + -12.946687698364258 + ], + [ + "Level", + -12.946913719177246 + ], + [ + "▁swirl", + -12.947052001953125 + ], + [ + "erfahrung", + -12.947061538696289 + ], + [ + "▁liabilities", + -12.947078704833984 + ], + [ + "praxis", + -12.9470853805542 + ], + [ + "IPO", + -12.947089195251465 + ], + [ + "▁screaming", + -12.947092056274414 + ], + [ + "emphasized", + -12.947200775146484 + ], + [ + "DEA", + -12.947260856628418 + ], + [ + "▁dermatolog", + -12.947351455688477 + ], + [ + "▁pacate", + -12.947498321533203 + ], + [ + "▁ansamblu", + -12.947507858276367 + ], + [ + "▁beteiligt", + -12.947509765625 + ], + [ + "▁Needles", + -12.947574615478516 + ], + [ + "▁organisiert", + -12.947607040405273 + ], + [ + "Pacific", + -12.947639465332031 + ], + [ + "actual", + -12.947823524475098 + ], + [ + "prindere", + -12.94801139831543 + ], + [ + "▁Indoor", + -12.948348045349121 + ], + [ + "▁Gewalt", + -12.948431015014648 + ], + [ + "▁rezid", + -12.948507308959961 + ], + [ + "censor", + -12.948522567749023 + ], + [ + "▁unlawful", + -12.94882869720459 + ], + [ + "▁Explain", + -12.948873519897461 + ], + [ + "▁Flame", + -12.948897361755371 + ], + [ + "▁brachte", + -12.948941230773926 + ], + [ + "▁Mustang", + -12.94899845123291 + ], + [ + "ectomy", + -12.949044227600098 + ], + [ + "▁deliberate", + -12.949064254760742 + ], + [ + "▁sparkle", + -12.949225425720215 + ], + [ + "▁inchis", + -12.94926929473877 + ], + [ + "▁Cristian", + -12.949289321899414 + ], + [ + "▁facture", + -12.949291229248047 + ], + [ + "▁Grundstück", + -12.949292182922363 + ], + [ + "außerhalb", + -12.949300765991211 + ], + [ + "coast", + -12.949321746826172 + ], + [ + "anilor", + -12.949396133422852 + ], + [ + "255", + -12.94952392578125 + ], + [ + "nterdisciplinary", + -12.949576377868652 + ], + [ + "▁Isabel", + -12.949655532836914 + ], + [ + "▁Städte", + -12.949701309204102 + ], + [ + "▁cicl", + -12.949837684631348 + ], + [ + "▁Zeug", + -12.949905395507812 + ], + [ + "▁Muskel", + -12.949951171875 + ], + [ + "▁indirectly", + -12.950051307678223 + ], + [ + "▁Vorbereitung", + -12.950093269348145 + ], + [ + "MMA", + -12.95012378692627 + ], + [ + "▁pudding", + -12.950197219848633 + ], + [ + "rax", + -12.950389862060547 + ], + [ + "▁Stimmung", + -12.95052433013916 + ], + [ + "▁hierarchy", + -12.95052433013916 + ], + [ + "partie", + -12.950597763061523 + ], + [ + "▁elevate", + -12.950685501098633 + ], + [ + "▁Persian", + -12.950690269470215 + ], + [ + "forensic", + -12.95077896118164 + ], + [ + "Become", + -12.950854301452637 + ], + [ + "leicht", + -12.9508695602417 + ], + [ + "▁staging", + -12.950942039489746 + ], + [ + "▁fühlt", + -12.950965881347656 + ], + [ + "fenster", + -12.950979232788086 + ], + [ + "▁unbelievable", + -12.951089859008789 + ], + [ + "„", + -12.951260566711426 + ], + [ + "▁Guatemala", + -12.951387405395508 + ], + [ + "LET", + -12.95141315460205 + ], + [ + "▁buff", + -12.951454162597656 + ], + [ + "▁Primul", + -12.951626777648926 + ], + [ + "▁mainland", + -12.951702117919922 + ], + [ + "campus", + -12.951923370361328 + ], + [ + "▁gefällt", + -12.952075958251953 + ], + [ + "BAN", + -12.952153205871582 + ], + [ + "finish", + -12.952229499816895 + ], + [ + "accustomed", + -12.952251434326172 + ], + [ + "▁Businesses", + -12.95234203338623 + ], + [ + "▁întreb", + -12.95239543914795 + ], + [ + "▁recomandă", + -12.952425956726074 + ], + [ + "▁pellet", + -12.952474594116211 + ], + [ + "▁GST", + -12.952507972717285 + ], + [ + "SEA", + -12.952601432800293 + ], + [ + "▁categorie", + -12.952631950378418 + ], + [ + "▁convainc", + -12.95268440246582 + ], + [ + "▁considéré", + -12.952739715576172 + ], + [ + "rois", + -12.952853202819824 + ], + [ + "▁thrust", + -12.952898979187012 + ], + [ + "ijk", + -12.953001022338867 + ], + [ + "gefüllt", + -12.953118324279785 + ], + [ + "▁situatii", + -12.953327178955078 + ], + [ + "▁Jacksonville", + -12.95337200164795 + ], + [ + "▁bakery", + -12.953473091125488 + ], + [ + "▁Accident", + -12.953554153442383 + ], + [ + "▁urmeaza", + -12.953572273254395 + ], + [ + "▁crib", + -12.953593254089355 + ], + [ + "getroffen", + -12.953707695007324 + ], + [ + "Based", + -12.953877449035645 + ], + [ + "Including", + -12.95398235321045 + ], + [ + "▁Morocco", + -12.95398235321045 + ], + [ + "▁casserole", + -12.95398235321045 + ], + [ + "▁enquiry", + -12.953983306884766 + ], + [ + "▁pahar", + -12.954017639160156 + ], + [ + "▁Unternehmer", + -12.954025268554688 + ], + [ + "électro", + -12.954068183898926 + ], + [ + "Marie", + -12.95413589477539 + ], + [ + "▁Sno", + -12.954153060913086 + ], + [ + "▁prostate", + -12.954168319702148 + ], + [ + "▁Wallace", + -12.95426082611084 + ], + [ + "empre", + -12.954402923583984 + ], + [ + "▁Multumesc", + -12.954415321350098 + ], + [ + "White", + -12.954675674438477 + ], + [ + "brief", + -12.954751014709473 + ], + [ + "▁kitten", + -12.954751014709473 + ], + [ + "füh", + -12.954780578613281 + ], + [ + "▁mankind", + -12.954821586608887 + ], + [ + "ENE", + -12.95483112335205 + ], + [ + "▁Ethics", + -12.954848289489746 + ], + [ + "▁Realty", + -12.954946517944336 + ], + [ + "▁Emerg", + -12.954988479614258 + ], + [ + "7-8", + -12.955055236816406 + ], + [ + "museum", + -12.955096244812012 + ], + [ + "BRE", + -12.95518970489502 + ], + [ + "▁kilometri", + -12.955282211303711 + ], + [ + "oyaume", + -12.955286026000977 + ], + [ + "▁Cambodia", + -12.955288887023926 + ], + [ + "▁bruit", + -12.955304145812988 + ], + [ + "▁sépar", + -12.955334663391113 + ], + [ + "mastered", + -12.9554443359375 + ], + [ + "shake", + -12.955608367919922 + ], + [ + "▁liaison", + -12.955718994140625 + ], + [ + "▁Boulder", + -12.955719947814941 + ], + [ + "▁tortilla", + -12.955720901489258 + ], + [ + "▁Fokus", + -12.955731391906738 + ], + [ + "▁Blair", + -12.95573902130127 + ], + [ + "▁disturbance", + -12.955775260925293 + ], + [ + "geladen", + -12.955843925476074 + ], + [ + "▁sunscreen", + -12.955886840820312 + ], + [ + "▁reuș", + -12.955896377563477 + ], + [ + "▁Braun", + -12.956155776977539 + ], + [ + "▁existente", + -12.956157684326172 + ], + [ + "stift", + -12.956242561340332 + ], + [ + "▁preot", + -12.956387519836426 + ], + [ + "▁doved", + -12.956445693969727 + ], + [ + "sexual", + -12.956488609313965 + ], + [ + "meanwhile", + -12.956583976745605 + ], + [ + "▁legislature", + -12.956583976745605 + ], + [ + "▁vermeiden", + -12.956583976745605 + ], + [ + "▁inequality", + -12.95687484741211 + ], + [ + "▁turc", + -12.956881523132324 + ], + [ + "ви", + -12.95698070526123 + ], + [ + "▁Kontrolle", + -12.95702075958252 + ], + [ + "▁Ursache", + -12.95704174041748 + ], + [ + "▁confess", + -12.95704174041748 + ], + [ + "▁poetic", + -12.957109451293945 + ], + [ + "attention", + -12.957236289978027 + ], + [ + "textured", + -12.957386016845703 + ], + [ + "GES", + -12.957586288452148 + ], + [ + "6-4", + -12.957637786865234 + ], + [ + "Ray", + -12.957696914672852 + ], + [ + "chromat", + -12.957745552062988 + ], + [ + "▁insightful", + -12.957775115966797 + ], + [ + "▁Navigation", + -12.957887649536133 + ], + [ + "▁destiny", + -12.957887649536133 + ], + [ + "▁ergeben", + -12.957892417907715 + ], + [ + "▁versteh", + -12.958090782165527 + ], + [ + "301", + -12.958209037780762 + ], + [ + "▁Exterior", + -12.958321571350098 + ], + [ + "église", + -12.958322525024414 + ], + [ + "▁Failure", + -12.958322525024414 + ], + [ + "▁Patricia", + -12.958324432373047 + ], + [ + "▁geschützt", + -12.958328247070312 + ], + [ + "intrarea", + -12.95833969116211 + ], + [ + "▁Forward", + -12.958368301391602 + ], + [ + "▁Portrait", + -12.95844841003418 + ], + [ + "▁enregistré", + -12.958480834960938 + ], + [ + "▁wagon", + -12.958620071411133 + ], + [ + "stealing", + -12.958879470825195 + ], + [ + "▁Numero", + -12.958880424499512 + ], + [ + "▁tradui", + -12.958986282348633 + ], + [ + "▁klassische", + -12.959033966064453 + ], + [ + "▁profitieren", + -12.959043502807617 + ], + [ + "▁laboratories", + -12.95919132232666 + ], + [ + "▁reconnaissance", + -12.95919132232666 + ], + [ + "ку", + -12.959314346313477 + ], + [ + "▁Petersburg", + -12.959359169006348 + ], + [ + "▁fertility", + -12.959421157836914 + ], + [ + "▁Understand", + -12.959516525268555 + ], + [ + "dehors", + -12.959746360778809 + ], + [ + "▁Knox", + -12.959762573242188 + ], + [ + "software", + -12.959797859191895 + ], + [ + "▁Celebration", + -12.959823608398438 + ], + [ + "4.6", + -12.959897994995117 + ], + [ + "quino", + -12.959930419921875 + ], + [ + "▁endeavour", + -12.960073471069336 + ], + [ + "▁temptation", + -12.960136413574219 + ], + [ + "▁Registry", + -12.96035385131836 + ], + [ + "IMP", + -12.960502624511719 + ], + [ + "bedingt", + -12.960625648498535 + ], + [ + "▁$60", + -12.960846900939941 + ], + [ + "▁Kriterien", + -12.96093463897705 + ], + [ + "▁strawberries", + -12.960943222045898 + ], + [ + "▁conspiracy", + -12.96094799041748 + ], + [ + "▁pouch", + -12.960976600646973 + ], + [ + "▁Alexandria", + -12.961017608642578 + ], + [ + "▁Mick", + -12.961102485656738 + ], + [ + "extra", + -12.961114883422852 + ], + [ + "▁Operator", + -12.961151123046875 + ], + [ + "enduring", + -12.96132755279541 + ], + [ + "▁smash", + -12.961359024047852 + ], + [ + "Euro", + -12.961360931396484 + ], + [ + "▁Nouvelle", + -12.961370468139648 + ], + [ + "▁Raspberry", + -12.961370468139648 + ], + [ + "▁präsentieren", + -12.961380004882812 + ], + [ + "▁electrician", + -12.961404800415039 + ], + [ + "▁cheerful", + -12.961472511291504 + ], + [ + "▁chargé", + -12.961508750915527 + ], + [ + "▁Diskussion", + -12.961511611938477 + ], + [ + "▁surpass", + -12.961604118347168 + ], + [ + "▁Acces", + -12.961701393127441 + ], + [ + "tausend", + -12.961771011352539 + ], + [ + "▁vigorous", + -12.961808204650879 + ], + [ + "▁tava", + -12.961810111999512 + ], + [ + "CHO", + -12.96193790435791 + ], + [ + "▁1951", + -12.961941719055176 + ], + [ + "▁Umsatz", + -12.962019920349121 + ], + [ + "▁slavery", + -12.962055206298828 + ], + [ + "travel", + -12.962294578552246 + ], + [ + "▁correspondent", + -12.962297439575195 + ], + [ + "▁$150", + -12.962307929992676 + ], + [ + "▁stärker", + -12.962594985961914 + ], + [ + "Alb", + -12.96264362335205 + ], + [ + "▁Lopez", + -12.962682723999023 + ], + [ + "▁longueur", + -12.962767601013184 + ], + [ + "▁successive", + -12.962772369384766 + ], + [ + "▁(2015)", + -12.96278190612793 + ], + [ + "teig", + -12.962790489196777 + ], + [ + "custom", + -12.962944984436035 + ], + [ + "TIM", + -12.963099479675293 + ], + [ + "▁Escape", + -12.963174819946289 + ], + [ + "▁Sekunden", + -12.963349342346191 + ], + [ + "tiré", + -12.963444709777832 + ], + [ + "▁chantier", + -12.963489532470703 + ], + [ + "▁saturated", + -12.963555335998535 + ], + [ + "▁confrontation", + -12.963804244995117 + ], + [ + "▁biography", + -12.963805198669434 + ], + [ + "zuerst", + -12.9639892578125 + ], + [ + "▁rencontré", + -12.963991165161133 + ], + [ + "▁harmless", + -12.96412181854248 + ], + [ + "Branche", + -12.964139938354492 + ], + [ + "▁QR", + -12.964380264282227 + ], + [ + "▁Ereignis", + -12.964430809020996 + ], + [ + "▁verkaufen", + -12.96444320678711 + ], + [ + "0:00", + -12.96451187133789 + ], + [ + "Association", + -12.96469783782959 + ], + [ + "▁Santiago", + -12.964865684509277 + ], + [ + "Control", + -12.964993476867676 + ], + [ + "▁Angriff", + -12.9650297164917 + ], + [ + "lase", + -12.96505069732666 + ], + [ + "▁sfaturi", + -12.965224266052246 + ], + [ + "▁Comprehensive", + -12.965304374694824 + ], + [ + "▁Shepherd", + -12.965304374694824 + ], + [ + "▁exponential", + -12.965304374694824 + ], + [ + "▁penetration", + -12.965304374694824 + ], + [ + "▁comble", + -12.965394973754883 + ], + [ + "ionar", + -12.965557098388672 + ], + [ + "slept", + -12.965563774108887 + ], + [ + "▁Spice", + -12.965633392333984 + ], + [ + "mAh", + -12.965688705444336 + ], + [ + "▁Vertreter", + -12.965747833251953 + ], + [ + "fehler", + -12.965752601623535 + ], + [ + "▁Scroll", + -12.96599292755127 + ], + [ + "▁WARRANT", + -12.966179847717285 + ], + [ + "▁minimise", + -12.966326713562012 + ], + [ + "▁Dept", + -12.966474533081055 + ], + [ + "▁urinar", + -12.96661376953125 + ], + [ + "établir", + -12.966619491577148 + ], + [ + "verhältnis", + -12.966713905334473 + ], + [ + "▁glowing", + -12.966979026794434 + ], + [ + "kulturelle", + -12.966984748840332 + ], + [ + "▁Pediatric", + -12.967057228088379 + ], + [ + "▁inconvenience", + -12.967057228088379 + ], + [ + "Antoine", + -12.967121124267578 + ], + [ + "▁Heck", + -12.967164993286133 + ], + [ + "▁couches", + -12.967265129089355 + ], + [ + "▁1938", + -12.967331886291504 + ], + [ + "maybe", + -12.967333793640137 + ], + [ + "ETA", + -12.9673433303833 + ], + [ + "▁solaire", + -12.96748161315918 + ], + [ + "▁Zürich", + -12.967495918273926 + ], + [ + "computer", + -12.967545509338379 + ], + [ + "milk", + -12.96756362915039 + ], + [ + "он", + -12.967585563659668 + ], + [ + "modalitate", + -12.967608451843262 + ], + [ + "spanning", + -12.967655181884766 + ], + [ + "▁Crypto", + -12.96774959564209 + ], + [ + "▁Spotify", + -12.967935562133789 + ], + [ + "mycin", + -12.967944145202637 + ], + [ + "▁similarities", + -12.96811294555664 + ], + [ + "▁eclipse", + -12.968377113342285 + ], + [ + "Map", + -12.968610763549805 + ], + [ + "double", + -12.96861743927002 + ], + [ + "corporate", + -12.968734741210938 + ], + [ + "▁Hindi", + -12.968853950500488 + ], + [ + "battling", + -12.968866348266602 + ], + [ + "▁habituel", + -12.969098091125488 + ], + [ + "▁Transition", + -12.969196319580078 + ], + [ + "▁luptă", + -12.96920394897461 + ], + [ + "▁trainee", + -12.969219207763672 + ], + [ + "LIS", + -12.96922492980957 + ], + [ + "▁Vatican", + -12.969254493713379 + ], + [ + "Archived", + -12.9692964553833 + ], + [ + "Connect", + -12.969305038452148 + ], + [ + "▁prealabil", + -12.969307899475098 + ], + [ + "▁Chambre", + -12.969327926635742 + ], + [ + "stuhl", + -12.969440460205078 + ], + [ + "▁arrivé", + -12.969557762145996 + ], + [ + "▁Urteil", + -12.969575881958008 + ], + [ + "▁scrutiny", + -12.969818115234375 + ], + [ + "▁memoir", + -12.969854354858398 + ], + [ + "▁innovant", + -12.9699068069458 + ], + [ + "▁sublime", + -12.969943046569824 + ], + [ + "children", + -12.970004081726074 + ], + [ + "▁Handwerk", + -12.970056533813477 + ], + [ + "▁campuses", + -12.970268249511719 + ], + [ + "▁durabil", + -12.970502853393555 + ], + [ + "▁immersive", + -12.970632553100586 + ], + [ + "▁Magnet", + -12.970732688903809 + ], + [ + "läufe", + -12.970808029174805 + ], + [ + "▁Techno", + -12.970837593078613 + ], + [ + "MAP", + -12.9710693359375 + ], + [ + "7.2", + -12.971145629882812 + ], + [ + "▁Schwimm", + -12.971181869506836 + ], + [ + "BOOK", + -12.971186637878418 + ], + [ + "188", + -12.971441268920898 + ], + [ + "▁Supervisor", + -12.971498489379883 + ], + [ + "prévue", + -12.971691131591797 + ], + [ + "needed", + -12.971813201904297 + ], + [ + "▁creditors", + -12.971822738647461 + ], + [ + "▁brin", + -12.971837043762207 + ], + [ + "▁Neck", + -12.971900939941406 + ], + [ + "▁Salut", + -12.971988677978516 + ], + [ + "▁despair", + -12.972105979919434 + ], + [ + "▁Sauce", + -12.972261428833008 + ], + [ + "▁Westminster", + -12.972335815429688 + ], + [ + "▁langfristig", + -12.972335815429688 + ], + [ + "▁northeast", + -12.972365379333496 + ], + [ + "▁încercat", + -12.972399711608887 + ], + [ + "▁nausea", + -12.972408294677734 + ], + [ + "▁Paypal", + -12.972440719604492 + ], + [ + "▁Arrow", + -12.972469329833984 + ], + [ + "▁Travis", + -12.972633361816406 + ], + [ + "(2009)", + -12.972713470458984 + ], + [ + "▁Rising", + -12.972719192504883 + ], + [ + "termes", + -12.973097801208496 + ], + [ + "Australie", + -12.973154067993164 + ], + [ + "▁scarf", + -12.973187446594238 + ], + [ + "klassischen", + -12.97337818145752 + ], + [ + "▁boug", + -12.973466873168945 + ], + [ + "DOT", + -12.97360610961914 + ], + [ + "▁Trink", + -12.97361946105957 + ], + [ + "▁bestätigt", + -12.97365951538086 + ], + [ + "▁officiel", + -12.97370433807373 + ], + [ + "Produkt", + -12.973873138427734 + ], + [ + "DNA", + -12.974140167236328 + ], + [ + "▁*******", + -12.97426700592041 + ], + [ + "GAR", + -12.974271774291992 + ], + [ + "therapeut", + -12.974377632141113 + ], + [ + "187", + -12.974420547485352 + ], + [ + "▁Louisville", + -12.974493026733398 + ], + [ + "▁geöffnet", + -12.97462272644043 + ], + [ + "Watch", + -12.974640846252441 + ], + [ + "85%", + -12.974678993225098 + ], + [ + "▁Candida", + -12.974698066711426 + ], + [ + "▁Kathy", + -12.974703788757324 + ], + [ + "▁Animation", + -12.974711418151855 + ], + [ + "planung", + -12.974715232849121 + ], + [ + "woche", + -12.974730491638184 + ], + [ + "Video", + -12.974966049194336 + ], + [ + "▁Automation", + -12.97507095336914 + ], + [ + "▁foliage", + -12.97507381439209 + ], + [ + "▁evenimentului", + -12.975175857543945 + ], + [ + "SEN", + -12.975362777709961 + ], + [ + "▁Dialog", + -12.975372314453125 + ], + [ + "▁ZIP", + -12.975372314453125 + ], + [ + "▁vieții", + -12.97537612915039 + ], + [ + "▁passionné", + -12.975425720214844 + ], + [ + "▁WOW", + -12.97544002532959 + ], + [ + "ectiv", + -12.975464820861816 + ], + [ + "▁vorbesc", + -12.975482940673828 + ], + [ + "▁computational", + -12.975533485412598 + ], + [ + "▁idiot", + -12.97557258605957 + ], + [ + "▁stigma", + -12.97567081451416 + ], + [ + "▁multumesc", + -12.975870132446289 + ], + [ + "▁sărbători", + -12.975870132446289 + ], + [ + "▁Advantage", + -12.975906372070312 + ], + [ + "▁alegeri", + -12.976024627685547 + ], + [ + "▁philosopher", + -12.976031303405762 + ], + [ + "RIE", + -12.976117134094238 + ], + [ + "refundable", + -12.976221084594727 + ], + [ + "▁Sofia", + -12.97623348236084 + ], + [ + "▁încheiat", + -12.976313591003418 + ], + [ + "meilleures", + -12.976473808288574 + ], + [ + "critical", + -12.976744651794434 + ], + [ + "▁cavity", + -12.976766586303711 + ], + [ + "▁ressort", + -12.976792335510254 + ], + [ + "strong", + -12.976798057556152 + ], + [ + "▁Backup", + -12.976948738098145 + ], + [ + "▁Zeitraum", + -12.977023124694824 + ], + [ + "▁Szene", + -12.977027893066406 + ], + [ + "▁Candle", + -12.977173805236816 + ], + [ + "▁ciocolat", + -12.977198600769043 + ], + [ + "etched", + -12.977227210998535 + ], + [ + "ан", + -12.977302551269531 + ], + [ + "▁Anchor", + -12.977365493774414 + ], + [ + "equate", + -12.977470397949219 + ], + [ + "▁bulg", + -12.977476119995117 + ], + [ + "▁motorist", + -12.977524757385254 + ], + [ + "träglich", + -12.977736473083496 + ], + [ + "please", + -12.977936744689941 + ], + [ + "different", + -12.978011131286621 + ], + [ + "▁Accel", + -12.97813606262207 + ], + [ + "Proiectul", + -12.97829818725586 + ], + [ + "▁cabbage", + -12.97852897644043 + ], + [ + "▁télécharger", + -12.97852897644043 + ], + [ + "▁Presentation", + -12.97856330871582 + ], + [ + "▁Struktur", + -12.978621482849121 + ], + [ + "bücher", + -12.978650093078613 + ], + [ + "▁flatter", + -12.978672981262207 + ], + [ + "emprunt", + -12.979074478149414 + ], + [ + "▁oriental", + -12.979111671447754 + ], + [ + "▁Turnier", + -12.979166984558105 + ], + [ + "brücke", + -12.97917366027832 + ], + [ + "▁légumes", + -12.979416847229004 + ], + [ + "gerechnet", + -12.979595184326172 + ], + [ + "flooded", + -12.979621887207031 + ], + [ + "LER", + -12.979679107666016 + ], + [ + "üben", + -12.97973918914795 + ], + [ + "internaute", + -12.979888916015625 + ], + [ + "▁Austausch", + -12.979935646057129 + ], + [ + "gefordert", + -12.980034828186035 + ], + [ + "▁adoptat", + -12.980277061462402 + ], + [ + "▁erinnern", + -12.980305671691895 + ], + [ + "▁dolphin", + -12.980307579040527 + ], + [ + "▁Parkinson", + -12.980308532714844 + ], + [ + "büro", + -12.980310440063477 + ], + [ + "▁Crest", + -12.980368614196777 + ], + [ + "▁Ikea", + -12.980437278747559 + ], + [ + "▁ecologic", + -12.980470657348633 + ], + [ + "mplă", + -12.98065185546875 + ], + [ + "▁șef", + -12.980655670166016 + ], + [ + "coop", + -12.980868339538574 + ], + [ + "▁Carson", + -12.980900764465332 + ], + [ + "▁uşor", + -12.981054306030273 + ], + [ + "▁exert", + -12.981070518493652 + ], + [ + "▁countertop", + -12.981114387512207 + ], + [ + "ntended", + -12.981136322021484 + ], + [ + "▁Civic", + -12.981313705444336 + ], + [ + "▁attentes", + -12.98133373260498 + ], + [ + "gesetzlichen", + -12.981356620788574 + ], + [ + "frischen", + -12.981475830078125 + ], + [ + "▁Bottle", + -12.981636047363281 + ], + [ + "▁cautare", + -12.982080459594727 + ], + [ + "▁waterfront", + -12.982226371765137 + ], + [ + "▁centerpiece", + -12.982312202453613 + ], + [ + "▁Castel", + -12.982441902160645 + ], + [ + "510", + -12.98270034790039 + ], + [ + "capped", + -12.982709884643555 + ], + [ + "▁mattresses", + -12.982850074768066 + ], + [ + "▁readiness", + -12.982865333557129 + ], + [ + "diag", + -12.982970237731934 + ], + [ + "▁geändert", + -12.982980728149414 + ], + [ + "▁complained", + -12.983051300048828 + ], + [ + "▁diary", + -12.983073234558105 + ], + [ + "▁ceremonies", + -12.983144760131836 + ], + [ + "▁următor", + -12.983181953430176 + ], + [ + "▁Engel", + -12.983270645141602 + ], + [ + "▁disconnect", + -12.9832763671875 + ], + [ + "▁Silvi", + -12.983282089233398 + ], + [ + "▁eingerichtet", + -12.9834566116333 + ], + [ + "medizin", + -12.983512878417969 + ], + [ + "▁majestic", + -12.983869552612305 + ], + [ + "▁Random", + -12.983943939208984 + ], + [ + "▁Equity", + -12.984046936035156 + ], + [ + "▁Echipa", + -12.984111785888672 + ], + [ + "са", + -12.984163284301758 + ], + [ + "316", + -12.984179496765137 + ], + [ + "▁Formation", + -12.984183311462402 + ], + [ + "inland", + -12.98421859741211 + ], + [ + "appuy", + -12.984301567077637 + ], + [ + "TAN", + -12.984481811523438 + ], + [ + "slipped", + -12.984918594360352 + ], + [ + "Certains", + -12.985247611999512 + ], + [ + "▁Silber", + -12.98525333404541 + ], + [ + "▁reçoi", + -12.985257148742676 + ], + [ + "▁Monthly", + -12.985323905944824 + ], + [ + "calculating", + -12.985494613647461 + ], + [ + "▁scratches", + -12.98554515838623 + ], + [ + "▁concurrence", + -12.985654830932617 + ], + [ + "▁Stärke", + -12.985662460327148 + ], + [ + "▁intermediar", + -12.985751152038574 + ], + [ + "▁erlebt", + -12.98579216003418 + ], + [ + "gesellschaftlich", + -12.986037254333496 + ], + [ + "▁Volk", + -12.986041069030762 + ], + [ + "▁Ansprüche", + -12.986101150512695 + ], + [ + "▁cumulative", + -12.986103057861328 + ], + [ + "▁Randy", + -12.986183166503906 + ], + [ + "▁instituții", + -12.98622989654541 + ], + [ + "together", + -12.986489295959473 + ], + [ + "▁Sap", + -12.986539840698242 + ], + [ + "▁modificari", + -12.986551284790039 + ], + [ + "▁erosion", + -12.986572265625 + ], + [ + "▁wicked", + -12.986577033996582 + ], + [ + "soaked", + -12.986613273620605 + ], + [ + "▁cellar", + -12.9866361618042 + ], + [ + "ignoring", + -12.986726760864258 + ], + [ + "▁scarce", + -12.986815452575684 + ], + [ + "ueuse", + -12.98697280883789 + ], + [ + "▁bibliothèque", + -12.986995697021484 + ], + [ + "critères", + -12.987017631530762 + ], + [ + "▁overlay", + -12.987166404724121 + ], + [ + "IPA", + -12.98737907409668 + ], + [ + "director", + -12.987393379211426 + ], + [ + "▁Krishna", + -12.987444877624512 + ], + [ + "▁methodologies", + -12.987451553344727 + ], + [ + "iocese", + -12.987513542175293 + ], + [ + "▁saucepan", + -12.987713813781738 + ], + [ + "184", + -12.987948417663574 + ], + [ + "275", + -12.987981796264648 + ], + [ + "▁précieu", + -12.988165855407715 + ], + [ + "▁academy", + -12.9883394241333 + ], + [ + "460", + -12.988438606262207 + ], + [ + "ERN", + -12.988679885864258 + ], + [ + "▁emoti", + -12.988725662231445 + ], + [ + "▁télévision", + -12.988823890686035 + ], + [ + "EDIT", + -12.988901138305664 + ], + [ + "▁Valeri", + -12.989045143127441 + ], + [ + "▁Charity", + -12.98911190032959 + ], + [ + "Voilà", + -12.989297866821289 + ], + [ + "▁lipsit", + -12.989356994628906 + ], + [ + "▁unleash", + -12.989373207092285 + ], + [ + "▁suferit", + -12.989506721496582 + ], + [ + "▁Lifestyle", + -12.98953914642334 + ], + [ + "▁Edel", + -12.989603996276855 + ], + [ + "▁Derek", + -12.989643096923828 + ], + [ + "▁Manga", + -12.989801406860352 + ], + [ + "▁increment", + -12.989990234375 + ], + [ + "▁plötzlich", + -12.990133285522461 + ], + [ + "▁5:30", + -12.990208625793457 + ], + [ + "▁Republicii", + -12.990246772766113 + ], + [ + "▁capitalism", + -12.990293502807617 + ], + [ + "ROW", + -12.990510940551758 + ], + [ + "▁Paar", + -12.990523338317871 + ], + [ + "allée", + -12.99057674407959 + ], + [ + "▁motto", + -12.990610122680664 + ], + [ + "Schäden", + -12.990630149841309 + ], + [ + "▁£10", + -12.99063491821289 + ], + [ + "RIP", + -12.990728378295898 + ], + [ + "courir", + -12.990761756896973 + ], + [ + "rocky", + -12.990944862365723 + ], + [ + "▁Sunshine", + -12.991031646728516 + ], + [ + "▁chimney", + -12.991044998168945 + ], + [ + "▁préfér", + -12.991153717041016 + ], + [ + "▁relaxare", + -12.991189956665039 + ], + [ + "▁colabora", + -12.99134349822998 + ], + [ + "liefer", + -12.99142837524414 + ], + [ + "▁ordentlich", + -12.991486549377441 + ], + [ + "▁dauerhaft", + -12.991535186767578 + ], + [ + "kammer", + -12.991572380065918 + ], + [ + "▁Basket", + -12.991579055786133 + ], + [ + "Site", + -12.991657257080078 + ], + [ + "▁Regina", + -12.991716384887695 + ], + [ + "▁simulate", + -12.991868019104004 + ], + [ + "▁wrestle", + -12.991939544677734 + ], + [ + "wertig", + -12.991986274719238 + ], + [ + "▁Christie", + -12.992018699645996 + ], + [ + "download", + -12.992033004760742 + ], + [ + "▁torch", + -12.992213249206543 + ], + [ + "riya", + -12.992216110229492 + ], + [ + "▁Grie", + -12.992247581481934 + ], + [ + "bitten", + -12.992356300354004 + ], + [ + "▁spezialisiert", + -12.99238109588623 + ], + [ + "▁Parade", + -12.992408752441406 + ], + [ + "▁migraine", + -12.992830276489258 + ], + [ + "▁Armstrong", + -12.992846488952637 + ], + [ + "▁cutie", + -12.9928560256958 + ], + [ + "▁bullying", + -12.992889404296875 + ], + [ + "▁Estonia", + -12.99293041229248 + ], + [ + "▁harvested", + -12.992948532104492 + ], + [ + "▁Hunger", + -12.992971420288086 + ], + [ + "▁frapp", + -12.992999076843262 + ], + [ + "REM", + -12.993117332458496 + ], + [ + "sensor", + -12.993189811706543 + ], + [ + "▁GREAT", + -12.993293762207031 + ], + [ + "▁thyroid", + -12.993302345275879 + ], + [ + "▁mărturi", + -12.993335723876953 + ], + [ + "ocupă", + -12.993809700012207 + ], + [ + "▁Wealth", + -12.993812561035156 + ], + [ + "▁convins", + -12.993841171264648 + ], + [ + "141", + -12.993876457214355 + ], + [ + "▁vingt", + -12.993901252746582 + ], + [ + "▁revel", + -12.994054794311523 + ], + [ + "▁Adri", + -12.994083404541016 + ], + [ + "▁remix", + -12.994207382202148 + ], + [ + "▁fermentation", + -12.99425220489502 + ], + [ + "▁achiziti", + -12.994352340698242 + ], + [ + "dream", + -12.994426727294922 + ], + [ + "▁contemporan", + -12.994632720947266 + ], + [ + "▁youngsters", + -12.994685173034668 + ], + [ + "▁Hartford", + -12.994745254516602 + ], + [ + "▁Wagen", + -12.994988441467285 + ], + [ + "▁Celebr", + -12.995214462280273 + ], + [ + "leveraging", + -12.99527645111084 + ], + [ + "▁Iasi", + -12.99549674987793 + ], + [ + "tackling", + -12.9955415725708 + ], + [ + "▁intrinsic", + -12.995553970336914 + ], + [ + "▁Macedon", + -12.995603561401367 + ], + [ + "NIA", + -12.995784759521484 + ], + [ + "▁bliss", + -12.995905876159668 + ], + [ + "▁gradual", + -12.995908737182617 + ], + [ + "▁inregistrat", + -12.995981216430664 + ], + [ + "▁volleyball", + -12.995986938476562 + ], + [ + "▁offiziell", + -12.996054649353027 + ], + [ + "▁carré", + -12.99611759185791 + ], + [ + "Mostly", + -12.996174812316895 + ], + [ + "▁Harley", + -12.996193885803223 + ], + [ + "▁locati", + -12.996216773986816 + ], + [ + "▁Klo", + -12.996223449707031 + ], + [ + "▁Equal", + -12.996238708496094 + ], + [ + "▁citat", + -12.996369361877441 + ], + [ + "▁argint", + -12.996478080749512 + ], + [ + "prüft", + -12.996528625488281 + ], + [ + "▁Fence", + -12.996600151062012 + ], + [ + "positive", + -12.996988296508789 + ], + [ + "▁Kaz", + -12.997245788574219 + ], + [ + "▁distortion", + -12.997342109680176 + ], + [ + "▁sâmbătă", + -12.997342109680176 + ], + [ + "▁frontière", + -12.997346878051758 + ], + [ + "▁revanch", + -12.997394561767578 + ], + [ + "▁Held", + -12.997465133666992 + ], + [ + "▁Hobb", + -12.99776554107666 + ], + [ + "▁reuşit", + -12.997796058654785 + ], + [ + "deem", + -12.997880935668945 + ], + [ + "▁dorint", + -12.997902870178223 + ], + [ + "▁Anlagen", + -12.997908592224121 + ], + [ + "▁cheval", + -12.997973442077637 + ], + [ + "630", + -12.99806022644043 + ], + [ + "▁implementare", + -12.99808406829834 + ], + [ + "▁curator", + -12.99821662902832 + ], + [ + "▁legislator", + -12.998247146606445 + ], + [ + "▁potassium", + -12.998247146606445 + ], + [ + "▁veterinarian", + -12.998247146606445 + ], + [ + "▁domenii", + -12.998273849487305 + ], + [ + "▁revue", + -12.998310089111328 + ], + [ + "Vielen", + -12.998333930969238 + ], + [ + "africain", + -12.998570442199707 + ], + [ + "before", + -12.998680114746094 + ], + [ + "▁Bestandteil", + -12.998702049255371 + ], + [ + "▁(2010)", + -12.998767852783203 + ], + [ + "▁Arlington", + -12.999153137207031 + ], + [ + "▁Gründung", + -12.999153137207031 + ], + [ + "▁Sprinkle", + -12.999153137207031 + ], + [ + "▁Princeton", + -12.999186515808105 + ], + [ + "chirurg", + -12.999228477478027 + ], + [ + "▁laissé", + -12.999357223510742 + ], + [ + "whoever", + -12.999384880065918 + ], + [ + "▁pasture", + -12.999431610107422 + ], + [ + "ajute", + -12.999436378479004 + ], + [ + "▁joyful", + -12.999494552612305 + ], + [ + "etapa", + -12.999905586242676 + ], + [ + "ESP", + -13.000017166137695 + ], + [ + "▁Iohannis", + -13.000059127807617 + ], + [ + "▁10:30", + -13.000127792358398 + ], + [ + "▁Kingston", + -13.000140190124512 + ], + [ + "▁contender", + -13.000164031982422 + ], + [ + "▁Damage", + -13.000177383422852 + ], + [ + "▁schreibt", + -13.000482559204102 + ], + [ + "sstisch", + -13.000631332397461 + ], + [ + "Associated", + -13.00072956085205 + ], + [ + "▁disposable", + -13.000782012939453 + ], + [ + "veranstaltung", + -13.00096607208252 + ], + [ + "▁puppet", + -13.00100040435791 + ], + [ + "pong", + -13.001093864440918 + ], + [ + "▁Chronicle", + -13.001176834106445 + ], + [ + "222", + -13.001286506652832 + ], + [ + "intuit", + -13.001396179199219 + ], + [ + "inscrire", + -13.001429557800293 + ], + [ + "▁speeches", + -13.001431465148926 + ], + [ + "▁Eingang", + -13.001775741577148 + ], + [ + "▁Adidas", + -13.001875877380371 + ], + [ + "▁cemetery", + -13.001877784729004 + ], + [ + "▁juicy", + -13.001885414123535 + ], + [ + "▁wertvolle", + -13.0018892288208 + ], + [ + "▁militari", + -13.001917839050293 + ], + [ + "China", + -13.00196361541748 + ], + [ + "ecția", + -13.002041816711426 + ], + [ + "luster", + -13.002063751220703 + ], + [ + "auftrag", + -13.00234317779541 + ], + [ + "▁Marius", + -13.002523422241211 + ], + [ + "▁crossover", + -13.002555847167969 + ], + [ + "▁enthusiast", + -13.002555847167969 + ], + [ + "▁cantitate", + -13.002630233764648 + ], + [ + "▁animat", + -13.002634048461914 + ], + [ + "Park", + -13.002793312072754 + ], + [ + "▁unchanged", + -13.00279426574707 + ], + [ + "russia", + -13.00281810760498 + ], + [ + "instant", + -13.002833366394043 + ], + [ + "ţiunea", + -13.002835273742676 + ], + [ + "▁franchi", + -13.002920150756836 + ], + [ + "▁mobiliz", + -13.002963066101074 + ], + [ + "athlet", + -13.003013610839844 + ], + [ + "▁Cardio", + -13.0031099319458 + ], + [ + "▁supus", + -13.003119468688965 + ], + [ + "▁Griff", + -13.003137588500977 + ], + [ + "flakes", + -13.003217697143555 + ], + [ + "soluble", + -13.003250122070312 + ], + [ + "Known", + -13.003693580627441 + ], + [ + "leaking", + -13.003741264343262 + ], + [ + "▁Holocaust", + -13.004148483276367 + ], + [ + "gift", + -13.004197120666504 + ], + [ + "▁tradiţi", + -13.004359245300293 + ], + [ + "▁southeast", + -13.004498481750488 + ], + [ + "▁correspondant", + -13.00460147857666 + ], + [ + "Isaiah", + -13.004603385925293 + ], + [ + "▁diagonal", + -13.004606246948242 + ], + [ + "▁Probabil", + -13.004680633544922 + ], + [ + "▁dégust", + -13.004791259765625 + ], + [ + "▁Naval", + -13.004802703857422 + ], + [ + "▁cultivation", + -13.004839897155762 + ], + [ + "▁Vertrieb", + -13.004849433898926 + ], + [ + "▁pony", + -13.004854202270508 + ], + [ + "▁Throw", + -13.0050048828125 + ], + [ + "little", + -13.005010604858398 + ], + [ + "▁remarque", + -13.005074501037598 + ], + [ + "▁parcare", + -13.005085945129395 + ], + [ + "3.8", + -13.00518798828125 + ], + [ + "▁renunt", + -13.005330085754395 + ], + [ + "▁Rewards", + -13.005487442016602 + ], + [ + "▁Thur", + -13.005496978759766 + ], + [ + "▁underestimate", + -13.005515098571777 + ], + [ + "▁frankly", + -13.005516052246094 + ], + [ + "Bretagne", + -13.005517959594727 + ], + [ + "axial", + -13.005537986755371 + ], + [ + "▁identities", + -13.0055570602417 + ], + [ + "▁Harvest", + -13.00561237335205 + ], + [ + "▁skippe", + -13.00561237335205 + ], + [ + "▁Boutique", + -13.005670547485352 + ], + [ + "▁intuition", + -13.005746841430664 + ], + [ + "▁Rotary", + -13.00581169128418 + ], + [ + "▁SERVICE", + -13.005875587463379 + ], + [ + "▁refill", + -13.005915641784668 + ], + [ + "▁arcade", + -13.006060600280762 + ], + [ + "▁komme", + -13.006386756896973 + ], + [ + "▁irrelevant", + -13.006427764892578 + ], + [ + "▁Sortiment", + -13.006429672241211 + ], + [ + "▁scriitor", + -13.006488800048828 + ], + [ + "▁clicked", + -13.006516456604004 + ], + [ + "▁ciel", + -13.006610870361328 + ], + [ + "▁Caesar", + -13.00680160522461 + ], + [ + "hound", + -13.006803512573242 + ], + [ + "whipped", + -13.006843566894531 + ], + [ + "licate", + -13.006867408752441 + ], + [ + "▁formatting", + -13.006986618041992 + ], + [ + "▁mosaic", + -13.007028579711914 + ], + [ + "(2017)", + -13.007122039794922 + ], + [ + "777", + -13.007257461547852 + ], + [ + "▁Messenger", + -13.007342338562012 + ], + [ + "dulci", + -13.007369041442871 + ], + [ + "▁(2016)", + -13.007420539855957 + ], + [ + "▁popcorn", + -13.007425308227539 + ], + [ + "▁Presidential", + -13.007497787475586 + ], + [ + "▁brokerage", + -13.007564544677734 + ], + [ + "dachte", + -13.00762939453125 + ], + [ + "verkauf", + -13.00768756866455 + ], + [ + "▁pomme", + -13.007721900939941 + ], + [ + "▁fret", + -13.007822036743164 + ], + [ + "▁revere", + -13.007894515991211 + ], + [ + "▁Canvas", + -13.008092880249023 + ], + [ + "▁Nottingham", + -13.008255004882812 + ], + [ + "▁Refuge", + -13.008257865905762 + ], + [ + "▁injustice", + -13.008259773254395 + ], + [ + "▁External", + -13.008264541625977 + ], + [ + "dincolo", + -13.008304595947266 + ], + [ + "directing", + -13.008511543273926 + ], + [ + "▁Toulouse", + -13.008710861206055 + ], + [ + "▁cheltuieli", + -13.008746147155762 + ], + [ + "▁distrus", + -13.008816719055176 + ], + [ + "impôt", + -13.008912086486816 + ], + [ + "landschaft", + -13.008964538574219 + ], + [ + "passion", + -13.00897216796875 + ], + [ + "▁Hobby", + -13.009099006652832 + ], + [ + "significant", + -13.009115219116211 + ], + [ + "▁Guinea", + -13.009209632873535 + ], + [ + "pecializing", + -13.009237289428711 + ], + [ + "pozitie", + -13.009245872497559 + ], + [ + "bourne", + -13.009295463562012 + ], + [ + "▁mâini", + -13.00933837890625 + ], + [ + "▁CFR", + -13.009395599365234 + ], + [ + "▁Konflikt", + -13.009626388549805 + ], + [ + "▁Vodafone", + -13.009626388549805 + ], + [ + "OUG", + -13.009681701660156 + ], + [ + "▁Übersicht", + -13.009735107421875 + ], + [ + "negotiated", + -13.009903907775879 + ], + [ + "▁gliss", + -13.010042190551758 + ], + [ + "▁Kapital", + -13.010111808776855 + ], + [ + "QC", + -13.0101318359375 + ], + [ + "▁gentleman", + -13.01024341583252 + ], + [ + "Inde", + -13.010514259338379 + ], + [ + "▁immensely", + -13.010639190673828 + ], + [ + "Business", + -13.010702133178711 + ], + [ + "▁04/2", + -13.010882377624512 + ], + [ + "societatea", + -13.010973930358887 + ], + [ + "fluoxetine", + -13.011000633239746 + ], + [ + "▁Wachstum", + -13.011000633239746 + ], + [ + "▁récit", + -13.011011123657227 + ], + [ + "▁Preisvergleich", + -13.011034965515137 + ], + [ + "▁Mohammed", + -13.011460304260254 + ], + [ + "gefangen", + -13.011462211608887 + ], + [ + "▁calibration", + -13.011608123779297 + ], + [ + "bekam", + -13.011728286743164 + ], + [ + "▁FUN", + -13.011758804321289 + ], + [ + "wasting", + -13.011839866638184 + ], + [ + "▁prosper", + -13.011862754821777 + ], + [ + "▁Afghan", + -13.011919021606445 + ], + [ + "▁Heroes", + -13.011921882629395 + ], + [ + "▁VMware", + -13.011927604675293 + ], + [ + "exception", + -13.011969566345215 + ], + [ + "▁înlocui", + -13.01244831085205 + ], + [ + "Neu", + -13.01246452331543 + ], + [ + "initiation", + -13.01250171661377 + ], + [ + "▁Peel", + -13.01281452178955 + ], + [ + "▁cunoaste", + -13.012836456298828 + ], + [ + "▁menschliche", + -13.012849807739258 + ], + [ + "▁poarta", + -13.012852668762207 + ], + [ + "▁congestion", + -13.012930870056152 + ], + [ + "▁îmbunătăț", + -13.013103485107422 + ], + [ + "EUR", + -13.013171195983887 + ], + [ + "▁sushi", + -13.01326847076416 + ], + [ + "Jährige", + -13.01329517364502 + ], + [ + "espoir", + -13.013423919677734 + ], + [ + "inspected", + -13.013444900512695 + ], + [ + "▁etape", + -13.013677597045898 + ], + [ + "▁pharmacist", + -13.013754844665527 + ], + [ + "flect", + -13.013840675354004 + ], + [ + "Changing", + -13.013932228088379 + ], + [ + "▁radiant", + -13.014046669006348 + ], + [ + "Daddy", + -13.014275550842285 + ], + [ + "▁categorii", + -13.014360427856445 + ], + [ + "quête", + -13.014628410339355 + ], + [ + "▁skincare", + -13.014657020568848 + ], + [ + "hébergement", + -13.014674186706543 + ], + [ + "840", + -13.01477336883545 + ], + [ + "awaiting", + -13.014822006225586 + ], + [ + "▁murdered", + -13.014841079711914 + ], + [ + "▁proficient", + -13.014863967895508 + ], + [ + "▁chauffe", + -13.014899253845215 + ], + [ + "▁contur", + -13.014937400817871 + ], + [ + "▁rejoindre", + -13.015145301818848 + ], + [ + "▁foloseste", + -13.01521110534668 + ], + [ + "▁Grup", + -13.01535701751709 + ], + [ + "152", + -13.01541519165039 + ], + [ + "▁workspace", + -13.015438079833984 + ], + [ + "▁primitive", + -13.015546798706055 + ], + [ + "▁Ginger", + -13.015557289123535 + ], + [ + "▁chemotherapy", + -13.015595436096191 + ], + [ + "▁platinum", + -13.015596389770508 + ], + [ + "▁sarcina", + -13.01559829711914 + ], + [ + "▁revival", + -13.015820503234863 + ], + [ + "▁Meditation", + -13.016111373901367 + ], + [ + "▁Vogel", + -13.0161714553833 + ], + [ + "IMA", + -13.016359329223633 + ], + [ + "▁handset", + -13.016486167907715 + ], + [ + "▁Nachmittag", + -13.01651668548584 + ], + [ + "▁déchets", + -13.016517639160156 + ], + [ + "▁Cornwall", + -13.0165433883667 + ], + [ + "▁Curry", + -13.016605377197266 + ], + [ + "▁cuplu", + -13.016607284545898 + ], + [ + "▁Birth", + -13.016822814941406 + ], + [ + "forward", + -13.016936302185059 + ], + [ + "Dezvoltare", + -13.016977310180664 + ], + [ + "▁irgendwie", + -13.016980171203613 + ], + [ + "▁erzielt", + -13.016993522644043 + ], + [ + "LOS", + -13.01700496673584 + ], + [ + "▁overload", + -13.01708984375 + ], + [ + "▁repay", + -13.01713752746582 + ], + [ + "urlaub", + -13.017155647277832 + ], + [ + "7.0", + -13.01716423034668 + ], + [ + "▁Wheat", + -13.01748275756836 + ], + [ + "▁degrab", + -13.017488479614258 + ], + [ + "▁Brock", + -13.017491340637207 + ], + [ + "▁inhabit", + -13.0176362991333 + ], + [ + "▁Speech", + -13.017834663391113 + ], + [ + "directional", + -13.017862319946289 + ], + [ + "▁Mandel", + -13.017909049987793 + ], + [ + "▁erscheinen", + -13.01791763305664 + ], + [ + "consciously", + -13.018059730529785 + ], + [ + "▁sunet", + -13.0182523727417 + ], + [ + "▁stole", + -13.018259048461914 + ], + [ + "▁Utilis", + -13.018349647521973 + ], + [ + "▁obstruction", + -13.01852798461914 + ], + [ + "▁mindfulness", + -13.0186767578125 + ], + [ + "partnering", + -13.01868724822998 + ], + [ + "CSI", + -13.018819808959961 + ], + [ + "204", + -13.01905632019043 + ], + [ + "▁squirrel", + -13.019286155700684 + ], + [ + "▁Rwanda", + -13.01975154876709 + ], + [ + "▁hunters", + -13.019850730895996 + ], + [ + "▁revitaliz", + -13.02022647857666 + ], + [ + "▁avansat", + -13.020232200622559 + ], + [ + "▁Yamaha", + -13.020294189453125 + ], + [ + "foto", + -13.020435333251953 + ], + [ + "▁Vegan", + -13.020469665527344 + ], + [ + "▁pitched", + -13.02053165435791 + ], + [ + "▁Vortrag", + -13.020540237426758 + ], + [ + "traditional", + -13.020809173583984 + ], + [ + "offrent", + -13.021024703979492 + ], + [ + "▁Expression", + -13.021315574645996 + ], + [ + "▁apprécié", + -13.021354675292969 + ], + [ + "▁Christina", + -13.021408081054688 + ], + [ + "eilig", + -13.021464347839355 + ], + [ + "▁verhindern", + -13.021599769592285 + ], + [ + "culturii", + -13.021607398986816 + ], + [ + "Aşa", + -13.021703720092773 + ], + [ + "▁enamel", + -13.021756172180176 + ], + [ + "▁fördern", + -13.021771430969238 + ], + [ + "▁acheté", + -13.021798133850098 + ], + [ + "▁eventuell", + -13.021842956542969 + ], + [ + "▁Sino", + -13.021873474121094 + ], + [ + "▁totodat", + -13.022008895874023 + ], + [ + "accelerated", + -13.022202491760254 + ], + [ + "▁strengthened", + -13.02245044708252 + ], + [ + "corro", + -13.022482872009277 + ], + [ + "4,5", + -13.02253246307373 + ], + [ + "▁Beverly", + -13.022533416748047 + ], + [ + "ulevard", + -13.022615432739258 + ], + [ + "▁hamper", + -13.022644996643066 + ], + [ + "▁Tempe", + -13.02268123626709 + ], + [ + "▁Yacht", + -13.022799491882324 + ], + [ + "▁LGBT", + -13.022871017456055 + ], + [ + "▁fingertips", + -13.022991180419922 + ], + [ + "▁Auftraggeber", + -13.02299976348877 + ], + [ + "▁harbour", + -13.0230131149292 + ], + [ + "blew", + -13.0230712890625 + ], + [ + "▁ideology", + -13.023115158081055 + ], + [ + "▁covenant", + -13.023170471191406 + ], + [ + "▁faction", + -13.023419380187988 + ], + [ + "▁animé", + -13.023481369018555 + ], + [ + "energie", + -13.023515701293945 + ], + [ + "iterführende", + -13.02369499206543 + ], + [ + "▁MAI", + -13.023784637451172 + ], + [ + "▁pluie", + -13.023905754089355 + ], + [ + "▁cathedral", + -13.023919105529785 + ], + [ + "▁chiropractic", + -13.023919105529785 + ], + [ + "monies", + -13.023968696594238 + ], + [ + "▁contraction", + -13.024054527282715 + ], + [ + "pvc", + -13.024202346801758 + ], + [ + "staff", + -13.024209022521973 + ], + [ + "BIT", + -13.024216651916504 + ], + [ + "EET", + -13.024514198303223 + ], + [ + "▁sanction", + -13.024575233459473 + ], + [ + "▁Reiki", + -13.024709701538086 + ], + [ + "Trying", + -13.024772644042969 + ], + [ + "▁endangered", + -13.024847984313965 + ], + [ + "▁Emperor", + -13.024849891662598 + ], + [ + "▁empfi", + -13.024909973144531 + ], + [ + "animation", + -13.024998664855957 + ], + [ + "207", + -13.025029182434082 + ], + [ + "separating", + -13.02512264251709 + ], + [ + "▁lucrative", + -13.025148391723633 + ], + [ + "▁ortho", + -13.02524185180664 + ], + [ + "variété", + -13.025266647338867 + ], + [ + "hésit", + -13.025287628173828 + ], + [ + "nuances", + -13.025289535522461 + ], + [ + "▁$250", + -13.025394439697266 + ], + [ + "▁drumuri", + -13.025435447692871 + ], + [ + "▁unsafe", + -13.025446891784668 + ], + [ + "▁1943", + -13.025477409362793 + ], + [ + "▁automatique", + -13.025524139404297 + ], + [ + "billed", + -13.025585174560547 + ], + [ + "▁rectangle", + -13.02578067779541 + ], + [ + "▁Spannung", + -13.025781631469727 + ], + [ + "▁dévoil", + -13.025790214538574 + ], + [ + "▁perimeter", + -13.02580738067627 + ], + [ + "▁imaginative", + -13.02581787109375 + ], + [ + "actifs", + -13.025851249694824 + ], + [ + "neuve", + -13.0259428024292 + ], + [ + "leagă", + -13.026269912719727 + ], + [ + "gehende", + -13.026700973510742 + ], + [ + "▁Gorgeous", + -13.026708602905273 + ], + [ + "▁impeccable", + -13.026708602905273 + ], + [ + "▁Curtain", + -13.026718139648438 + ], + [ + "▁presume", + -13.026731491088867 + ], + [ + "surpassed", + -13.02687931060791 + ], + [ + "schiff", + -13.026927947998047 + ], + [ + "Allied", + -13.02699089050293 + ], + [ + "fanden", + -13.027080535888672 + ], + [ + "▁célébr", + -13.027174949645996 + ], + [ + "▁phénomène", + -13.027174949645996 + ], + [ + "▁Powell", + -13.027413368225098 + ], + [ + "jean", + -13.027631759643555 + ], + [ + "▁peculiar", + -13.027640342712402 + ], + [ + "▁Antarctic", + -13.027641296386719 + ], + [ + "▁gradient", + -13.027663230895996 + ], + [ + "▁brainstorm", + -13.027704238891602 + ], + [ + "échapp", + -13.027726173400879 + ], + [ + "Bot", + -13.027738571166992 + ], + [ + "cita", + -13.027743339538574 + ], + [ + "▁lumber", + -13.027752876281738 + ], + [ + "weichen", + -13.027852058410645 + ], + [ + "▁Halte", + -13.028024673461914 + ], + [ + "▁noștri", + -13.028107643127441 + ], + [ + "construction", + -13.028165817260742 + ], + [ + "DOC", + -13.028236389160156 + ], + [ + "▁aluat", + -13.028319358825684 + ], + [ + "streamlined", + -13.028462409973145 + ], + [ + "Bio", + -13.028494834899902 + ], + [ + "▁nutritious", + -13.028573036193848 + ], + [ + "▁délicat", + -13.0286283493042 + ], + [ + "▁sticla", + -13.028656959533691 + ], + [ + "OVE", + -13.028721809387207 + ], + [ + "▁panneau", + -13.028793334960938 + ], + [ + "▁hetero", + -13.028801918029785 + ], + [ + "▁annul", + -13.028839111328125 + ], + [ + "IDA", + -13.028935432434082 + ], + [ + "▁pitches", + -13.028960227966309 + ], + [ + "▁Edmonton", + -13.029040336608887 + ], + [ + "mediated", + -13.029136657714844 + ], + [ + "AFP", + -13.029139518737793 + ], + [ + "▁Tibetan", + -13.029228210449219 + ], + [ + "intégration", + -13.02934455871582 + ], + [ + "▁Rox", + -13.0294771194458 + ], + [ + "energia", + -13.02950668334961 + ], + [ + "▁reconnaît", + -13.029509544372559 + ], + [ + "▁ține", + -13.029525756835938 + ], + [ + "▁ignition", + -13.029534339904785 + ], + [ + "Foarte", + -13.029541015625 + ], + [ + "▁HOME", + -13.029545783996582 + ], + [ + "▁MLB", + -13.029545783996582 + ], + [ + "▁Wähle", + -13.029590606689453 + ], + [ + "▁Merkel", + -13.029658317565918 + ], + [ + "poarte", + -13.029664993286133 + ], + [ + "ALT", + -13.02979850769043 + ], + [ + "jenigen", + -13.029985427856445 + ], + [ + "▁conflit", + -13.029987335205078 + ], + [ + "▁buckle", + -13.029996871948242 + ], + [ + "▁cacao", + -13.030035018920898 + ], + [ + "▁représentation", + -13.030076026916504 + ], + [ + "incepand", + -13.030267715454102 + ], + [ + "▁Carroll", + -13.030306816101074 + ], + [ + "▁clientilor", + -13.030370712280273 + ], + [ + "▁immunity", + -13.030441284179688 + ], + [ + "oût", + -13.03044319152832 + ], + [ + "▁Witch", + -13.030488014221191 + ], + [ + "▁Wolfgang", + -13.030532836914062 + ], + [ + "▁prudent", + -13.030701637268066 + ], + [ + "fotograf", + -13.03084945678711 + ], + [ + "paar", + -13.030871391296387 + ], + [ + "ergeti", + -13.030927658081055 + ], + [ + "▁empowerment", + -13.031112670898438 + ], + [ + "▁Admir", + -13.03122329711914 + ], + [ + "▁complémentaire", + -13.031340599060059 + ], + [ + "▁angepasst", + -13.031376838684082 + ], + [ + "▁flirt", + -13.031376838684082 + ], + [ + "▁elektronische", + -13.031388282775879 + ], + [ + "▁stereotype", + -13.03140640258789 + ], + [ + "SIL", + -13.031465530395508 + ], + [ + "▁Realtor", + -13.031471252441406 + ], + [ + "Edit", + -13.031774520874023 + ], + [ + "requête", + -13.03181266784668 + ], + [ + "▁Herstellung", + -13.031815528869629 + ], + [ + "▁cyst", + -13.031947135925293 + ], + [ + "syndic", + -13.031994819641113 + ], + [ + "leni", + -13.032007217407227 + ], + [ + "▁fringe", + -13.032020568847656 + ], + [ + "▁Jardin", + -13.032032012939453 + ], + [ + "▁Vezi", + -13.032052993774414 + ], + [ + "▁Ausstattung", + -13.032312393188477 + ], + [ + "▁glide", + -13.032590866088867 + ], + [ + "▁Andere", + -13.032758712768555 + ], + [ + "▁Haftung", + -13.032781600952148 + ], + [ + "maßnahmen", + -13.032788276672363 + ], + [ + "▁recommandé", + -13.032790184020996 + ], + [ + "▁nave", + -13.032793998718262 + ], + [ + "viziune", + -13.033051490783691 + ], + [ + "▁stimulus", + -13.033098220825195 + ], + [ + "faulty", + -13.0331449508667 + ], + [ + "▁vicinity", + -13.033249855041504 + ], + [ + "▁turnaround", + -13.033445358276367 + ], + [ + "stammt", + -13.033846855163574 + ], + [ + "▁problemlos", + -13.033856391906738 + ], + [ + "▁Establish", + -13.03415298461914 + ], + [ + "▁Silva", + -13.034172058105469 + ], + [ + "▁muzică", + -13.034187316894531 + ], + [ + "▁theatrical", + -13.03421401977539 + ], + [ + "▁braid", + -13.034242630004883 + ], + [ + "▁blieb", + -13.034276962280273 + ], + [ + "158", + -13.034296989440918 + ], + [ + "▁ignorance", + -13.034330368041992 + ], + [ + "onset", + -13.034416198730469 + ], + [ + "zeitlich", + -13.034523963928223 + ], + [ + "▁Sink", + -13.034523963928223 + ], + [ + "▁caractéris", + -13.034594535827637 + ], + [ + "▁kreative", + -13.03465747833252 + ], + [ + "behörde", + -13.034677505493164 + ], + [ + "repairing", + -13.034680366516113 + ], + [ + "▁tumble", + -13.034757614135742 + ], + [ + "zione", + -13.034871101379395 + ], + [ + "▁Evil", + -13.03494644165039 + ], + [ + "▁popping", + -13.034952163696289 + ], + [ + "▁mutant", + -13.035025596618652 + ], + [ + "emme", + -13.035030364990234 + ], + [ + "▁Pleasant", + -13.035125732421875 + ], + [ + "▁appetizer", + -13.035125732421875 + ], + [ + "▁PLEASE", + -13.035126686096191 + ], + [ + "▁physiological", + -13.035128593444824 + ], + [ + "▁Facility", + -13.035131454467773 + ], + [ + "▁quirky", + -13.035131454467773 + ], + [ + "▁colectiv", + -13.035154342651367 + ], + [ + "151", + -13.035181999206543 + ], + [ + "August", + -13.03531551361084 + ], + [ + "▁Jewelry", + -13.035327911376953 + ], + [ + "▁ziar", + -13.035481452941895 + ], + [ + "▁puissant", + -13.035489082336426 + ], + [ + "▁Argument", + -13.035595893859863 + ], + [ + "▁Betracht", + -13.035621643066406 + ], + [ + "▁TRANS", + -13.035636901855469 + ], + [ + "Exception", + -13.036011695861816 + ], + [ + "nosti", + -13.036083221435547 + ], + [ + "▁Geographic", + -13.036155700683594 + ], + [ + "amazingly", + -13.036173820495605 + ], + [ + "▁météo", + -13.036181449890137 + ], + [ + "streit", + -13.036314010620117 + ], + [ + "▁idle", + -13.036439895629883 + ], + [ + "179", + -13.036441802978516 + ], + [ + "▁Bremen", + -13.036534309387207 + ], + [ + "▁Kläger", + -13.03653621673584 + ], + [ + "▁Grammy", + -13.036598205566406 + ], + [ + "▁Philosophy", + -13.036613464355469 + ], + [ + "▁utilizeaz", + -13.036779403686523 + ], + [ + "Accord", + -13.036897659301758 + ], + [ + "▁USDA", + -13.036986351013184 + ], + [ + "Continuing", + -13.037010192871094 + ], + [ + "geschenk", + -13.037178039550781 + ], + [ + "kredit", + -13.037248611450195 + ], + [ + "Laugh", + -13.037297248840332 + ], + [ + "oaring", + -13.037406921386719 + ], + [ + "▁Richter", + -13.037460327148438 + ], + [ + "▁Figur", + -13.037938117980957 + ], + [ + "▁inconsistent", + -13.037947654724121 + ], + [ + "cresterea", + -13.038069725036621 + ], + [ + "▁regeneration", + -13.038130760192871 + ], + [ + "speaking", + -13.03818416595459 + ], + [ + "▁nasal", + -13.03824234008789 + ], + [ + "▁partagé", + -13.038259506225586 + ], + [ + "▁Warranty", + -13.038419723510742 + ], + [ + "▁Mueller", + -13.038501739501953 + ], + [ + "formează", + -13.038734436035156 + ], + [ + "hundert", + -13.038745880126953 + ], + [ + "gemeldet", + -13.038893699645996 + ], + [ + "▁excursions", + -13.038912773132324 + ], + [ + "▁linii", + -13.039066314697266 + ], + [ + "gefährlich", + -13.039067268371582 + ], + [ + "▁schema", + -13.03907299041748 + ], + [ + "nişte", + -13.039131164550781 + ], + [ + "▁roadway", + -13.039132118225098 + ], + [ + "▁regression", + -13.039135932922363 + ], + [ + "▁mână", + -13.039366722106934 + ], + [ + "5.3", + -13.039373397827148 + ], + [ + "▁Spät", + -13.039734840393066 + ], + [ + "▁stubborn", + -13.039833068847656 + ], + [ + "efectele", + -13.040030479431152 + ], + [ + "▁atenţi", + -13.040136337280273 + ], + [ + "▁dovedit", + -13.04018497467041 + ], + [ + "▁Agile", + -13.040190696716309 + ], + [ + "denying", + -13.04023265838623 + ], + [ + "fluss", + -13.040620803833008 + ], + [ + "▁Calvin", + -13.04066276550293 + ], + [ + "Sculpt", + -13.04083251953125 + ], + [ + "égalité", + -13.040884971618652 + ], + [ + "ticket", + -13.040977478027344 + ], + [ + "marketed", + -13.041044235229492 + ], + [ + "holic", + -13.041173934936523 + ], + [ + "▁eCommerce", + -13.041346549987793 + ], + [ + "▁Slip", + -13.041369438171387 + ], + [ + "▁degradation", + -13.041736602783203 + ], + [ + "écart", + -13.041742324829102 + ], + [ + "AGR", + -13.041807174682617 + ], + [ + "▁burglar", + -13.041837692260742 + ], + [ + "▁conjug", + -13.041903495788574 + ], + [ + "LLP", + -13.04194164276123 + ], + [ + "couvrir", + -13.041997909545898 + ], + [ + "▁Hearing", + -13.042001724243164 + ], + [ + "▁canton", + -13.042006492614746 + ], + [ + "▁sixteen", + -13.042068481445312 + ], + [ + "▁Verlust", + -13.042097091674805 + ], + [ + "allied", + -13.042268753051758 + ], + [ + "Performing", + -13.042393684387207 + ], + [ + "▁évoqu", + -13.042519569396973 + ], + [ + "▁bookstore", + -13.042574882507324 + ], + [ + "▁intrebari", + -13.042627334594727 + ], + [ + "▁Hyderabad", + -13.042668342590332 + ], + [ + "▁repertoire", + -13.042668342590332 + ], + [ + "▁cablu", + -13.042678833007812 + ], + [ + "▁Costume", + -13.04269790649414 + ], + [ + "▁Shannon", + -13.042713165283203 + ], + [ + "▁glossy", + -13.042800903320312 + ], + [ + "▁cible", + -13.042876243591309 + ], + [ + "Saint", + -13.042984008789062 + ], + [ + "▁Ultima", + -13.043042182922363 + ], + [ + "▁teint", + -13.0432767868042 + ], + [ + "▁envision", + -13.043477058410645 + ], + [ + "▁thinner", + -13.043478965759277 + ], + [ + "ис", + -13.043609619140625 + ], + [ + "▁bladder", + -13.043615341186523 + ], + [ + "▁Prairie", + -13.043618202209473 + ], + [ + "▁puppies", + -13.043633460998535 + ], + [ + "▁overweight", + -13.043729782104492 + ], + [ + "destined", + -13.043925285339355 + ], + [ + "▁addictive", + -13.043935775756836 + ], + [ + "▁posé", + -13.043993949890137 + ], + [ + "▁mecanism", + -13.044112205505371 + ], + [ + "▁chorus", + -13.044466972351074 + ], + [ + "weder", + -13.044528007507324 + ], + [ + "▁begrüß", + -13.044562339782715 + ], + [ + "▁unsuccessful", + -13.044562339782715 + ], + [ + "executing", + -13.044564247131348 + ], + [ + "▁metadata", + -13.044611930847168 + ], + [ + "traiter", + -13.044620513916016 + ], + [ + "▁borrowed", + -13.044649124145508 + ], + [ + "▁aeroport", + -13.044679641723633 + ], + [ + "▁Bibli", + -13.044761657714844 + ], + [ + "▁youthful", + -13.044902801513672 + ], + [ + "▁Herbert", + -13.044913291931152 + ], + [ + "client", + -13.04500961303711 + ], + [ + "merci", + -13.04520034790039 + ], + [ + "▁Beast", + -13.045210838317871 + ], + [ + "▁Entrepreneur", + -13.045230865478516 + ], + [ + "▁Gelände", + -13.045256614685059 + ], + [ + "▁Packers", + -13.045268058776855 + ], + [ + "formarea", + -13.045469284057617 + ], + [ + "▁Kündigung", + -13.045511245727539 + ], + [ + "▁verdient", + -13.045515060424805 + ], + [ + "▁solutie", + -13.045530319213867 + ], + [ + "figuration", + -13.045611381530762 + ], + [ + "voluntarily", + -13.045622825622559 + ], + [ + "Gregor", + -13.045742988586426 + ], + [ + "▁Uncle", + -13.04589557647705 + ], + [ + "tarifs", + -13.045907020568848 + ], + [ + "▁écologique", + -13.045987129211426 + ], + [ + "▁Investition", + -13.045991897583008 + ], + [ + "exemplar", + -13.046127319335938 + ], + [ + "▁prevede", + -13.046144485473633 + ], + [ + "▁waive", + -13.046147346496582 + ], + [ + "▁Legion", + -13.046156883239746 + ], + [ + "similar", + -13.046247482299805 + ], + [ + "▁shareholder", + -13.04626750946045 + ], + [ + "▁oyster", + -13.046476364135742 + ], + [ + "▁Lightning", + -13.046530723571777 + ], + [ + "experimenting", + -13.04662799835205 + ], + [ + "▁replies", + -13.04663372039795 + ], + [ + "80,000", + -13.046757698059082 + ], + [ + "▁adept", + -13.04692554473877 + ], + [ + "▁Crăciun", + -13.046935081481934 + ], + [ + "▁sanatos", + -13.046935081481934 + ], + [ + "305", + -13.04699993133545 + ], + [ + "specialised", + -13.047069549560547 + ], + [ + "▁drummer", + -13.047189712524414 + ], + [ + "Applicants", + -13.04741096496582 + ], + [ + "objekt", + -13.04741096496582 + ], + [ + "▁Fifth", + -13.047446250915527 + ], + [ + "rgic", + -13.047567367553711 + ], + [ + "theater", + -13.047635078430176 + ], + [ + "▁terminé", + -13.047852516174316 + ], + [ + "▁Englisch", + -13.047894477844238 + ], + [ + "▁Oradea", + -13.047898292541504 + ], + [ + "possesses", + -13.0479097366333 + ], + [ + "illiers", + -13.047986030578613 + ], + [ + "▁refurbish", + -13.048110961914062 + ], + [ + "graphie", + -13.04814338684082 + ], + [ + "▁Booth", + -13.048174858093262 + ], + [ + "▁Ausdruck", + -13.048192977905273 + ], + [ + "▁Marriage", + -13.048361778259277 + ], + [ + "▁knives", + -13.048362731933594 + ], + [ + "▁Relief", + -13.048368453979492 + ], + [ + "▁Clerk", + -13.048392295837402 + ], + [ + "wait", + -13.048501014709473 + ], + [ + "▁probablement", + -13.048698425292969 + ], + [ + "▁suplimentar", + -13.048701286315918 + ], + [ + "dollar", + -13.048797607421875 + ], + [ + "English", + -13.04898452758789 + ], + [ + "866", + -13.049300193786621 + ], + [ + "▁Savannah", + -13.049314498901367 + ], + [ + "▁aftermath", + -13.049318313598633 + ], + [ + "phé", + -13.04932689666748 + ], + [ + "▁Plum", + -13.049417495727539 + ], + [ + "264", + -13.049566268920898 + ], + [ + "2.000", + -13.049582481384277 + ], + [ + "niei", + -13.049603462219238 + ], + [ + "ATP", + -13.049803733825684 + ], + [ + "mila", + -13.04985523223877 + ], + [ + "▁glut", + -13.049887657165527 + ], + [ + "gotta", + -13.049891471862793 + ], + [ + "schütt", + -13.049893379211426 + ], + [ + "klick", + -13.049996376037598 + ], + [ + "whether", + -13.050090789794922 + ], + [ + "▁Wade", + -13.050163269042969 + ], + [ + "▁Riley", + -13.050280570983887 + ], + [ + "Chancellor", + -13.050288200378418 + ], + [ + "▁nebun", + -13.050300598144531 + ], + [ + "▁aufgebaut", + -13.050374984741211 + ], + [ + "steigt", + -13.050423622131348 + ], + [ + "▁entirety", + -13.050494194030762 + ], + [ + "▁telefoane", + -13.05074691772461 + ], + [ + "▁Roulette", + -13.050763130187988 + ], + [ + "1700", + -13.050787925720215 + ], + [ + "▁lycée", + -13.050856590270996 + ], + [ + "rotary", + -13.051128387451172 + ], + [ + "benefited", + -13.051170349121094 + ], + [ + "▁Bisericii", + -13.051220893859863 + ], + [ + "▁Rehabilitation", + -13.051220893859863 + ], + [ + "▁lithium", + -13.051228523254395 + ], + [ + "imposing", + -13.051279067993164 + ], + [ + "176", + -13.051329612731934 + ], + [ + "▁thunder", + -13.051527976989746 + ], + [ + "ăsesc", + -13.052000045776367 + ], + [ + "▁Einblick", + -13.052010536193848 + ], + [ + "oiled", + -13.052151679992676 + ], + [ + "SSA", + -13.052181243896484 + ], + [ + "apparition", + -13.05224609375 + ], + [ + "▁Impress", + -13.052273750305176 + ], + [ + "▁Aboriginal", + -13.052297592163086 + ], + [ + "loos", + -13.052383422851562 + ], + [ + "▁Bread", + -13.052440643310547 + ], + [ + "177", + -13.052619934082031 + ], + [ + "VERS", + -13.052638053894043 + ], + [ + "▁Respect", + -13.05271053314209 + ], + [ + "▁Practical", + -13.053047180175781 + ], + [ + "drafting", + -13.05306339263916 + ], + [ + "си", + -13.053099632263184 + ], + [ + "▁faza", + -13.053109169006348 + ], + [ + "▁sovereign", + -13.053123474121094 + ], + [ + "▁Untersuchung", + -13.05314826965332 + ], + [ + "▁Niveau", + -13.053154945373535 + ], + [ + "transport", + -13.053182601928711 + ], + [ + "▁downstream", + -13.053293228149414 + ], + [ + "▁Milton", + -13.053383827209473 + ], + [ + "▁knob", + -13.053390502929688 + ], + [ + "employeur", + -13.053499221801758 + ], + [ + "▁furnish", + -13.053544044494629 + ], + [ + "weather", + -13.053564071655273 + ], + [ + "LAB", + -13.053646087646484 + ], + [ + "166", + -13.053853988647461 + ], + [ + "▁salaire", + -13.053937911987305 + ], + [ + "▁Carnival", + -13.054088592529297 + ], + [ + "4-0", + -13.054168701171875 + ], + [ + "▁Angle", + -13.054291725158691 + ], + [ + "▁José", + -13.054399490356445 + ], + [ + "architecture", + -13.054475784301758 + ], + [ + "▁Sunset", + -13.054574966430664 + ], + [ + "▁Absolut", + -13.054694175720215 + ], + [ + "▁herrlich", + -13.05470085144043 + ], + [ + "12%", + -13.054703712463379 + ], + [ + "▁Indo", + -13.054823875427246 + ], + [ + "▁Komfort", + -13.055049896240234 + ], + [ + "▁acțiuni", + -13.05505084991455 + ], + [ + "energize", + -13.055085182189941 + ], + [ + "▁Warning", + -13.055171966552734 + ], + [ + "▁Sunny", + -13.055216789245605 + ], + [ + "▁razor", + -13.055489540100098 + ], + [ + "▁psychic", + -13.055490493774414 + ], + [ + "▁convivial", + -13.055525779724121 + ], + [ + "Voraussetzungen", + -13.05555534362793 + ], + [ + "IMO", + -13.055622100830078 + ], + [ + "opérateur", + -13.055743217468262 + ], + [ + "▁langjährige", + -13.05575942993164 + ], + [ + "▁Spanie", + -13.055901527404785 + ], + [ + "pulmonary", + -13.056004524230957 + ], + [ + "▁Bingo", + -13.056050300598145 + ], + [ + "▁confession", + -13.056096076965332 + ], + [ + "▁Petru", + -13.056100845336914 + ], + [ + "▁prerequisite", + -13.056164741516113 + ], + [ + "▁dodge", + -13.056352615356445 + ], + [ + "▁McN", + -13.056436538696289 + ], + [ + "▁originate", + -13.056577682495117 + ], + [ + "▁nettoy", + -13.056612014770508 + ], + [ + "▁$14", + -13.056645393371582 + ], + [ + "▁Bride", + -13.05669116973877 + ], + [ + "▁noisy", + -13.05673885345459 + ], + [ + "▁Worcester", + -13.056963920593262 + ], + [ + "▁Surrey", + -13.056982040405273 + ], + [ + "harmonis", + -13.057110786437988 + ], + [ + "▁représentant", + -13.057304382324219 + ], + [ + "organisée", + -13.057475090026855 + ], + [ + "truction", + -13.057513236999512 + ], + [ + "injected", + -13.057597160339355 + ], + [ + "▁Suzuki", + -13.057924270629883 + ], + [ + "▁japonais", + -13.057924270629883 + ], + [ + "▁turquoise", + -13.057924270629883 + ], + [ + "▁Peut", + -13.058004379272461 + ], + [ + "▁Sequ", + -13.058028221130371 + ], + [ + "slated", + -13.058037757873535 + ], + [ + "▁Alma", + -13.058215141296387 + ], + [ + "▁gebraucht", + -13.05827522277832 + ], + [ + "gängig", + -13.058281898498535 + ], + [ + "▁commis", + -13.058377265930176 + ], + [ + "ACS", + -13.05856990814209 + ], + [ + "pressure", + -13.058664321899414 + ], + [ + "cured", + -13.05874252319336 + ], + [ + "▁Jackie", + -13.058757781982422 + ], + [ + "▁Kashmir", + -13.05888557434082 + ], + [ + "▁recruited", + -13.059000968933105 + ], + [ + "▁vécu", + -13.059011459350586 + ], + [ + "▁opus", + -13.059052467346191 + ], + [ + "kWh", + -13.05927562713623 + ], + [ + "▁tapping", + -13.059292793273926 + ], + [ + "▁tehnologie", + -13.05931282043457 + ], + [ + "▁Gentle", + -13.059365272521973 + ], + [ + "▁bombard", + -13.059372901916504 + ], + [ + "▁caméra", + -13.059427261352539 + ], + [ + "züglich", + -13.059431076049805 + ], + [ + "▁bingo", + -13.059453010559082 + ], + [ + "private", + -13.059496879577637 + ], + [ + "▁mediator", + -13.059642791748047 + ], + [ + "▁carbohydrates", + -13.059847831726074 + ], + [ + "▁workmanship", + -13.059849739074707 + ], + [ + "▁Combat", + -13.059853553771973 + ], + [ + "▁Mickey", + -13.059901237487793 + ], + [ + "▁distressed", + -13.059908866882324 + ], + [ + "lucrează", + -13.059924125671387 + ], + [ + "treatment", + -13.06007194519043 + ], + [ + "▁Einwohner", + -13.060330390930176 + ], + [ + "▁glaze", + -13.060386657714844 + ], + [ + "scholarly", + -13.06043529510498 + ], + [ + "ROC", + -13.060750007629395 + ], + [ + "▁Darwin", + -13.060774803161621 + ], + [ + "drückt", + -13.060775756835938 + ], + [ + "▁treadmill", + -13.060819625854492 + ], + [ + "ntz", + -13.060830116271973 + ], + [ + "620", + -13.061087608337402 + ], + [ + "surface", + -13.061148643493652 + ], + [ + "▁vieţii", + -13.0612211227417 + ], + [ + "990", + -13.061296463012695 + ], + [ + "▁doigt", + -13.061341285705566 + ], + [ + "▁explor", + -13.061450004577637 + ], + [ + "▁asistent", + -13.061670303344727 + ], + [ + "coloriage", + -13.061734199523926 + ], + [ + "▁Martinez", + -13.061758041381836 + ], + [ + "▁antibodies", + -13.061775207519531 + ], + [ + "Schülerinnen", + -13.061779975891113 + ], + [ + "Honestly", + -13.06178092956543 + ], + [ + "grabbing", + -13.061871528625488 + ], + [ + "▁Cardiff", + -13.061897277832031 + ], + [ + "▁Trophy", + -13.062084197998047 + ], + [ + "▁pupil", + -13.062117576599121 + ], + [ + "▁invoke", + -13.062161445617676 + ], + [ + "bezüglich", + -13.062193870544434 + ], + [ + "Anschließend", + -13.062275886535645 + ], + [ + "perks", + -13.062360763549805 + ], + [ + "530", + -13.062373161315918 + ], + [ + "▁emblem", + -13.062431335449219 + ], + [ + "770", + -13.062543869018555 + ], + [ + "clairement", + -13.062590599060059 + ], + [ + "▁sublinia", + -13.062597274780273 + ], + [ + "▁1910", + -13.062719345092773 + ], + [ + "▁Embassy", + -13.062740325927734 + ], + [ + "▁Valencia", + -13.062740325927734 + ], + [ + "▁catastrophic", + -13.062740325927734 + ], + [ + "▁simulator", + -13.06274700164795 + ], + [ + "Pierre", + -13.062766075134277 + ], + [ + "▁doorstep", + -13.062806129455566 + ], + [ + "▁rallie", + -13.062881469726562 + ], + [ + "▁șans", + -13.062891960144043 + ], + [ + "▁crosses", + -13.06300163269043 + ], + [ + "▁zodi", + -13.06312084197998 + ], + [ + "Next", + -13.06314754486084 + ], + [ + "▁rebuilt", + -13.063152313232422 + ], + [ + "▁panorama", + -13.063222885131836 + ], + [ + "196", + -13.06324291229248 + ], + [ + "▁erinnert", + -13.06370735168457 + ], + [ + "lism", + -13.06371784210205 + ], + [ + "opened", + -13.06383228302002 + ], + [ + "▁breakout", + -13.064126014709473 + ], + [ + "▁mosque", + -13.064153671264648 + ], + [ + "boc", + -13.064507484436035 + ], + [ + "▁grout", + -13.064568519592285 + ], + [ + "▁Gather", + -13.064582824707031 + ], + [ + "▁vampire", + -13.06467342376709 + ], + [ + "▁tandem", + -13.064684867858887 + ], + [ + "▁pastra", + -13.064702033996582 + ], + [ + "▁lösen", + -13.064794540405273 + ], + [ + "▁discontinu", + -13.064826965332031 + ], + [ + "fuses", + -13.064885139465332 + ], + [ + "▁identitate", + -13.064947128295898 + ], + [ + "BAC", + -13.064964294433594 + ], + [ + "▁$100,000", + -13.065122604370117 + ], + [ + "Finder", + -13.06515121459961 + ], + [ + "▁Leicester", + -13.065157890319824 + ], + [ + "▁1933", + -13.065159797668457 + ], + [ + "informatiile", + -13.065234184265137 + ], + [ + "lädt", + -13.065309524536133 + ], + [ + "iggle", + -13.065399169921875 + ], + [ + "▁Discuss", + -13.065462112426758 + ], + [ + "distributing", + -13.065470695495605 + ], + [ + "▁disappoint", + -13.065475463867188 + ], + [ + "ecţia", + -13.065611839294434 + ], + [ + "▁condiment", + -13.065640449523926 + ], + [ + "▁Marriott", + -13.065642356872559 + ], + [ + "▁entspannt", + -13.065644264221191 + ], + [ + "arbitrary", + -13.06564998626709 + ], + [ + "rühren", + -13.06574821472168 + ], + [ + "Intensiv", + -13.065771102905273 + ], + [ + "eliminare", + -13.065895080566406 + ], + [ + "muster", + -13.06594467163086 + ], + [ + "▁komplexe", + -13.066130638122559 + ], + [ + "▁(2008)", + -13.066184997558594 + ], + [ + "absolument", + -13.066349029541016 + ], + [ + "aloo", + -13.066420555114746 + ], + [ + "cererea", + -13.06655216217041 + ], + [ + "▁imobiliar", + -13.066696166992188 + ], + [ + "▁paramount", + -13.066705703735352 + ], + [ + "▁Vince", + -13.066723823547363 + ], + [ + "pov", + -13.067076683044434 + ], + [ + "▁conveyor", + -13.067549705505371 + ], + [ + "▁Natalie", + -13.067583084106445 + ], + [ + "▁Comedy", + -13.067623138427734 + ], + [ + "Developing", + -13.0678129196167 + ], + [ + "disputed", + -13.067878723144531 + ], + [ + "164", + -13.067911148071289 + ], + [ + "▁Communist", + -13.067949295043945 + ], + [ + "▁Bahnhof", + -13.06806468963623 + ], + [ + "dokument", + -13.068145751953125 + ], + [ + "▁Somali", + -13.06828498840332 + ], + [ + "▁Strasbourg", + -13.068503379821777 + ], + [ + "▁Technician", + -13.068550109863281 + ], + [ + "▁subsidies", + -13.068633079528809 + ], + [ + "judeţul", + -13.068723678588867 + ], + [ + "▁bible", + -13.068769454956055 + ], + [ + "gefahren", + -13.068855285644531 + ], + [ + "▁literal", + -13.068882942199707 + ], + [ + "▁diminish", + -13.068940162658691 + ], + [ + "Sfântul", + -13.0689697265625 + ], + [ + "▁doreșt", + -13.068978309631348 + ], + [ + "▁Xiaomi", + -13.069036483764648 + ], + [ + "▁planète", + -13.069130897521973 + ], + [ + "▁LTD", + -13.069175720214844 + ], + [ + "▁Zugriff", + -13.069196701049805 + ], + [ + "beginn", + -13.06921672821045 + ], + [ + "▁Einführung", + -13.069294929504395 + ], + [ + "▁coronar", + -13.069393157958984 + ], + [ + "lomi", + -13.0693941116333 + ], + [ + "▁Accueil", + -13.0695219039917 + ], + [ + "scanned", + -13.069528579711914 + ], + [ + "▁Banque", + -13.06952953338623 + ], + [ + "▁réaction", + -13.069531440734863 + ], + [ + "▁Hoffman", + -13.069546699523926 + ], + [ + "▁merveille", + -13.069637298583984 + ], + [ + "navigating", + -13.069719314575195 + ], + [ + "schalten", + -13.06984806060791 + ], + [ + "▁ieşi", + -13.070136070251465 + ], + [ + "1-6", + -13.070175170898438 + ], + [ + "▁frustr", + -13.070670127868652 + ], + [ + "▁réfléchi", + -13.0709810256958 + ], + [ + "▁difuz", + -13.071100234985352 + ], + [ + "▁freue", + -13.07121753692627 + ], + [ + "besuch", + -13.071349143981934 + ], + [ + "153", + -13.071386337280273 + ], + [ + "▁butterflies", + -13.071467399597168 + ], + [ + "▁terrifying", + -13.071467399597168 + ], + [ + "▁încuraj", + -13.071468353271484 + ], + [ + "▁Château", + -13.071470260620117 + ], + [ + "▁contingent", + -13.071474075317383 + ], + [ + "▁abusive", + -13.0714750289917 + ], + [ + "▁SharePoint", + -13.07148551940918 + ], + [ + "▁skating", + -13.071573257446289 + ], + [ + "▁militaire", + -13.07166576385498 + ], + [ + "▁Vig", + -13.071690559387207 + ], + [ + "omics", + -13.071840286254883 + ], + [ + "▁Blockchain", + -13.07197093963623 + ], + [ + "▁principii", + -13.071975708007812 + ], + [ + "▁permitting", + -13.071979522705078 + ], + [ + "optimisation", + -13.072270393371582 + ], + [ + "▁maintien", + -13.072328567504883 + ], + [ + "▁Aluminum", + -13.072442054748535 + ], + [ + "▁Plymouth", + -13.072443008422852 + ], + [ + "▁Weiterbildung", + -13.072457313537598 + ], + [ + "▁Finanzierung", + -13.072505950927734 + ], + [ + "▁Kerala", + -13.072514533996582 + ], + [ + "insulated", + -13.072668075561523 + ], + [ + "▁loaf", + -13.072802543640137 + ], + [ + "▁Sammlung", + -13.072929382324219 + ], + [ + "▁îndepărt", + -13.072930335998535 + ], + [ + "▁Gewerbe", + -13.072942733764648 + ], + [ + "udel", + -13.072988510131836 + ], + [ + "▁coursework", + -13.073104858398438 + ], + [ + "▁Darstellung", + -13.073246002197266 + ], + [ + "▁indeplin", + -13.073433876037598 + ], + [ + "▁Gandhi", + -13.073434829711914 + ], + [ + "tossed", + -13.07361888885498 + ], + [ + "ewed", + -13.073844909667969 + ], + [ + "▁classement", + -13.073884963989258 + ], + [ + "▁Protestant", + -13.073905944824219 + ], + [ + "▁frumoasă", + -13.073905944824219 + ], + [ + "▁pantalon", + -13.073906898498535 + ], + [ + "▁rivet", + -13.073966979980469 + ], + [ + "▁Echt", + -13.0741605758667 + ], + [ + "erviciului", + -13.07421588897705 + ], + [ + "fabricated", + -13.074322700500488 + ], + [ + "Compania", + -13.074372291564941 + ], + [ + "▁juvenile", + -13.074394226074219 + ], + [ + "▁souligne", + -13.07444953918457 + ], + [ + "▁chrono", + -13.07447338104248 + ], + [ + "▁VII", + -13.074594497680664 + ], + [ + "▁Kirch", + -13.074714660644531 + ], + [ + "catcher", + -13.075014114379883 + ], + [ + "salv", + -13.075263023376465 + ], + [ + "▁Enforcement", + -13.075370788574219 + ], + [ + "▁Penguin", + -13.075410842895508 + ], + [ + "kowski", + -13.075465202331543 + ], + [ + "▁2:1", + -13.075470924377441 + ], + [ + "gesundheit", + -13.075475692749023 + ], + [ + "▁unveil", + -13.075519561767578 + ], + [ + "bending", + -13.075531959533691 + ], + [ + "▁conecta", + -13.075579643249512 + ], + [ + "▁faim", + -13.075885772705078 + ], + [ + "▁MacBook", + -13.075969696044922 + ], + [ + "versuch", + -13.07600212097168 + ], + [ + "▁regiuni", + -13.076029777526855 + ], + [ + "▁Willow", + -13.076184272766113 + ], + [ + "▁finanziell", + -13.076303482055664 + ], + [ + "▁nurturing", + -13.076354026794434 + ], + [ + "impuls", + -13.076370239257812 + ], + [ + "▁funktionieren", + -13.076371192932129 + ], + [ + "▁rezult", + -13.076554298400879 + ], + [ + "▁spui", + -13.076593399047852 + ], + [ + "▁walkway", + -13.076653480529785 + ], + [ + "▁Rauch", + -13.076708793640137 + ], + [ + "169", + -13.076793670654297 + ], + [ + "610", + -13.076863288879395 + ], + [ + "▁scazut", + -13.0773286819458 + ], + [ + "▁Garrett", + -13.077329635620117 + ], + [ + "▁necesită", + -13.077352523803711 + ], + [ + "Articolul", + -13.077364921569824 + ], + [ + "numită", + -13.077371597290039 + ], + [ + "Coastal", + -13.077383041381836 + ], + [ + "▁canned", + -13.077421188354492 + ], + [ + "▁Friendly", + -13.077499389648438 + ], + [ + "dissolved", + -13.0775728225708 + ], + [ + "seid", + -13.077674865722656 + ], + [ + "▁feminin", + -13.077685356140137 + ], + [ + "▁fetch", + -13.077710151672363 + ], + [ + "▁Accent", + -13.077767372131348 + ], + [ + "phrase", + -13.077771186828613 + ], + [ + "effekt", + -13.077775955200195 + ], + [ + "▁Progressive", + -13.077777862548828 + ], + [ + "▁canadien", + -13.077820777893066 + ], + [ + "iety", + -13.077839851379395 + ], + [ + "eignen", + -13.077984809875488 + ], + [ + "paraître", + -13.07812213897705 + ], + [ + "▁asylum", + -13.07833194732666 + ], + [ + "▁Albany", + -13.078362464904785 + ], + [ + "▁remis", + -13.078386306762695 + ], + [ + "▁Joyce", + -13.078664779663086 + ], + [ + "schätzt", + -13.078784942626953 + ], + [ + "▁begleiten", + -13.078801155090332 + ], + [ + "▁Siemens", + -13.079007148742676 + ], + [ + "▁schlimm", + -13.079061508178711 + ], + [ + "▁Libra", + -13.079254150390625 + ], + [ + "▁Composite", + -13.079290390014648 + ], + [ + "▁écr", + -13.079315185546875 + ], + [ + "disciplina", + -13.079379081726074 + ], + [ + "▁premature", + -13.079630851745605 + ], + [ + "▁scopuri", + -13.079681396484375 + ], + [ + "ffnung", + -13.079715728759766 + ], + [ + "7000", + -13.079726219177246 + ], + [ + "▁conséquent", + -13.079780578613281 + ], + [ + "▁côte", + -13.079787254333496 + ], + [ + "celul", + -13.079872131347656 + ], + [ + "▁fourteen", + -13.079940795898438 + ], + [ + "▁Riverside", + -13.080077171325684 + ], + [ + "gemacht", + -13.08013916015625 + ], + [ + "▁volcanic", + -13.080272674560547 + ], + [ + "▁Salesforce", + -13.080315589904785 + ], + [ + "▁Granite", + -13.080317497253418 + ], + [ + "▁Zentral", + -13.080329895019531 + ], + [ + "▁Female", + -13.080341339111328 + ], + [ + "▁culmin", + -13.08047103881836 + ], + [ + "▁urmatoare", + -13.080547332763672 + ], + [ + "toxicity", + -13.080560684204102 + ], + [ + "▁mâna", + -13.080678939819336 + ], + [ + "▁Umfang", + -13.080764770507812 + ], + [ + "▁Encore", + -13.08077621459961 + ], + [ + "▁Edgar", + -13.080831527709961 + ], + [ + "▁négoci", + -13.080852508544922 + ], + [ + "njeux", + -13.080873489379883 + ], + [ + "▁variance", + -13.080917358398438 + ], + [ + "▁Functional", + -13.080973625183105 + ], + [ + "172", + -13.081046104431152 + ], + [ + "▁dissolve", + -13.0811185836792 + ], + [ + "förderung", + -13.081188201904297 + ], + [ + "▁Brilliant", + -13.081254959106445 + ], + [ + "▁comprehension", + -13.081254959106445 + ], + [ + "▁soybean", + -13.081254959106445 + ], + [ + "▁standalone", + -13.081255912780762 + ], + [ + "▁Communi", + -13.081303596496582 + ], + [ + "▁ajut", + -13.081313133239746 + ], + [ + "▁lavish", + -13.081338882446289 + ], + [ + "Ouest", + -13.081384658813477 + ], + [ + "▁Maggie", + -13.081385612487793 + ], + [ + "▁evolutionary", + -13.081550598144531 + ], + [ + "bowel", + -13.081575393676758 + ], + [ + "▁glyco", + -13.081626892089844 + ], + [ + "▁Happi", + -13.081706047058105 + ], + [ + "organising", + -13.081710815429688 + ], + [ + "▁übernimm", + -13.081727027893066 + ], + [ + "▁snowboard", + -13.081793785095215 + ], + [ + "▁prévention", + -13.081830024719238 + ], + [ + "▁Celebrate", + -13.082160949707031 + ], + [ + "▁pottery", + -13.082254409790039 + ], + [ + "▁Outstanding", + -13.082328796386719 + ], + [ + "▁toamna", + -13.082331657409668 + ], + [ + "▁graceful", + -13.082548141479492 + ], + [ + "197", + -13.082559585571289 + ], + [ + "strecke", + -13.082598686218262 + ], + [ + "▁medizinische", + -13.082733154296875 + ], + [ + "216", + -13.082839965820312 + ], + [ + "▁prune", + -13.082868576049805 + ], + [ + "Pourtant", + -13.083000183105469 + ], + [ + "▁Difference", + -13.083224296569824 + ], + [ + "▁factura", + -13.083830833435059 + ], + [ + "Mass", + -13.084161758422852 + ], + [ + "▁Enhanc", + -13.084190368652344 + ], + [ + "upholstered", + -13.084209442138672 + ], + [ + "▁übernommen", + -13.084209442138672 + ], + [ + "▁mitigation", + -13.084210395812988 + ], + [ + "▁Hidden", + -13.084219932556152 + ], + [ + "▁Häuser", + -13.084234237670898 + ], + [ + "▁Pavel", + -13.084403991699219 + ], + [ + "▁congress", + -13.084512710571289 + ], + [ + "▁antibody", + -13.084598541259766 + ], + [ + "▁stitches", + -13.084811210632324 + ], + [ + "▁colonies", + -13.084820747375488 + ], + [ + "Into", + -13.084900856018066 + ], + [ + "▁démo", + -13.084924697875977 + ], + [ + "▁MVP", + -13.085041046142578 + ], + [ + "▁replay", + -13.085062026977539 + ], + [ + "▁usoara", + -13.08522891998291 + ], + [ + "▁Breast", + -13.085278511047363 + ], + [ + "ooney", + -13.085336685180664 + ], + [ + "▁außen", + -13.085663795471191 + ], + [ + "▁Motorola", + -13.085695266723633 + ], + [ + "▁spalat", + -13.08578109741211 + ], + [ + "euillez", + -13.086088180541992 + ], + [ + "▁jeunesse", + -13.086170196533203 + ], + [ + "▁pastoral", + -13.086174011230469 + ], + [ + "▁Sussex", + -13.086185455322266 + ], + [ + "▁stencil", + -13.08619213104248 + ], + [ + "▁organismului", + -13.086504936218262 + ], + [ + "seized", + -13.086649894714355 + ], + [ + "▁întrebare", + -13.086865425109863 + ], + [ + "cliquez", + -13.086874961853027 + ], + [ + "5.7", + -13.086984634399414 + ], + [ + "▁Yama", + -13.087080955505371 + ], + [ + "painted", + -13.08708667755127 + ], + [ + "▁Swimming", + -13.087176322937012 + ], + [ + "Rhythm", + -13.087202072143555 + ], + [ + "▁sorrow", + -13.087210655212402 + ], + [ + "▁Movers", + -13.08731460571289 + ], + [ + "renforcer", + -13.08735466003418 + ], + [ + "▁Wach", + -13.087381362915039 + ], + [ + "0,00", + -13.087390899658203 + ], + [ + "▁glove", + -13.08753490447998 + ], + [ + "▁stâng", + -13.087669372558594 + ], + [ + "rgendwann", + -13.087687492370605 + ], + [ + "▁Philippine", + -13.08769416809082 + ], + [ + "▁anunțat", + -13.087716102600098 + ], + [ + "▁Coleman", + -13.087723731994629 + ], + [ + "affir", + -13.087918281555176 + ], + [ + "uleiul", + -13.08808422088623 + ], + [ + "▁Coconut", + -13.088197708129883 + ], + [ + "▁Supplement", + -13.088210105895996 + ], + [ + "haudiere", + -13.088293075561523 + ], + [ + "▁kettle", + -13.088313102722168 + ], + [ + "▁3,5", + -13.088370323181152 + ], + [ + "refurbished", + -13.088425636291504 + ], + [ + "esthétique", + -13.088665962219238 + ], + [ + "performing", + -13.088667869567871 + ], + [ + "▁Engag", + -13.088762283325195 + ], + [ + "Group", + -13.088801383972168 + ], + [ + "▁viande", + -13.088887214660645 + ], + [ + "▁oricum", + -13.088888168334961 + ], + [ + "Spitalul", + -13.089093208312988 + ], + [ + "▁cesse", + -13.089110374450684 + ], + [ + "▁contradiction", + -13.089130401611328 + ], + [ + "▁Chrysler", + -13.089154243469238 + ], + [ + "▁poultry", + -13.089154243469238 + ], + [ + "▁thirteen", + -13.089154243469238 + ], + [ + "▁sightseeing", + -13.089155197143555 + ], + [ + "▁Miguel", + -13.089158058166504 + ], + [ + "▁terminology", + -13.089334487915039 + ], + [ + "▁Genetic", + -13.089553833007812 + ], + [ + "commercial", + -13.08963394165039 + ], + [ + "gehoben", + -13.08965015411377 + ], + [ + "RIGHT", + -13.08995532989502 + ], + [ + "▁proprietate", + -13.089990615844727 + ], + [ + "▁Cannes", + -13.090012550354004 + ], + [ + "▁klicken", + -13.090023040771484 + ], + [ + "▁Belgique", + -13.0901460647583 + ], + [ + "tapped", + -13.09034538269043 + ], + [ + "kinetic", + -13.090569496154785 + ], + [ + "▁feuilles", + -13.090673446655273 + ], + [ + "whitening", + -13.090760231018066 + ], + [ + "Any", + -13.090946197509766 + ], + [ + "Manager", + -13.091099739074707 + ], + [ + "▁constatat", + -13.091106414794922 + ], + [ + "▁Myanmar", + -13.091140747070312 + ], + [ + "▁Examination", + -13.091142654418945 + ], + [ + "▁règle", + -13.091208457946777 + ], + [ + "▁umgesetzt", + -13.09128475189209 + ], + [ + "211", + -13.091336250305176 + ], + [ + "▁Herald", + -13.091449737548828 + ], + [ + "Alex", + -13.091680526733398 + ], + [ + "▁drauf", + -13.091707229614258 + ], + [ + "logger", + -13.091714859008789 + ], + [ + "▁pictur", + -13.09186840057373 + ], + [ + "▁Divi", + -13.09196949005127 + ], + [ + "▁furnizat", + -13.092089653015137 + ], + [ + "▁verzichten", + -13.092132568359375 + ], + [ + "▁Sergi", + -13.092199325561523 + ], + [ + "contaminated", + -13.09223747253418 + ], + [ + "▁Buddy", + -13.092243194580078 + ], + [ + "▁chilled", + -13.092268943786621 + ], + [ + "▁vorlieg", + -13.092317581176758 + ], + [ + "▁Claudia", + -13.092632293701172 + ], + [ + "▁miserable", + -13.092653274536133 + ], + [ + "▁sketches", + -13.092683792114258 + ], + [ + "schicken", + -13.092814445495605 + ], + [ + "since", + -13.0928373336792 + ], + [ + "2.9", + -13.092840194702148 + ], + [ + "▁sitzen", + -13.092928886413574 + ], + [ + "ceapa", + -13.093396186828613 + ], + [ + "respectarea", + -13.093438148498535 + ], + [ + "▁handheld", + -13.093448638916016 + ], + [ + "popular", + -13.093527793884277 + ], + [ + "calming", + -13.093603134155273 + ], + [ + "Govern", + -13.093632698059082 + ], + [ + "▁omega", + -13.093645095825195 + ], + [ + "▁Planner", + -13.093791007995605 + ], + [ + "enriched", + -13.093850135803223 + ], + [ + "154", + -13.093976974487305 + ], + [ + "▁autorisé", + -13.093989372253418 + ], + [ + "▁cadouri", + -13.09407901763916 + ], + [ + "▁vulnerabilities", + -13.094143867492676 + ], + [ + "▁Arbeitnehmer", + -13.094158172607422 + ], + [ + "éditeur", + -13.094234466552734 + ], + [ + "▁Anleitung", + -13.094317436218262 + ], + [ + "rubbing", + -13.094343185424805 + ], + [ + "▁autovehicul", + -13.094621658325195 + ], + [ + "▁öffnen", + -13.094621658325195 + ], + [ + "▁Napoleon", + -13.094622611999512 + ], + [ + "▁cliché", + -13.094637870788574 + ], + [ + "▁Schaf", + -13.09469985961914 + ], + [ + "regulating", + -13.094894409179688 + ], + [ + "▁Kühl", + -13.09490966796875 + ], + [ + "▁blush", + -13.094913482666016 + ], + [ + "▁discard", + -13.094992637634277 + ], + [ + "▁confine", + -13.095027923583984 + ], + [ + "▁Rodriguez", + -13.09511947631836 + ], + [ + "▁ADHD", + -13.095165252685547 + ], + [ + "▁Madame", + -13.09516716003418 + ], + [ + "▁résolution", + -13.095319747924805 + ], + [ + "▁flair", + -13.095369338989258 + ], + [ + "▁claw", + -13.095422744750977 + ], + [ + "▁1929", + -13.095643043518066 + ], + [ + "ETH", + -13.095672607421875 + ], + [ + "nähe", + -13.095804214477539 + ], + [ + "▁soothe", + -13.0958251953125 + ], + [ + "4.9", + -13.095833778381348 + ], + [ + "montée", + -13.095925331115723 + ], + [ + "confirming", + -13.095989227294922 + ], + [ + "continent", + -13.09613037109375 + ], + [ + "reiz", + -13.09643840789795 + ], + [ + "john", + -13.096577644348145 + ], + [ + "IONAL", + -13.096588134765625 + ], + [ + "▁exported", + -13.0966215133667 + ], + [ + "▁Prison", + -13.096651077270508 + ], + [ + "possessed", + -13.096952438354492 + ], + [ + "▁placebo", + -13.096991539001465 + ], + [ + "▁biodiversity", + -13.097116470336914 + ], + [ + "▁combustion", + -13.097116470336914 + ], + [ + "▁Plumbing", + -13.09711742401123 + ], + [ + "ixie", + -13.097124099731445 + ], + [ + "▁repetition", + -13.09715461730957 + ], + [ + "▁soumis", + -13.097372055053711 + ], + [ + "▁reduc", + -13.097671508789062 + ], + [ + "▁constrain", + -13.097759246826172 + ], + [ + "Anti", + -13.097760200500488 + ], + [ + "consolidated", + -13.097817420959473 + ], + [ + "214", + -13.098095893859863 + ], + [ + "▁breaches", + -13.098108291625977 + ], + [ + "infringement", + -13.098115921020508 + ], + [ + "▁drizzle", + -13.098115921020508 + ], + [ + "▁erhöhen", + -13.098116874694824 + ], + [ + "▁Somerset", + -13.098118782043457 + ], + [ + "▁blonde", + -13.098132133483887 + ], + [ + "▁Funny", + -13.09813404083252 + ], + [ + "tuşi", + -13.098149299621582 + ], + [ + "▁reinvent", + -13.098162651062012 + ], + [ + "▁sérieux", + -13.098247528076172 + ], + [ + "▁croire", + -13.098308563232422 + ], + [ + "general", + -13.098315238952637 + ], + [ + "▁Distance", + -13.098319053649902 + ], + [ + "▁VoIP", + -13.098348617553711 + ], + [ + "▁adăugat", + -13.098406791687012 + ], + [ + "matik", + -13.098546028137207 + ], + [ + "▁avatar", + -13.098647117614746 + ], + [ + "▁superstar", + -13.098804473876953 + ], + [ + "8.0", + -13.098814010620117 + ], + [ + "lusieurs", + -13.098982810974121 + ], + [ + "▁Judeţean", + -13.099117279052734 + ], + [ + "offenen", + -13.099128723144531 + ], + [ + "RAF", + -13.099133491516113 + ], + [ + "▁restroom", + -13.099207878112793 + ], + [ + "enfance", + -13.099348068237305 + ], + [ + "▁garnish", + -13.099499702453613 + ], + [ + "▁vermittelt", + -13.099631309509277 + ], + [ + "Histoire", + -13.099634170532227 + ], + [ + "cyan", + -13.100628852844238 + ], + [ + "Talk", + -13.100666046142578 + ], + [ + "▁Varianten", + -13.10069465637207 + ], + [ + "▁Lille", + -13.10085678100586 + ], + [ + "▁offenbar", + -13.10098934173584 + ], + [ + "▁rénovation", + -13.10112190246582 + ], + [ + "▁comentarii", + -13.101249694824219 + ], + [ + "▁Bedford", + -13.10130500793457 + ], + [ + "▁cercetări", + -13.101325988769531 + ], + [ + "▁précision", + -13.101337432861328 + ], + [ + "MRC", + -13.101358413696289 + ], + [ + "alterations", + -13.101476669311523 + ], + [ + "▁discours", + -13.101531028747559 + ], + [ + "äger", + -13.101577758789062 + ], + [ + "▁antreprenor", + -13.101622581481934 + ], + [ + "▁Oriental", + -13.101849555969238 + ], + [ + "conducerea", + -13.101868629455566 + ], + [ + "CBC", + -13.101932525634766 + ], + [ + "▁mince", + -13.101985931396484 + ], + [ + "▁presidency", + -13.10212516784668 + ], + [ + "▁lipstick", + -13.102167129516602 + ], + [ + "▁SERVICES", + -13.102237701416016 + ], + [ + "productive", + -13.10237979888916 + ], + [ + "Assad", + -13.102400779724121 + ], + [ + "▁efectiv", + -13.102540969848633 + ], + [ + "▁gestern", + -13.102596282958984 + ], + [ + "▁RGB", + -13.102606773376465 + ], + [ + "▁Transilvania", + -13.102627754211426 + ], + [ + "▁Raleigh", + -13.102670669555664 + ], + [ + "DOM", + -13.102702140808105 + ], + [ + "▁iesit", + -13.102806091308594 + ], + [ + "▁anuntat", + -13.102810859680176 + ], + [ + "▁automatiquement", + -13.102901458740234 + ], + [ + "▁proliferation", + -13.103130340576172 + ], + [ + "▁Maroc", + -13.103156089782715 + ], + [ + "▁prezenţ", + -13.10323429107666 + ], + [ + "▁Filipino", + -13.103296279907227 + ], + [ + "▁Traian", + -13.103351593017578 + ], + [ + "▁swimmer", + -13.10356616973877 + ], + [ + "▁Slovenia", + -13.103632926940918 + ], + [ + "phobia", + -13.103724479675293 + ], + [ + "curricular", + -13.103734016418457 + ], + [ + "jurnal", + -13.103825569152832 + ], + [ + "▁vorne", + -13.103870391845703 + ], + [ + "▁asuma", + -13.103875160217285 + ], + [ + "defended", + -13.104104995727539 + ], + [ + "▁imminent", + -13.104140281677246 + ], + [ + "favored", + -13.10417366027832 + ], + [ + "▁innovator", + -13.104179382324219 + ], + [ + "▁Salzburg", + -13.104289054870605 + ], + [ + "5.4", + -13.104452133178711 + ], + [ + "Safe", + -13.104597091674805 + ], + [ + "▁inteleg", + -13.104744911193848 + ], + [ + "▁charisma", + -13.104781150817871 + ], + [ + "nature", + -13.104784965515137 + ], + [ + "4.8", + -13.104942321777344 + ], + [ + "argues", + -13.105104446411133 + ], + [ + "▁dimensiune", + -13.105142593383789 + ], + [ + "▁subdivision", + -13.105142593383789 + ], + [ + "▁embarrassing", + -13.105144500732422 + ], + [ + "▁confuse", + -13.105207443237305 + ], + [ + "DIC", + -13.105460166931152 + ], + [ + "rubrique", + -13.10549545288086 + ], + [ + "dépendance", + -13.105598449707031 + ], + [ + "INCLUD", + -13.10565185546875 + ], + [ + "▁Griffin", + -13.10574722290039 + ], + [ + "157", + -13.105751037597656 + ], + [ + "▁revamp", + -13.105839729309082 + ], + [ + "▁umgehen", + -13.10595989227295 + ], + [ + "▁mențin", + -13.106231689453125 + ], + [ + "▁1937", + -13.106695175170898 + ], + [ + "eklagte", + -13.106766700744629 + ], + [ + "▁clientèle", + -13.106801986694336 + ], + [ + "▁campsite", + -13.10708999633789 + ], + [ + "▁florist", + -13.107144355773926 + ], + [ + "▁Ferguson", + -13.107159614562988 + ], + [ + "▁demolition", + -13.107160568237305 + ], + [ + "▁McCain", + -13.107254981994629 + ], + [ + "▁reckon", + -13.10733413696289 + ], + [ + "striped", + -13.107414245605469 + ], + [ + "▁sonore", + -13.107481002807617 + ], + [ + "migrated", + -13.107548713684082 + ], + [ + "▁fluorescent", + -13.107664108276367 + ], + [ + "▁Colegi", + -13.107762336730957 + ], + [ + "ianu", + -13.107860565185547 + ], + [ + "cruising", + -13.107882499694824 + ], + [ + "LINK", + -13.107965469360352 + ], + [ + "▁Cutting", + -13.108001708984375 + ], + [ + "ABILITY", + -13.108168601989746 + ], + [ + "▁Categories", + -13.108168601989746 + ], + [ + "▁erhoben", + -13.108168601989746 + ], + [ + "▁Cocktail", + -13.108169555664062 + ], + [ + "▁Generator", + -13.108177185058594 + ], + [ + "▁gesucht", + -13.108186721801758 + ], + [ + "▁telescope", + -13.10818862915039 + ], + [ + "KET", + -13.108192443847656 + ], + [ + "▁hilfreich", + -13.108192443847656 + ], + [ + "▁beneficiary", + -13.108585357666016 + ], + [ + "▁Winston", + -13.108636856079102 + ], + [ + "Auswirkungen", + -13.108675956726074 + ], + [ + "portrayed", + -13.108705520629883 + ], + [ + "▁Aspekte", + -13.108743667602539 + ], + [ + "ffected", + -13.108901023864746 + ], + [ + "eutic", + -13.108905792236328 + ], + [ + "International", + -13.109021186828613 + ], + [ + "attente", + -13.109078407287598 + ], + [ + "mentioning", + -13.109119415283203 + ], + [ + "launch", + -13.109129905700684 + ], + [ + "▁EURO", + -13.109152793884277 + ], + [ + "▁Fraser", + -13.109344482421875 + ], + [ + "▁Johannes", + -13.109408378601074 + ], + [ + "▁felicit", + -13.109477043151855 + ], + [ + "▁plâng", + -13.109522819519043 + ], + [ + "izant", + -13.10971736907959 + ], + [ + "▁reţe", + -13.109846115112305 + ], + [ + "Mech", + -13.109954833984375 + ], + [ + "▁algebra", + -13.110193252563477 + ], + [ + "▁surgeries", + -13.110257148742676 + ], + [ + "▁semifinal", + -13.110262870788574 + ], + [ + "▁intimidating", + -13.110288619995117 + ], + [ + "▁exkl", + -13.110604286193848 + ], + [ + "asigurarea", + -13.110918998718262 + ], + [ + "Tek", + -13.111136436462402 + ], + [ + "▁Einladung", + -13.111205101013184 + ], + [ + "▁similaire", + -13.111205101013184 + ], + [ + "▁bebelus", + -13.111221313476562 + ], + [ + "▁déclin", + -13.111400604248047 + ], + [ + "▁Console", + -13.111495018005371 + ], + [ + "RET", + -13.111573219299316 + ], + [ + "appli", + -13.111586570739746 + ], + [ + "45%", + -13.111663818359375 + ], + [ + "Evenimentul", + -13.111811637878418 + ], + [ + "sincerely", + -13.111812591552734 + ], + [ + "sammlung", + -13.112098693847656 + ], + [ + "Amérique", + -13.112220764160156 + ], + [ + "▁1919", + -13.112326622009277 + ], + [ + "regulation", + -13.112367630004883 + ], + [ + "gebäude", + -13.112726211547852 + ], + [ + "▁Perspektive", + -13.112726211547852 + ], + [ + "Espagne", + -13.112744331359863 + ], + [ + "▁Underground", + -13.11283016204834 + ], + [ + "secret", + -13.112833976745605 + ], + [ + "▁Aussicht", + -13.112874031066895 + ], + [ + "Photo", + -13.112977027893066 + ], + [ + "▁Brust", + -13.113144874572754 + ], + [ + "▁Sustainability", + -13.11323356628418 + ], + [ + "▁clădiri", + -13.11323356628418 + ], + [ + "▁librarian", + -13.11323356628418 + ], + [ + "▁HBO", + -13.113235473632812 + ], + [ + "▁Parallel", + -13.113240242004395 + ], + [ + "▁shimmer", + -13.113283157348633 + ], + [ + "▁schlicht", + -13.113292694091797 + ], + [ + "▁anticipat", + -13.113311767578125 + ], + [ + "▁foolish", + -13.11335563659668 + ], + [ + "▁Ability", + -13.11347484588623 + ], + [ + "▁ceremoni", + -13.11358642578125 + ], + [ + "▁Ablauf", + -13.11359977722168 + ], + [ + "icrobial", + -13.113606452941895 + ], + [ + "▁actiuni", + -13.11362361907959 + ], + [ + "▁Wilhelm", + -13.113761901855469 + ], + [ + "▁nennen", + -13.113775253295898 + ], + [ + "▁botez", + -13.113832473754883 + ], + [ + "Alpes", + -13.113912582397461 + ], + [ + "▁libér", + -13.11392593383789 + ], + [ + "▁sneakers", + -13.114052772521973 + ], + [ + "geschafft", + -13.114252090454102 + ], + [ + "▁downstairs", + -13.114261627197266 + ], + [ + "▁wrench", + -13.114294052124023 + ], + [ + "▁erheblich", + -13.11442756652832 + ], + [ + "▁alimentar", + -13.114710807800293 + ], + [ + "▁suger", + -13.11474323272705 + ], + [ + "analysis", + -13.114883422851562 + ], + [ + "öhn", + -13.114891052246094 + ], + [ + "▁Nantes", + -13.114895820617676 + ], + [ + "▁Arbor", + -13.114899635314941 + ], + [ + "ooze", + -13.115150451660156 + ], + [ + "▁facade", + -13.115229606628418 + ], + [ + "▁MySQL", + -13.115266799926758 + ], + [ + "▁Salvador", + -13.115266799926758 + ], + [ + "▁Schlafzimmer", + -13.115279197692871 + ], + [ + "▁autentic", + -13.115320205688477 + ], + [ + "▁prezint", + -13.115348815917969 + ], + [ + "▁campground", + -13.115397453308105 + ], + [ + "Query", + -13.11540412902832 + ], + [ + "bekannt", + -13.115598678588867 + ], + [ + "arcinia", + -13.115632057189941 + ], + [ + "▁stunt", + -13.115825653076172 + ], + [ + "▁informare", + -13.115830421447754 + ], + [ + "▁interzis", + -13.11584186553955 + ], + [ + "▁Burke", + -13.115995407104492 + ], + [ + "certified", + -13.11601734161377 + ], + [ + "▁clove", + -13.11605167388916 + ], + [ + "java", + -13.116271018981934 + ], + [ + "▁Vielfalt", + -13.116284370422363 + ], + [ + "gebung", + -13.116329193115234 + ], + [ + "▁9/11", + -13.116497993469238 + ], + [ + "▁disruptive", + -13.11650562286377 + ], + [ + "visual", + -13.116693496704102 + ], + [ + "▁anunţat", + -13.11679458618164 + ], + [ + "▁Plätze", + -13.116799354553223 + ], + [ + "▁reduceri", + -13.116920471191406 + ], + [ + "autorisation", + -13.116950035095215 + ], + [ + "▁ligament", + -13.11705207824707 + ], + [ + "▁învăța", + -13.117081642150879 + ], + [ + "läufig", + -13.117303848266602 + ], + [ + "▁Copenhagen", + -13.117303848266602 + ], + [ + "▁commodities", + -13.117303848266602 + ], + [ + "▁eindeutig", + -13.117313385009766 + ], + [ + "▁catheter", + -13.117321014404297 + ], + [ + "erklärung", + -13.117720603942871 + ], + [ + "▁intelectual", + -13.117814064025879 + ], + [ + "▁municipality", + -13.117891311645508 + ], + [ + "▁1936", + -13.11798095703125 + ], + [ + "rruption", + -13.118217468261719 + ], + [ + "▁Lafayette", + -13.118324279785156 + ], + [ + "▁berühmte", + -13.118324279785156 + ], + [ + "▁idylli", + -13.118325233459473 + ], + [ + "▁caldura", + -13.118447303771973 + ], + [ + "▁tablette", + -13.118535995483398 + ], + [ + "▁liquidity", + -13.118728637695312 + ], + [ + "NGOs", + -13.118885040283203 + ], + [ + "▁supliment", + -13.11889934539795 + ], + [ + "contact", + -13.119075775146484 + ], + [ + "lustig", + -13.119219779968262 + ], + [ + "▁watercolor", + -13.119319915771484 + ], + [ + "▁Tiffany", + -13.119344711303711 + ], + [ + "▁Glauben", + -13.119365692138672 + ], + [ + "Immobilie", + -13.119406700134277 + ], + [ + "▁stripped", + -13.119549751281738 + ], + [ + "▁Beatles", + -13.119601249694824 + ], + [ + "ани", + -13.119770050048828 + ], + [ + "▁lifespan", + -13.119986534118652 + ], + [ + "▁profondeur", + -13.120251655578613 + ], + [ + "▁durere", + -13.120329856872559 + ], + [ + "▁Lithuania", + -13.120367050170898 + ], + [ + "▁resurrection", + -13.120367050170898 + ], + [ + "▁suitcase", + -13.120535850524902 + ], + [ + "▁Plumber", + -13.120545387268066 + ], + [ + "criticized", + -13.120595932006836 + ], + [ + "feared", + -13.120756149291992 + ], + [ + "▁Aunt", + -13.120929718017578 + ], + [ + "otwithstanding", + -13.121068000793457 + ], + [ + "verständlich", + -13.12115478515625 + ], + [ + "fiber", + -13.121248245239258 + ], + [ + "headquartered", + -13.121390342712402 + ], + [ + "▁Perspective", + -13.121391296386719 + ], + [ + "▁semantic", + -13.121413230895996 + ], + [ + "VIEW", + -13.121431350708008 + ], + [ + "▁Ersatzteile", + -13.121567726135254 + ], + [ + "▁disgust", + -13.121685981750488 + ], + [ + "rrington", + -13.121834754943848 + ], + [ + "ässe", + -13.121922492980957 + ], + [ + "▁anerkannt", + -13.121956825256348 + ], + [ + "meaning", + -13.12203598022461 + ], + [ + "178", + -13.122039794921875 + ], + [ + "▁grupuri", + -13.1221284866333 + ], + [ + "ciones", + -13.122267723083496 + ], + [ + "▁Mobility", + -13.122414588928223 + ], + [ + "▁unstable", + -13.122422218322754 + ], + [ + "▁FULL", + -13.122456550598145 + ], + [ + "austausch", + -13.122491836547852 + ], + [ + "▁culminat", + -13.122549057006836 + ], + [ + "▁Roast", + -13.122742652893066 + ], + [ + "existant", + -13.122940063476562 + ], + [ + "167", + -13.123008728027344 + ], + [ + "tinerii", + -13.123040199279785 + ], + [ + "September", + -13.123115539550781 + ], + [ + "▁haircut", + -13.123274803161621 + ], + [ + "▁Tutorial", + -13.123440742492676 + ], + [ + "▁enquiries", + -13.123440742492676 + ], + [ + "▁livelihood", + -13.123440742492676 + ], + [ + "▁proficiency", + -13.123440742492676 + ], + [ + "▁pavement", + -13.123443603515625 + ], + [ + "▁Reservation", + -13.123445510864258 + ], + [ + "aimerai", + -13.123491287231445 + ], + [ + "▁laboratoire", + -13.123492240905762 + ], + [ + "leihen", + -13.123501777648926 + ], + [ + "ministerium", + -13.123518943786621 + ], + [ + "▁Concentr", + -13.12366008758545 + ], + [ + "▁swipe", + -13.12368106842041 + ], + [ + "extrêmement", + -13.123687744140625 + ], + [ + "cultivated", + -13.123708724975586 + ], + [ + "▁Converse", + -13.123845100402832 + ], + [ + "▁paycheck", + -13.123863220214844 + ], + [ + "olltest", + -13.123995780944824 + ], + [ + "▁Bauch", + -13.124022483825684 + ], + [ + "▁autobuz", + -13.124067306518555 + ], + [ + "attack", + -13.124094009399414 + ], + [ + "While", + -13.124311447143555 + ], + [ + "Retrouvez", + -13.124320983886719 + ], + [ + "▁Dolphin", + -13.124466896057129 + ], + [ + "▁Shelby", + -13.124480247497559 + ], + [ + "▁Diagnostic", + -13.124486923217773 + ], + [ + "▁reconcil", + -13.124558448791504 + ], + [ + "▁Iaşi", + -13.124733924865723 + ], + [ + "▁iubesc", + -13.124979972839355 + ], + [ + "▁Bestseller", + -13.124985694885254 + ], + [ + "▁antrenor", + -13.125035285949707 + ], + [ + "▁Imaging", + -13.125089645385742 + ], + [ + "▁priorité", + -13.125295639038086 + ], + [ + "▁brewery", + -13.125494003295898 + ], + [ + "▁residual", + -13.125494003295898 + ], + [ + "▁intermittent", + -13.125494956970215 + ], + [ + "Kollekt", + -13.125585556030273 + ], + [ + "▁Walsh", + -13.12558650970459 + ], + [ + "▁marvelous", + -13.125653266906738 + ], + [ + "canceled", + -13.125686645507812 + ], + [ + "174", + -13.125761985778809 + ], + [ + "normes", + -13.125837326049805 + ], + [ + "▁Tempo", + -13.125996589660645 + ], + [ + "▁Târgu", + -13.126008987426758 + ], + [ + "877", + -13.126165390014648 + ], + [ + "5-8", + -13.126190185546875 + ], + [ + "960", + -13.126486778259277 + ], + [ + "▁Scandinavia", + -13.1265230178833 + ], + [ + "▁prolific", + -13.126526832580566 + ], + [ + "lasi", + -13.126916885375977 + ], + [ + "glück", + -13.127097129821777 + ], + [ + "▁immersion", + -13.127204895019531 + ], + [ + "RSA", + -13.127323150634766 + ], + [ + "▁Polk", + -13.127340316772461 + ], + [ + "▁transmitter", + -13.12747859954834 + ], + [ + "▁Kleidung", + -13.12755298614502 + ], + [ + "▁Cosmo", + -13.127676963806152 + ], + [ + "▁1935", + -13.127788543701172 + ], + [ + "höhere", + -13.127906799316406 + ], + [ + "▁Tatsache", + -13.128074645996094 + ], + [ + "▁Outlet", + -13.1282377243042 + ], + [ + "▁canalisation", + -13.12824821472168 + ], + [ + "Mbps", + -13.128433227539062 + ], + [ + "▁skeptical", + -13.128582954406738 + ], + [ + "mplification", + -13.128617286682129 + ], + [ + "▁Advice", + -13.128618240356445 + ], + [ + "▁détaillé", + -13.128676414489746 + ], + [ + "660", + -13.128701210021973 + ], + [ + "▁eyebrow", + -13.128722190856934 + ], + [ + "▁HIGH", + -13.128898620605469 + ], + [ + "hnlich", + -13.129073143005371 + ], + [ + "▁depăș", + -13.12910270690918 + ], + [ + "▁procurori", + -13.129140853881836 + ], + [ + "▁refrain", + -13.129212379455566 + ], + [ + "▁geschaffen", + -13.12952995300293 + ], + [ + "justement", + -13.129663467407227 + ], + [ + "exposing", + -13.129700660705566 + ], + [ + "243", + -13.1298828125 + ], + [ + "sectorul", + -13.130104064941406 + ], + [ + "▁courrier", + -13.130180358886719 + ], + [ + "▁carcas", + -13.130199432373047 + ], + [ + "sitter", + -13.13022518157959 + ], + [ + "▁Schreiben", + -13.130335807800293 + ], + [ + "▁malfunction", + -13.130358695983887 + ], + [ + "poartă", + -13.130522727966309 + ], + [ + "raisons", + -13.130565643310547 + ], + [ + "▁HOT", + -13.130650520324707 + ], + [ + "▁refreshed", + -13.130730628967285 + ], + [ + "mânt", + -13.130744934082031 + ], + [ + "▁coefficient", + -13.13097858428955 + ], + [ + "▁instituţii", + -13.131194114685059 + ], + [ + "▁sanguin", + -13.131202697753906 + ], + [ + "▁ceci", + -13.131213188171387 + ], + [ + "▁garçon", + -13.131232261657715 + ], + [ + "deluxe", + -13.131237030029297 + ], + [ + "▁rectif", + -13.131311416625977 + ], + [ + "920", + -13.131364822387695 + ], + [ + "Exista", + -13.131428718566895 + ], + [ + "▁magnif", + -13.131568908691406 + ], + [ + "efficiencies", + -13.131681442260742 + ], + [ + "▁Mitsubishi", + -13.131681442260742 + ], + [ + "▁consortium", + -13.131681442260742 + ], + [ + "▁baggage", + -13.131683349609375 + ], + [ + "▁guild", + -13.131736755371094 + ], + [ + "▁sixty", + -13.13193130493164 + ], + [ + "▁Retreat", + -13.13245677947998 + ], + [ + "batting", + -13.132473945617676 + ], + [ + "470", + -13.132708549499512 + ], + [ + "▁Britanie", + -13.132718086242676 + ], + [ + "displaced", + -13.132734298706055 + ], + [ + "▁spați", + -13.132794380187988 + ], + [ + "▁exceptionnelle", + -13.13281536102295 + ], + [ + "▁authorize", + -13.132906913757324 + ], + [ + "▁prescribe", + -13.133187294006348 + ], + [ + "▁dépannage", + -13.133234024047852 + ], + [ + "▁sexuelle", + -13.133234024047852 + ], + [ + "valid", + -13.133275032043457 + ], + [ + "▁hymn", + -13.133752822875977 + ], + [ + "▁histories", + -13.133757591247559 + ], + [ + "▁oriunde", + -13.133764266967773 + ], + [ + "Pop", + -13.133785247802734 + ], + [ + "▁dispoziţi", + -13.133800506591797 + ], + [ + "ADI", + -13.133819580078125 + ], + [ + "Google", + -13.133830070495605 + ], + [ + "▁Autism", + -13.133918762207031 + ], + [ + "▁aggr", + -13.134354591369629 + ], + [ + "bleed", + -13.134618759155273 + ], + [ + "▁displacement", + -13.13478946685791 + ], + [ + "▁hobbies", + -13.13478946685791 + ], + [ + "▁anatomy", + -13.134799003601074 + ], + [ + "▁Klinik", + -13.134821891784668 + ], + [ + "▁CCTV", + -13.1348237991333 + ], + [ + "readable", + -13.134886741638184 + ], + [ + "ulph", + -13.134982109069824 + ], + [ + "metabol", + -13.135035514831543 + ], + [ + "▁rugăm", + -13.135037422180176 + ], + [ + "▁Scotia", + -13.135087013244629 + ], + [ + "▁Einheit", + -13.135211944580078 + ], + [ + "▁troupe", + -13.13581371307373 + ], + [ + "▁Practitioner", + -13.135828018188477 + ], + [ + "▁oarec", + -13.135909080505371 + ], + [ + "Appel", + -13.135998725891113 + ], + [ + "situația", + -13.136096000671387 + ], + [ + "▁Yemen", + -13.136353492736816 + ], + [ + "piping", + -13.136515617370605 + ], + [ + "blood", + -13.136772155761719 + ], + [ + "engraved", + -13.136866569519043 + ], + [ + "▁Cristina", + -13.136866569519043 + ], + [ + "▁inaccurate", + -13.136866569519043 + ], + [ + "savory", + -13.136878967285156 + ], + [ + "atism", + -13.136919021606445 + ], + [ + "▁dependency", + -13.137007713317871 + ], + [ + "▁assertion", + -13.137015342712402 + ], + [ + "▁intersect", + -13.137201309204102 + ], + [ + "DATA", + -13.137224197387695 + ], + [ + "▁britanic", + -13.1373872756958 + ], + [ + "▁sanitaire", + -13.137393951416016 + ], + [ + "▁PLUS", + -13.137436866760254 + ], + [ + "▁platter", + -13.137730598449707 + ], + [ + "▁reconsider", + -13.137802124023438 + ], + [ + "▁Swim", + -13.13786792755127 + ], + [ + "▁Scene", + -13.137896537780762 + ], + [ + "▁Reynolds", + -13.137907028198242 + ], + [ + "▁gesund", + -13.137922286987305 + ], + [ + "international", + -13.137959480285645 + ], + [ + "government", + -13.13804817199707 + ], + [ + "▁gemstone", + -13.138052940368652 + ], + [ + "▁reproductive", + -13.1381196975708 + ], + [ + "▁expressive", + -13.13820743560791 + ], + [ + "▁tranche", + -13.13842487335205 + ], + [ + "▁Niagara", + -13.138427734375 + ], + [ + "▁Studierende", + -13.138434410095215 + ], + [ + "▁crave", + -13.138607025146484 + ], + [ + "pathetic", + -13.138739585876465 + ], + [ + "▁1916", + -13.138858795166016 + ], + [ + "▁Thousand", + -13.138873100280762 + ], + [ + "uffed", + -13.138893127441406 + ], + [ + "▁Lancaster", + -13.138960838317871 + ], + [ + "▁revenge", + -13.138972282409668 + ], + [ + "▁melody", + -13.1389741897583 + ], + [ + "Suitable", + -13.138991355895996 + ], + [ + "▁beacon", + -13.139082908630371 + ], + [ + "▁MAY", + -13.139205932617188 + ], + [ + "livré", + -13.139216423034668 + ], + [ + "Virus", + -13.139391899108887 + ], + [ + "▁collaborator", + -13.139413833618164 + ], + [ + "produktion", + -13.139480590820312 + ], + [ + "▁iluminat", + -13.139593124389648 + ], + [ + "facets", + -13.13975715637207 + ], + [ + "▁expus", + -13.139784812927246 + ], + [ + "▁baptism", + -13.13999080657959 + ], + [ + "▁urgency", + -13.140016555786133 + ], + [ + "artery", + -13.14030647277832 + ], + [ + "▁eingeladen", + -13.14043140411377 + ], + [ + "▁entfernen", + -13.14051342010498 + ], + [ + "soaking", + -13.140555381774902 + ], + [ + "▁irré", + -13.140557289123535 + ], + [ + "▁purity", + -13.140700340270996 + ], + [ + "▁adăug", + -13.140731811523438 + ], + [ + "historischen", + -13.140777587890625 + ], + [ + "crezi", + -13.140793800354004 + ], + [ + "▁tarziu", + -13.141035079956055 + ], + [ + "▁Mozart", + -13.141040802001953 + ], + [ + "▁trimming", + -13.141056060791016 + ], + [ + "▁violat", + -13.141056060791016 + ], + [ + "▁Vermögen", + -13.14108943939209 + ], + [ + "▁Theorie", + -13.141114234924316 + ], + [ + "scheibe", + -13.14114761352539 + ], + [ + "Partidul", + -13.141324996948242 + ], + [ + "▁childcare", + -13.14133071899414 + ], + [ + "ajele", + -13.141345977783203 + ], + [ + "▁Punjab", + -13.141390800476074 + ], + [ + "6.3", + -13.14156436920166 + ], + [ + "▁recount", + -13.141571044921875 + ], + [ + "▁repel", + -13.141799926757812 + ], + [ + "vantage", + -13.1419095993042 + ], + [ + "6.4", + -13.141953468322754 + ], + [ + "▁comedian", + -13.142087936401367 + ], + [ + "▁snappe", + -13.142256736755371 + ], + [ + "PLE", + -13.142271041870117 + ], + [ + "▁rapper", + -13.142439842224121 + ], + [ + "▁Belfast", + -13.142657279968262 + ], + [ + "▁predictive", + -13.14271068572998 + ], + [ + "dépôt", + -13.1427583694458 + ], + [ + "flavored", + -13.142769813537598 + ], + [ + "chließlich", + -13.14293098449707 + ], + [ + "▁stump", + -13.142955780029297 + ], + [ + "▁lakh", + -13.142963409423828 + ], + [ + "3:30", + -13.143021583557129 + ], + [ + "▁cetățeni", + -13.1431245803833 + ], + [ + "▁Milliarden", + -13.143125534057617 + ], + [ + "Assurance", + -13.143128395080566 + ], + [ + "▁Marketplace", + -13.143329620361328 + ], + [ + "equipped", + -13.143423080444336 + ], + [ + "▁russe", + -13.143462181091309 + ], + [ + "Exactly", + -13.143651008605957 + ], + [ + "▁Venez", + -13.144125938415527 + ], + [ + "▁Pavilion", + -13.144171714782715 + ], + [ + "▁incontournable", + -13.144171714782715 + ], + [ + "▁slaughter", + -13.14417839050293 + ], + [ + "asteptam", + -13.144190788269043 + ], + [ + "▁Fighter", + -13.144196510314941 + ], + [ + "▁Landkreis", + -13.144278526306152 + ], + [ + "▁lumini", + -13.144312858581543 + ], + [ + "▁connaît", + -13.144615173339844 + ], + [ + "▁Breite", + -13.144674301147461 + ], + [ + "▁Disability", + -13.144774436950684 + ], + [ + "▁Alfa", + -13.144786834716797 + ], + [ + "▁poise", + -13.144895553588867 + ], + [ + "▁Alpen", + -13.144898414611816 + ], + [ + "betont", + -13.145031929016113 + ], + [ + "159", + -13.145161628723145 + ], + [ + "▁geprägt", + -13.145219802856445 + ], + [ + "▁intrigued", + -13.145219802856445 + ], + [ + "▁sympathy", + -13.145220756530762 + ], + [ + "societal", + -13.145225524902344 + ], + [ + "▁sédui", + -13.145243644714355 + ], + [ + "▁differentiation", + -13.145384788513184 + ], + [ + "▁aprobare", + -13.145744323730469 + ], + [ + "schirm", + -13.14585018157959 + ], + [ + "sagt", + -13.145956039428711 + ], + [ + "7.3", + -13.146101951599121 + ], + [ + "Bib", + -13.146263122558594 + ], + [ + "europäischen", + -13.146268844604492 + ], + [ + "▁Innovative", + -13.146268844604492 + ], + [ + "▁autonome", + -13.146330833435059 + ], + [ + "▁Objective", + -13.146400451660156 + ], + [ + "▁refusal", + -13.146551132202148 + ], + [ + "▁exposé", + -13.146719932556152 + ], + [ + "▁cetăţeni", + -13.146793365478516 + ], + [ + "▁stimmt", + -13.146798133850098 + ], + [ + "acordul", + -13.147162437438965 + ], + [ + "▁hormonal", + -13.147254943847656 + ], + [ + "intermédiaire", + -13.147319793701172 + ], + [ + "▁doubl", + -13.147374153137207 + ], + [ + "▁flute", + -13.147509574890137 + ], + [ + "▁Balkon", + -13.147523880004883 + ], + [ + "▁Florian", + -13.147607803344727 + ], + [ + "737", + -13.147614479064941 + ], + [ + "▁dritte", + -13.147639274597168 + ], + [ + "spitze", + -13.147685050964355 + ], + [ + "donnent", + -13.14778995513916 + ], + [ + "▁Zuhause", + -13.147850036621094 + ], + [ + "▁VIII", + -13.147852897644043 + ], + [ + "familien", + -13.148151397705078 + ], + [ + "▁sécurisé", + -13.148313522338867 + ], + [ + "▁glamour", + -13.148370742797852 + ], + [ + "▁societati", + -13.148370742797852 + ], + [ + "typique", + -13.1483793258667 + ], + [ + "▁addicted", + -13.148421287536621 + ], + [ + "▁Providence", + -13.148500442504883 + ], + [ + "▁Extended", + -13.148506164550781 + ], + [ + "▁Barbie", + -13.148513793945312 + ], + [ + "zustand", + -13.148516654968262 + ], + [ + "▁Sauna", + -13.148638725280762 + ], + [ + "▁propane", + -13.148663520812988 + ], + [ + "europa", + -13.148894309997559 + ], + [ + "glued", + -13.148940086364746 + ], + [ + "▁Mystery", + -13.148941993713379 + ], + [ + "▁travaillé", + -13.149106979370117 + ], + [ + "riol", + -13.149251937866211 + ], + [ + "fleisch", + -13.149288177490234 + ], + [ + "▁Eintritt", + -13.149327278137207 + ], + [ + "▁Syndrome", + -13.149422645568848 + ], + [ + "▁petroleum", + -13.149426460266113 + ], + [ + "▁genial", + -13.149433135986328 + ], + [ + "sponsored", + -13.149436950683594 + ], + [ + "▁Cindy", + -13.149436950683594 + ], + [ + "▁courier", + -13.149600982666016 + ], + [ + "▁Scrap", + -13.149640083312988 + ], + [ + "▁conţin", + -13.149724006652832 + ], + [ + "(2007)", + -13.149764060974121 + ], + [ + "▁gewährleisten", + -13.149949073791504 + ], + [ + "▁proprietor", + -13.15011215209961 + ], + [ + "▁cheque", + -13.15046215057373 + ], + [ + "maternity", + -13.150477409362793 + ], + [ + "▁Gustav", + -13.15048599243164 + ], + [ + "▁arterial", + -13.150497436523438 + ], + [ + "▁whiskey", + -13.150510787963867 + ], + [ + "▁concealed", + -13.150525093078613 + ], + [ + "thèque", + -13.150553703308105 + ], + [ + "felony", + -13.150579452514648 + ], + [ + "▁tweeted", + -13.150613784790039 + ], + [ + "OTA", + -13.150619506835938 + ], + [ + "nsel", + -13.150664329528809 + ], + [ + "▁coarse", + -13.150664329528809 + ], + [ + "▁identificat", + -13.150707244873047 + ], + [ + "▁variability", + -13.150716781616211 + ], + [ + "civ", + -13.150843620300293 + ], + [ + "▁drastic", + -13.150956153869629 + ], + [ + "▁hatred", + -13.151090621948242 + ], + [ + "▁Bürgermeister", + -13.151237487792969 + ], + [ + "▁utilizatorilor", + -13.15124225616455 + ], + [ + "OULD", + -13.15137004852295 + ], + [ + "rmaßen", + -13.151383399963379 + ], + [ + "▁windshield", + -13.151530265808105 + ], + [ + "▁Particular", + -13.151531219482422 + ], + [ + "▁Tunnel", + -13.151638984680176 + ], + [ + "▁litri", + -13.15164852142334 + ], + [ + "extrême", + -13.15180492401123 + ], + [ + "▁Schalt", + -13.151944160461426 + ], + [ + "paket", + -13.152159690856934 + ], + [ + "berlin", + -13.152169227600098 + ], + [ + "▁slujb", + -13.152193069458008 + ], + [ + "facilitated", + -13.152206420898438 + ], + [ + "Congressional", + -13.152510643005371 + ], + [ + "▁honeymoon", + -13.152585983276367 + ], + [ + "▁Provision", + -13.152697563171387 + ], + [ + "▁Outfit", + -13.152779579162598 + ], + [ + "udder", + -13.152814865112305 + ], + [ + "▁chandelier", + -13.153002738952637 + ], + [ + "donating", + -13.153132438659668 + ], + [ + "historic", + -13.15333080291748 + ], + [ + "organized", + -13.153508186340332 + ], + [ + "(8)", + -13.15356731414795 + ], + [ + "▁touristique", + -13.153610229492188 + ], + [ + "▁Roosevelt", + -13.153643608093262 + ], + [ + "▁Verständnis", + -13.153643608093262 + ], + [ + "▁prilej", + -13.153655052185059 + ], + [ + "Vanity", + -13.153806686401367 + ], + [ + "chilly", + -13.153964042663574 + ], + [ + "loyer", + -13.154031753540039 + ], + [ + "▁Zhang", + -13.154053688049316 + ], + [ + "▁Nouveau", + -13.154193878173828 + ], + [ + "Soft", + -13.154326438903809 + ], + [ + "▁motherboard", + -13.15441608428955 + ], + [ + "▁Erklärung", + -13.154701232910156 + ], + [ + "▁Tasmania", + -13.154702186584473 + ], + [ + "▁verändern", + -13.154703140258789 + ], + [ + "▁seldom", + -13.154711723327637 + ], + [ + "▁Karriere", + -13.154714584350586 + ], + [ + "▁Mixed", + -13.154902458190918 + ], + [ + "umfang", + -13.154970169067383 + ], + [ + "▁Strategies", + -13.155035972595215 + ], + [ + "CHAR", + -13.155051231384277 + ], + [ + "olitary", + -13.155075073242188 + ], + [ + "▁Persoan", + -13.1550874710083 + ], + [ + "bewegung", + -13.155242919921875 + ], + [ + "▁Ernest", + -13.155367851257324 + ], + [ + "withdrawn", + -13.155855178833008 + ], + [ + "▁stationary", + -13.155881881713867 + ], + [ + "▁bland", + -13.155939102172852 + ], + [ + "▁Replace", + -13.156059265136719 + ], + [ + "▁Londres", + -13.156290054321289 + ], + [ + "▁plural", + -13.156290054321289 + ], + [ + "▁concentrat", + -13.156515121459961 + ], + [ + "Maschine", + -13.156675338745117 + ], + [ + "▁Advocate", + -13.156820297241211 + ], + [ + "▁vermitteln", + -13.156824111938477 + ], + [ + "▁dispenser", + -13.156827926635742 + ], + [ + "▁tedious", + -13.15695858001709 + ], + [ + "▁Straight", + -13.15705394744873 + ], + [ + "▁Corona", + -13.157061576843262 + ], + [ + "▁monumental", + -13.157073020935059 + ], + [ + "▁migrate", + -13.15720272064209 + ], + [ + "▁verlieren", + -13.157366752624512 + ], + [ + "▁Lub", + -13.157482147216797 + ], + [ + "▁reinforcement", + -13.157827377319336 + ], + [ + "▁cherish", + -13.157843589782715 + ], + [ + "Veterinary", + -13.157881736755371 + ], + [ + "geschwindigkeit", + -13.157881736755371 + ], + [ + "▁féminin", + -13.157881736755371 + ], + [ + "▁Facilities", + -13.157964706420898 + ], + [ + "▁urmari", + -13.158050537109375 + ], + [ + "▁Vertical", + -13.158098220825195 + ], + [ + "echoe", + -13.158188819885254 + ], + [ + "toured", + -13.158548355102539 + ], + [ + "Served", + -13.158772468566895 + ], + [ + "más", + -13.158853530883789 + ], + [ + "license", + -13.158893585205078 + ], + [ + "misunderstanding", + -13.158944129943848 + ], + [ + "▁glamorous", + -13.158944129943848 + ], + [ + "BJP", + -13.158973693847656 + ], + [ + "▁découvert", + -13.159173965454102 + ], + [ + "schönsten", + -13.159517288208008 + ], + [ + "▁(2018)", + -13.159577369689941 + ], + [ + "▁orasului", + -13.159581184387207 + ], + [ + "328", + -13.159674644470215 + ], + [ + "thighs", + -13.159801483154297 + ], + [ + "éclairage", + -13.160008430480957 + ], + [ + "Oamenii", + -13.160009384155273 + ], + [ + "▁Transmission", + -13.16014575958252 + ], + [ + "▁transpir", + -13.16015911102295 + ], + [ + "▁președinte", + -13.160321235656738 + ], + [ + "finalists", + -13.160327911376953 + ], + [ + "genügend", + -13.160524368286133 + ], + [ + "▁Aufmerksamkeit", + -13.160539627075195 + ], + [ + "▁unglaublich", + -13.160539627075195 + ], + [ + "▁descarc", + -13.160604476928711 + ], + [ + "▁Couch", + -13.160683631896973 + ], + [ + "eaucoup", + -13.160788536071777 + ], + [ + "▁adidas", + -13.161075592041016 + ], + [ + "▁1-800-", + -13.161077499389648 + ], + [ + "▁Communities", + -13.161102294921875 + ], + [ + "▁Einkommen", + -13.161102294921875 + ], + [ + "▁Reagan", + -13.16114330291748 + ], + [ + "▁Stoke", + -13.161260604858398 + ], + [ + "▁Snapchat", + -13.161269187927246 + ], + [ + "éclat", + -13.161272048950195 + ], + [ + "▁auseinander", + -13.161367416381836 + ], + [ + "▁richesse", + -13.16137409210205 + ], + [ + "▁toggle", + -13.161396026611328 + ], + [ + "▁Zutaten", + -13.161606788635254 + ], + [ + "▁député", + -13.16161060333252 + ], + [ + "▁battlefield", + -13.161611557006836 + ], + [ + "▁spirituel", + -13.161611557006836 + ], + [ + "▁Shuttle", + -13.161632537841797 + ], + [ + "▁Aktien", + -13.161665916442871 + ], + [ + "hormon", + -13.161819458007812 + ], + [ + "connection", + -13.16187858581543 + ], + [ + "▁vizitatori", + -13.16191577911377 + ], + [ + "érité", + -13.161971092224121 + ], + [ + "truck", + -13.1619873046875 + ], + [ + "▁yourselves", + -13.162139892578125 + ], + [ + "▁Logistics", + -13.162140846252441 + ], + [ + "coveted", + -13.16215705871582 + ], + [ + "▁şedinţ", + -13.162671089172363 + ], + [ + "▁messenger", + -13.162703514099121 + ], + [ + "▁țar", + -13.162918090820312 + ], + [ + "▁Grau", + -13.163025856018066 + ], + [ + "chirurgie", + -13.163138389587402 + ], + [ + "▁Ressourcen", + -13.16320514678955 + ], + [ + "▁Jésus", + -13.163207054138184 + ], + [ + "▁acțiune", + -13.163208961486816 + ], + [ + "▁Bundesliga", + -13.163249015808105 + ], + [ + "Lizenz", + -13.163379669189453 + ], + [ + "ELLE", + -13.163908958435059 + ], + [ + "vraie", + -13.1639986038208 + ], + [ + "ruined", + -13.164018630981445 + ], + [ + "▁Marble", + -13.164109230041504 + ], + [ + "▁Zambia", + -13.164308547973633 + ], + [ + "▁Finnish", + -13.164366722106934 + ], + [ + "▁trackback", + -13.164488792419434 + ], + [ + "héros", + -13.16451644897461 + ], + [ + "▁réclam", + -13.164534568786621 + ], + [ + "locurile", + -13.164706230163574 + ], + [ + "tägliche", + -13.164753913879395 + ], + [ + "IFF", + -13.164824485778809 + ], + [ + "▁contextual", + -13.164938926696777 + ], + [ + "▁Elvis", + -13.165084838867188 + ], + [ + "▁Batch", + -13.165183067321777 + ], + [ + "▁appris", + -13.16519546508789 + ], + [ + "intensive", + -13.165404319763184 + ], + [ + "▁întâmplat", + -13.16565990447998 + ], + [ + "▁prelucr", + -13.16576099395752 + ], + [ + "flore", + -13.165873527526855 + ], + [ + "▁Alkohol", + -13.165877342224121 + ], + [ + "Konzern", + -13.165895462036133 + ], + [ + "Delete", + -13.166082382202148 + ], + [ + "öck", + -13.16612720489502 + ], + [ + "▁clientii", + -13.16614818572998 + ], + [ + "▁innovate", + -13.166224479675293 + ], + [ + "▁ASAP", + -13.166345596313477 + ], + [ + "crumbs", + -13.166425704956055 + ], + [ + "reusable", + -13.166489601135254 + ], + [ + "▁Beaver", + -13.166507720947266 + ], + [ + "▁rosii", + -13.166643142700195 + ], + [ + "Arr", + -13.166704177856445 + ], + [ + "▁Zubehör", + -13.166948318481445 + ], + [ + "▁stolz", + -13.166952133178711 + ], + [ + "▁$75", + -13.16695499420166 + ], + [ + "▁Frühling", + -13.166967391967773 + ], + [ + "▁disagreement", + -13.166988372802734 + ], + [ + "▁formulate", + -13.167381286621094 + ], + [ + "braking", + -13.167522430419922 + ], + [ + "▁submarine", + -13.167535781860352 + ], + [ + "▁identificare", + -13.167652130126953 + ], + [ + "lansarea", + -13.167659759521484 + ], + [ + "covered", + -13.167753219604492 + ], + [ + "benso", + -13.167859077453613 + ], + [ + "▁situatie", + -13.167989730834961 + ], + [ + "hilf", + -13.1681547164917 + ], + [ + "▁Southampton", + -13.168557167053223 + ], + [ + "▁intéressé", + -13.168557167053223 + ], + [ + "▁congressional", + -13.168572425842285 + ], + [ + "65%", + -13.168595314025879 + ], + [ + "▁Allison", + -13.168627738952637 + ], + [ + "Mainland", + -13.168726921081543 + ], + [ + "▁touchscreen", + -13.16882038116455 + ], + [ + "leitet", + -13.168922424316406 + ], + [ + "mnului", + -13.16958999633789 + ], + [ + "▁engagiert", + -13.169631004333496 + ], + [ + "joacă", + -13.16964340209961 + ], + [ + "▁$5,000", + -13.169652938842773 + ], + [ + "upscale", + -13.1697359085083 + ], + [ + "▁vérité", + -13.16983413696289 + ], + [ + "flüssig", + -13.170167922973633 + ], + [ + "Richtlinie", + -13.170169830322266 + ], + [ + "▁positif", + -13.170169830322266 + ], + [ + "▁diferenta", + -13.170175552368164 + ], + [ + "▁întâi", + -13.170707702636719 + ], + [ + "ethylene", + -13.170791625976562 + ], + [ + "kreuz", + -13.170913696289062 + ], + [ + "Surely", + -13.170990943908691 + ], + [ + "puneti", + -13.171002388000488 + ], + [ + "europe", + -13.171142578125 + ], + [ + "▁comunist", + -13.171271324157715 + ], + [ + "unterricht", + -13.171302795410156 + ], + [ + "▁Füll", + -13.171304702758789 + ], + [ + "▁Aberdeen", + -13.171792030334473 + ], + [ + "▁DSLR", + -13.171792030334473 + ], + [ + "▁functioneaza", + -13.171799659729004 + ], + [ + "▁benches", + -13.171807289123535 + ], + [ + "▁Alpine", + -13.171866416931152 + ], + [ + "phthal", + -13.172003746032715 + ], + [ + "▁counselling", + -13.17219066619873 + ], + [ + "▁erzielen", + -13.172323226928711 + ], + [ + "▁părinţi", + -13.172329902648926 + ], + [ + "▁besitzen", + -13.17236614227295 + ], + [ + "heavenly", + -13.172389030456543 + ], + [ + "▁masque", + -13.17281723022461 + ], + [ + "▁Legislature", + -13.172859191894531 + ], + [ + "▁Recycling", + -13.172861099243164 + ], + [ + "▁Derma", + -13.172883987426758 + ], + [ + "reunite", + -13.172926902770996 + ], + [ + "recettes", + -13.17310619354248 + ], + [ + "converge", + -13.173262596130371 + ], + [ + "▁compoziti", + -13.17327880859375 + ], + [ + "▁Nürnberg", + -13.173398971557617 + ], + [ + "760", + -13.173545837402344 + ], + [ + "▁entière", + -13.173674583435059 + ], + [ + "▁parchment", + -13.173944473266602 + ], + [ + "▁Aufwand", + -13.173945426940918 + ], + [ + "▁antivirus", + -13.174087524414062 + ], + [ + "▁remettr", + -13.17409610748291 + ], + [ + "▁NEVER", + -13.174243927001953 + ], + [ + "▁restrictive", + -13.174266815185547 + ], + [ + "▁beurre", + -13.174283027648926 + ], + [ + "▁frigider", + -13.174478530883789 + ], + [ + "acquisition", + -13.174642562866211 + ], + [ + "▁Correct", + -13.174866676330566 + ], + [ + "▁immortal", + -13.175017356872559 + ], + [ + "▁occupancy", + -13.175017356872559 + ], + [ + "▁Tucson", + -13.175019264221191 + ], + [ + "▁Dhabi", + -13.175025939941406 + ], + [ + "obligation", + -13.175033569335938 + ], + [ + "▁warfare", + -13.175037384033203 + ], + [ + "▁syntax", + -13.175045013427734 + ], + [ + "APS", + -13.175106048583984 + ], + [ + "мен", + -13.175209999084473 + ], + [ + "▁diferenț", + -13.175251960754395 + ], + [ + "wordpress", + -13.17549991607666 + ], + [ + "▁Wohnzimmer", + -13.175593376159668 + ], + [ + "oppo", + -13.175736427307129 + ], + [ + "▁miscare", + -13.175762176513672 + ], + [ + "companiilor", + -13.17581558227539 + ], + [ + "▁bezahlt", + -13.17584228515625 + ], + [ + "Sterne", + -13.175864219665527 + ], + [ + "inability", + -13.175898551940918 + ], + [ + "▁Hoffnung", + -13.176156044006348 + ], + [ + "▁românească", + -13.176176071166992 + ], + [ + "document", + -13.176177024841309 + ], + [ + "borrowers", + -13.17625904083252 + ], + [ + "▁rasa", + -13.176301956176758 + ], + [ + "▁bénéfice", + -13.176445960998535 + ], + [ + "▁Panda", + -13.17645263671875 + ], + [ + "▁cărţi", + -13.176730155944824 + ], + [ + "▁Vorgehen", + -13.17690658569336 + ], + [ + "▁afecteaz", + -13.176956176757812 + ], + [ + "▁diagnos", + -13.177050590515137 + ], + [ + "▁Dentistry", + -13.177180290222168 + ], + [ + "▁staggering", + -13.177180290222168 + ], + [ + "präsident", + -13.177181243896484 + ], + [ + "▁vocational", + -13.177239418029785 + ], + [ + "Combined", + -13.177287101745605 + ], + [ + "stère", + -13.177306175231934 + ], + [ + "▁frunze", + -13.177478790283203 + ], + [ + "OLI", + -13.177525520324707 + ], + [ + "▁răc", + -13.177752494812012 + ], + [ + "▁changé", + -13.177754402160645 + ], + [ + "▁reprezentanți", + -13.177757263183594 + ], + [ + "▁ausgeschlossen", + -13.177777290344238 + ], + [ + "Windows", + -13.177891731262207 + ], + [ + "sometimes", + -13.177898406982422 + ], + [ + "▁dargestellt", + -13.178120613098145 + ], + [ + "provoking", + -13.178263664245605 + ], + [ + "terribly", + -13.178264617919922 + ], + [ + "▁speculate", + -13.178274154663086 + ], + [ + "▁complément", + -13.178305625915527 + ], + [ + "▁(2006)", + -13.178306579589844 + ], + [ + "zulegen", + -13.178668022155762 + ], + [ + "▁définitive", + -13.178876876831055 + ], + [ + "considerare", + -13.17911148071289 + ], + [ + "▁Subaru", + -13.179354667663574 + ], + [ + "WAN", + -13.179390907287598 + ], + [ + "guessed", + -13.179417610168457 + ], + [ + "spannung", + -13.179479598999023 + ], + [ + "▁supernatural", + -13.179515838623047 + ], + [ + "▁Interstate", + -13.17957878112793 + ], + [ + "▁redundant", + -13.179891586303711 + ], + [ + "▁HUG", + -13.179893493652344 + ], + [ + "▁restauration", + -13.180006980895996 + ], + [ + "repute", + -13.180011749267578 + ], + [ + "coagul", + -13.180028915405273 + ], + [ + "tehnologia", + -13.18043327331543 + ], + [ + "warded", + -13.180444717407227 + ], + [ + "▁lobster", + -13.180469512939453 + ], + [ + "▁Hafen", + -13.180542945861816 + ], + [ + "▁Guess", + -13.18056583404541 + ], + [ + "seraient", + -13.181038856506348 + ], + [ + "▁trench", + -13.181156158447266 + ], + [ + "▁piept", + -13.181283950805664 + ], + [ + "categorized", + -13.181396484375 + ], + [ + "softer", + -13.1815185546875 + ], + [ + "▁feasibility", + -13.181519508361816 + ], + [ + "▁restructuring", + -13.181519508361816 + ], + [ + "▁GOOD", + -13.181537628173828 + ], + [ + "▁inspiré", + -13.181610107421875 + ], + [ + "▁spéci", + -13.18163013458252 + ], + [ + "▁Mattress", + -13.181686401367188 + ], + [ + "▁biologique", + -13.181702613830566 + ], + [ + "▁Crema", + -13.182043075561523 + ], + [ + "▁korrekt", + -13.182063102722168 + ], + [ + "▁imperfect", + -13.182205200195312 + ], + [ + "▁advantageous", + -13.182329177856445 + ], + [ + "9.00", + -13.182390213012695 + ], + [ + "PAL", + -13.182557106018066 + ], + [ + "▁Illustration", + -13.182607650756836 + ], + [ + "▁Katherine", + -13.182607650756836 + ], + [ + "▁cervical", + -13.182607650756836 + ], + [ + "▁hectic", + -13.182611465454102 + ], + [ + "▁Belastung", + -13.182615280151367 + ], + [ + "▁Laguna", + -13.182628631591797 + ], + [ + "▁Burton", + -13.182761192321777 + ], + [ + "nettoyage", + -13.182875633239746 + ], + [ + "Toward", + -13.183072090148926 + ], + [ + "continuare", + -13.183072090148926 + ], + [ + "▁acumulat", + -13.183106422424316 + ], + [ + "▁déposé", + -13.183216094970703 + ], + [ + "▁prestige", + -13.183269500732422 + ], + [ + "▁LNG", + -13.183525085449219 + ], + [ + "▁Dacia", + -13.183662414550781 + ], + [ + "▁concede", + -13.183691024780273 + ], + [ + "▁reconciliation", + -13.183822631835938 + ], + [ + "Sistemul", + -13.183877944946289 + ], + [ + "Speed", + -13.183937072753906 + ], + [ + "▁Implant", + -13.183977127075195 + ], + [ + "▁möchtest", + -13.184020042419434 + ], + [ + "▁Norton", + -13.184064865112305 + ], + [ + "▁cosmic", + -13.184181213378906 + ], + [ + "enregistrement", + -13.184247016906738 + ], + [ + "țării", + -13.18433952331543 + ], + [ + "Veröffentlichung", + -13.184786796569824 + ], + [ + "erlebnis", + -13.184786796569824 + ], + [ + "▁Carpenter", + -13.184786796569824 + ], + [ + "▁INFORMATION", + -13.184786796569824 + ], + [ + "invites", + -13.18481731414795 + ], + [ + "▁gewan", + -13.1849365234375 + ], + [ + "▁réservé", + -13.184986114501953 + ], + [ + "▁aquatic", + -13.184988021850586 + ], + [ + "▁Seoul", + -13.18507194519043 + ], + [ + "▁älter", + -13.185185432434082 + ], + [ + "▁classmates", + -13.185223579406738 + ], + [ + "gelangen", + -13.185253143310547 + ], + [ + "▁Camill", + -13.185285568237305 + ], + [ + "simo", + -13.185291290283203 + ], + [ + "▁dormitor", + -13.185333251953125 + ], + [ + "wahren", + -13.185354232788086 + ], + [ + "▁incremental", + -13.185357093811035 + ], + [ + "▁caci", + -13.185494422912598 + ], + [ + "mittlere", + -13.185752868652344 + ], + [ + "▁condominium", + -13.185877799987793 + ], + [ + "▁rainforest", + -13.185877799987793 + ], + [ + "▁championnat", + -13.185891151428223 + ], + [ + "▁interrupted", + -13.185921669006348 + ], + [ + "▁tactile", + -13.185930252075195 + ], + [ + "▁unconditional", + -13.185945510864258 + ], + [ + "▁reactive", + -13.186041831970215 + ], + [ + "▁Stretch", + -13.1861572265625 + ], + [ + "▁serene", + -13.18624210357666 + ], + [ + "570", + -13.186318397521973 + ], + [ + "igte", + -13.186376571655273 + ], + [ + "Louis", + -13.186410903930664 + ], + [ + "▁Mittelpunkt", + -13.186493873596191 + ], + [ + "EEP", + -13.18651294708252 + ], + [ + "▁vault", + -13.186552047729492 + ], + [ + "absolu", + -13.186893463134766 + ], + [ + "▁solidarity", + -13.186971664428711 + ], + [ + "CLICK", + -13.18708324432373 + ], + [ + "▁hustle", + -13.187090873718262 + ], + [ + "▁microscope", + -13.187105178833008 + ], + [ + "▁Recommended", + -13.187111854553223 + ], + [ + "âche", + -13.18716812133789 + ], + [ + "▁flashlight", + -13.187286376953125 + ], + [ + "modificarea", + -13.18754768371582 + ], + [ + "izaţi", + -13.18773078918457 + ], + [ + "planned", + -13.187899589538574 + ], + [ + "Download", + -13.187906265258789 + ], + [ + "▁gourmand", + -13.188064575195312 + ], + [ + "▁subsidiaries", + -13.188064575195312 + ], + [ + "orthodox", + -13.188135147094727 + ], + [ + "▁Auburn", + -13.188323020935059 + ], + [ + "▁exprimat", + -13.188336372375488 + ], + [ + "procédé", + -13.18861198425293 + ], + [ + "▁ressenti", + -13.188648223876953 + ], + [ + "▁stint", + -13.188678741455078 + ], + [ + "Essentially", + -13.189072608947754 + ], + [ + "▁Savior", + -13.189164161682129 + ], + [ + "▁Flood", + -13.189168930053711 + ], + [ + "▁neurological", + -13.189249038696289 + ], + [ + "▁strig", + -13.189340591430664 + ], + [ + "scended", + -13.189421653747559 + ], + [ + "▁Shiva", + -13.189483642578125 + ], + [ + "▁Sketch", + -13.189544677734375 + ], + [ + "▁monarch", + -13.18956184387207 + ], + [ + "▁Preview", + -13.189632415771484 + ], + [ + "▁bewegt", + -13.189811706542969 + ], + [ + "mapped", + -13.189818382263184 + ], + [ + "énorme", + -13.189962387084961 + ], + [ + "▁définition", + -13.189963340759277 + ], + [ + "▁nécessité", + -13.189984321594238 + ], + [ + "▁antren", + -13.190027236938477 + ], + [ + "▁Infant", + -13.190072059631348 + ], + [ + "▁incumbent", + -13.190255165100098 + ], + [ + "▁pavilion", + -13.190255165100098 + ], + [ + "▁Taliban", + -13.19025707244873 + ], + [ + "Easily", + -13.19025993347168 + ], + [ + "▁verteilt", + -13.19030475616455 + ], + [ + "▁Biblical", + -13.190320014953613 + ], + [ + "Christian", + -13.190333366394043 + ], + [ + "județul", + -13.190436363220215 + ], + [ + "Learning", + -13.19046688079834 + ], + [ + "▁Expand", + -13.19054126739502 + ], + [ + "▁Attach", + -13.19056224822998 + ], + [ + "consideră", + -13.190573692321777 + ], + [ + "einsatz", + -13.190574645996094 + ], + [ + "Numai", + -13.190585136413574 + ], + [ + "▁Eintrag", + -13.190597534179688 + ], + [ + "▁üblich", + -13.190607070922852 + ], + [ + "▁cumpără", + -13.19062614440918 + ], + [ + "escaped", + -13.190693855285645 + ], + [ + "▁Ortodox", + -13.190804481506348 + ], + [ + "▁obţinut", + -13.190805435180664 + ], + [ + "ecluded", + -13.191036224365234 + ], + [ + "▁brownie", + -13.191089630126953 + ], + [ + "▁regulament", + -13.191253662109375 + ], + [ + "▁Chaos", + -13.191302299499512 + ], + [ + "▁masiv", + -13.19132137298584 + ], + [ + "▁Gerald", + -13.191376686096191 + ], + [ + "▁Sigur", + -13.191380500793457 + ], + [ + "▁wavelength", + -13.191380500793457 + ], + [ + "▁retiring", + -13.191396713256836 + ], + [ + "▁exactement", + -13.191819190979004 + ], + [ + "ntino", + -13.191823959350586 + ], + [ + "▁Krebs", + -13.19194221496582 + ], + [ + "▁monatlich", + -13.191956520080566 + ], + [ + "▁aranj", + -13.192011833190918 + ], + [ + "▁priveşt", + -13.192099571228027 + ], + [ + "▁mecanic", + -13.192109107971191 + ], + [ + "money", + -13.192233085632324 + ], + [ + "parliamentary", + -13.1922607421875 + ], + [ + "▁probation", + -13.192427635192871 + ], + [ + "embroidered", + -13.192451477050781 + ], + [ + "▁amenajat", + -13.192451477050781 + ], + [ + "▁remnant", + -13.192451477050781 + ], + [ + "▁senzati", + -13.192472457885742 + ], + [ + "▁Declaration", + -13.192483901977539 + ], + [ + "farbe", + -13.192506790161133 + ], + [ + "▁skinny", + -13.19260311126709 + ], + [ + "Energi", + -13.192648887634277 + ], + [ + "verhältnisse", + -13.19288158416748 + ], + [ + "Recruit", + -13.192972183227539 + ], + [ + "frying", + -13.193161010742188 + ], + [ + "925", + -13.193294525146484 + ], + [ + "nstruire", + -13.193302154541016 + ], + [ + "toasted", + -13.193424224853516 + ], + [ + "▁nicotine", + -13.193551063537598 + ], + [ + "recessed", + -13.193570137023926 + ], + [ + "▁dialect", + -13.193572044372559 + ], + [ + "▁confisc", + -13.193575859069824 + ], + [ + "▁bubbl", + -13.193643569946289 + ], + [ + "▁Precision", + -13.193682670593262 + ], + [ + "▁sollicit", + -13.193842887878418 + ], + [ + "▁Moral", + -13.193977355957031 + ], + [ + "▁renseignements", + -13.194112777709961 + ], + [ + "UMP", + -13.194116592407227 + ], + [ + "ijn", + -13.194183349609375 + ], + [ + "▁fermeture", + -13.194320678710938 + ], + [ + "▁blueprint", + -13.19462776184082 + ], + [ + "▁groceries", + -13.194652557373047 + ], + [ + "möbel", + -13.194655418395996 + ], + [ + "▁Plenty", + -13.194657325744629 + ], + [ + "▁forfeit", + -13.194719314575195 + ], + [ + "méthodes", + -13.194915771484375 + ], + [ + "paving", + -13.19493293762207 + ], + [ + "outheastern", + -13.194979667663574 + ], + [ + "▁Overview", + -13.19503116607666 + ], + [ + "▁observers", + -13.195171356201172 + ], + [ + "▁Timișoara", + -13.19520378112793 + ], + [ + "noticing", + -13.195332527160645 + ], + [ + "▁Owl", + -13.195381164550781 + ], + [ + "▁1925", + -13.195517539978027 + ], + [ + "▁prüfen", + -13.195755004882812 + ], + [ + "▁Bewohner", + -13.195756912231445 + ], + [ + "▁Latvia", + -13.195770263671875 + ], + [ + "▁Tuscan", + -13.19577407836914 + ], + [ + "▁apprenticeship", + -13.195789337158203 + ], + [ + "▁courteous", + -13.1958646774292 + ], + [ + "adult", + -13.196023941040039 + ], + [ + "Licensed", + -13.196029663085938 + ], + [ + "abused", + -13.196762084960938 + ], + [ + "confidence", + -13.19678020477295 + ], + [ + "▁revolt", + -13.196782112121582 + ], + [ + "conference", + -13.196861267089844 + ], + [ + "genoss", + -13.196914672851562 + ], + [ + "▁răni", + -13.196944236755371 + ], + [ + "▁Intervention", + -13.196949005126953 + ], + [ + "▁primesc", + -13.196969985961914 + ], + [ + "trays", + -13.197041511535645 + ], + [ + "nozzle", + -13.197216033935547 + ], + [ + "▁splitting", + -13.197443962097168 + ], + [ + "▁könne", + -13.197507858276367 + ], + [ + "▁peisaj", + -13.197943687438965 + ], + [ + "▁academia", + -13.197962760925293 + ], + [ + "▁chakra", + -13.197979927062988 + ], + [ + "▁Abdul", + -13.1981201171875 + ], + [ + "▁Beschreibung", + -13.198225021362305 + ], + [ + "Regeln", + -13.19831371307373 + ], + [ + "eezy", + -13.198314666748047 + ], + [ + "▁problématique", + -13.198515892028809 + ], + [ + "▁Ausführung", + -13.198524475097656 + ], + [ + "▁reconnect", + -13.19868278503418 + ], + [ + "▁telefonic", + -13.198966026306152 + ], + [ + "▁Ethereum", + -13.199069023132324 + ], + [ + "▁Winnipeg", + -13.199069023132324 + ], + [ + "▁misconception", + -13.199069023132324 + ], + [ + "▁Verpackung", + -13.199070930480957 + ], + [ + "▁erzeugt", + -13.199097633361816 + ], + [ + "▁Identity", + -13.199104309082031 + ], + [ + "▁dunkle", + -13.199109077453613 + ], + [ + "sustaining", + -13.19916820526123 + ], + [ + "▁pereche", + -13.199178695678711 + ], + [ + "▁neîn", + -13.199239730834961 + ], + [ + "directorul", + -13.199291229248047 + ], + [ + "▁élabor", + -13.199584007263184 + ], + [ + "▁Hollow", + -13.19960880279541 + ], + [ + "▁getestet", + -13.199751853942871 + ], + [ + "▁Promote", + -13.199797630310059 + ], + [ + "agriculture", + -13.199920654296875 + ], + [ + "▁deosebir", + -13.199934005737305 + ], + [ + "▁neam", + -13.199999809265137 + ], + [ + "aufbau", + -13.200042724609375 + ], + [ + "▁susținut", + -13.200079917907715 + ], + [ + "fueled", + -13.200119018554688 + ], + [ + "▁impresionant", + -13.200177192687988 + ], + [ + "innate", + -13.20026969909668 + ], + [ + "grenzt", + -13.200340270996094 + ], + [ + "rescued", + -13.200514793395996 + ], + [ + "bestand", + -13.200559616088867 + ], + [ + "▁adjunct", + -13.200729370117188 + ], + [ + "▁Mischung", + -13.200754165649414 + ], + [ + "▁Lease", + -13.201258659362793 + ], + [ + "espagnol", + -13.201284408569336 + ], + [ + "▁Kickstarter", + -13.201284408569336 + ], + [ + "▁buzunar", + -13.201284408569336 + ], + [ + "▁buddies", + -13.20129108428955 + ], + [ + "käufe", + -13.201485633850098 + ], + [ + "cevoir", + -13.201582908630371 + ], + [ + "▁creşte", + -13.201675415039062 + ], + [ + "▁Cluster", + -13.201825141906738 + ], + [ + "▁obișnui", + -13.201838493347168 + ], + [ + "▁cassette", + -13.201889038085938 + ], + [ + "▁optisch", + -13.201947212219238 + ], + [ + "manned", + -13.20200252532959 + ], + [ + "schneid", + -13.202362060546875 + ], + [ + "Württemberg", + -13.202393531799316 + ], + [ + "shredded", + -13.202393531799316 + ], + [ + "▁botanical", + -13.20239543914795 + ], + [ + "characterization", + -13.202445983886719 + ], + [ + "▁Durchführung", + -13.202452659606934 + ], + [ + "▁tireless", + -13.20250129699707 + ], + [ + "lässlich", + -13.20254135131836 + ], + [ + "▁Merchant", + -13.202570915222168 + ], + [ + "joutez", + -13.20259952545166 + ], + [ + "▁amélior", + -13.202676773071289 + ], + [ + "fixed", + -13.202741622924805 + ], + [ + "kho", + -13.202760696411133 + ], + [ + "▁televizor", + -13.202948570251465 + ], + [ + "▁Davies", + -13.202964782714844 + ], + [ + "enceinte", + -13.203118324279785 + ], + [ + "▁Panorama", + -13.20350456237793 + ], + [ + "▁maternal", + -13.203507423400879 + ], + [ + "diversified", + -13.203513145446777 + ], + [ + "▁Jü", + -13.203570365905762 + ], + [ + "▁naz", + -13.203730583190918 + ], + [ + "▁plonge", + -13.2039213180542 + ], + [ + "geschickt", + -13.203944206237793 + ], + [ + "MIS", + -13.204215049743652 + ], + [ + "ragged", + -13.204553604125977 + ], + [ + "▁diarrhea", + -13.20461654663086 + ], + [ + "▁tsunami", + -13.20461654663086 + ], + [ + "▁Nikola", + -13.204625129699707 + ], + [ + "▁festivities", + -13.20464038848877 + ], + [ + "potting", + -13.20479965209961 + ], + [ + "▁telefonisch", + -13.204874038696289 + ], + [ + "TAR", + -13.204971313476562 + ], + [ + "▁schimbări", + -13.205023765563965 + ], + [ + "▁occidental", + -13.205172538757324 + ], + [ + "schloss", + -13.205179214477539 + ], + [ + "Print", + -13.205284118652344 + ], + [ + "▁autoritățil", + -13.205361366271973 + ], + [ + "idos", + -13.20556640625 + ], + [ + "mediocr", + -13.20559310913086 + ], + [ + "▁Decla", + -13.205686569213867 + ], + [ + "▁Elliott", + -13.205729484558105 + ], + [ + "▁pinpoint", + -13.205734252929688 + ], + [ + "▁disciple", + -13.20579719543457 + ], + [ + "▁Cairo", + -13.2058744430542 + ], + [ + "▁15-20", + -13.2059326171875 + ], + [ + "▁limbaj", + -13.20611572265625 + ], + [ + "▁retenu", + -13.206154823303223 + ], + [ + "▁Blüte", + -13.20628833770752 + ], + [ + "▁MINI", + -13.206467628479004 + ], + [ + "▁lumină", + -13.206567764282227 + ], + [ + "▁flawed", + -13.206846237182617 + ], + [ + "▁Belarus", + -13.207067489624023 + ], + [ + "Totul", + -13.207207679748535 + ], + [ + "hôte", + -13.207273483276367 + ], + [ + "▁verbringen", + -13.207315444946289 + ], + [ + "▁simultaneous", + -13.207344055175781 + ], + [ + "▁competiți", + -13.207402229309082 + ], + [ + "▁lancement", + -13.207413673400879 + ], + [ + "▁proprietati", + -13.207432746887207 + ], + [ + "▁angajator", + -13.207465171813965 + ], + [ + "▁ignorant", + -13.207674026489258 + ], + [ + "▁indicative", + -13.207700729370117 + ], + [ + "▁Bearbeitung", + -13.207961082458496 + ], + [ + "▁Ungaria", + -13.207961082458496 + ], + [ + "▁Sfint", + -13.208015441894531 + ], + [ + "▁Trojan", + -13.20804214477539 + ], + [ + "▁1911", + -13.208100318908691 + ], + [ + "▁reliabl", + -13.2081937789917 + ], + [ + "6-0", + -13.20827865600586 + ], + [ + "obst", + -13.208523750305176 + ], + [ + "▁relève", + -13.208579063415527 + ], + [ + "▁standpoint", + -13.208874702453613 + ], + [ + "ridden", + -13.208918571472168 + ], + [ + "▁Pdf", + -13.209005355834961 + ], + [ + "tatewide", + -13.209051132202148 + ], + [ + "Water", + -13.209062576293945 + ], + [ + "▁Pricing", + -13.209089279174805 + ], + [ + "▁protecţi", + -13.209168434143066 + ], + [ + "November", + -13.209615707397461 + ], + [ + "▁televiziune", + -13.20964241027832 + ], + [ + "Sodium", + -13.209881782531738 + ], + [ + "douceur", + -13.209942817687988 + ], + [ + "▁Flasche", + -13.210183143615723 + ], + [ + "3.9", + -13.210193634033203 + ], + [ + "▁electromagnetic", + -13.210195541381836 + ], + [ + "▁mitochondria", + -13.210195541381836 + ], + [ + "Suddenly", + -13.210199356079102 + ], + [ + "▁Drupal", + -13.210201263427734 + ], + [ + "▁supraveghere", + -13.210211753845215 + ], + [ + "▁cornea", + -13.210288047790527 + ], + [ + "räumt", + -13.210309982299805 + ], + [ + "▁healed", + -13.210410118103027 + ], + [ + "Roc", + -13.210649490356445 + ], + [ + "▁temporar", + -13.210707664489746 + ], + [ + "▁amaze", + -13.210770606994629 + ], + [ + "▁confrunta", + -13.210833549499512 + ], + [ + "Afterward", + -13.210836410522461 + ], + [ + "▁festgelegt", + -13.21084213256836 + ], + [ + "▁Kuchen", + -13.210844993591309 + ], + [ + "▁perpetual", + -13.210858345031738 + ], + [ + "systematically", + -13.211000442504883 + ], + [ + "▁coloan", + -13.211006164550781 + ], + [ + "▁extensi", + -13.211058616638184 + ], + [ + "▁Județean", + -13.211315155029297 + ], + [ + "▁amelior", + -13.211315155029297 + ], + [ + "▁illustrator", + -13.211315155029297 + ], + [ + "▁titanium", + -13.211344718933105 + ], + [ + "SMEs", + -13.211384773254395 + ], + [ + "taxable", + -13.211578369140625 + ], + [ + "▁Borough", + -13.211607933044434 + ], + [ + "verlust", + -13.211772918701172 + ], + [ + "ductive", + -13.21233081817627 + ], + [ + "▁Küste", + -13.212335586547852 + ], + [ + "▁végétal", + -13.212410926818848 + ], + [ + "▁breastfeeding", + -13.212435722351074 + ], + [ + "▁captivating", + -13.212435722351074 + ], + [ + "▁Chevy", + -13.212443351745605 + ], + [ + "▁aerospace", + -13.212469100952148 + ], + [ + "pozitia", + -13.213095664978027 + ], + [ + "Tutor", + -13.213199615478516 + ], + [ + "▁spum", + -13.213312149047852 + ], + [ + "curând", + -13.213419914245605 + ], + [ + "iscus", + -13.213458061218262 + ], + [ + "October", + -13.213495254516602 + ], + [ + "▁Reparatur", + -13.213557243347168 + ], + [ + "▁Servicii", + -13.213574409484863 + ], + [ + "▁Gonz", + -13.21357536315918 + ], + [ + "▁cybersecurity", + -13.21357536315918 + ], + [ + "▁UCLA", + -13.213678359985352 + ], + [ + "rissa", + -13.213835716247559 + ], + [ + "▁Kemp", + -13.213850021362305 + ], + [ + "▁piston", + -13.214046478271484 + ], + [ + "▁révèle", + -13.214118957519531 + ], + [ + "▁posséd", + -13.21412181854248 + ], + [ + "▁versehen", + -13.214129447937012 + ], + [ + "▁scrutin", + -13.214226722717285 + ], + [ + "donnant", + -13.21436882019043 + ], + [ + "▁Geschwindigkeit", + -13.214680671691895 + ], + [ + "▁Panasonic", + -13.214680671691895 + ], + [ + "audio", + -13.214700698852539 + ], + [ + "▁Packaging", + -13.214771270751953 + ], + [ + "phra", + -13.2147798538208 + ], + [ + "▁Letzte", + -13.214954376220703 + ], + [ + "insicht", + -13.215141296386719 + ], + [ + "▁sammeln", + -13.215243339538574 + ], + [ + "▁extins", + -13.215259552001953 + ], + [ + "▁collège", + -13.215266227722168 + ], + [ + "ancies", + -13.215343475341797 + ], + [ + "▁întâlnit", + -13.215350151062012 + ], + [ + "▁Servi", + -13.215392112731934 + ], + [ + "stattet", + -13.215493202209473 + ], + [ + "▁abstraction", + -13.215566635131836 + ], + [ + "▁candidature", + -13.215592384338379 + ], + [ + "ONU", + -13.215676307678223 + ], + [ + "▁raffle", + -13.215826988220215 + ], + [ + "▁Soldier", + -13.215834617614746 + ], + [ + "▁stipulate", + -13.215883255004883 + ], + [ + "▁vizual", + -13.215950012207031 + ], + [ + "lucht", + -13.216007232666016 + ], + [ + "▁circus", + -13.216068267822266 + ], + [ + "▁decree", + -13.216259002685547 + ], + [ + "immeuble", + -13.216367721557617 + ], + [ + "Store", + -13.216426849365234 + ], + [ + "randul", + -13.216622352600098 + ], + [ + "▁narration", + -13.216933250427246 + ], + [ + "implication", + -13.216958045959473 + ], + [ + "▁discontinued", + -13.216971397399902 + ], + [ + "▁Pilates", + -13.216989517211914 + ], + [ + "▁biais", + -13.21701431274414 + ], + [ + "panel", + -13.217325210571289 + ], + [ + "▁mower", + -13.217458724975586 + ], + [ + "▁Castro", + -13.21753978729248 + ], + [ + "pregătire", + -13.217641830444336 + ], + [ + "▁denomination", + -13.218062400817871 + ], + [ + "▁throttle", + -13.21806526184082 + ], + [ + "▁finition", + -13.218086242675781 + ], + [ + "▁clarification", + -13.218286514282227 + ], + [ + "laut", + -13.218366622924805 + ], + [ + "▁wastewater", + -13.2184419631958 + ], + [ + "▁Sanchez", + -13.218770980834961 + ], + [ + "▁Umfeld", + -13.2189359664917 + ], + [ + "▁consili", + -13.218997955322266 + ], + [ + "extrait", + -13.219013214111328 + ], + [ + "ionism", + -13.2190523147583 + ], + [ + "▁Cannabis", + -13.219186782836914 + ], + [ + "▁misconduct", + -13.219186782836914 + ], + [ + "▁shepherd", + -13.219186782836914 + ], + [ + "▁feminist", + -13.21919059753418 + ], + [ + "▁criterii", + -13.219212532043457 + ], + [ + "America", + -13.219219207763672 + ], + [ + "▁Telephone", + -13.219270706176758 + ], + [ + "▁Fritz", + -13.219438552856445 + ], + [ + "▁cheltui", + -13.219794273376465 + ], + [ + "▁Übung", + -13.219857215881348 + ], + [ + "făcută", + -13.22006893157959 + ], + [ + "▁străzi", + -13.220170021057129 + ], + [ + "influencing", + -13.220315933227539 + ], + [ + "▁Democracy", + -13.220321655273438 + ], + [ + "atorium", + -13.220376014709473 + ], + [ + "▁Stufe", + -13.220465660095215 + ], + [ + "▁Cornell", + -13.220660209655762 + ], + [ + "zugehen", + -13.22074031829834 + ], + [ + "▁coton", + -13.220804214477539 + ], + [ + "▁beinhaltet", + -13.220881462097168 + ], + [ + "▁kritisch", + -13.220884323120117 + ], + [ + "▁Kalender", + -13.22105884552002 + ], + [ + "▁Teig", + -13.221253395080566 + ], + [ + "cooked", + -13.221264839172363 + ], + [ + "▁diversité", + -13.221390724182129 + ], + [ + "recognizable", + -13.221446990966797 + ], + [ + "▁Dictionary", + -13.221446990966797 + ], + [ + "attribution", + -13.22145938873291 + ], + [ + "▁Teresa", + -13.221471786499023 + ], + [ + "▁Ahmad", + -13.221487998962402 + ], + [ + "HAM", + -13.221627235412598 + ], + [ + "▁floss", + -13.221668243408203 + ], + [ + "génie", + -13.2218599319458 + ], + [ + "▁Espa", + -13.221989631652832 + ], + [ + "hersteller", + -13.221993446350098 + ], + [ + "Musée", + -13.222001075744629 + ], + [ + "▁Crawford", + -13.222579002380371 + ], + [ + "▁Phantom", + -13.222579002380371 + ], + [ + "▁Jenkins", + -13.222640037536621 + ], + [ + "genauer", + -13.222774505615234 + ], + [ + "▁acţiuni", + -13.222885131835938 + ], + [ + "▁meciuri", + -13.22322940826416 + ], + [ + "▁verstärkt", + -13.22326374053955 + ], + [ + "▁troop", + -13.22341251373291 + ], + [ + "räder", + -13.223483085632324 + ], + [ + "Putting", + -13.223536491394043 + ], + [ + "NASDAQ", + -13.223712921142578 + ], + [ + "▁Buddhism", + -13.223712921142578 + ], + [ + "▁Religious", + -13.223712921142578 + ], + [ + "▁accommodating", + -13.223712921142578 + ], + [ + "▁lendemain", + -13.223712921142578 + ], + [ + "▁plywood", + -13.223714828491211 + ], + [ + "▁inflatable", + -13.223724365234375 + ], + [ + "▁sèche", + -13.223731994628906 + ], + [ + "▁fragil", + -13.223845481872559 + ], + [ + "▁Filip", + -13.224115371704102 + ], + [ + "▁Terrace", + -13.224274635314941 + ], + [ + "Biblio", + -13.22432804107666 + ], + [ + "resides", + -13.22448444366455 + ], + [ + "▁varf", + -13.22451114654541 + ], + [ + "Bildern", + -13.224528312683105 + ], + [ + "loß", + -13.224685668945312 + ], + [ + "555", + -13.224702835083008 + ], + [ + "▁astounding", + -13.224847793579102 + ], + [ + "▁brillant", + -13.224857330322266 + ], + [ + "▁Railroad", + -13.224871635437012 + ], + [ + "minimizing", + -13.224907875061035 + ], + [ + "▁Benedict", + -13.225019454956055 + ], + [ + "▁$400", + -13.225068092346191 + ], + [ + "▁schematic", + -13.225217819213867 + ], + [ + "Canada", + -13.225371360778809 + ], + [ + "▁psihic", + -13.225415229797363 + ], + [ + "▁avertiz", + -13.225497245788574 + ], + [ + "▁Breed", + -13.225550651550293 + ], + [ + "▁gradina", + -13.225606918334961 + ], + [ + "▁Liege", + -13.225822448730469 + ], + [ + "▁Retirement", + -13.225983619689941 + ], + [ + "▁pergola", + -13.226005554199219 + ], + [ + "▁Kuwait", + -13.2260103225708 + ], + [ + "▁logistic", + -13.22629451751709 + ], + [ + "▁captive", + -13.22651481628418 + ], + [ + "prepared", + -13.226568222045898 + ], + [ + "▁prononc", + -13.226568222045898 + ], + [ + "Celui", + -13.226676940917969 + ], + [ + "deutschland", + -13.227120399475098 + ], + [ + "▁devreme", + -13.227124214172363 + ], + [ + "▁părți", + -13.227270126342773 + ], + [ + "▁1934", + -13.227517127990723 + ], + [ + "▁ersetzt", + -13.227560997009277 + ], + [ + "▁frightening", + -13.227689743041992 + ], + [ + "▁fiecărui", + -13.227819442749023 + ], + [ + "correct", + -13.22799015045166 + ], + [ + "6.6", + -13.228057861328125 + ], + [ + "▁Manitoba", + -13.228259086608887 + ], + [ + "Chartered", + -13.228416442871094 + ], + [ + "▁părăs", + -13.228543281555176 + ], + [ + "Powered", + -13.228697776794434 + ], + [ + "impede", + -13.22876262664795 + ], + [ + "agonist", + -13.22878646850586 + ], + [ + "▁stratégique", + -13.228829383850098 + ], + [ + "▁vigilant", + -13.228830337524414 + ], + [ + "faceted", + -13.228930473327637 + ], + [ + "available", + -13.229308128356934 + ], + [ + "▁Promise", + -13.229388236999512 + ], + [ + "▁humorous", + -13.229446411132812 + ], + [ + "treibt", + -13.229449272155762 + ], + [ + "▁Patrol", + -13.229514122009277 + ], + [ + "huh", + -13.229523658752441 + ], + [ + "ztlich", + -13.229804039001465 + ], + [ + "▁rejet", + -13.2299165725708 + ], + [ + "odeur", + -13.229935646057129 + ], + [ + "usziehbar", + -13.22996997833252 + ], + [ + "▁gespannt", + -13.229972839355469 + ], + [ + "church", + -13.230018615722656 + ], + [ + "▁Popescu", + -13.230109214782715 + ], + [ + "▁einmalig", + -13.230518341064453 + ], + [ + "diluted", + -13.230551719665527 + ], + [ + "lighted", + -13.231070518493652 + ], + [ + "▁stattfinden", + -13.23111343383789 + ], + [ + "▁Reaktion", + -13.231183052062988 + ], + [ + "▁délivr", + -13.23134994506836 + ], + [ + "▁Helfer", + -13.231407165527344 + ], + [ + "Fiind", + -13.23142147064209 + ], + [ + "rmând", + -13.231507301330566 + ], + [ + "▁Beweis", + -13.231671333312988 + ], + [ + "▁Violet", + -13.231733322143555 + ], + [ + "kamera", + -13.231764793395996 + ], + [ + "▁Romney", + -13.231779098510742 + ], + [ + "▁Bradford", + -13.231800079345703 + ], + [ + "stellbar", + -13.231852531433105 + ], + [ + "▁roadmap", + -13.231921195983887 + ], + [ + "▁subconscious", + -13.23204231262207 + ], + [ + "contrasting", + -13.232138633728027 + ], + [ + "mécanisme", + -13.232254981994629 + ], + [ + "kämpft", + -13.232255935668945 + ], + [ + "▁Preston", + -13.232719421386719 + ], + [ + "▁Anliegen", + -13.232802391052246 + ], + [ + "▁necessities", + -13.232827186584473 + ], + [ + "▁detrimental", + -13.232828140258789 + ], + [ + "▁sprawl", + -13.232830047607422 + ], + [ + "▁Erfüllung", + -13.23287582397461 + ], + [ + "▁massacre", + -13.2329683303833 + ], + [ + "▁pietre", + -13.232987403869629 + ], + [ + "▁situații", + -13.233027458190918 + ], + [ + "vêtement", + -13.233080863952637 + ], + [ + "Listed", + -13.233144760131836 + ], + [ + "▁extravagant", + -13.233399391174316 + ], + [ + "▁axle", + -13.233525276184082 + ], + [ + "OTT", + -13.233663558959961 + ], + [ + "wildly", + -13.233744621276855 + ], + [ + "70,000", + -13.233797073364258 + ], + [ + "▁chauffeur", + -13.23384952545166 + ], + [ + "▁Brasov", + -13.233972549438477 + ], + [ + "▁Fähigkeiten", + -13.233972549438477 + ], + [ + "▁staatlich", + -13.234025001525879 + ], + [ + "outlines", + -13.234034538269043 + ], + [ + "▁aufmerksam", + -13.234545707702637 + ], + [ + "▁Relation", + -13.234749794006348 + ], + [ + "▁Stephan", + -13.234947204589844 + ], + [ + "yland", + -13.23494815826416 + ], + [ + "proclaimed", + -13.235086441040039 + ], + [ + "Wallet", + -13.235100746154785 + ], + [ + "verarbeitung", + -13.235118865966797 + ], + [ + "▁überraschen", + -13.235118865966797 + ], + [ + "▁Injury", + -13.235125541687012 + ], + [ + "▁horsepower", + -13.235237121582031 + ], + [ + "▁Tropical", + -13.23523998260498 + ], + [ + "▁wives", + -13.235459327697754 + ], + [ + "adherence", + -13.235677719116211 + ], + [ + "schätzung", + -13.235692977905273 + ], + [ + "▁coherent", + -13.235708236694336 + ], + [ + "parlament", + -13.23574161529541 + ], + [ + "▁stup", + -13.235852241516113 + ], + [ + "▁resonance", + -13.23626708984375 + ], + [ + "▁inheritance", + -13.236355781555176 + ], + [ + "commenced", + -13.23645305633545 + ], + [ + "▁supervise", + -13.236475944519043 + ], + [ + "▁facilitator", + -13.236488342285156 + ], + [ + "fares", + -13.236678123474121 + ], + [ + "▁Tibet", + -13.23672866821289 + ], + [ + "communication", + -13.236787796020508 + ], + [ + "yog", + -13.236806869506836 + ], + [ + "▁WLAN", + -13.236842155456543 + ], + [ + "▁Chili", + -13.23685073852539 + ], + [ + "▁Harold", + -13.2369966506958 + ], + [ + "▁Guerre", + -13.237005233764648 + ], + [ + "▁Femme", + -13.237146377563477 + ], + [ + "▁Lisbon", + -13.237231254577637 + ], + [ + "▁mulțumi", + -13.237415313720703 + ], + [ + "▁vorbereitet", + -13.237415313720703 + ], + [ + "▁aperture", + -13.237422943115234 + ], + [ + "▁Universities", + -13.237442016601562 + ], + [ + "▁reckless", + -13.237471580505371 + ], + [ + "▁Botschaft", + -13.237533569335938 + ], + [ + "▁Squad", + -13.238022804260254 + ], + [ + "▁buoy", + -13.238061904907227 + ], + [ + "participarea", + -13.238236427307129 + ], + [ + "stiinta", + -13.238389015197754 + ], + [ + "▁repeal", + -13.238415718078613 + ], + [ + "drilled", + -13.238489151000977 + ], + [ + "▁Conversation", + -13.238567352294922 + ], + [ + "▁subsid", + -13.238615036010742 + ], + [ + "anstalt", + -13.238741874694824 + ], + [ + "faktor", + -13.23874282836914 + ], + [ + "▁swamp", + -13.238790512084961 + ], + [ + "pflichtig", + -13.238921165466309 + ], + [ + "▁camion", + -13.238970756530762 + ], + [ + "▁gouvern", + -13.239032745361328 + ], + [ + "▁archaeological", + -13.239141464233398 + ], + [ + "▁glitch", + -13.239198684692383 + ], + [ + "average", + -13.239294052124023 + ], + [ + "▁coffre", + -13.239481925964355 + ], + [ + "▁Insert", + -13.239513397216797 + ], + [ + "▁colonne", + -13.2395601272583 + ], + [ + "▁Assess", + -13.23962116241455 + ], + [ + "▁batches", + -13.239716529846191 + ], + [ + "▁ammunition", + -13.239717483520508 + ], + [ + "▁scissors", + -13.239717483520508 + ], + [ + "▁Locksmith", + -13.239740371704102 + ], + [ + "▁Bollywood", + -13.239991188049316 + ], + [ + "expédi", + -13.240288734436035 + ], + [ + "▁descendants", + -13.24039363861084 + ], + [ + "▁unwilling", + -13.240506172180176 + ], + [ + "▁Noise", + -13.240649223327637 + ], + [ + "▁Directive", + -13.240660667419434 + ], + [ + "ATOR", + -13.240765571594238 + ], + [ + "▁Rajasthan", + -13.240870475769043 + ], + [ + "▁chaotic", + -13.240888595581055 + ], + [ + "▁NEED", + -13.24093246459961 + ], + [ + "▁părere", + -13.24095344543457 + ], + [ + "▁begonnen", + -13.241448402404785 + ], + [ + "▁Reef", + -13.241504669189453 + ], + [ + "▁vorgesehen", + -13.24161434173584 + ], + [ + "▁allocate", + -13.241826057434082 + ], + [ + "▁exceptionnel", + -13.241936683654785 + ], + [ + "▁gefertigt", + -13.24203872680664 + ], + [ + "fading", + -13.242072105407715 + ], + [ + "▁interpersonal", + -13.242178916931152 + ], + [ + "▁occupie", + -13.242204666137695 + ], + [ + "▁Teatr", + -13.242579460144043 + ], + [ + "▁kilomètres", + -13.242603302001953 + ], + [ + "▁verbinden", + -13.242608070373535 + ], + [ + "▁Frucht", + -13.242643356323242 + ], + [ + "augmented", + -13.242720603942871 + ], + [ + "▁twentieth", + -13.243181228637695 + ], + [ + "▁aggression", + -13.243183135986328 + ], + [ + "▁Miracle", + -13.243184089660645 + ], + [ + "▁peninsula", + -13.243184089660645 + ], + [ + "▁Fernando", + -13.243185043334961 + ], + [ + "▁autorităţil", + -13.243203163146973 + ], + [ + "▁Iisus", + -13.243217468261719 + ], + [ + "▁puck", + -13.243423461914062 + ], + [ + "titel", + -13.243454933166504 + ], + [ + "▁remake", + -13.243562698364258 + ], + [ + "freiheit", + -13.243563652038574 + ], + [ + "▁Belize", + -13.243590354919434 + ], + [ + "▁secundar", + -13.243779182434082 + ], + [ + "▁perpetrat", + -13.243786811828613 + ], + [ + "jedenfalls", + -13.243797302246094 + ], + [ + "linked", + -13.243820190429688 + ], + [ + "▁dégag", + -13.243918418884277 + ], + [ + "LAY", + -13.243926048278809 + ], + [ + "behandlung", + -13.244172096252441 + ], + [ + "▁1928", + -13.244193077087402 + ], + [ + "▁Nickel", + -13.244205474853516 + ], + [ + "rophy", + -13.244256973266602 + ], + [ + "▁autonomy", + -13.244338989257812 + ], + [ + "▁Treffen", + -13.244402885437012 + ], + [ + "▁groundbreaking", + -13.24445915222168 + ], + [ + "politisch", + -13.244484901428223 + ], + [ + "▁Vector", + -13.244553565979004 + ], + [ + "oricine", + -13.244684219360352 + ], + [ + "utilisées", + -13.244684219360352 + ], + [ + "plete", + -13.244771003723145 + ], + [ + "droht", + -13.244918823242188 + ], + [ + "▁alternativ", + -13.245104789733887 + ], + [ + "▁Bernie", + -13.245213508605957 + ], + [ + "▁embellish", + -13.245260238647461 + ], + [ + "▁Curriculum", + -13.24549674987793 + ], + [ + "herrscht", + -13.245525360107422 + ], + [ + "escalier", + -13.246126174926758 + ], + [ + "hian", + -13.246333122253418 + ], + [ + "ertaining", + -13.246387481689453 + ], + [ + "hitter", + -13.246430397033691 + ], + [ + "▁kompetente", + -13.24665641784668 + ], + [ + "▁trekking", + -13.246760368347168 + ], + [ + "EACH", + -13.246841430664062 + ], + [ + "▁Bedien", + -13.2470703125 + ], + [ + "starred", + -13.247169494628906 + ], + [ + "▁săptămâna", + -13.247236251831055 + ], + [ + "▁Gratuit", + -13.247239112854004 + ], + [ + "▁Jahrzehnte", + -13.247241020202637 + ], + [ + "ingénieur", + -13.24731731414795 + ], + [ + "▁Huang", + -13.24736213684082 + ], + [ + "Music", + -13.247401237487793 + ], + [ + "misiei", + -13.247544288635254 + ], + [ + "▁masuri", + -13.247733116149902 + ], + [ + "▁Achievement", + -13.247817039489746 + ], + [ + "▁Dorothy", + -13.247817039489746 + ], + [ + "blätter", + -13.247817993164062 + ], + [ + "éloign", + -13.247817993164062 + ], + [ + "▁Anglia", + -13.247990608215332 + ], + [ + "brach", + -13.248013496398926 + ], + [ + "▁Optimization", + -13.248085021972656 + ], + [ + "6.7", + -13.248170852661133 + ], + [ + "winkel", + -13.248210906982422 + ], + [ + "contenan", + -13.248347282409668 + ], + [ + "Astăzi", + -13.248398780822754 + ], + [ + "wiped", + -13.248441696166992 + ], + [ + "granting", + -13.248665809631348 + ], + [ + "▁plăti", + -13.248859405517578 + ], + [ + "▁Compensation", + -13.248979568481445 + ], + [ + "▁Verkäufer", + -13.248979568481445 + ], + [ + "▁angajați", + -13.248980522155762 + ], + [ + "▁diminished", + -13.24902057647705 + ], + [ + "employment", + -13.249250411987305 + ], + [ + "yahoo", + -13.249435424804688 + ], + [ + "▁détrui", + -13.249698638916016 + ], + [ + "▁suffisant", + -13.24982738494873 + ], + [ + "▁Moldovei", + -13.250144004821777 + ], + [ + "▁Pokemon", + -13.250144004821777 + ], + [ + "▁Malcolm", + -13.250144958496094 + ], + [ + "▁mysteries", + -13.250147819519043 + ], + [ + "▁Diversity", + -13.250149726867676 + ], + [ + "▁clinique", + -13.250327110290527 + ], + [ + "landais", + -13.250344276428223 + ], + [ + "▁campanii", + -13.250399589538574 + ], + [ + "▁témoignage", + -13.250439643859863 + ], + [ + "▁paralel", + -13.250467300415039 + ], + [ + "▁travailleurs", + -13.250576972961426 + ], + [ + "▁salvage", + -13.250580787658691 + ], + [ + "▁crayon", + -13.250732421875 + ], + [ + "immédiat", + -13.25085163116455 + ], + [ + "hopped", + -13.250958442687988 + ], + [ + "▁senzor", + -13.25102710723877 + ], + [ + "▁imbunatati", + -13.251073837280273 + ], + [ + "▁capitalize", + -13.2511568069458 + ], + [ + "▁Elephant", + -13.25130844116211 + ], + [ + "▁insomnia", + -13.25131607055664 + ], + [ + "▁Ansicht", + -13.251325607299805 + ], + [ + "▁lupte", + -13.251556396484375 + ], + [ + "▁genomic", + -13.251557350158691 + ], + [ + "▁Grape", + -13.251769065856934 + ], + [ + "MONT", + -13.25197982788086 + ], + [ + "métiers", + -13.252004623413086 + ], + [ + "▁Pierce", + -13.252123832702637 + ], + [ + "consulted", + -13.252388954162598 + ], + [ + "▁Responsible", + -13.252474784851074 + ], + [ + "symmetry", + -13.252476692199707 + ], + [ + "▁sulfur", + -13.252487182617188 + ], + [ + "▁înapoi", + -13.252510070800781 + ], + [ + "▁Junction", + -13.252549171447754 + ], + [ + "▁trilogy", + -13.252622604370117 + ], + [ + "▁unkompliziert", + -13.253059387207031 + ], + [ + "▁zugänglich", + -13.253059387207031 + ], + [ + "▁préfèr", + -13.253153800964355 + ], + [ + "oarelor", + -13.253361701965332 + ], + [ + "langage", + -13.253460884094238 + ], + [ + "admired", + -13.253589630126953 + ], + [ + "platform", + -13.253595352172852 + ], + [ + "▁pluralit", + -13.253616333007812 + ], + [ + "▁betrachtet", + -13.253643035888672 + ], + [ + "▁reproduc", + -13.253790855407715 + ], + [ + "exemple", + -13.25385570526123 + ], + [ + "▁conspir", + -13.254347801208496 + ], + [ + "▁pelvi", + -13.25437068939209 + ], + [ + "leased", + -13.254551887512207 + ], + [ + "▁souffle", + -13.254570960998535 + ], + [ + "▁approprié", + -13.254705429077148 + ], + [ + "absorbing", + -13.254817962646484 + ], + [ + "dividing", + -13.254855155944824 + ], + [ + "herently", + -13.255147933959961 + ], + [ + "▁blister", + -13.255179405212402 + ], + [ + "löst", + -13.255182266235352 + ], + [ + "Apotheke", + -13.255398750305176 + ], + [ + "▁Asociaţi", + -13.255424499511719 + ], + [ + "education", + -13.255904197692871 + ], + [ + "▁retract", + -13.255982398986816 + ], + [ + "▁appraise", + -13.255990982055664 + ], + [ + "▁Debbie", + -13.256075859069824 + ], + [ + "▁arhitect", + -13.256193161010742 + ], + [ + "▁Mohamed", + -13.256568908691406 + ], + [ + "▁îndrept", + -13.256568908691406 + ], + [ + "▁exhaustive", + -13.256753921508789 + ], + [ + "▁Notebook", + -13.257004737854004 + ], + [ + "crashing", + -13.257068634033203 + ], + [ + "▁Betreiber", + -13.257155418395996 + ], + [ + "▁présidentielle", + -13.257159233093262 + ], + [ + "▁Träger", + -13.257172584533691 + ], + [ + "▁noteworthy", + -13.257259368896484 + ], + [ + "▁séparé", + -13.257729530334473 + ], + [ + "▁doppelt", + -13.257795333862305 + ], + [ + "tină", + -13.258066177368164 + ], + [ + "Quelques", + -13.258085250854492 + ], + [ + "culoarea", + -13.258100509643555 + ], + [ + "▁ethic", + -13.258166313171387 + ], + [ + "▁cohesive", + -13.258329391479492 + ], + [ + "▁congratulations", + -13.258334159851074 + ], + [ + "▁sovereignty", + -13.25833797454834 + ], + [ + "▁Aplica", + -13.258413314819336 + ], + [ + "▁Covenant", + -13.25851058959961 + ], + [ + "▁multicultural", + -13.258591651916504 + ], + [ + "assemblée", + -13.258955001831055 + ], + [ + "▁petals", + -13.258974075317383 + ], + [ + "erode", + -13.259026527404785 + ], + [ + "▁porumb", + -13.259035110473633 + ], + [ + "▁Barrier", + -13.259050369262695 + ], + [ + "▁WWE", + -13.259085655212402 + ], + [ + "Etwa", + -13.259175300598145 + ], + [ + "▁recunosc", + -13.259271621704102 + ], + [ + "▁turtle", + -13.259415626525879 + ], + [ + "▁vârf", + -13.259444236755371 + ], + [ + "▁Ranking", + -13.259448051452637 + ], + [ + "▁sympathetic", + -13.259514808654785 + ], + [ + "exploded", + -13.2595796585083 + ], + [ + "▁influenț", + -13.259591102600098 + ], + [ + "▁Fireplace", + -13.25972843170166 + ], + [ + "▁Nachwuchs", + -13.260090827941895 + ], + [ + "▁empfohlen", + -13.260090827941895 + ], + [ + "Voir", + -13.260661125183105 + ], + [ + "▁Vimeo", + -13.26069164276123 + ], + [ + "▁weaving", + -13.260967254638672 + ], + [ + "beneficiar", + -13.261198043823242 + ], + [ + "▁balade", + -13.261216163635254 + ], + [ + "▁Mercy", + -13.261566162109375 + ], + [ + "3.000", + -13.26181697845459 + ], + [ + "Immediately", + -13.261857032775879 + ], + [ + "▁frosting", + -13.261868476867676 + ], + [ + "▁Fiscal", + -13.261882781982422 + ], + [ + "downloadable", + -13.26188850402832 + ], + [ + "▁Hwy", + -13.261902809143066 + ], + [ + "évoluer", + -13.261951446533203 + ], + [ + "▁vieille", + -13.2620210647583 + ], + [ + "heißen", + -13.262436866760254 + ], + [ + "▁étrangère", + -13.262446403503418 + ], + [ + "▁incapable", + -13.262490272521973 + ], + [ + "volunteered", + -13.262520790100098 + ], + [ + "fortunately", + -13.262564659118652 + ], + [ + "company", + -13.262738227844238 + ], + [ + "denkt", + -13.2627592086792 + ], + [ + "▁citesc", + -13.262818336486816 + ], + [ + "▁intrebare", + -13.262896537780762 + ], + [ + "pleasantly", + -13.262990951538086 + ], + [ + "▁Minecraft", + -13.263079643249512 + ], + [ + "▁Schmuck", + -13.26308536529541 + ], + [ + "▁maghiar", + -13.263099670410156 + ], + [ + "conductive", + -13.263339042663574 + ], + [ + "décrit", + -13.263534545898438 + ], + [ + "provide", + -13.26353931427002 + ], + [ + "▁depăş", + -13.263628959655762 + ], + [ + "ituated", + -13.263657569885254 + ], + [ + "▁trumpet", + -13.264216423034668 + ], + [ + "▁nastere", + -13.2642240524292 + ], + [ + "▁Région", + -13.264245986938477 + ], + [ + "Occupational", + -13.264411926269531 + ], + [ + "▁Grecia", + -13.264415740966797 + ], + [ + "▁Conclusion", + -13.26449203491211 + ], + [ + "▁collaborateurs", + -13.264927864074707 + ], + [ + "▁Alibaba", + -13.265398025512695 + ], + [ + "▁amplasat", + -13.265398979187012 + ], + [ + "▁Plastik", + -13.265992164611816 + ], + [ + "▁stash", + -13.266023635864258 + ], + [ + "▁Bonnie", + -13.266045570373535 + ], + [ + "▁ehrlich", + -13.266156196594238 + ], + [ + "▁contention", + -13.266193389892578 + ], + [ + "▁Oslo", + -13.266263008117676 + ], + [ + "englische", + -13.266319274902344 + ], + [ + "measurable", + -13.266439437866211 + ], + [ + "loppy", + -13.266470909118652 + ], + [ + "▁Refrigerat", + -13.266579627990723 + ], + [ + "▁remboursement", + -13.266580581665039 + ], + [ + "▁societăţi", + -13.266580581665039 + ], + [ + "translates", + -13.266607284545898 + ], + [ + "ichtigkeit", + -13.266685485839844 + ], + [ + "agentur", + -13.266741752624512 + ], + [ + "▁compute", + -13.266800880432129 + ], + [ + "berater", + -13.266921043395996 + ], + [ + "▁Georgetown", + -13.266945838928223 + ], + [ + "wolves", + -13.266951560974121 + ], + [ + "ceased", + -13.266959190368652 + ], + [ + "▁Binary", + -13.267030715942383 + ], + [ + "▁kontrolliert", + -13.267172813415527 + ], + [ + "informer", + -13.267416000366211 + ], + [ + "lehrer", + -13.267578125 + ], + [ + "lieferung", + -13.267709732055664 + ], + [ + "▁definit", + -13.267742156982422 + ], + [ + "chèque", + -13.267765045166016 + ], + [ + "▁clergy", + -13.267765045166016 + ], + [ + "▁ministries", + -13.267767906188965 + ], + [ + "▁plague", + -13.267779350280762 + ], + [ + "▁Jedi", + -13.267805099487305 + ], + [ + "▁Blackjack", + -13.268025398254395 + ], + [ + "▁subsection", + -13.26807689666748 + ], + [ + "▁Sachsen", + -13.268121719360352 + ], + [ + "valorile", + -13.268146514892578 + ], + [ + "molded", + -13.26816463470459 + ], + [ + "▁betroffen", + -13.268183708190918 + ], + [ + "▁adecvat", + -13.268229484558105 + ], + [ + "▁collègue", + -13.26835823059082 + ], + [ + "▁chinez", + -13.268392562866211 + ], + [ + "emelle", + -13.268695831298828 + ], + [ + "▁körperliche", + -13.268902778625488 + ], + [ + "▁titan", + -13.26891040802002 + ], + [ + "▁sophistication", + -13.268951416015625 + ], + [ + "▁provoke", + -13.268957138061523 + ], + [ + "▁pensii", + -13.269042015075684 + ], + [ + "▁Tucker", + -13.269377708435059 + ], + [ + "▁motoare", + -13.26943302154541 + ], + [ + "supported", + -13.269536972045898 + ], + [ + "▁Sicil", + -13.269697189331055 + ], + [ + "▁Ausgangs", + -13.26987361907959 + ], + [ + "▁verletzt", + -13.269908905029297 + ], + [ + "Ligue", + -13.269996643066406 + ], + [ + "▁organizatori", + -13.270026206970215 + ], + [ + "▁apprentice", + -13.270099639892578 + ], + [ + "▁Potato", + -13.270183563232422 + ], + [ + "▁Duft", + -13.27039623260498 + ], + [ + "▁medicament", + -13.270566940307617 + ], + [ + "Hôtel", + -13.270740509033203 + ], + [ + "▁Triangle", + -13.270842552185059 + ], + [ + "buted", + -13.271100044250488 + ], + [ + "▁Bentley", + -13.271336555480957 + ], + [ + "următoarele", + -13.271389961242676 + ], + [ + "animate", + -13.271404266357422 + ], + [ + "megapixel", + -13.271404266357422 + ], + [ + "einfachen", + -13.271514892578125 + ], + [ + "▁performanț", + -13.271544456481934 + ], + [ + "lurry", + -13.27184009552002 + ], + [ + "suffisamment", + -13.27192211151123 + ], + [ + "▁Weihnachten", + -13.27192211151123 + ], + [ + "▁Detective", + -13.27194595336914 + ], + [ + "▁lovit", + -13.272049903869629 + ], + [ + "▁blouse", + -13.27213191986084 + ], + [ + "▁hartie", + -13.272163391113281 + ], + [ + "vro", + -13.27225112915039 + ], + [ + "▁disastrous", + -13.272517204284668 + ], + [ + "vermutlich", + -13.2725191116333 + ], + [ + "▁Stafford", + -13.272527694702148 + ], + [ + "ehlt", + -13.272628784179688 + ], + [ + "▁vielseitig", + -13.272643089294434 + ], + [ + "Manifest", + -13.273274421691895 + ], + [ + "homage", + -13.27354907989502 + ], + [ + "menée", + -13.273566246032715 + ], + [ + "▁erläuter", + -13.27370834350586 + ], + [ + "▁volontaire", + -13.273709297180176 + ], + [ + "wrought", + -13.27371597290039 + ], + [ + "▁Naples", + -13.273719787597656 + ], + [ + "recommending", + -13.273759841918945 + ], + [ + "▁thermique", + -13.273774147033691 + ], + [ + "▁subtitle", + -13.273787498474121 + ], + [ + "▁Slam", + -13.273809432983398 + ], + [ + "▁necesitate", + -13.273809432983398 + ], + [ + "trimmed", + -13.274099349975586 + ], + [ + "urmatoarele", + -13.274178504943848 + ], + [ + "▁Sorin", + -13.274245262145996 + ], + [ + "▁compromis", + -13.274300575256348 + ], + [ + "overcoming", + -13.274477005004883 + ], + [ + "▁Samantha", + -13.274901390075684 + ], + [ + "dazzling", + -13.27490234375 + ], + [ + "▁Pearson", + -13.274903297424316 + ], + [ + "▁glazing", + -13.274911880493164 + ], + [ + "Revelation", + -13.274921417236328 + ], + [ + "destinée", + -13.275156021118164 + ], + [ + "öffnet", + -13.27515983581543 + ], + [ + "CERT", + -13.275327682495117 + ], + [ + "▁Sneak", + -13.275503158569336 + ], + [ + "proiectele", + -13.275605201721191 + ], + [ + "▁longitudinal", + -13.27609634399414 + ], + [ + "▁cocaine", + -13.276098251342773 + ], + [ + "▁universitar", + -13.276108741760254 + ], + [ + "▁refreshments", + -13.276166915893555 + ], + [ + "▁instanţ", + -13.276243209838867 + ], + [ + "▁kostenfrei", + -13.276397705078125 + ], + [ + "▁comédie", + -13.276451110839844 + ], + [ + "▁Locat", + -13.276725769042969 + ], + [ + "▁Albania", + -13.276732444763184 + ], + [ + "▁mécanique", + -13.276776313781738 + ], + [ + "messung", + -13.27683162689209 + ], + [ + "issus", + -13.277260780334473 + ], + [ + "pinned", + -13.277328491210938 + ], + [ + "▁sanft", + -13.277335166931152 + ], + [ + "▁geprüft", + -13.277435302734375 + ], + [ + "▁procè", + -13.277442932128906 + ], + [ + "▁Üb", + -13.277765274047852 + ], + [ + "5-0", + -13.277802467346191 + ], + [ + "▁Catering", + -13.277957916259766 + ], + [ + "▁prosperous", + -13.27801513671875 + ], + [ + "▁replication", + -13.278098106384277 + ], + [ + "▁obese", + -13.278441429138184 + ], + [ + "clerosis", + -13.278489112854004 + ], + [ + "▁Carnegie", + -13.278489112854004 + ], + [ + "▁Incredible", + -13.278489112854004 + ], + [ + "▁Teppich", + -13.278489112854004 + ], + [ + "▁crunchy", + -13.278489112854004 + ], + [ + "▁vomiting", + -13.278529167175293 + ], + [ + "▁sourire", + -13.278619766235352 + ], + [ + "publish", + -13.278948783874512 + ], + [ + "▁exterioar", + -13.279094696044922 + ], + [ + "▁forehead", + -13.279107093811035 + ], + [ + "▁climatique", + -13.279313087463379 + ], + [ + "▁conservator", + -13.279458999633789 + ], + [ + "▁Russland", + -13.279687881469727 + ], + [ + "▁kombiniert", + -13.279687881469727 + ], + [ + "▁Thrones", + -13.279688835144043 + ], + [ + "▁Griffith", + -13.27968978881836 + ], + [ + "▁fragrant", + -13.279695510864258 + ], + [ + "▁RSVP", + -13.279698371887207 + ], + [ + "klima", + -13.279751777648926 + ], + [ + "▁situație", + -13.279808044433594 + ], + [ + "deschiderea", + -13.280009269714355 + ], + [ + "▁moale", + -13.280033111572266 + ], + [ + "▁Trevor", + -13.280112266540527 + ], + [ + "ménager", + -13.28011417388916 + ], + [ + "deploying", + -13.280428886413574 + ], + [ + "▁Loft", + -13.280500411987305 + ], + [ + "▁Willkommen", + -13.28059196472168 + ], + [ + "▁Bezirks", + -13.280887603759766 + ], + [ + "▁Himself", + -13.280975341796875 + ], + [ + "▁quarant", + -13.28101634979248 + ], + [ + "▁1901", + -13.281079292297363 + ], + [ + "▁tripod", + -13.28136920928955 + ], + [ + "▁récolt", + -13.281553268432617 + ], + [ + "natură", + -13.281631469726562 + ], + [ + "School", + -13.281649589538574 + ], + [ + "contested", + -13.281773567199707 + ], + [ + "bwohl", + -13.281784057617188 + ], + [ + "Darren", + -13.281830787658691 + ], + [ + "medicine", + -13.281903266906738 + ], + [ + "▁Impuls", + -13.282041549682617 + ], + [ + "prevailing", + -13.282057762145996 + ], + [ + "▁orthodontic", + -13.282089233398438 + ], + [ + "▁sequential", + -13.282089233398438 + ], + [ + "▁Kolkata", + -13.28209114074707 + ], + [ + "▁séch", + -13.282100677490234 + ], + [ + "▁diaper", + -13.28212833404541 + ], + [ + "▁simplifie", + -13.282144546508789 + ], + [ + "▁reflux", + -13.282163619995117 + ], + [ + "▁Hypo", + -13.282242774963379 + ], + [ + "imprimer", + -13.282251358032227 + ], + [ + "▁Folosi", + -13.282401084899902 + ], + [ + "Info", + -13.282570838928223 + ], + [ + "▁Investiga", + -13.282801628112793 + ], + [ + "stabilirea", + -13.282845497131348 + ], + [ + "élis", + -13.283149719238281 + ], + [ + "ccessed", + -13.28320026397705 + ], + [ + "▁recyclable", + -13.283293724060059 + ], + [ + "▁forbidden", + -13.283295631408691 + ], + [ + "▁Colonel", + -13.283297538757324 + ], + [ + "▁nisip", + -13.28330135345459 + ], + [ + "▁Fundamental", + -13.283303260803223 + ], + [ + "▁nouveauté", + -13.283308029174805 + ], + [ + "khi", + -13.283357620239258 + ], + [ + "▁ecology", + -13.28339672088623 + ], + [ + "▁filament", + -13.283540725708008 + ], + [ + "▁relentless", + -13.283559799194336 + ], + [ + "▁Behavior", + -13.283669471740723 + ], + [ + "titulaire", + -13.283900260925293 + ], + [ + "▁administrativ", + -13.28404426574707 + ], + [ + "▁Vorlage", + -13.284209251403809 + ], + [ + "zeigte", + -13.28427791595459 + ], + [ + "▁Bäume", + -13.284497261047363 + ], + [ + "▁Kartoffel", + -13.284497261047363 + ], + [ + "▁Possible", + -13.284500122070312 + ], + [ + "▁perturb", + -13.28466510772705 + ], + [ + "▁Grigor", + -13.284717559814453 + ], + [ + "▁streng", + -13.284759521484375 + ], + [ + "▁vânzare", + -13.285101890563965 + ], + [ + "concentrating", + -13.285698890686035 + ], + [ + "▁rechtzeitig", + -13.2857027053833 + ], + [ + "▁eternity", + -13.28570556640625 + ], + [ + "▁Puzzle", + -13.28575611114502 + ], + [ + "▁malade", + -13.285775184631348 + ], + [ + "▁Metallic", + -13.285776138305664 + ], + [ + "▁Unterhaltung", + -13.285783767700195 + ], + [ + "▁4:00", + -13.285820960998535 + ], + [ + "▁magique", + -13.285908699035645 + ], + [ + "▁cellphone", + -13.285975456237793 + ], + [ + "▁inhibition", + -13.286023139953613 + ], + [ + "▁remplacement", + -13.286025047302246 + ], + [ + "▁WWII", + -13.286089897155762 + ], + [ + "Eff", + -13.286258697509766 + ], + [ + "kontakt", + -13.286832809448242 + ], + [ + "Update", + -13.286869049072266 + ], + [ + "▁Emerald", + -13.286910057067871 + ], + [ + "▁hammock", + -13.286910057067871 + ], + [ + "POWER", + -13.286917686462402 + ], + [ + "automne", + -13.286917686462402 + ], + [ + "▁(2004)", + -13.286961555480957 + ], + [ + "▁participanți", + -13.287012100219727 + ], + [ + "1998)", + -13.287014961242676 + ], + [ + "▁deletion", + -13.287186622619629 + ], + [ + "▁Proiect", + -13.287226676940918 + ], + [ + "IDENT", + -13.287504196166992 + ], + [ + "▁precis", + -13.287623405456543 + ], + [ + "▁limp", + -13.287676811218262 + ], + [ + "▁Pompe", + -13.287686347961426 + ], + [ + "▁ménage", + -13.28780746459961 + ], + [ + "▁Wahrheit", + -13.288119316101074 + ], + [ + "▁Intelligent", + -13.28812026977539 + ], + [ + "▁instability", + -13.2881441116333 + ], + [ + "insurance", + -13.288346290588379 + ], + [ + "▁Nursery", + -13.288352966308594 + ], + [ + "▁synonym", + -13.288427352905273 + ], + [ + "▁ignite", + -13.28848934173584 + ], + [ + "▁Vernon", + -13.28849983215332 + ], + [ + "purchase", + -13.288524627685547 + ], + [ + "▁disponibilité", + -13.288662910461426 + ], + [ + "▁producţi", + -13.28909969329834 + ], + [ + "▁Pentagon", + -13.289329528808594 + ], + [ + "▁illumination", + -13.289329528808594 + ], + [ + "▁obsolete", + -13.289329528808594 + ], + [ + "▁unacceptable", + -13.28933048248291 + ], + [ + "Gleichzeitig", + -13.289938926696777 + ], + [ + "rutsch", + -13.290071487426758 + ], + [ + "viziuni", + -13.290409088134766 + ], + [ + "▁Nicaragua", + -13.29054069519043 + ], + [ + "▁hesitation", + -13.290541648864746 + ], + [ + "▁nascut", + -13.290545463562012 + ], + [ + "▁Warehouse", + -13.29055404663086 + ], + [ + "geboten", + -13.290558815002441 + ], + [ + "▁Lagos", + -13.290844917297363 + ], + [ + "produced", + -13.290874481201172 + ], + [ + "cativa", + -13.291309356689453 + ], + [ + "▁Tracy", + -13.291326522827148 + ], + [ + "Projekt", + -13.291468620300293 + ], + [ + "▁malaria", + -13.291692733764648 + ], + [ + "▁Baldwin", + -13.291755676269531 + ], + [ + "Take", + -13.291791915893555 + ], + [ + "▁fluctuations", + -13.291844367980957 + ], + [ + "▁titular", + -13.29194450378418 + ], + [ + "bmw", + -13.291976928710938 + ], + [ + "▁brevet", + -13.29202651977539 + ], + [ + "étapes", + -13.292173385620117 + ], + [ + "wikipedia", + -13.292373657226562 + ], + [ + "▁corporal", + -13.292424201965332 + ], + [ + "▁Schönheit", + -13.2926664352417 + ], + [ + "utilizatorii", + -13.292695999145508 + ], + [ + "INFO", + -13.292807579040527 + ], + [ + "▁formularul", + -13.292900085449219 + ], + [ + "femi", + -13.292959213256836 + ], + [ + "Konferenz", + -13.29296875 + ], + [ + "▁carnival", + -13.29296875 + ], + [ + "▁Kräuter", + -13.292969703674316 + ], + [ + "▁gelernt", + -13.292981147766113 + ], + [ + "▁Sherman", + -13.293017387390137 + ], + [ + "▁persistence", + -13.293289184570312 + ], + [ + "▁Behörden", + -13.293577194213867 + ], + [ + "▁Frühjahr", + -13.293578147888184 + ], + [ + "▁Guvern", + -13.293649673461914 + ], + [ + "interpreting", + -13.293878555297852 + ], + [ + "▁nommé", + -13.294021606445312 + ], + [ + "consult", + -13.294035911560059 + ], + [ + "▁obligaţi", + -13.294184684753418 + ], + [ + "▁Newspaper", + -13.2942476272583 + ], + [ + "(2005)", + -13.294515609741211 + ], + [ + "pumped", + -13.294614791870117 + ], + [ + "▁autoritati", + -13.294634819030762 + ], + [ + "▁aplicatii", + -13.294644355773926 + ], + [ + "▁verhindert", + -13.294794082641602 + ], + [ + "▁évident", + -13.294794082641602 + ], + [ + "▁getrennt", + -13.294795036315918 + ], + [ + "▁Encourage", + -13.295403480529785 + ], + [ + "▁lurk", + -13.295432090759277 + ], + [ + "▁condemned", + -13.295455932617188 + ], + [ + "▁4:30", + -13.295502662658691 + ], + [ + "labelled", + -13.29576587677002 + ], + [ + "ordinea", + -13.295899391174316 + ], + [ + "▁pantofi", + -13.296012878417969 + ], + [ + "Default", + -13.296042442321777 + ], + [ + "▁beruh", + -13.296120643615723 + ], + [ + "/01/", + -13.296268463134766 + ], + [ + "league", + -13.296503067016602 + ], + [ + "▁couvert", + -13.296524047851562 + ], + [ + "▁competencies", + -13.296622276306152 + ], + [ + "▁mozzarella", + -13.296622276306152 + ], + [ + "jihad", + -13.29662799835205 + ], + [ + "▁gossip", + -13.29662799835205 + ], + [ + "▁Omaha", + -13.296628952026367 + ], + [ + "▁coincidence", + -13.296669960021973 + ], + [ + "▁Pinot", + -13.296710968017578 + ], + [ + "dotted", + -13.296789169311523 + ], + [ + "schilder", + -13.297197341918945 + ], + [ + "▁Munte", + -13.297224998474121 + ], + [ + "▁Vermieter", + -13.297232627868652 + ], + [ + "▁britannique", + -13.297232627868652 + ], + [ + "▁comentariu", + -13.297235488891602 + ], + [ + "abonnement", + -13.29725456237793 + ], + [ + "▁inventive", + -13.29727840423584 + ], + [ + "complie", + -13.297279357910156 + ], + [ + "composée", + -13.29734992980957 + ], + [ + "▁glatt", + -13.297684669494629 + ], + [ + "adorned", + -13.297842979431152 + ], + [ + "▁Opportunities", + -13.297842979431152 + ], + [ + "▁equilibrium", + -13.297842979431152 + ], + [ + "▁persuasive", + -13.297842979431152 + ], + [ + "▁achiziţi", + -13.297843933105469 + ], + [ + "▁déterminer", + -13.297843933105469 + ], + [ + "▁fleece", + -13.297857284545898 + ], + [ + "▁ivory", + -13.29786205291748 + ], + [ + "▁Genuss", + -13.297900199890137 + ], + [ + "Thousands", + -13.297930717468262 + ], + [ + "▁izolat", + -13.297965049743652 + ], + [ + "▁symbolize", + -13.298033714294434 + ], + [ + "gâteau", + -13.298051834106445 + ], + [ + "▁relații", + -13.298062324523926 + ], + [ + "▁Classroom", + -13.298144340515137 + ], + [ + "settlers", + -13.298155784606934 + ], + [ + "▁vremuri", + -13.298195838928223 + ], + [ + "▁Serial", + -13.29838752746582 + ], + [ + "▁boite", + -13.298399925231934 + ], + [ + "équivalent", + -13.298453330993652 + ], + [ + "▁benutzen", + -13.298454284667969 + ], + [ + "▁Recomand", + -13.298462867736816 + ], + [ + "▁Sinai", + -13.298968315124512 + ], + [ + "▁Advertise", + -13.29906940460205 + ], + [ + "▁Thermal", + -13.299206733703613 + ], + [ + "fiance", + -13.299471855163574 + ], + [ + "▁universitaire", + -13.299683570861816 + ], + [ + "▁rivière", + -13.299793243408203 + ], + [ + "▁reimburse", + -13.299907684326172 + ], + [ + "ţara", + -13.299932479858398 + ], + [ + "tician", + -13.30002498626709 + ], + [ + "intelligence", + -13.300041198730469 + ], + [ + "▁abgestimmt", + -13.300288200378418 + ], + [ + "▁compliqué", + -13.300288200378418 + ], + [ + "▁succulent", + -13.300297737121582 + ], + [ + "opéra", + -13.300395011901855 + ], + [ + "7-9", + -13.300456047058105 + ], + [ + "▁pierderi", + -13.300654411315918 + ], + [ + "extinction", + -13.30090045928955 + ], + [ + "▁Zweifel", + -13.30103874206543 + ], + [ + "ATCH", + -13.30112361907959 + ], + [ + "10,000", + -13.301222801208496 + ], + [ + "▁uninterrupted", + -13.301513671875 + ], + [ + "▁Eigentum", + -13.301517486572266 + ], + [ + "▁Utility", + -13.301517486572266 + ], + [ + "ско", + -13.301529884338379 + ], + [ + "▁tornado", + -13.301544189453125 + ], + [ + "▁Güte", + -13.301727294921875 + ], + [ + "▁pertain", + -13.301923751831055 + ], + [ + "painters", + -13.301993370056152 + ], + [ + "Help", + -13.3021240234375 + ], + [ + "▁străinătate", + -13.30212688446045 + ], + [ + "▁stammen", + -13.302170753479004 + ], + [ + "opposition", + -13.302229881286621 + ], + [ + "▁rhino", + -13.302233695983887 + ], + [ + "intervenir", + -13.302427291870117 + ], + [ + "▁hyperlink", + -13.302441596984863 + ], + [ + "höchst", + -13.302518844604492 + ], + [ + "roach", + -13.302627563476562 + ], + [ + "wSt", + -13.302687644958496 + ], + [ + "▁monastery", + -13.302740097045898 + ], + [ + "▁algae", + -13.302754402160645 + ], + [ + "▁shaving", + -13.302757263183594 + ], + [ + "présentent", + -13.302804946899414 + ], + [ + "Africa", + -13.302860260009766 + ], + [ + "eigener", + -13.303047180175781 + ], + [ + "▁glace", + -13.303153991699219 + ], + [ + "▁discurs", + -13.303179740905762 + ], + [ + "▁autograph", + -13.303204536437988 + ], + [ + "▁Conflict", + -13.303359031677246 + ], + [ + "▁școli", + -13.303411483764648 + ], + [ + "▁excerpt", + -13.303617477416992 + ], + [ + "correlated", + -13.303628921508789 + ], + [ + "empel", + -13.303841590881348 + ], + [ + "cryptocurrencies", + -13.30396842956543 + ], + [ + "▁symposium", + -13.30396842956543 + ], + [ + "▁gewohnt", + -13.303994178771973 + ], + [ + "PTSD", + -13.304070472717285 + ], + [ + "▁harmonic", + -13.304166793823242 + ], + [ + "discarded", + -13.304282188415527 + ], + [ + "▁Flint", + -13.304359436035156 + ], + [ + "Russia", + -13.304422378540039 + ], + [ + "▁ședinț", + -13.304583549499512 + ], + [ + "▁accusations", + -13.304727554321289 + ], + [ + "▁încălc", + -13.304827690124512 + ], + [ + "sendung", + -13.305152893066406 + ], + [ + "▁Chiropractic", + -13.305197715759277 + ], + [ + "▁excepți", + -13.305201530456543 + ], + [ + "▁proclaim", + -13.305201530456543 + ], + [ + "▁Flexible", + -13.305295944213867 + ], + [ + "▁Hüt", + -13.30538272857666 + ], + [ + "▁Baltic", + -13.30539608001709 + ], + [ + "▁inaltime", + -13.30553913116455 + ], + [ + "▁montré", + -13.305868148803711 + ], + [ + "exécution", + -13.305898666381836 + ], + [ + "partei", + -13.305961608886719 + ], + [ + "▁specifie", + -13.306072235107422 + ], + [ + "▁Jackpot", + -13.306105613708496 + ], + [ + "▁stumble", + -13.306134223937988 + ], + [ + "▁individuel", + -13.306161880493164 + ], + [ + "▁Veteran", + -13.306217193603516 + ], + [ + "▁Supplies", + -13.306428909301758 + ], + [ + "▁excavation", + -13.306428909301758 + ], + [ + "▁Libraries", + -13.306469917297363 + ], + [ + "▁prénom", + -13.306476593017578 + ], + [ + "WOOD", + -13.30650806427002 + ], + [ + "meciul", + -13.306917190551758 + ], + [ + "Chef", + -13.306938171386719 + ], + [ + "▁SUPER", + -13.306940078735352 + ], + [ + "Appeals", + -13.30696964263916 + ], + [ + "terapia", + -13.307113647460938 + ], + [ + "▁relatii", + -13.30713939666748 + ], + [ + "modifying", + -13.30748462677002 + ], + [ + "▁Regulament", + -13.307662010192871 + ], + [ + "▁bănci", + -13.307662963867188 + ], + [ + "▁agility", + -13.307666778564453 + ], + [ + "▁Magnetic", + -13.307674407958984 + ], + [ + "▁piatra", + -13.30767822265625 + ], + [ + "▁Governance", + -13.307680130004883 + ], + [ + "▁clown", + -13.30772876739502 + ], + [ + "▁Choir", + -13.308337211608887 + ], + [ + "aujourd", + -13.308548927307129 + ], + [ + "▁vendeur", + -13.308732032775879 + ], + [ + "ndererseits", + -13.308859825134277 + ], + [ + "▁Bahrain", + -13.3088960647583 + ], + [ + "▁Timisoara", + -13.3088960647583 + ], + [ + "▁exklusive", + -13.3088960647583 + ], + [ + "▁Population", + -13.309001922607422 + ], + [ + "▁nepo", + -13.309073448181152 + ], + [ + "▁relish", + -13.309085845947266 + ], + [ + "▁Pumpkin", + -13.309571266174316 + ], + [ + "▁détente", + -13.309784889221191 + ], + [ + "▁episcop", + -13.309860229492188 + ], + [ + "patterned", + -13.309929847717285 + ], + [ + "▁THANK", + -13.310132026672363 + ], + [ + "▁Widerspruch", + -13.310132026672363 + ], + [ + "▁Crisis", + -13.310189247131348 + ], + [ + "▁goose", + -13.310226440429688 + ], + [ + "▁couture", + -13.310307502746582 + ], + [ + "▁hinweg", + -13.310446739196777 + ], + [ + "supplemental", + -13.310486793518066 + ], + [ + "shingles", + -13.31060791015625 + ], + [ + "investir", + -13.310635566711426 + ], + [ + "▁steriliz", + -13.310759544372559 + ], + [ + "tractors", + -13.310761451721191 + ], + [ + "cellules", + -13.31078815460205 + ], + [ + "▁Gloria", + -13.310888290405273 + ], + [ + "▁teilnehmen", + -13.311092376708984 + ], + [ + "companiile", + -13.311248779296875 + ], + [ + "surfacing", + -13.311279296875 + ], + [ + "▁nostalgic", + -13.311368942260742 + ], + [ + "▁Badezimmer", + -13.311369895935059 + ], + [ + "▁conjoint", + -13.311370849609375 + ], + [ + "vacancy", + -13.31145191192627 + ], + [ + "▁homeland", + -13.311582565307617 + ], + [ + "▁Abschnitt", + -13.311625480651855 + ], + [ + "Cartea", + -13.311653137207031 + ], + [ + "SIA", + -13.311782836914062 + ], + [ + "▁explode", + -13.311786651611328 + ], + [ + "fostering", + -13.311959266662598 + ], + [ + "▁ceilalti", + -13.31198787689209 + ], + [ + "▁gentil", + -13.31214714050293 + ], + [ + "oplasty", + -13.31218433380127 + ], + [ + "bodied", + -13.312424659729004 + ], + [ + "▁1906", + -13.312499046325684 + ], + [ + "▁BlackBerry", + -13.312607765197754 + ], + [ + "▁Presbyterian", + -13.312607765197754 + ], + [ + "▁berücksichtigt", + -13.312607765197754 + ], + [ + "▁compartiment", + -13.312607765197754 + ], + [ + "▁compulsory", + -13.312607765197754 + ], + [ + "Millennial", + -13.312609672546387 + ], + [ + "▁sanitar", + -13.312638282775879 + ], + [ + "▁stink", + -13.312975883483887 + ], + [ + "lius", + -13.313047409057617 + ], + [ + "thankfully", + -13.313136100769043 + ], + [ + "modalité", + -13.313173294067383 + ], + [ + "▁cunoaște", + -13.313226699829102 + ], + [ + "Infrastruktur", + -13.313227653503418 + ], + [ + "▁studenți", + -13.313253402709961 + ], + [ + "Bref", + -13.313270568847656 + ], + [ + "London", + -13.31360149383545 + ], + [ + "▁Arduino", + -13.313847541809082 + ], + [ + "▁cilantro", + -13.313847541809082 + ], + [ + "▁Rafael", + -13.313848495483398 + ], + [ + "▁untersucht", + -13.313861846923828 + ], + [ + "▁martyr", + -13.31389331817627 + ], + [ + "▁Mormon", + -13.313984870910645 + ], + [ + "▁wicket", + -13.313996315002441 + ], + [ + "cherished", + -13.314335823059082 + ], + [ + "liquid", + -13.314417839050293 + ], + [ + "▁dorinț", + -13.314571380615234 + ], + [ + "lehnt", + -13.314717292785645 + ], + [ + "meisterschaft", + -13.31493091583252 + ], + [ + "fondateur", + -13.314971923828125 + ], + [ + "câble", + -13.315078735351562 + ], + [ + "▁erreichbar", + -13.315091133117676 + ], + [ + "▁footsteps", + -13.315094947814941 + ], + [ + "▁Kloster", + -13.31519889831543 + ], + [ + "▁multiplayer", + -13.315218925476074 + ], + [ + "▁substitu", + -13.315276145935059 + ], + [ + "▁Frisch", + -13.315526962280273 + ], + [ + "▁arsenal", + -13.315712928771973 + ], + [ + "explication", + -13.315866470336914 + ], + [ + "▁conexiun", + -13.315986633300781 + ], + [ + "muddy", + -13.316045761108398 + ], + [ + "▁Reifen", + -13.316120147705078 + ], + [ + "auraient", + -13.316132545471191 + ], + [ + "▁biologic", + -13.316136360168457 + ], + [ + "▁acquainted", + -13.316332817077637 + ], + [ + "▁shelving", + -13.316341400146484 + ], + [ + "Stunning", + -13.316373825073242 + ], + [ + "▁Clothing", + -13.316394805908203 + ], + [ + "▁kidding", + -13.316431999206543 + ], + [ + "excellent", + -13.316452026367188 + ], + [ + "▁susțin", + -13.316487312316895 + ], + [ + "bătut", + -13.316502571105957 + ], + [ + "elusive", + -13.3165283203125 + ], + [ + "werbung", + -13.316743850708008 + ], + [ + "slipping", + -13.316813468933105 + ], + [ + "▁configura", + -13.316926956176758 + ], + [ + "▁proaspat", + -13.31695556640625 + ], + [ + "▁apporté", + -13.317120552062988 + ], + [ + "▁démarr", + -13.317328453063965 + ], + [ + "Spezialist", + -13.317578315734863 + ], + [ + "▁obligați", + -13.317578315734863 + ], + [ + "▁societăți", + -13.317578315734863 + ], + [ + "▁malpractice", + -13.31757926940918 + ], + [ + "Hundreds", + -13.317609786987305 + ], + [ + "▁3:1", + -13.318138122558594 + ], + [ + "▁computation", + -13.31817626953125 + ], + [ + "▁Heilig", + -13.318528175354004 + ], + [ + "▁Helsinki", + -13.318824768066406 + ], + [ + "▁firefighters", + -13.318824768066406 + ], + [ + "▁obedience", + -13.318824768066406 + ], + [ + "▁evacuate", + -13.318825721740723 + ], + [ + "▁Floyd", + -13.318840026855469 + ], + [ + "▁Disneyland", + -13.318859100341797 + ], + [ + "Cathy", + -13.319069862365723 + ], + [ + "▁Broken", + -13.319278717041016 + ], + [ + "cript", + -13.319952011108398 + ], + [ + "▁Gewähr", + -13.320073127746582 + ], + [ + "▁embarrassed", + -13.320073127746582 + ], + [ + "▁Leicht", + -13.32007884979248 + ], + [ + "▁témoign", + -13.320379257202148 + ], + [ + "▁viteze", + -13.3206148147583 + ], + [ + "▁hallmark", + -13.320731163024902 + ], + [ + "uploads", + -13.32082462310791 + ], + [ + "▁Submission", + -13.320929527282715 + ], + [ + "▁croissant", + -13.321049690246582 + ], + [ + "awning", + -13.32105827331543 + ], + [ + "detecting", + -13.321198463439941 + ], + [ + "▁Bahamas", + -13.321322441101074 + ], + [ + "▁Kathleen", + -13.321325302124023 + ], + [ + "▁latch", + -13.321377754211426 + ], + [ + "▁pronounce", + -13.321380615234375 + ], + [ + "▁choke", + -13.321428298950195 + ], + [ + "▁$50,000", + -13.3215970993042 + ], + [ + "▁historische", + -13.321642875671387 + ], + [ + "jugé", + -13.321829795837402 + ], + [ + "▁MasterCard", + -13.321949005126953 + ], + [ + "▁Horror", + -13.321955680847168 + ], + [ + "spoiled", + -13.321958541870117 + ], + [ + "▁apariți", + -13.32202434539795 + ], + [ + "geschaltet", + -13.3225736618042 + ], + [ + "▁Londra", + -13.322578430175781 + ], + [ + "viction", + -13.322580337524414 + ], + [ + "▁Disaster", + -13.322593688964844 + ], + [ + "▁desigur", + -13.322601318359375 + ], + [ + "▁substanț", + -13.322601318359375 + ], + [ + "▁compiler", + -13.322613716125488 + ], + [ + "▁vanzari", + -13.32262897491455 + ], + [ + "▁Simulation", + -13.322669982910156 + ], + [ + "Occasionally", + -13.322842597961426 + ], + [ + "Seite", + -13.322884559631348 + ], + [ + "Linked", + -13.322938919067383 + ], + [ + "Roll", + -13.323015213012695 + ], + [ + "▁trajet", + -13.323244094848633 + ], + [ + "Molecular", + -13.323834419250488 + ], + [ + "▁pragmatic", + -13.323843002319336 + ], + [ + "judecată", + -13.323915481567383 + ], + [ + "ров", + -13.32400894165039 + ], + [ + "serrurerie", + -13.324024200439453 + ], + [ + "▁reconstruct", + -13.324129104614258 + ], + [ + "▁heureuse", + -13.324179649353027 + ], + [ + "▁knight", + -13.32422924041748 + ], + [ + "knowingly", + -13.324431419372559 + ], + [ + "▁perspectiva", + -13.324453353881836 + ], + [ + "ordinary", + -13.324604034423828 + ], + [ + "▁chaudière", + -13.324721336364746 + ], + [ + "Neill", + -13.324727058410645 + ], + [ + "cellulose", + -13.325080871582031 + ], + [ + "▁Delicious", + -13.325080871582031 + ], + [ + "▁incearca", + -13.325080871582031 + ], + [ + "▁retrospective", + -13.325080871582031 + ], + [ + "▁mundane", + -13.325081825256348 + ], + [ + "▁definiert", + -13.32508659362793 + ], + [ + "▁cockpit", + -13.325088500976562 + ], + [ + "Aktionen", + -13.325363159179688 + ], + [ + "▁distanț", + -13.325654029846191 + ], + [ + "▁diplôme", + -13.325708389282227 + ], + [ + "prepaid", + -13.325737953186035 + ], + [ + "▁Tabellen", + -13.325758934020996 + ], + [ + "▁economie", + -13.325770378112793 + ], + [ + "December", + -13.325826644897461 + ], + [ + "Punkten", + -13.32613754272461 + ], + [ + "▁Punch", + -13.32614517211914 + ], + [ + "Martin", + -13.326154708862305 + ], + [ + "▁Espresso", + -13.326314926147461 + ], + [ + "▁ubiquitous", + -13.326335906982422 + ], + [ + "▁Mongolia", + -13.326337814331055 + ], + [ + "▁collabor", + -13.326635360717773 + ], + [ + "▁Vordergrund", + -13.32696533203125 + ], + [ + "cameră", + -13.327091217041016 + ], + [ + "represented", + -13.327268600463867 + ], + [ + "▁AUTO", + -13.327446937561035 + ], + [ + "▁Ofert", + -13.327542304992676 + ], + [ + "neig", + -13.327593803405762 + ], + [ + "▁Hazard", + -13.327595710754395 + ], + [ + "▁Constanta", + -13.327596664428711 + ], + [ + "▁tumour", + -13.32759952545166 + ], + [ + "▁Neighborhood", + -13.327603340148926 + ], + [ + "▁detaliat", + -13.327619552612305 + ], + [ + "▁extraordinaire", + -13.327665328979492 + ], + [ + "▁Therapeutic", + -13.327686309814453 + ], + [ + "predicting", + -13.327693939208984 + ], + [ + "▁institutii", + -13.32776165008545 + ], + [ + "ifizierung", + -13.327797889709473 + ], + [ + "wählt", + -13.328207015991211 + ], + [ + "▁remarquable", + -13.32822322845459 + ], + [ + "Invent", + -13.328512191772461 + ], + [ + "▁foloseșt", + -13.328514099121094 + ], + [ + "öfte", + -13.328703880310059 + ], + [ + "▁discreet", + -13.328853607177734 + ], + [ + "▁Flickr", + -13.32885456085205 + ], + [ + "▁trésor", + -13.328856468200684 + ], + [ + "▁steroids", + -13.328872680664062 + ], + [ + "▁personnalité", + -13.328953742980957 + ], + [ + "▁Krankenhaus", + -13.32901668548584 + ], + [ + "▁affordability", + -13.329218864440918 + ], + [ + "deuten", + -13.329398155212402 + ], + [ + "Detailed", + -13.329412460327148 + ], + [ + "Walk", + -13.329444885253906 + ], + [ + "▁parallèle", + -13.329483032226562 + ], + [ + "thèse", + -13.329649925231934 + ], + [ + "▁gefördert", + -13.330117225646973 + ], + [ + "Greeting", + -13.33014965057373 + ], + [ + "gelistet", + -13.330172538757324 + ], + [ + "▁chlorine", + -13.330392837524414 + ], + [ + "behält", + -13.33039665222168 + ], + [ + "emption", + -13.330435752868652 + ], + [ + "▁mobilité", + -13.330601692199707 + ], + [ + "▁randonnée", + -13.330668449401855 + ], + [ + "habitant", + -13.330718040466309 + ], + [ + "zilla", + -13.331082344055176 + ], + [ + "▁Lili", + -13.331160545349121 + ], + [ + "▁répét", + -13.331341743469238 + ], + [ + "trucât", + -13.331376075744629 + ], + [ + "▁Hospice", + -13.331376075744629 + ], + [ + "▁grassroots", + -13.331377029418945 + ], + [ + "▁affiché", + -13.331393241882324 + ], + [ + "pears", + -13.331470489501953 + ], + [ + "▁linistit", + -13.331497192382812 + ], + [ + "▁Patron", + -13.331552505493164 + ], + [ + "▁Stalin", + -13.331626892089844 + ], + [ + "▁închiri", + -13.331751823425293 + ], + [ + "▁Apostol", + -13.332018852233887 + ], + [ + "▁poudre", + -13.332246780395508 + ], + [ + "▁piscin", + -13.332419395446777 + ], + [ + "merlin", + -13.33259391784668 + ], + [ + "limited", + -13.33260726928711 + ], + [ + "▁métallique", + -13.332639694213867 + ], + [ + "gazebo", + -13.33267879486084 + ], + [ + "weilige", + -13.332718849182129 + ], + [ + "prosecutors", + -13.33278751373291 + ], + [ + "Expert", + -13.33314323425293 + ], + [ + "Assemblée", + -13.333271980285645 + ], + [ + "▁fauna", + -13.333285331726074 + ], + [ + "▁Turtle", + -13.333353996276855 + ], + [ + "▁Consortium", + -13.333905220031738 + ], + [ + "▁assemblies", + -13.333905220031738 + ], + [ + "▁trajectory", + -13.333905220031738 + ], + [ + "▁Vineyard", + -13.333906173706055 + ], + [ + "▁Mehrwert", + -13.334037780761719 + ], + [ + "▁sunflower", + -13.334043502807617 + ], + [ + "develop", + -13.334060668945312 + ], + [ + "▁heroic", + -13.334100723266602 + ], + [ + "▁riscuri", + -13.334151268005371 + ], + [ + "oeuf", + -13.334300994873047 + ], + [ + "influence", + -13.334452629089355 + ], + [ + "▁Voraussetzung", + -13.334500312805176 + ], + [ + "utoritatea", + -13.334518432617188 + ], + [ + "Produsul", + -13.334654808044434 + ], + [ + "▁gewährleistet", + -13.335171699523926 + ], + [ + "▁brûl", + -13.335175514221191 + ], + [ + "▁Column", + -13.335184097290039 + ], + [ + "▁trousers", + -13.335209846496582 + ], + [ + "▁posterior", + -13.33521556854248 + ], + [ + "glyph", + -13.335251808166504 + ], + [ + "▁Happen", + -13.335280418395996 + ], + [ + "▁créateur", + -13.335667610168457 + ], + [ + "▁apostle", + -13.335898399353027 + ], + [ + "▁padding", + -13.335907936096191 + ], + [ + "▁Digitalisierung", + -13.335908889770508 + ], + [ + "▁Laurie", + -13.335915565490723 + ], + [ + "▁Erwerb", + -13.336065292358398 + ], + [ + "▁bătrân", + -13.336440086364746 + ], + [ + "▁harmonious", + -13.336441040039062 + ], + [ + "▁ailments", + -13.336456298828125 + ], + [ + "▁Venue", + -13.33650016784668 + ], + [ + "▁Motorcycle", + -13.336523056030273 + ], + [ + "▁cortex", + -13.336551666259766 + ], + [ + "▁Sunrise", + -13.336636543273926 + ], + [ + "Software", + -13.336775779724121 + ], + [ + "▁advocat", + -13.336934089660645 + ], + [ + "essentiellement", + -13.337422370910645 + ], + [ + "•", + -13.337494850158691 + ], + [ + "părut", + -13.337522506713867 + ], + [ + "▁Suffolk", + -13.337711334228516 + ], + [ + "▁righteousness", + -13.337711334228516 + ], + [ + "▁Shirley", + -13.337712287902832 + ], + [ + "▁Famous", + -13.337749481201172 + ], + [ + "▁emulate", + -13.337788581848145 + ], + [ + "vermögen", + -13.33788776397705 + ], + [ + "generated", + -13.337963104248047 + ], + [ + "Ecole", + -13.337977409362793 + ], + [ + "▁managerial", + -13.338086128234863 + ], + [ + "believe", + -13.338091850280762 + ], + [ + "▁récupére", + -13.338348388671875 + ], + [ + "▁recens", + -13.338531494140625 + ], + [ + "▁Barrett", + -13.338778495788574 + ], + [ + "▁courageous", + -13.338814735412598 + ], + [ + "9.95", + -13.338961601257324 + ], + [ + "▁Odyssey", + -13.338982582092285 + ], + [ + "▁Violence", + -13.338982582092285 + ], + [ + "▁concasseur", + -13.338982582092285 + ], + [ + "▁evacuation", + -13.338982582092285 + ], + [ + "▁kontinuierlich", + -13.338982582092285 + ], + [ + "▁epidemi", + -13.3389892578125 + ], + [ + "▁disconnected", + -13.339197158813477 + ], + [ + "frucht", + -13.339339256286621 + ], + [ + "Trustees", + -13.339348793029785 + ], + [ + "▁Massiv", + -13.339459419250488 + ], + [ + "gebucht", + -13.339473724365234 + ], + [ + "stütze", + -13.339526176452637 + ], + [ + "▁febr", + -13.339741706848145 + ], + [ + "honoured", + -13.339743614196777 + ], + [ + "▁digitiz", + -13.340079307556152 + ], + [ + "Image", + -13.34021282196045 + ], + [ + "▁Brunswick", + -13.34025764465332 + ], + [ + "▁Therapist", + -13.34026050567627 + ], + [ + "accessoire", + -13.340264320373535 + ], + [ + "▁croqu", + -13.340291023254395 + ], + [ + "Pflanz", + -13.34052848815918 + ], + [ + "dragging", + -13.340536117553711 + ], + [ + "▁Facilit", + -13.340750694274902 + ], + [ + "soucis", + -13.340765953063965 + ], + [ + "Asadar", + -13.34081745147705 + ], + [ + "▁Thames", + -13.341021537780762 + ], + [ + "▁cariera", + -13.341116905212402 + ], + [ + "▁mercury", + -13.341530799865723 + ], + [ + "▁Blessed", + -13.341533660888672 + ], + [ + "▁Whitney", + -13.341630935668945 + ], + [ + "▁géant", + -13.341926574707031 + ], + [ + "▁coordonnée", + -13.342217445373535 + ], + [ + "oidal", + -13.342623710632324 + ], + [ + "Wohnungen", + -13.342696189880371 + ], + [ + "▁Spectrum", + -13.34280776977539 + ], + [ + "▁Avengers", + -13.342808723449707 + ], + [ + "▁Gloucester", + -13.342808723449707 + ], + [ + "▁nützlich", + -13.342811584472656 + ], + [ + "▁toothbrush", + -13.342830657958984 + ], + [ + "▁Vanessa", + -13.342843055725098 + ], + [ + "Saxon", + -13.342947959899902 + ], + [ + "▁comunități", + -13.343165397644043 + ], + [ + "reprezentanţi", + -13.343175888061523 + ], + [ + "▁întâlnire", + -13.343225479125977 + ], + [ + "delve", + -13.343234062194824 + ], + [ + "▁technologique", + -13.343452453613281 + ], + [ + "Describe", + -13.343466758728027 + ], + [ + "▁constient", + -13.343501091003418 + ], + [ + "gestalt", + -13.343600273132324 + ], + [ + "▁Tribune", + -13.344090461730957 + ], + [ + "▁fiberglass", + -13.34412956237793 + ], + [ + "verbindung", + -13.344210624694824 + ], + [ + "sacrificing", + -13.344351768493652 + ], + [ + "▁Pablo", + -13.344470024108887 + ], + [ + "▁adanc", + -13.34525203704834 + ], + [ + "omia", + -13.345309257507324 + ], + [ + "hâte", + -13.345317840576172 + ], + [ + "▁Sanctuary", + -13.345366477966309 + ], + [ + "▁accolade", + -13.345368385314941 + ], + [ + "▁Wurzel", + -13.345398902893066 + ], + [ + "▁spacing", + -13.345433235168457 + ], + [ + "▁bedeutend", + -13.345481872558594 + ], + [ + "▁biased", + -13.345499992370605 + ], + [ + "randomized", + -13.345747947692871 + ], + [ + "▁agenți", + -13.345856666564941 + ], + [ + "▁excepţi", + -13.346012115478516 + ], + [ + "▁fișier", + -13.346028327941895 + ], + [ + "▁fisier", + -13.34664535522461 + ], + [ + "irrespective", + -13.346648216247559 + ], + [ + "▁Gardner", + -13.34665584564209 + ], + [ + "▁aprecia", + -13.346884727478027 + ], + [ + "▁Klu", + -13.347082138061523 + ], + [ + "▁apropie", + -13.347535133361816 + ], + [ + "▁echival", + -13.347784042358398 + ], + [ + "tauchen", + -13.347862243652344 + ], + [ + "▁hauptsächlich", + -13.347930908203125 + ], + [ + "▁pollutants", + -13.347930908203125 + ], + [ + "▁mammals", + -13.347931861877441 + ], + [ + "▁Landwirtschaft", + -13.347936630249023 + ], + [ + "▁stăpân", + -13.34793758392334 + ], + [ + "▁Prüf", + -13.347990989685059 + ], + [ + "▁Motorsport", + -13.34807300567627 + ], + [ + "Leaving", + -13.348352432250977 + ], + [ + "schädigung", + -13.348573684692383 + ], + [ + "▁calendrier", + -13.348573684692383 + ], + [ + "plikation", + -13.348655700683594 + ], + [ + "▁DOE", + -13.348655700683594 + ], + [ + "ред", + -13.348966598510742 + ], + [ + "Jahr", + -13.34913444519043 + ], + [ + "▁entitlement", + -13.34921646118164 + ], + [ + "schuldig", + -13.349217414855957 + ], + [ + "▁Münster", + -13.349218368530273 + ], + [ + "pository", + -13.349451065063477 + ], + [ + "▁numero", + -13.350220680236816 + ], + [ + "▁entsprechen", + -13.350383758544922 + ], + [ + "▁astronaut", + -13.350502967834473 + ], + [ + "▁hexagon", + -13.350502967834473 + ], + [ + "▁DAMAGE", + -13.350503921508789 + ], + [ + "▁Quartz", + -13.350504875183105 + ], + [ + "▁rédaction", + -13.350504875183105 + ], + [ + "▁replenish", + -13.350508689880371 + ], + [ + "▁amoureux", + -13.350523948669434 + ], + [ + "▁opțiun", + -13.350616455078125 + ], + [ + "Custom", + -13.350622177124023 + ], + [ + "▁Telekom", + -13.350639343261719 + ], + [ + "▁RFID", + -13.351163864135742 + ], + [ + "▁Scorpio", + -13.351264953613281 + ], + [ + "▁thirst", + -13.35152816772461 + ], + [ + "▁Kosovo", + -13.351791381835938 + ], + [ + "▁precursor", + -13.351794242858887 + ], + [ + "▁sarbatori", + -13.351810455322266 + ], + [ + "▁Daisy", + -13.351828575134277 + ], + [ + "▁Dropbox", + -13.351898193359375 + ], + [ + "Smith", + -13.351949691772461 + ], + [ + "contabil", + -13.352191925048828 + ], + [ + "▁monnaie", + -13.352437973022461 + ], + [ + "capsul", + -13.352577209472656 + ], + [ + "treff", + -13.352760314941406 + ], + [ + "beauftragte", + -13.352761268615723 + ], + [ + "industrial", + -13.353006362915039 + ], + [ + "responsables", + -13.353010177612305 + ], + [ + "▁FIRST", + -13.353080749511719 + ], + [ + "▁crezut", + -13.35308837890625 + ], + [ + "▁reseller", + -13.353107452392578 + ], + [ + "▁direcți", + -13.353154182434082 + ], + [ + "mouvoir", + -13.353294372558594 + ], + [ + "▁Invite", + -13.353431701660156 + ], + [ + "▁constructii", + -13.353440284729004 + ], + [ + "▁oublié", + -13.353577613830566 + ], + [ + "găseșt", + -13.353687286376953 + ], + [ + "▁végét", + -13.353755950927734 + ], + [ + "idine", + -13.35385799407959 + ], + [ + "▁Ajout", + -13.353951454162598 + ], + [ + "▁Shelf", + -13.354195594787598 + ], + [ + "HALL", + -13.35422420501709 + ], + [ + "▁nostalgia", + -13.35437297821045 + ], + [ + "▁ottoman", + -13.35437297821045 + ], + [ + "▁ambalaj", + -13.354398727416992 + ], + [ + "municipiul", + -13.354405403137207 + ], + [ + "NOVA", + -13.354500770568848 + ], + [ + "▁disregard", + -13.354997634887695 + ], + [ + "▁bijuterii", + -13.355018615722656 + ], + [ + "▁sorgfältig", + -13.355018615722656 + ], + [ + "vraient", + -13.355307579040527 + ], + [ + "▁backsplash", + -13.355669975280762 + ], + [ + "▁nuisance", + -13.355679512023926 + ], + [ + "▁Territory", + -13.35568618774414 + ], + [ + "▁surprins", + -13.355693817138672 + ], + [ + "enchanting", + -13.35571002960205 + ], + [ + "trospecti", + -13.355847358703613 + ], + [ + "▁dvd", + -13.356199264526367 + ], + [ + "Totally", + -13.356329917907715 + ], + [ + "▁Edelstahl", + -13.35696029663086 + ], + [ + "▁sequencing", + -13.356961250305176 + ], + [ + "▁Circus", + -13.35696792602539 + ], + [ + "▁ashamed", + -13.35696792602539 + ], + [ + "▁horrific", + -13.357028007507324 + ], + [ + "▁taiat", + -13.357033729553223 + ], + [ + "▁Angehörige", + -13.357125282287598 + ], + [ + "Michel", + -13.357256889343262 + ], + [ + "▁communion", + -13.357298851013184 + ], + [ + "▁psiho", + -13.357378959655762 + ], + [ + "losigkeit", + -13.357405662536621 + ], + [ + "dipping", + -13.357512474060059 + ], + [ + "▁profesională", + -13.357608795166016 + ], + [ + "Indiferent", + -13.357609748840332 + ], + [ + "▁crestin", + -13.357723236083984 + ], + [ + "wholesome", + -13.357796669006348 + ], + [ + "▁Welfare", + -13.358257293701172 + ], + [ + "▁plentiful", + -13.358257293701172 + ], + [ + "▁Triumph", + -13.358258247375488 + ], + [ + "▁fascination", + -13.358260154724121 + ], + [ + "▁vicious", + -13.358291625976562 + ], + [ + "▁Höchst", + -13.358294486999512 + ], + [ + "▁Dunkel", + -13.358386039733887 + ], + [ + "▁harass", + -13.358406066894531 + ], + [ + "ambogia", + -13.358475685119629 + ], + [ + "▁synonymous", + -13.358598709106445 + ], + [ + "bottom", + -13.35879898071289 + ], + [ + "▁bénévole", + -13.358906745910645 + ], + [ + "▁suprafaț", + -13.358906745910645 + ], + [ + "▁umplut", + -13.358997344970703 + ], + [ + "▁Teddy", + -13.359162330627441 + ], + [ + "breathable", + -13.359292984008789 + ], + [ + "▁Toshiba", + -13.3595552444458 + ], + [ + "▁seismic", + -13.359569549560547 + ], + [ + "▁dringend", + -13.359583854675293 + ], + [ + "▁cultură", + -13.359585762023926 + ], + [ + "▁Waffen", + -13.359665870666504 + ], + [ + "▁Bubble", + -13.359702110290527 + ], + [ + "▁Brigade", + -13.359759330749512 + ], + [ + "▁Blatt", + -13.36012077331543 + ], + [ + "▁scénario", + -13.36020565032959 + ], + [ + "allah", + -13.360396385192871 + ], + [ + "▁superintendent", + -13.360855102539062 + ], + [ + "pflanzen", + -13.360856056213379 + ], + [ + "▁kurzfristig", + -13.360856056213379 + ], + [ + "▁raspberry", + -13.360876083374023 + ], + [ + "▁Evident", + -13.360904693603516 + ], + [ + "▁inutile", + -13.361076354980469 + ], + [ + "prouvé", + -13.361104011535645 + ], + [ + "▁obtien", + -13.36141300201416 + ], + [ + "▁Matthias", + -13.361506462097168 + ], + [ + "▁déclench", + -13.361506462097168 + ], + [ + "Situationen", + -13.361529350280762 + ], + [ + "▁Disclaimer", + -13.362156867980957 + ], + [ + "▁loneliness", + -13.362156867980957 + ], + [ + "▁Gothic", + -13.362164497375488 + ], + [ + "▁humility", + -13.362165451049805 + ], + [ + "▁machiaj", + -13.362175941467285 + ], + [ + "▁Sophia", + -13.362178802490234 + ], + [ + "▁Forecast", + -13.362265586853027 + ], + [ + "IBLE", + -13.362456321716309 + ], + [ + "ivism", + -13.362480163574219 + ], + [ + "israel", + -13.36278247833252 + ], + [ + "▁kümmern", + -13.362809181213379 + ], + [ + "▁verbreitet", + -13.362825393676758 + ], + [ + "▁capacitor", + -13.362832069396973 + ], + [ + "deprived", + -13.3634614944458 + ], + [ + "unbiased", + -13.3634614944458 + ], + [ + "▁Dominique", + -13.3634614944458 + ], + [ + "▁Bamboo", + -13.363462448120117 + ], + [ + "▁Heinrich", + -13.363465309143066 + ], + [ + "individualized", + -13.363550186157227 + ], + [ + "▁ansprechen", + -13.363776206970215 + ], + [ + "ordinaire", + -13.363801002502441 + ], + [ + "▁Ucraina", + -13.364112854003906 + ], + [ + "▁militare", + -13.364115715026855 + ], + [ + "massif", + -13.364352226257324 + ], + [ + "▁emisiuni", + -13.364501953125 + ], + [ + "maladies", + -13.364622116088867 + ], + [ + "▁pneumonia", + -13.364765167236328 + ], + [ + "▁graffiti", + -13.364767074584961 + ], + [ + "▁Determine", + -13.3648099899292 + ], + [ + "▁Northwestern", + -13.364893913269043 + ], + [ + "▁grasimi", + -13.364897727966309 + ], + [ + "▁lebendig", + -13.364920616149902 + ], + [ + "▁cifre", + -13.364946365356445 + ], + [ + "▁accelerator", + -13.36533260345459 + ], + [ + "▁nib", + -13.365374565124512 + ], + [ + "▁Jocuri", + -13.365400314331055 + ], + [ + "▁außergewöhnlich", + -13.365402221679688 + ], + [ + "▁orchid", + -13.36542797088623 + ], + [ + "zugreifen", + -13.365530967712402 + ], + [ + "utilisent", + -13.365662574768066 + ], + [ + "▁nineteenth", + -13.366071701049805 + ], + [ + "improvisation", + -13.366072654724121 + ], + [ + "▁Disclosure", + -13.366072654724121 + ], + [ + "▁Überraschung", + -13.366072654724121 + ], + [ + "▁Casual", + -13.366093635559082 + ], + [ + "▁Witness", + -13.366093635559082 + ], + [ + "teacher", + -13.366125106811523 + ], + [ + "Printed", + -13.366129875183105 + ], + [ + "▁prețuri", + -13.366189956665039 + ], + [ + "rues", + -13.366216659545898 + ], + [ + "▁cerinte", + -13.366338729858398 + ], + [ + "rouvent", + -13.36662483215332 + ], + [ + "assembling", + -13.36673355102539 + ], + [ + "▁atenție", + -13.366769790649414 + ], + [ + "▁amintiri", + -13.366782188415527 + ], + [ + "▁sustinut", + -13.366805076599121 + ], + [ + "Digital", + -13.367257118225098 + ], + [ + "▁Deborah", + -13.36738109588623 + ], + [ + "gesichts", + -13.367382049560547 + ], + [ + "▁temperament", + -13.367440223693848 + ], + [ + "▁competency", + -13.367447853088379 + ], + [ + "▁dwarf", + -13.367515563964844 + ], + [ + "▁dureaz", + -13.367539405822754 + ], + [ + "habilit", + -13.367764472961426 + ], + [ + "leaned", + -13.3679838180542 + ], + [ + "▁illicit", + -13.368348121643066 + ], + [ + "Availability", + -13.368691444396973 + ], + [ + "▁Brașov", + -13.368691444396973 + ], + [ + "▁Pyramid", + -13.368691444396973 + ], + [ + "▁achievable", + -13.368691444396973 + ], + [ + "▁judiciaire", + -13.368691444396973 + ], + [ + "Übrigen", + -13.368693351745605 + ], + [ + "▁activism", + -13.368795394897461 + ], + [ + "▁boycott", + -13.368839263916016 + ], + [ + "Desigur", + -13.368927001953125 + ], + [ + "klingt", + -13.369264602661133 + ], + [ + "▁Leidenschaft", + -13.369346618652344 + ], + [ + "▁Richtig", + -13.369701385498047 + ], + [ + "▁Airbnb", + -13.370002746582031 + ], + [ + "▁învățământ", + -13.370002746582031 + ], + [ + "Kampagne", + -13.370004653930664 + ], + [ + "▁thumbnail", + -13.370014190673828 + ], + [ + "Bestimmungen", + -13.370016098022461 + ], + [ + "▁vollkommen", + -13.37001895904541 + ], + [ + "▁biomass", + -13.370027542114258 + ], + [ + "▁escalate", + -13.370030403137207 + ], + [ + "wächst", + -13.370085716247559 + ], + [ + "▁scăpa", + -13.370098114013672 + ], + [ + "▁résult", + -13.37014389038086 + ], + [ + "▁shrine", + -13.370217323303223 + ], + [ + "maximizing", + -13.370370864868164 + ], + [ + "avoue", + -13.370492935180664 + ], + [ + "dirigeants", + -13.370665550231934 + ], + [ + "▁cerveau", + -13.370672225952148 + ], + [ + "▁proast", + -13.370955467224121 + ], + [ + "▁contaminants", + -13.371325492858887 + ], + [ + "effectue", + -13.37151050567627 + ], + [ + "ediție", + -13.371539115905762 + ], + [ + "monetiz", + -13.371772766113281 + ], + [ + "▁deplasare", + -13.371976852416992 + ], + [ + "▁Sfant", + -13.37209415435791 + ], + [ + "ROOM", + -13.372113227844238 + ], + [ + "bushes", + -13.372151374816895 + ], + [ + "mairie", + -13.37251091003418 + ], + [ + "obligate", + -13.372528076171875 + ], + [ + "▁tug", + -13.372573852539062 + ], + [ + "▁Collector", + -13.372632026672363 + ], + [ + "▁annoyed", + -13.372633934020996 + ], + [ + "▁aerobic", + -13.372654914855957 + ], + [ + "▁integer", + -13.372830390930176 + ], + [ + "▁Upload", + -13.373249053955078 + ], + [ + "▁impartial", + -13.37346076965332 + ], + [ + "▁discuţi", + -13.373623847961426 + ], + [ + "gastrointestinal", + -13.37394905090332 + ], + [ + "▁chiropractor", + -13.37394905090332 + ], + [ + "▁treptat", + -13.373950004577637 + ], + [ + "▁fishermen", + -13.37395191192627 + ], + [ + "levitra", + -13.3739595413208 + ], + [ + "Gruppe", + -13.373964309692383 + ], + [ + "▁Apostle", + -13.373970985412598 + ], + [ + "▁conseillé", + -13.374068260192871 + ], + [ + "Isra", + -13.37421703338623 + ], + [ + "▁Persönlichkeit", + -13.374431610107422 + ], + [ + "▁cantitati", + -13.374459266662598 + ], + [ + "▁incredibil", + -13.374614715576172 + ], + [ + "▁Berater", + -13.374800682067871 + ], + [ + "▁propuneri", + -13.374835014343262 + ], + [ + "MEDIA", + -13.375236511230469 + ], + [ + "▁opaque", + -13.37526798248291 + ], + [ + "▁Nielsen", + -13.375269889831543 + ], + [ + "▁cartofi", + -13.375277519226074 + ], + [ + "▁Whale", + -13.37533950805664 + ], + [ + "erzeugen", + -13.375890731811523 + ], + [ + "▁knack", + -13.375931739807129 + ], + [ + "Kandidat", + -13.375936508178711 + ], + [ + "▁tradițional", + -13.375937461853027 + ], + [ + "zählige", + -13.375983238220215 + ], + [ + "▁Petroleum", + -13.376588821411133 + ], + [ + "▁deficiencies", + -13.376588821411133 + ], + [ + "▁persecution", + -13.376588821411133 + ], + [ + "▁zgomot", + -13.376588821411133 + ], + [ + "▁reiterate", + -13.376592636108398 + ], + [ + "▁Slice", + -13.376670837402344 + ], + [ + "▁envy", + -13.376704216003418 + ], + [ + "▁stomac", + -13.376851081848145 + ], + [ + "Donnell", + -13.376914978027344 + ], + [ + "▁primordial", + -13.377249717712402 + ], + [ + "reclining", + -13.377274513244629 + ], + [ + "PASS", + -13.377861976623535 + ], + [ + "▁Resistance", + -13.377910614013672 + ], + [ + "▁Widerruf", + -13.377911567687988 + ], + [ + "▁vodka", + -13.377911567687988 + ], + [ + "▁yolk", + -13.377912521362305 + ], + [ + "ollywood", + -13.377915382385254 + ], + [ + "▁truffle", + -13.377933502197266 + ], + [ + "▁Sänger", + -13.377955436706543 + ], + [ + "▁Kenntnis", + -13.377968788146973 + ], + [ + "▁Kiel", + -13.37803840637207 + ], + [ + "▁Mutual", + -13.378044128417969 + ], + [ + "▁saliva", + -13.37816047668457 + ], + [ + "▁renforce", + -13.378411293029785 + ], + [ + "▁mulch", + -13.378680229187012 + ], + [ + "▁reviste", + -13.378875732421875 + ], + [ + "lucrarea", + -13.378978729248047 + ], + [ + "▁multiply", + -13.379130363464355 + ], + [ + "▁marshmallow", + -13.379234313964844 + ], + [ + "▁Durchschnitt", + -13.379288673400879 + ], + [ + "▁Authorities", + -13.379426002502441 + ], + [ + "▁greed", + -13.379521369934082 + ], + [ + "Visiting", + -13.379638671875 + ], + [ + "Carlton", + -13.379727363586426 + ], + [ + "▁splend", + -13.37975025177002 + ], + [ + "▁Erkenntnisse", + -13.379898071289062 + ], + [ + "▁Russie", + -13.379916191101074 + ], + [ + "Agence", + -13.38007926940918 + ], + [ + "schickt", + -13.380288124084473 + ], + [ + "##", + -13.3804931640625 + ], + [ + "▁Erweiterung", + -13.380560874938965 + ], + [ + "▁Franchise", + -13.380560874938965 + ], + [ + "Dedicated", + -13.380563735961914 + ], + [ + "▁Wisdom", + -13.380569458007812 + ], + [ + "▁gagnant", + -13.380592346191406 + ], + [ + "planetary", + -13.380598068237305 + ], + [ + "▁affinity", + -13.380619049072266 + ], + [ + "▁préférence", + -13.380739212036133 + ], + [ + "▁intellect", + -13.380810737609863 + ], + [ + "▁Translat", + -13.380830764770508 + ], + [ + "▁Sultan", + -13.38089370727539 + ], + [ + "▁birouri", + -13.38101577758789 + ], + [ + "▁Academie", + -13.381224632263184 + ], + [ + "▁consequential", + -13.38138484954834 + ], + [ + "▁festgestellt", + -13.381402015686035 + ], + [ + "▁Chanel", + -13.381444931030273 + ], + [ + "▁soutenu", + -13.381875038146973 + ], + [ + "▁Montessori", + -13.381888389587402 + ], + [ + "▁equitable", + -13.381892204284668 + ], + [ + "▁théorie", + -13.381893157958984 + ], + [ + "▁primavara", + -13.3818941116333 + ], + [ + "▁Daughter", + -13.38189697265625 + ], + [ + "▁Dixon", + -13.381898880004883 + ], + [ + "▁unravel", + -13.38190746307373 + ], + [ + "Olimp", + -13.381915092468262 + ], + [ + "▁disturbed", + -13.381916999816895 + ], + [ + "▁novelty", + -13.382004737854004 + ], + [ + "synchronous", + -13.382113456726074 + ], + [ + "relevant", + -13.382166862487793 + ], + [ + "bourgeois", + -13.38251781463623 + ], + [ + "▁Parfum", + -13.38255500793457 + ], + [ + "▁Polonia", + -13.382563591003418 + ], + [ + "▁monoton", + -13.382781028747559 + ], + [ + "tratare", + -13.38302230834961 + ], + [ + "dumping", + -13.38318157196045 + ], + [ + "▁Bibliothek", + -13.383217811584473 + ], + [ + "▁Saskatchewan", + -13.383217811584473 + ], + [ + "▁experiential", + -13.383217811584473 + ], + [ + "▁verursacht", + -13.383217811584473 + ], + [ + "intègre", + -13.383218765258789 + ], + [ + "▁Intermediate", + -13.383275032043457 + ], + [ + "Israel", + -13.383476257324219 + ], + [ + "lucreaza", + -13.383495330810547 + ], + [ + "▁quantify", + -13.383862495422363 + ], + [ + "▁zahăr", + -13.383882522583008 + ], + [ + "▁încadr", + -13.383902549743652 + ], + [ + "Personalized", + -13.383946418762207 + ], + [ + "▁Chronic", + -13.384309768676758 + ], + [ + "hôpital", + -13.384549140930176 + ], + [ + "▁diskutiert", + -13.384549140930176 + ], + [ + "electrique", + -13.3848876953125 + ], + [ + "ethos", + -13.384978294372559 + ], + [ + "Nase", + -13.385059356689453 + ], + [ + "atmosphère", + -13.385214805603027 + ], + [ + "▁ungefähr", + -13.385215759277344 + ], + [ + "évaluer", + -13.385251998901367 + ], + [ + "▁scuz", + -13.385321617126465 + ], + [ + "haltige", + -13.38533878326416 + ], + [ + "January", + -13.38557243347168 + ], + [ + "▁Sharma", + -13.385603904724121 + ], + [ + "▁seizures", + -13.385881423950195 + ], + [ + "▁zucchini", + -13.385881423950195 + ], + [ + "▁Stadi", + -13.385885238647461 + ], + [ + "▁eccentric", + -13.385885238647461 + ], + [ + "▁offensichtlich", + -13.385909080505371 + ], + [ + "▁Irvine", + -13.385920524597168 + ], + [ + "cuprinse", + -13.38601303100586 + ], + [ + "▁Arbitr", + -13.386157035827637 + ], + [ + "Buenos", + -13.386183738708496 + ], + [ + "▁Shelter", + -13.386210441589355 + ], + [ + "CEPT", + -13.386454582214355 + ], + [ + "ouvri", + -13.386455535888672 + ], + [ + "acryl", + -13.386539459228516 + ], + [ + "▁Gourmet", + -13.38654899597168 + ], + [ + "scented", + -13.386595726013184 + ], + [ + "doubling", + -13.38659954071045 + ], + [ + "▁rafina", + -13.386608123779297 + ], + [ + "▁Vereinbarung", + -13.38721752166748 + ], + [ + "▁Dashboard", + -13.387218475341797 + ], + [ + "▁Sandwich", + -13.387218475341797 + ], + [ + "▁Riviera", + -13.387226104736328 + ], + [ + "échec", + -13.387237548828125 + ], + [ + "Giro", + -13.387253761291504 + ], + [ + "▁oasis", + -13.38725757598877 + ], + [ + "▁apology", + -13.3872709274292 + ], + [ + "▁YEAR", + -13.387272834777832 + ], + [ + "▁realtor", + -13.387504577636719 + ], + [ + "acheteur", + -13.38754653930664 + ], + [ + "▁larva", + -13.387613296508789 + ], + [ + "▁invitați", + -13.388097763061523 + ], + [ + "exhibiting", + -13.38830852508545 + ], + [ + "modernen", + -13.388331413269043 + ], + [ + "▁Collaboration", + -13.38855266571045 + ], + [ + "▁dezvălui", + -13.38855266571045 + ], + [ + "▁kiosk", + -13.38855266571045 + ], + [ + "▁Bermuda", + -13.388553619384766 + ], + [ + "Copiii", + -13.388564109802246 + ], + [ + "▁goddess", + -13.388581275939941 + ], + [ + "uplifting", + -13.388609886169434 + ], + [ + "▁simultan", + -13.388808250427246 + ], + [ + "▁episod", + -13.388884544372559 + ], + [ + "▁Braşov", + -13.38922119140625 + ], + [ + "cunoscută", + -13.389634132385254 + ], + [ + "▁Cherokee", + -13.389890670776367 + ], + [ + "▁Kazakhstan", + -13.389890670776367 + ], + [ + "▁Lauderdale", + -13.389890670776367 + ], + [ + "▁închisoare", + -13.389898300170898 + ], + [ + "▁Christchurch", + -13.389934539794922 + ], + [ + "▁influenţ", + -13.389982223510742 + ], + [ + "▁Meghan", + -13.390019416809082 + ], + [ + "▁Dienstleistung", + -13.390557289123535 + ], + [ + "▁cladiri", + -13.390564918518066 + ], + [ + "▁evrei", + -13.391148567199707 + ], + [ + "▁oatmeal", + -13.391230583190918 + ], + [ + "▁chronique", + -13.3912353515625 + ], + [ + "▁associée", + -13.391264915466309 + ], + [ + "▁Goose", + -13.391283988952637 + ], + [ + "gänz", + -13.391855239868164 + ], + [ + "▁Blätter", + -13.391901969909668 + ], + [ + "▁jurnalist", + -13.392212867736816 + ], + [ + "cedat", + -13.392263412475586 + ], + [ + "nommée", + -13.392315864562988 + ], + [ + "écrivain", + -13.392572402954102 + ], + [ + "▁epoxy", + -13.392577171325684 + ], + [ + "▁verlangt", + -13.392590522766113 + ], + [ + "Störung", + -13.392708778381348 + ], + [ + "▁Doyle", + -13.392729759216309 + ], + [ + "▁Philharmoni", + -13.392844200134277 + ], + [ + "▁déclare", + -13.393044471740723 + ], + [ + "effort", + -13.393045425415039 + ], + [ + "ström", + -13.393118858337402 + ], + [ + "▁cunoaşte", + -13.393244743347168 + ], + [ + "▁gigantic", + -13.3932466506958 + ], + [ + "któ", + -13.393378257751465 + ], + [ + "▁ilustr", + -13.393529891967773 + ], + [ + "▁frec", + -13.39371109008789 + ], + [ + "▁Syracuse", + -13.393916130065918 + ], + [ + "▁Einwilligung", + -13.393917083740234 + ], + [ + "▁miraculous", + -13.393917083740234 + ], + [ + "▁ökologisch", + -13.393917083740234 + ], + [ + "▁Simmons", + -13.393922805786133 + ], + [ + "▁albastru", + -13.393926620483398 + ], + [ + "besser", + -13.393962860107422 + ], + [ + "▁interioare", + -13.394006729125977 + ], + [ + "▁Trocken", + -13.394068717956543 + ], + [ + "niveau", + -13.39406967163086 + ], + [ + "▁Torah", + -13.394122123718262 + ], + [ + "▁beobachten", + -13.3945894241333 + ], + [ + "▁behandeln", + -13.394637107849121 + ], + [ + "staffed", + -13.394742965698242 + ], + [ + "hütte", + -13.394824028015137 + ], + [ + "Central", + -13.394939422607422 + ], + [ + "▁Freiburg", + -13.395198822021484 + ], + [ + "▁Netanyahu", + -13.395261764526367 + ], + [ + "▁Lexington", + -13.395302772521973 + ], + [ + "▁insotit", + -13.395492553710938 + ], + [ + "▁depasi", + -13.39560604095459 + ], + [ + "sewage", + -13.395853996276855 + ], + [ + "erkrankung", + -13.395951271057129 + ], + [ + "▁părţi", + -13.396234512329102 + ], + [ + "▁Nixon", + -13.39661693572998 + ], + [ + "Byron", + -13.396905899047852 + ], + [ + "▁varietat", + -13.39724063873291 + ], + [ + "▁Bildschirm", + -13.397299766540527 + ], + [ + "▁accompli", + -13.397424697875977 + ], + [ + "affirmed", + -13.397525787353516 + ], + [ + "▁phyto", + -13.397533416748047 + ], + [ + "sectiune", + -13.397592544555664 + ], + [ + "abteilung", + -13.397932052612305 + ], + [ + "▁voastre", + -13.397957801818848 + ], + [ + "GitHub", + -13.397958755493164 + ], + [ + "▁Jorge", + -13.39796257019043 + ], + [ + "ACTION", + -13.397972106933594 + ], + [ + "voastra", + -13.397984504699707 + ], + [ + "▁Peanut", + -13.397987365722656 + ], + [ + "▁bilingual", + -13.398011207580566 + ], + [ + "▁nourriture", + -13.39803695678711 + ], + [ + "▁Asphalt", + -13.398640632629395 + ], + [ + "emballage", + -13.399310111999512 + ], + [ + "▁sanitation", + -13.399310111999512 + ], + [ + "▁Dessert", + -13.399313926696777 + ], + [ + "intitulé", + -13.399322509765625 + ], + [ + "▁acţiune", + -13.399374008178711 + ], + [ + "▁Übersetzung", + -13.399402618408203 + ], + [ + "destinate", + -13.39941692352295 + ], + [ + "▁Goddess", + -13.399504661560059 + ], + [ + "poziție", + -13.399576187133789 + ], + [ + "denumirea", + -13.400002479553223 + ], + [ + "cantitatea", + -13.40002727508545 + ], + [ + "▁Stereo", + -13.400223731994629 + ], + [ + "object", + -13.400373458862305 + ], + [ + "▁décè", + -13.40058708190918 + ], + [ + "▁Handeln", + -13.400665283203125 + ], + [ + "▁ambience", + -13.400697708129883 + ], + [ + "▁Lindsay", + -13.4006986618042 + ], + [ + "▁tensiune", + -13.400781631469727 + ], + [ + "▁thrift", + -13.400788307189941 + ], + [ + "▁Optimiz", + -13.400843620300293 + ], + [ + "▁beantworten", + -13.401338577270508 + ], + [ + "▁magistrat", + -13.401342391967773 + ], + [ + "évidence", + -13.402016639709473 + ], + [ + "▁Eclipse", + -13.402016639709473 + ], + [ + "▁Ribbon", + -13.402016639709473 + ], + [ + "▁condensation", + -13.402016639709473 + ], + [ + "▁innocence", + -13.402018547058105 + ], + [ + "▁mascara", + -13.402023315429688 + ], + [ + "▁seventeen", + -13.402290344238281 + ], + [ + "▁compétent", + -13.402694702148438 + ], + [ + "bewertet", + -13.402717590332031 + ], + [ + "▁Muzic", + -13.40285587310791 + ], + [ + "complexities", + -13.402928352355957 + ], + [ + "ddington", + -13.403324127197266 + ], + [ + "Entwickler", + -13.403372764587402 + ], + [ + "masonry", + -13.4033784866333 + ], + [ + "Führer", + -13.403386116027832 + ], + [ + "▁awakening", + -13.403388977050781 + ], + [ + "▁lovitur", + -13.403806686401367 + ], + [ + "gebrochen", + -13.404068946838379 + ], + [ + "indexed", + -13.404478073120117 + ], + [ + "campania", + -13.404515266418457 + ], + [ + "▁Fountain", + -13.404730796813965 + ], + [ + "▁Joomla", + -13.404730796813965 + ], + [ + "▁Superintendent", + -13.404730796813965 + ], + [ + "▁Dahl", + -13.404742240905762 + ], + [ + "▁Benefici", + -13.404863357543945 + ], + [ + "optimiser", + -13.404919624328613 + ], + [ + "bursting", + -13.405380249023438 + ], + [ + "diplom", + -13.405427932739258 + ], + [ + "microsoft", + -13.405621528625488 + ], + [ + "▁correlate", + -13.405776977539062 + ], + [ + "▁arhitectura", + -13.405848503112793 + ], + [ + "▁lunette", + -13.40611743927002 + ], + [ + "Statistical", + -13.406147003173828 + ], + [ + "▁iarnă", + -13.406201362609863 + ], + [ + "▁importanț", + -13.406932830810547 + ], + [ + "sistence", + -13.407366752624512 + ], + [ + "associated", + -13.407402992248535 + ], + [ + "Occident", + -13.407452583312988 + ], + [ + "▁Heidelberg", + -13.407452583312988 + ], + [ + "▁acquaintance", + -13.407452583312988 + ], + [ + "Introducing", + -13.407453536987305 + ], + [ + "▁ripple", + -13.407480239868164 + ], + [ + "▁Childhood", + -13.407563209533691 + ], + [ + "drywall", + -13.407577514648438 + ], + [ + "Vreau", + -13.40771770477295 + ], + [ + "▁compétence", + -13.407967567443848 + ], + [ + "▁asteapta", + -13.408135414123535 + ], + [ + "▁duhovnic", + -13.408135414123535 + ], + [ + "▁învăţământ", + -13.408141136169434 + ], + [ + "encompassing", + -13.40829849243164 + ], + [ + "1997)", + -13.408370018005371 + ], + [ + "▁atractiv", + -13.408515930175781 + ], + [ + "Majoritatea", + -13.408775329589844 + ], + [ + "▁bungalow", + -13.40881633758545 + ], + [ + "▁Introduce", + -13.408817291259766 + ], + [ + "▁culprit", + -13.408817291259766 + ], + [ + "▁malheureusement", + -13.408817291259766 + ], + [ + "▁voudrai", + -13.408817291259766 + ], + [ + "Europäische", + -13.408825874328613 + ], + [ + "wunsch", + -13.408880233764648 + ], + [ + "▁înțeles", + -13.408892631530762 + ], + [ + "▁infestation", + -13.40889835357666 + ], + [ + "Bringing", + -13.409186363220215 + ], + [ + "▁Mehrheit", + -13.409229278564453 + ], + [ + "ски", + -13.409456253051758 + ], + [ + "▁procéder", + -13.409499168395996 + ], + [ + "grupului", + -13.409504890441895 + ], + [ + "▁dispoziti", + -13.40964412689209 + ], + [ + "▁snug", + -13.409950256347656 + ], + [ + "▁Afrika", + -13.41018295288086 + ], + [ + "▁Madagascar", + -13.41018295288086 + ], + [ + "Părinte", + -13.410195350646973 + ], + [ + "▁Clayton", + -13.410223960876465 + ], + [ + "▁antagonist", + -13.410239219665527 + ], + [ + "termeni", + -13.410250663757324 + ], + [ + "▁Literary", + -13.410391807556152 + ], + [ + "▁Babylon", + -13.410452842712402 + ], + [ + "▁überprüfen", + -13.410865783691406 + ], + [ + "▁duminica", + -13.410879135131836 + ], + [ + "farbig", + -13.410970687866211 + ], + [ + "nennt", + -13.411064147949219 + ], + [ + "annual", + -13.411487579345703 + ], + [ + "▁Qualcomm", + -13.41154956817627 + ], + [ + "▁Slovakia", + -13.41154956817627 + ], + [ + "▁plictis", + -13.411552429199219 + ], + [ + "▁prairie", + -13.411554336547852 + ], + [ + "▁Schatten", + -13.411622047424316 + ], + [ + "▁compléter", + -13.41223430633545 + ], + [ + "inauguration", + -13.412376403808594 + ], + [ + "▁apărare", + -13.412407875061035 + ], + [ + "▁întăr", + -13.412412643432617 + ], + [ + "▁pronunciation", + -13.412919044494629 + ], + [ + "▁bewährt", + -13.412919998168945 + ], + [ + "▁Viertel", + -13.413084983825684 + ], + [ + "▁Heidi", + -13.413252830505371 + ], + [ + "▁Gummi", + -13.413507461547852 + ], + [ + "▁veggie", + -13.413552284240723 + ], + [ + "▁monsieur", + -13.413604736328125 + ], + [ + "éveil", + -13.413630485534668 + ], + [ + "shipments", + -13.413928985595703 + ], + [ + "▁Medikamente", + -13.414290428161621 + ], + [ + "▁Johannesburg", + -13.414314270019531 + ], + [ + "▁ermittelt", + -13.414321899414062 + ], + [ + "▁bataille", + -13.414440155029297 + ], + [ + "extrem", + -13.414609909057617 + ], + [ + "▁1:2", + -13.414671897888184 + ], + [ + "Array", + -13.414725303649902 + ], + [ + "▁portail", + -13.414857864379883 + ], + [ + "▁găzdui", + -13.414977073669434 + ], + [ + "▁Calcium", + -13.41497802734375 + ], + [ + "▁Correction", + -13.415104866027832 + ], + [ + "bureaux", + -13.41528034210205 + ], + [ + "bestselling", + -13.415338516235352 + ], + [ + "Übungen", + -13.415420532226562 + ], + [ + "paramètres", + -13.415633201599121 + ], + [ + "▁Provincial", + -13.415663719177246 + ], + [ + "▁outrageous", + -13.415680885314941 + ], + [ + "▁Giveaway", + -13.415775299072266 + ], + [ + "▁LGBTQ", + -13.41589641571045 + ], + [ + "geklärt", + -13.416854858398438 + ], + [ + "▁Karlsruhe", + -13.417038917541504 + ], + [ + "▁esențial", + -13.417038917541504 + ], + [ + "avancée", + -13.41703987121582 + ], + [ + "hesitant", + -13.417040824890137 + ], + [ + "enlarged", + -13.417069435119629 + ], + [ + "▁inherit", + -13.417121887207031 + ], + [ + "Food", + -13.4171724319458 + ], + [ + "bucuria", + -13.417181015014648 + ], + [ + "▁BTW", + -13.417400360107422 + ], + [ + "associe", + -13.417579650878906 + ], + [ + "▁Möchte", + -13.417742729187012 + ], + [ + "demokrat", + -13.417789459228516 + ], + [ + "Turcia", + -13.417964935302734 + ], + [ + "forged", + -13.418370246887207 + ], + [ + "▁Zhao", + -13.418442726135254 + ], + [ + "▁cherries", + -13.418556213378906 + ], + [ + "▁evangelical", + -13.418631553649902 + ], + [ + "▁jüng", + -13.418792724609375 + ], + [ + "spans", + -13.41880989074707 + ], + [ + "▁străluc", + -13.41888427734375 + ], + [ + "▁geschie", + -13.41893196105957 + ], + [ + "▁Tattoo", + -13.419112205505371 + ], + [ + "sanitary", + -13.419114112854004 + ], + [ + "▁biopsy", + -13.419353485107422 + ], + [ + "▁imprumut", + -13.419795036315918 + ], + [ + "▁unreasonable", + -13.419795036315918 + ], + [ + "Funktion", + -13.419800758361816 + ], + [ + "▁prohibition", + -13.419904708862305 + ], + [ + "▁Prezent", + -13.419939041137695 + ], + [ + "boosted", + -13.419967651367188 + ], + [ + "▁chalet", + -13.420382499694824 + ], + [ + "▁tanar", + -13.420450210571289 + ], + [ + "Faktoren", + -13.420489311218262 + ], + [ + "▁Mozilla", + -13.420550346374512 + ], + [ + "▁Lambert", + -13.420760154724121 + ], + [ + "▁Cruci", + -13.420927047729492 + ], + [ + "▁Flugzeug", + -13.421198844909668 + ], + [ + "reassure", + -13.421205520629883 + ], + [ + "envisioned", + -13.421542167663574 + ], + [ + "Traditionally", + -13.421773910522461 + ], + [ + "▁parametri", + -13.42185115814209 + ], + [ + "▁unicorn", + -13.421891212463379 + ], + [ + "▁adéquat", + -13.421894073486328 + ], + [ + "▁Colonial", + -13.421915054321289 + ], + [ + "▁Kwa", + -13.422097206115723 + ], + [ + "▁SERV", + -13.422333717346191 + ], + [ + "tourism", + -13.422627449035645 + ], + [ + "▁Kiev", + -13.422974586486816 + ], + [ + "heightened", + -13.42309284210205 + ], + [ + "circulating", + -13.423099517822266 + ], + [ + "▁Kreditkarte", + -13.42310619354248 + ], + [ + "gedruckt", + -13.423110008239746 + ], + [ + "▁Depend", + -13.423120498657227 + ], + [ + "Style", + -13.423196792602539 + ], + [ + "▁Rettungs", + -13.42325496673584 + ], + [ + "wrongful", + -13.423418998718262 + ], + [ + "▁devour", + -13.423453330993652 + ], + [ + "▁manevr", + -13.423582077026367 + ], + [ + "carora", + -13.423628807067871 + ], + [ + "erfolgreichen", + -13.423723220825195 + ], + [ + "überwiegend", + -13.423942565917969 + ], + [ + "▁Sauvignon", + -13.423942565917969 + ], + [ + "händler", + -13.423944473266602 + ], + [ + "▁annotation", + -13.424009323120117 + ], + [ + "▁expans", + -13.424020767211914 + ], + [ + "▁recital", + -13.424080848693848 + ], + [ + "inhabited", + -13.424367904663086 + ], + [ + "OnePlus", + -13.424549102783203 + ], + [ + "Gästen", + -13.424588203430176 + ], + [ + "beliebig", + -13.424613952636719 + ], + [ + "▁Anonymous", + -13.424635887145996 + ], + [ + "▁Ansprechpartner", + -13.424635887145996 + ], + [ + "▁tamb", + -13.42464542388916 + ], + [ + "estimating", + -13.424670219421387 + ], + [ + "frequent", + -13.424769401550293 + ], + [ + "▁disciplin", + -13.425241470336914 + ], + [ + "▁plombier", + -13.425329208374023 + ], + [ + "▁teoretic", + -13.42533016204834 + ], + [ + "greift", + -13.425339698791504 + ], + [ + "▁Einschränkung", + -13.42537784576416 + ], + [ + "obscur", + -13.426115989685059 + ], + [ + "architecte", + -13.426233291625977 + ], + [ + "▁détour", + -13.42647647857666 + ], + [ + "▁spaghetti", + -13.426717758178711 + ], + [ + "croft", + -13.42693042755127 + ], + [ + "▁Grammar", + -13.426953315734863 + ], + [ + "▁investitii", + -13.427062034606934 + ], + [ + "▁glorif", + -13.427067756652832 + ], + [ + "architekt", + -13.427412033081055 + ], + [ + "Oricum", + -13.427451133728027 + ], + [ + "▁bruise", + -13.427692413330078 + ], + [ + "▁McCarthy", + -13.428107261657715 + ], + [ + "▁Uruguay", + -13.428107261657715 + ], + [ + "Produsele", + -13.428109169006348 + ], + [ + "▁Comparison", + -13.42811107635498 + ], + [ + "▁fondamental", + -13.42811107635498 + ], + [ + "▁stradă", + -13.428115844726562 + ], + [ + "▁Countries", + -13.428131103515625 + ], + [ + "▁guéri", + -13.42825698852539 + ], + [ + "▁bâti", + -13.428339004516602 + ], + [ + "▁blunt", + -13.428515434265137 + ], + [ + "▁Sistem", + -13.428645133972168 + ], + [ + "▁Betroffenen", + -13.428803443908691 + ], + [ + "efectuare", + -13.428823471069336 + ], + [ + "▁scharf", + -13.428899765014648 + ], + [ + "naps", + -13.429057121276855 + ], + [ + "▁plaid", + -13.429163932800293 + ], + [ + "▁investiții", + -13.429367065429688 + ], + [ + "evenimentele", + -13.42948055267334 + ], + [ + "▁Phuket", + -13.429499626159668 + ], + [ + "▁testosterone", + -13.429499626159668 + ], + [ + "▁scaffold", + -13.429500579833984 + ], + [ + "▁rasch", + -13.430022239685059 + ], + [ + "▁adânc", + -13.430076599121094 + ], + [ + "atteinte", + -13.430228233337402 + ], + [ + "▁educație", + -13.430320739746094 + ], + [ + "▁leopard", + -13.430893898010254 + ], + [ + "▁superioare", + -13.430893898010254 + ], + [ + "▁téléchargement", + -13.430893898010254 + ], + [ + "▁Weapon", + -13.431103706359863 + ], + [ + "favourable", + -13.431336402893066 + ], + [ + "nourishing", + -13.43143367767334 + ], + [ + "▁verfolgt", + -13.43160629272461 + ], + [ + "▁tablou", + -13.431633949279785 + ], + [ + "Algérie", + -13.431657791137695 + ], + [ + "Islam", + -13.431700706481934 + ], + [ + "faser", + -13.431825637817383 + ], + [ + "rhythm", + -13.432214736938477 + ], + [ + "▁Anthropolog", + -13.432291030883789 + ], + [ + "▁clôtur", + -13.432291030883789 + ], + [ + "spüren", + -13.432291984558105 + ], + [ + "▁Architectural", + -13.432294845581055 + ], + [ + "▁imaginary", + -13.432368278503418 + ], + [ + "cône", + -13.432456016540527 + ], + [ + "▁snuggl", + -13.432744026184082 + ], + [ + "disadvantaged", + -13.432745933532715 + ], + [ + "radically", + -13.4329195022583 + ], + [ + "Première", + -13.433011054992676 + ], + [ + "▁combinaison", + -13.433027267456055 + ], + [ + "▁Algeria", + -13.43303108215332 + ], + [ + "▁Wände", + -13.43317985534668 + ], + [ + "aesthetically", + -13.43336009979248 + ], + [ + "▁McKe", + -13.433368682861328 + ], + [ + "interroge", + -13.433473587036133 + ], + [ + "exclusive", + -13.433475494384766 + ], + [ + "▁Thomson", + -13.433688163757324 + ], + [ + "▁Gujarat", + -13.43368911743164 + ], + [ + "irgendwo", + -13.433690071105957 + ], + [ + "Severin", + -13.433767318725586 + ], + [ + "▁imitation", + -13.433926582336426 + ], + [ + "constructed", + -13.434194564819336 + ], + [ + "▁Montpellier", + -13.434388160705566 + ], + [ + "cedent", + -13.434539794921875 + ], + [ + "accelerating", + -13.434563636779785 + ], + [ + "dommages", + -13.4346284866333 + ], + [ + "lideri", + -13.434730529785156 + ], + [ + "▁Millennium", + -13.435089111328125 + ], + [ + "▁imprisonment", + -13.435089111328125 + ], + [ + "machining", + -13.435111999511719 + ], + [ + "▁anxiet", + -13.43521499633789 + ], + [ + "Contains", + -13.435298919677734 + ], + [ + "pleade", + -13.435563087463379 + ], + [ + "DOWN", + -13.43564510345459 + ], + [ + "geschehen", + -13.435797691345215 + ], + [ + "restaurant", + -13.435811996459961 + ], + [ + "Totusi", + -13.435839653015137 + ], + [ + "amintesc", + -13.436158180236816 + ], + [ + "▁Crisp", + -13.436233520507812 + ], + [ + "aduse", + -13.436278343200684 + ], + [ + "▁imposé", + -13.436351776123047 + ], + [ + "Jubiläum", + -13.436490058898926 + ], + [ + "▁Plaintiff", + -13.436491012573242 + ], + [ + "▁authoritative", + -13.436491966247559 + ], + [ + "▁rendition", + -13.436633110046387 + ], + [ + "Royce", + -13.436707496643066 + ], + [ + "1996)", + -13.436724662780762 + ], + [ + "Asociația", + -13.437192916870117 + ], + [ + "▁Gluten", + -13.437264442443848 + ], + [ + "feature", + -13.43741226196289 + ], + [ + "Behavioral", + -13.437454223632812 + ], + [ + "tearing", + -13.437763214111328 + ], + [ + "▁Entfernung", + -13.437894821166992 + ], + [ + "▁Responsibility", + -13.437894821166992 + ], + [ + "▁negligent", + -13.437894821166992 + ], + [ + "▁syllabus", + -13.437894821166992 + ], + [ + "▁Cycling", + -13.437895774841309 + ], + [ + "generell", + -13.438114166259766 + ], + [ + "customised", + -13.438392639160156 + ], + [ + "Management", + -13.43850326538086 + ], + [ + "▁timid", + -13.438518524169922 + ], + [ + "Tagged", + -13.438730239868164 + ], + [ + "▁susţinut", + -13.438809394836426 + ], + [ + "anchored", + -13.43892765045166 + ], + [ + "alternating", + -13.439055442810059 + ], + [ + "▁obligatoriu", + -13.439300537109375 + ], + [ + "▁reinstate", + -13.439456939697266 + ], + [ + "Können", + -13.43946361541748 + ], + [ + "▁Paol", + -13.439596176147461 + ], + [ + "öhr", + -13.439603805541992 + ], + [ + "▁Asociati", + -13.439876556396484 + ], + [ + "▁commenc", + -13.440285682678223 + ], + [ + "reinigt", + -13.440293312072754 + ], + [ + "commended", + -13.440350532531738 + ], + [ + "▁Proceed", + -13.440675735473633 + ], + [ + "beutel", + -13.440702438354492 + ], + [ + "▁Experimental", + -13.44070816040039 + ], + [ + "▁constellation", + -13.44070816040039 + ], + [ + "▁gepflegt", + -13.44070816040039 + ], + [ + "▁Ergänzung", + -13.440709114074707 + ], + [ + "Judith", + -13.440713882446289 + ], + [ + "▁Quartet", + -13.440720558166504 + ], + [ + "complemented", + -13.440742492675781 + ], + [ + "ausbildung", + -13.440750122070312 + ], + [ + "▁uncertainties", + -13.44077205657959 + ], + [ + "▁humiliat", + -13.440914154052734 + ], + [ + "luta", + -13.441121101379395 + ], + [ + "▁complexion", + -13.441482543945312 + ], + [ + "Serviciul", + -13.441612243652344 + ], + [ + "▁Toast", + -13.441722869873047 + ], + [ + "ummies", + -13.442425727844238 + ], + [ + "▁irit", + -13.442463874816895 + ], + [ + "producing", + -13.442585945129395 + ], + [ + "amenajare", + -13.442825317382812 + ], + [ + "▁béton", + -13.442828178405762 + ], + [ + "▁serpent", + -13.442851066589355 + ], + [ + "▁vizită", + -13.442996978759766 + ], + [ + "▁Beamte", + -13.443017959594727 + ], + [ + "▁Füße", + -13.443166732788086 + ], + [ + "▁Norwich", + -13.443531036376953 + ], + [ + "▁acronym", + -13.443531036376953 + ], + [ + "▁eradicate", + -13.443531036376953 + ], + [ + "▁solidarité", + -13.44353199005127 + ], + [ + "▁eggplant", + -13.443582534790039 + ], + [ + "▁sailors", + -13.443619728088379 + ], + [ + "waschen", + -13.444538116455078 + ], + [ + "Editura", + -13.444757461547852 + ], + [ + "▁erwerben", + -13.444944381713867 + ], + [ + "▁unconventional", + -13.444944381713867 + ], + [ + "▁boulder", + -13.444948196411133 + ], + [ + "Diplom", + -13.445013046264648 + ], + [ + "influx", + -13.446162223815918 + ], + [ + "▁Twelve", + -13.446361541748047 + ], + [ + "▁Sexual", + -13.44636344909668 + ], + [ + "numite", + -13.446369171142578 + ], + [ + "▁kontaktieren", + -13.446370124816895 + ], + [ + "▁strâns", + -13.44637680053711 + ], + [ + "▁précisément", + -13.446382522583008 + ], + [ + "empfindlich", + -13.446405410766602 + ], + [ + "▁divulg", + -13.446490287780762 + ], + [ + "▁delicat", + -13.446539878845215 + ], + [ + "compete", + -13.446542739868164 + ], + [ + "▁implique", + -13.446616172790527 + ], + [ + "implantation", + -13.44672966003418 + ], + [ + "frères", + -13.447328567504883 + ], + [ + "shedding", + -13.44758415222168 + ], + [ + "découvrez", + -13.447657585144043 + ], + [ + "rith", + -13.447735786437988 + ], + [ + "▁réglementation", + -13.447778701782227 + ], + [ + "▁transistor", + -13.447785377502441 + ], + [ + "inflated", + -13.447792053222656 + ], + [ + "▁Bluff", + -13.447887420654297 + ], + [ + "▁Aquarium", + -13.448526382446289 + ], + [ + "▁mananc", + -13.448638916015625 + ], + [ + "▁disinfect", + -13.448700904846191 + ], + [ + "tuft", + -13.448740005493164 + ], + [ + "Public", + -13.449081420898438 + ], + [ + "conceivabl", + -13.449197769165039 + ], + [ + "▁Cadillac", + -13.449197769165039 + ], + [ + "Assassin", + -13.449199676513672 + ], + [ + "issuance", + -13.449252128601074 + ], + [ + "▁Achtung", + -13.449287414550781 + ], + [ + "▁grundlegend", + -13.449909210205078 + ], + [ + "▁Băsescu", + -13.449910163879395 + ], + [ + "schaden", + -13.45014476776123 + ], + [ + "coached", + -13.450409889221191 + ], + [ + "▁betreffend", + -13.45046329498291 + ], + [ + "ergebnis", + -13.450541496276855 + ], + [ + "▁Lieutenant", + -13.4506196975708 + ], + [ + "WORLD", + -13.450620651245117 + ], + [ + "▁Moroccan", + -13.450620651245117 + ], + [ + "▁Butterfly", + -13.450621604919434 + ], + [ + "would", + -13.450737953186035 + ], + [ + "▁Metropol", + -13.451025009155273 + ], + [ + "lexic", + -13.451192855834961 + ], + [ + "comunitatea", + -13.45124340057373 + ], + [ + "vapeur", + -13.451456069946289 + ], + [ + "4.000", + -13.451559066772461 + ], + [ + "Pentru", + -13.451581954956055 + ], + [ + "üblichen", + -13.451613426208496 + ], + [ + "▁Général", + -13.451770782470703 + ], + [ + "▁Versailles", + -13.452046394348145 + ], + [ + "▁engraving", + -13.452046394348145 + ], + [ + "▁pédagogique", + -13.452192306518555 + ], + [ + "▁Policies", + -13.452759742736816 + ], + [ + "descending", + -13.453235626220703 + ], + [ + "stärkt", + -13.453349113464355 + ], + [ + "▁démocratie", + -13.453470230102539 + ], + [ + "▁granddaughter", + -13.453470230102539 + ], + [ + "▁buffalo", + -13.453474998474121 + ], + [ + "Datorita", + -13.45347785949707 + ], + [ + "hydroxy", + -13.453537940979004 + ], + [ + "▁ganduri", + -13.453566551208496 + ], + [ + "▁hijack", + -13.453624725341797 + ], + [ + "zahn", + -13.453699111938477 + ], + [ + "poziția", + -13.45406436920166 + ], + [ + "▁Zähne", + -13.454184532165527 + ], + [ + "▁grossesse", + -13.454296112060547 + ], + [ + "embassy", + -13.4548978805542 + ], + [ + "▁cérémonie", + -13.4548978805542 + ], + [ + "Rhône", + -13.454898834228516 + ], + [ + "▁Cabernet", + -13.454898834228516 + ], + [ + "▁Namibia", + -13.454902648925781 + ], + [ + "▁pedestal", + -13.454902648925781 + ], + [ + "▁Fighting", + -13.45490550994873 + ], + [ + "▁Threat", + -13.454962730407715 + ], + [ + "▁ideological", + -13.455047607421875 + ], + [ + "▁restitu", + -13.455183029174805 + ], + [ + "gelangt", + -13.455510139465332 + ], + [ + "Mitgliedern", + -13.455537796020508 + ], + [ + "acquérir", + -13.455613136291504 + ], + [ + "▁inferioar", + -13.45561695098877 + ], + [ + "Thierry", + -13.455619812011719 + ], + [ + "▁Entspannung", + -13.455638885498047 + ], + [ + "frequency", + -13.45566177368164 + ], + [ + "▁Fluid", + -13.455686569213867 + ], + [ + "▁betreut", + -13.455901145935059 + ], + [ + "Biological", + -13.455965995788574 + ], + [ + "▁Constanţa", + -13.456328392028809 + ], + [ + "▁beschäftigen", + -13.456328392028809 + ], + [ + "▁undesirable", + -13.456328392028809 + ], + [ + "▁protégé", + -13.456365585327148 + ], + [ + "▁nautical", + -13.456474304199219 + ], + [ + "▁sniff", + -13.456507682800293 + ], + [ + "Decizi", + -13.456510543823242 + ], + [ + "▁căldur", + -13.45706558227539 + ], + [ + "▁ideologi", + -13.457335472106934 + ], + [ + "Fraktion", + -13.457545280456543 + ], + [ + "collegiate", + -13.45776081085205 + ], + [ + "▁sănătos", + -13.45776081085205 + ], + [ + "▁Observatory", + -13.45776653289795 + ], + [ + "▁saturation", + -13.457769393920898 + ], + [ + "organizate", + -13.457771301269531 + ], + [ + "mergem", + -13.458321571350098 + ], + [ + "Publish", + -13.458451271057129 + ], + [ + "▁rattle", + -13.458460807800293 + ], + [ + "▁întâlniri", + -13.458663940429688 + ], + [ + "emporte", + -13.458741188049316 + ], + [ + "▁înscris", + -13.459046363830566 + ], + [ + "▁Patterson", + -13.459195137023926 + ], + [ + "▁ehrenamtlich", + -13.459195137023926 + ], + [ + "linux", + -13.459213256835938 + ], + [ + "conduire", + -13.45921802520752 + ], + [ + "▁absolven", + -13.459223747253418 + ], + [ + "▁einzigartig", + -13.459598541259766 + ], + [ + "▁_____", + -13.459803581237793 + ], + [ + "▁Beschäftigung", + -13.459912300109863 + ], + [ + "▁erfasst", + -13.459927558898926 + ], + [ + "▁Datum", + -13.459992408752441 + ], + [ + "raportul", + -13.460284233093262 + ], + [ + "ennemi", + -13.460460662841797 + ], + [ + "default", + -13.460643768310547 + ], + [ + "icillin", + -13.46066951751709 + ], + [ + "▁diamant", + -13.460671424865723 + ], + [ + "amerika", + -13.460684776306152 + ], + [ + "▁pescuit", + -13.46070384979248 + ], + [ + "▁grappl", + -13.460797309875488 + ], + [ + "▁Homeland", + -13.46082592010498 + ], + [ + "▁tromb", + -13.46112060546875 + ], + [ + "▁reduzieren", + -13.461349487304688 + ], + [ + "▁Statut", + -13.461593627929688 + ], + [ + "booming", + -13.461670875549316 + ], + [ + "fenced", + -13.461723327636719 + ], + [ + "measure", + -13.461888313293457 + ], + [ + "témoin", + -13.462069511413574 + ], + [ + "▁Inventory", + -13.462069511413574 + ], + [ + "▁circonstance", + -13.462069511413574 + ], + [ + "▁téléphonique", + -13.462069511413574 + ], + [ + "▁împiedic", + -13.46207046508789 + ], + [ + "▁Settlement", + -13.462072372436523 + ], + [ + "kannte", + -13.462076187133789 + ], + [ + "▁substantive", + -13.462385177612305 + ], + [ + "miterea", + -13.462642669677734 + ], + [ + "▁noştri", + -13.462790489196777 + ], + [ + "▁plăcere", + -13.462791442871094 + ], + [ + "▁eticheta", + -13.462823867797852 + ], + [ + "quickest", + -13.462993621826172 + ], + [ + "▁pasageri", + -13.463089942932129 + ], + [ + "▁Publi", + -13.463495254516602 + ], + [ + "▁Suzanne", + -13.463509559631348 + ], + [ + "▁bucătări", + -13.463509559631348 + ], + [ + "Regulatory", + -13.463510513305664 + ], + [ + "▁Mandarin", + -13.463647842407227 + ], + [ + "surgical", + -13.463947296142578 + ], + [ + "▁Smash", + -13.463950157165527 + ], + [ + "▁mândr", + -13.46403694152832 + ], + [ + "▁Unterkunft", + -13.464315414428711 + ], + [ + "moos", + -13.464374542236328 + ], + [ + "Camere", + -13.464510917663574 + ], + [ + "/03/", + -13.464651107788086 + ], + [ + "▁ethno", + -13.464677810668945 + ], + [ + "▁Eröffnung", + -13.46495246887207 + ], + [ + "▁Snyder", + -13.46495246887207 + ], + [ + "▁Wilmington", + -13.46495246887207 + ], + [ + "▁Canberra", + -13.464953422546387 + ], + [ + "▁Tahoe", + -13.464953422546387 + ], + [ + "▁slippery", + -13.464953422546387 + ], + [ + "▁Snake", + -13.464957237243652 + ], + [ + "▁turmeric", + -13.464963912963867 + ], + [ + "▁Cartoon", + -13.46499252319336 + ], + [ + "▁scrisoare", + -13.46500015258789 + ], + [ + "▁reprend", + -13.465425491333008 + ], + [ + "▁Konkurrenz", + -13.46567440032959 + ], + [ + "▁raisins", + -13.465693473815918 + ], + [ + "▁Werkstatt", + -13.465713500976562 + ], + [ + "▁agresiv", + -13.465795516967773 + ], + [ + "hugs", + -13.46615219116211 + ], + [ + "cazurile", + -13.46618938446045 + ], + [ + "spirited", + -13.466232299804688 + ], + [ + "▁britisch", + -13.466307640075684 + ], + [ + "spritz", + -13.466367721557617 + ], + [ + "auxiliary", + -13.46639633178711 + ], + [ + "interprétation", + -13.46639633178711 + ], + [ + "▁verbindet", + -13.46639633178711 + ], + [ + "▁fuzzy", + -13.466429710388184 + ], + [ + "▁turmoil", + -13.466432571411133 + ], + [ + "▁redefine", + -13.466819763183594 + ], + [ + "▁Kiwi", + -13.466890335083008 + ], + [ + "oiseaux", + -13.46712875366211 + ], + [ + "▁pamper", + -13.467146873474121 + ], + [ + "▁desfaso", + -13.46719741821289 + ], + [ + "▁pragu", + -13.467576026916504 + ], + [ + "prevenirea", + -13.467730522155762 + ], + [ + "▁convergence", + -13.467846870422363 + ], + [ + "tufted", + -13.467878341674805 + ], + [ + "brewed", + -13.467981338500977 + ], + [ + "villagers", + -13.468003273010254 + ], + [ + "▁Irving", + -13.468170166015625 + ], + [ + "nigsten", + -13.468660354614258 + ], + [ + "▁embod", + -13.468742370605469 + ], + [ + "Alicia", + -13.468938827514648 + ], + [ + "probably", + -13.469009399414062 + ], + [ + "divider", + -13.46904468536377 + ], + [ + "Attempt", + -13.469223022460938 + ], + [ + "▁Cognitive", + -13.469292640686035 + ], + [ + "▁Recognition", + -13.469292640686035 + ], + [ + "▁concierge", + -13.469292640686035 + ], + [ + "▁Semester", + -13.4692964553833 + ], + [ + "Economie", + -13.469417572021484 + ], + [ + "sortiment", + -13.469460487365723 + ], + [ + "shortest", + -13.46961498260498 + ], + [ + "üchtig", + -13.469650268554688 + ], + [ + "▁conveyanc", + -13.469978332519531 + ], + [ + "▁Ferdinand", + -13.470017433166504 + ], + [ + "▁permanence", + -13.470019340515137 + ], + [ + "▁incadr", + -13.470145225524902 + ], + [ + "▁estrogen", + -13.470290184020996 + ], + [ + "February", + -13.470661163330078 + ], + [ + "gedeckt", + -13.470704078674316 + ], + [ + "▁reagieren", + -13.470743179321289 + ], + [ + "▁meditate", + -13.470980644226074 + ], + [ + "simulated", + -13.471010208129883 + ], + [ + "▁supprimer", + -13.471468925476074 + ], + [ + "▁bumbac", + -13.47146987915039 + ], + [ + "▁vânzări", + -13.471477508544922 + ], + [ + "▁Kapitel", + -13.471478462219238 + ], + [ + "▁Weltkrieg", + -13.471513748168945 + ], + [ + "déposer", + -13.471674919128418 + ], + [ + "Asus", + -13.4718017578125 + ], + [ + "▁Communicat", + -13.471851348876953 + ], + [ + "Finished", + -13.47188949584961 + ], + [ + "▁Telegraph", + -13.472054481506348 + ], + [ + "▁Competitive", + -13.472196578979492 + ], + [ + "▁collectivités", + -13.472197532653809 + ], + [ + "▁protège", + -13.472199440002441 + ], + [ + "▁scallop", + -13.472219467163086 + ], + [ + "Happy", + -13.472335815429688 + ], + [ + "tehnică", + -13.472352981567383 + ], + [ + "▁Gestalt", + -13.47270393371582 + ], + [ + "▁benign", + -13.47295093536377 + ], + [ + "kraut", + -13.473149299621582 + ], + [ + "louer", + -13.473221778869629 + ], + [ + "▁Printr", + -13.47326946258545 + ], + [ + "mputation", + -13.473346710205078 + ], + [ + "▁dicke", + -13.473429679870605 + ], + [ + "▁Halifax", + -13.473650932312012 + ], + [ + "▁bounty", + -13.473650932312012 + ], + [ + "▁cauliflower", + -13.473650932312012 + ], + [ + "▁Survival", + -13.473654747009277 + ], + [ + "▁Chandler", + -13.473684310913086 + ], + [ + "▁bemüh", + -13.473760604858398 + ], + [ + "phro", + -13.473855972290039 + ], + [ + "Friday", + -13.474018096923828 + ], + [ + "particularly", + -13.474032402038574 + ], + [ + "arteries", + -13.474197387695312 + ], + [ + "Lösung", + -13.474771499633789 + ], + [ + "▁causal", + -13.474817276000977 + ], + [ + "▁recueilli", + -13.475075721740723 + ], + [ + "Stylish", + -13.47510814666748 + ], + [ + "schränke", + -13.47510814666748 + ], + [ + "▁francophone", + -13.47510814666748 + ], + [ + "▁limousine", + -13.47510814666748 + ], + [ + "▁statistiques", + -13.47510814666748 + ], + [ + "▁Kleider", + -13.475111961364746 + ], + [ + "▁dunkel", + -13.475127220153809 + ], + [ + "tätigkeit", + -13.475190162658691 + ], + [ + "▁punished", + -13.475257873535156 + ], + [ + "▁implică", + -13.475539207458496 + ], + [ + "▁inițial", + -13.475568771362305 + ], + [ + "▁Eminescu", + -13.475837707519531 + ], + [ + "▁expliqué", + -13.475837707519531 + ], + [ + "▁Eduard", + -13.475839614868164 + ], + [ + "▁psychologique", + -13.475870132446289 + ], + [ + "▁protejeaz", + -13.476580619812012 + ], + [ + "spül", + -13.476709365844727 + ], + [ + "▁Virtu", + -13.477021217346191 + ], + [ + "▁régulière", + -13.477044105529785 + ], + [ + "▁Outreach", + -13.477130889892578 + ], + [ + "▁Apprentice", + -13.47729778289795 + ], + [ + "▁compréhension", + -13.47729778289795 + ], + [ + "▁zwölf", + -13.47729778289795 + ], + [ + "Surgical", + -13.477315902709961 + ], + [ + "latéral", + -13.477417945861816 + ], + [ + "▁Ceremony", + -13.47803020477295 + ], + [ + "▁Shampoo", + -13.47803783416748 + ], + [ + "Global", + -13.478239059448242 + ], + [ + "▁paradis", + -13.478302955627441 + ], + [ + "Developed", + -13.478493690490723 + ], + [ + "▁figurine", + -13.478549003601074 + ], + [ + "sujets", + -13.478574752807617 + ], + [ + "▁Naomi", + -13.478772163391113 + ], + [ + "financed", + -13.478838920593262 + ], + [ + "forestry", + -13.478896141052246 + ], + [ + "▁Anregung", + -13.479494094848633 + ], + [ + "▁spectateur", + -13.479804039001465 + ], + [ + "▁exercitii", + -13.479815483093262 + ], + [ + "▁russisch", + -13.479888916015625 + ], + [ + "gefunden", + -13.479988098144531 + ], + [ + "schleunig", + -13.480225563049316 + ], + [ + "▁géographique", + -13.480225563049316 + ], + [ + "▁Delphi", + -13.480317115783691 + ], + [ + "Freddie", + -13.4806489944458 + ], + [ + "▁muzici", + -13.480958938598633 + ], + [ + "▁Edmund", + -13.48095989227295 + ], + [ + "finanzielle", + -13.481032371520996 + ], + [ + "(2003)", + -13.481319427490234 + ], + [ + "accentuate", + -13.481437683105469 + ], + [ + "overlapping", + -13.48151969909668 + ], + [ + "▁Pluto", + -13.481595993041992 + ], + [ + "românii", + -13.481683731079102 + ], + [ + "▁Timişoara", + -13.48169231414795 + ], + [ + "▁poivr", + -13.481754302978516 + ], + [ + "▁repris", + -13.481852531433105 + ], + [ + "▁Geschlecht", + -13.482426643371582 + ], + [ + "▁thieves", + -13.482426643371582 + ], + [ + "▁Transformer", + -13.482431411743164 + ], + [ + "▁shortcomings", + -13.482438087463379 + ], + [ + "▁aptitude", + -13.48244571685791 + ], + [ + "pitfalls", + -13.482468605041504 + ], + [ + "▁manicure", + -13.482577323913574 + ], + [ + "mystical", + -13.482723236083984 + ], + [ + "▁abolish", + -13.482833862304688 + ], + [ + "▁Zielgruppe", + -13.482873916625977 + ], + [ + "▁naţionale", + -13.483160972595215 + ], + [ + "▁trandafir", + -13.483160972595215 + ], + [ + "▁matematic", + -13.483193397521973 + ], + [ + "▁Hirsch", + -13.483257293701172 + ], + [ + "Fahr", + -13.483458518981934 + ], + [ + "connaissent", + -13.483476638793945 + ], + [ + "browned", + -13.483846664428711 + ], + [ + "▁bearbeitet", + -13.483881950378418 + ], + [ + "▁usturoi", + -13.483896255493164 + ], + [ + "▁Surprise", + -13.48389720916748 + ], + [ + "▁Tehran", + -13.483899116516113 + ], + [ + "▁BLACK", + -13.483901023864746 + ], + [ + "▁abonament", + -13.483904838562012 + ], + [ + "▁mêl", + -13.483972549438477 + ], + [ + "Angebot", + -13.484091758728027 + ], + [ + "ajungi", + -13.48410415649414 + ], + [ + "▁Woodland", + -13.48420524597168 + ], + [ + "▁gradini", + -13.484305381774902 + ], + [ + "▁Marilyn", + -13.48464584350586 + ], + [ + "kilometer", + -13.484880447387695 + ], + [ + "tempered", + -13.485230445861816 + ], + [ + "▁intimacy", + -13.485371589660645 + ], + [ + "▁thunderstorm", + -13.485373497009277 + ], + [ + "▁Uttar", + -13.485413551330566 + ], + [ + "▁varnish", + -13.485535621643066 + ], + [ + "opathie", + -13.485982894897461 + ], + [ + "▁școlar", + -13.48611068725586 + ], + [ + "▁raisonnable", + -13.486114501953125 + ], + [ + "proactively", + -13.486490249633789 + ], + [ + "▁gib", + -13.486536979675293 + ], + [ + "▁hospice", + -13.48684310913086 + ], + [ + "▁constă", + -13.486896514892578 + ], + [ + "▁Crescent", + -13.48690128326416 + ], + [ + "▁ambasad", + -13.486933708190918 + ], + [ + "hotărâre", + -13.486969947814941 + ], + [ + "▁fraîche", + -13.48709774017334 + ], + [ + "▁bundesweit", + -13.487581253051758 + ], + [ + "nsbesondere", + -13.487812042236328 + ], + [ + "▁intoarce", + -13.487863540649414 + ], + [ + "▁Schokolade", + -13.488319396972656 + ], + [ + "▁adjective", + -13.488319396972656 + ], + [ + "▁incalzire", + -13.488319396972656 + ], + [ + "▁Qualification", + -13.488320350646973 + ], + [ + "▁Bolivia", + -13.488324165344238 + ], + [ + "▁cruelty", + -13.488334655761719 + ], + [ + "pläne", + -13.48834228515625 + ], + [ + "▁solitude", + -13.488354682922363 + ], + [ + "▁Bosnia", + -13.488568305969238 + ], + [ + "rohr", + -13.488643646240234 + ], + [ + "▁regrette", + -13.48877239227295 + ], + [ + "zusammengestellt", + -13.48924732208252 + ], + [ + "▁Kardashian", + -13.489798545837402 + ], + [ + "▁Picasso", + -13.489798545837402 + ], + [ + "▁unverbindlich", + -13.489798545837402 + ], + [ + "▁Headquarters", + -13.489799499511719 + ], + [ + "métrage", + -13.4898099899292 + ], + [ + "▁Magento", + -13.489816665649414 + ], + [ + "▁exhibitors", + -13.489898681640625 + ], + [ + "utty", + -13.490381240844727 + ], + [ + "▁Fünf", + -13.490538597106934 + ], + [ + "▁Peugeot", + -13.490538597106934 + ], + [ + "▁verdienen", + -13.490538597106934 + ], + [ + "▁absolviert", + -13.49053955078125 + ], + [ + "schutzerklärung", + -13.490679740905762 + ], + [ + "sistemele", + -13.49089241027832 + ], + [ + "▁concrète", + -13.491279602050781 + ], + [ + "▁rhyme", + -13.491279602050781 + ], + [ + "▁Continuous", + -13.49128246307373 + ], + [ + "versprechen", + -13.491312026977539 + ], + [ + "▁Melanie", + -13.49202823638916 + ], + [ + "▁clienţi", + -13.492046356201172 + ], + [ + "luckily", + -13.492205619812012 + ], + [ + "▁counterfeit", + -13.492762565612793 + ], + [ + "▁locomotive", + -13.492889404296875 + ], + [ + "▁reacți", + -13.492908477783203 + ], + [ + "ampered", + -13.493005752563477 + ], + [ + "atenția", + -13.493011474609375 + ], + [ + "Suppose", + -13.493062973022461 + ], + [ + "hinweis", + -13.493464469909668 + ], + [ + "verletzung", + -13.493504524230957 + ], + [ + "▁mănânc", + -13.493504524230957 + ], + [ + "▁provoac", + -13.493507385253906 + ], + [ + "▁regizor", + -13.493511199951172 + ], + [ + "kundig", + -13.49352741241455 + ], + [ + "embarqu", + -13.493584632873535 + ], + [ + "Radio", + -13.493690490722656 + ], + [ + "Ministrul", + -13.493896484375 + ], + [ + "weakened", + -13.494214057922363 + ], + [ + "▁translucent", + -13.494247436523438 + ], + [ + "George", + -13.494380950927734 + ], + [ + "▁bacterii", + -13.494402885437012 + ], + [ + "intervalul", + -13.494803428649902 + ], + [ + "▁vizualiz", + -13.494832038879395 + ], + [ + "▁Feuchtigkeit", + -13.494991302490234 + ], + [ + "▁choisissez", + -13.494991302490234 + ], + [ + "▁plausible", + -13.494991302490234 + ], + [ + "▁perpetu", + -13.495122909545898 + ], + [ + "▁bucati", + -13.495194435119629 + ], + [ + "▁Giovanni", + -13.495735168457031 + ], + [ + "▁bluetooth", + -13.495736122131348 + ], + [ + "▁translating", + -13.49573802947998 + ], + [ + "▁Kyoto", + -13.495739936828613 + ], + [ + "▁homosexual", + -13.495745658874512 + ], + [ + "treabă", + -13.495820045471191 + ], + [ + "ntrepid", + -13.495983123779297 + ], + [ + "▁fachlich", + -13.496664047241211 + ], + [ + "Vaccin", + -13.496774673461914 + ], + [ + "▁Treib", + -13.497248649597168 + ], + [ + "varsity", + -13.497272491455078 + ], + [ + "▁Tavern", + -13.497278213500977 + ], + [ + "▁ensue", + -13.497330665588379 + ], + [ + "flexibel", + -13.497971534729004 + ], + [ + "retrieved", + -13.498102188110352 + ], + [ + "traditionellen", + -13.498230934143066 + ], + [ + "▁circulati", + -13.498546600341797 + ], + [ + "▁Diagnose", + -13.498717308044434 + ], + [ + "▁Strawberry", + -13.498717308044434 + ], + [ + "Societatea", + -13.49871826171875 + ], + [ + "expertise", + -13.498849868774414 + ], + [ + "▁naturii", + -13.499464988708496 + ], + [ + "▁4:1", + -13.499515533447266 + ], + [ + "Frequently", + -13.500210762023926 + ], + [ + "disproportionate", + -13.500210762023926 + ], + [ + "▁LIMITED", + -13.500210762023926 + ], + [ + "▁ancestral", + -13.500227928161621 + ], + [ + "▁Logistik", + -13.500237464904785 + ], + [ + "▁recolt", + -13.50042724609375 + ], + [ + "▁liebevoll", + -13.500436782836914 + ], + [ + "importing", + -13.500452041625977 + ], + [ + "aparatul", + -13.500458717346191 + ], + [ + "poziţia", + -13.500564575195312 + ], + [ + "facerilor", + -13.500658988952637 + ], + [ + "Submitted", + -13.50086784362793 + ], + [ + "ografia", + -13.501221656799316 + ], + [ + "onformément", + -13.50168228149414 + ], + [ + "▁dissemination", + -13.501708030700684 + ], + [ + "afli", + -13.501834869384766 + ], + [ + "luminous", + -13.502154350280762 + ], + [ + "▁draußen", + -13.502456665039062 + ], + [ + "▁Zauber", + -13.502535820007324 + ], + [ + "▁Ibrahim", + -13.503207206726074 + ], + [ + "▁eruption", + -13.503216743469238 + ], + [ + "écrite", + -13.50357723236084 + ], + [ + "avril", + -13.503898620605469 + ], + [ + "Increasing", + -13.504171371459961 + ], + [ + "hingeg", + -13.504411697387695 + ], + [ + "fidelity", + -13.504707336425781 + ], + [ + "étonnant", + -13.504707336425781 + ], + [ + "▁créativité", + -13.504707336425781 + ], + [ + "▁Required", + -13.504708290100098 + ], + [ + "▁Edison", + -13.504719734191895 + ], + [ + "▁Stuhl", + -13.504719734191895 + ], + [ + "outhwestern", + -13.506060600280762 + ], + [ + "▁Beschwerden", + -13.506210327148438 + ], + [ + "▁angajaţi", + -13.506210327148438 + ], + [ + "▁Currency", + -13.506211280822754 + ], + [ + "▁reagiert", + -13.506214141845703 + ], + [ + "Science", + -13.506229400634766 + ], + [ + "hospital", + -13.506253242492676 + ], + [ + "professionellen", + -13.50649356842041 + ], + [ + "▁Trouve", + -13.506768226623535 + ], + [ + "▁utopi", + -13.50683307647705 + ], + [ + "gypte", + -13.506928443908691 + ], + [ + "▁Konsequenz", + -13.506962776184082 + ], + [ + "▁pacienți", + -13.506962776184082 + ], + [ + "▁orizont", + -13.506988525390625 + ], + [ + "Corey", + -13.506999015808105 + ], + [ + "▁quartet", + -13.507009506225586 + ], + [ + "▁Sherlock", + -13.50710678100586 + ], + [ + "▁gagné", + -13.507237434387207 + ], + [ + "▁Jusqu", + -13.50732707977295 + ], + [ + "▁Clickfunnel", + -13.507465362548828 + ], + [ + "Survivor", + -13.507716178894043 + ], + [ + "▁Beethoven", + -13.507716178894043 + ], + [ + "▁Exemplar", + -13.507716178894043 + ], + [ + "▁Gonzalez", + -13.507716178894043 + ], + [ + "▁Illustrator", + -13.507716178894043 + ], + [ + "▁Verpflichtung", + -13.507718086242676 + ], + [ + "Possibly", + -13.507719993591309 + ], + [ + "Maintenant", + -13.507721900939941 + ], + [ + "▁incendiu", + -13.507721900939941 + ], + [ + "▁poêl", + -13.507747650146484 + ], + [ + "▁aşez", + -13.507757186889648 + ], + [ + "phenol", + -13.508248329162598 + ], + [ + "▁magician", + -13.508421897888184 + ], + [ + "éventuellement", + -13.508512496948242 + ], + [ + "▁amortiz", + -13.508736610412598 + ], + [ + "bouchage", + -13.50873851776123 + ], + [ + "▁Accommodation", + -13.509223937988281 + ], + [ + "▁Significant", + -13.509223937988281 + ], + [ + "▁rejoice", + -13.509223937988281 + ], + [ + "▁Lorraine", + -13.509224891662598 + ], + [ + "▁Necklace", + -13.509234428405762 + ], + [ + "▁hamburger", + -13.509273529052734 + ], + [ + "Enhanced", + -13.5095796585083 + ], + [ + "▁Audrey", + -13.509978294372559 + ], + [ + "▁considère", + -13.509986877441406 + ], + [ + "hafen", + -13.51050853729248 + ], + [ + "acordare", + -13.510509490966797 + ], + [ + "▁ediți", + -13.51075553894043 + ], + [ + "▁militia", + -13.510767936706543 + ], + [ + "captivate", + -13.510771751403809 + ], + [ + "▁rebellion", + -13.510777473449707 + ], + [ + "▁veranstalte", + -13.510844230651855 + ], + [ + "▁matelas", + -13.510859489440918 + ], + [ + "originating", + -13.510873794555664 + ], + [ + "Typical", + -13.51092529296875 + ], + [ + "▁législat", + -13.511360168457031 + ], + [ + "▁Kräfte", + -13.511488914489746 + ], + [ + "▁Eigentümer", + -13.511489868164062 + ], + [ + "▁gonfl", + -13.511608123779297 + ], + [ + "dispoziție", + -13.512028694152832 + ], + [ + "▁Fabulous", + -13.512246131896973 + ], + [ + "▁Guillaume", + -13.512246131896973 + ], + [ + "▁Genuine", + -13.512247085571289 + ], + [ + "selbe", + -13.512449264526367 + ], + [ + "(2002)", + -13.512616157531738 + ], + [ + "Einen", + -13.512908935546875 + ], + [ + "▁Snapdragon", + -13.513002395629883 + ], + [ + "▁plagiarism", + -13.513002395629883 + ], + [ + "▁Rendez", + -13.513019561767578 + ], + [ + "▁înregistrare", + -13.513033866882324 + ], + [ + "probiert", + -13.513081550598145 + ], + [ + "gestiegen", + -13.513153076171875 + ], + [ + "Teatrul", + -13.513370513916016 + ], + [ + "trove", + -13.513469696044922 + ], + [ + "ntsprechend", + -13.513566017150879 + ], + [ + "Städten", + -13.513691902160645 + ], + [ + "unforeseen", + -13.513760566711426 + ], + [ + "▁Meridian", + -13.513761520385742 + ], + [ + "▁Ministries", + -13.513763427734375 + ], + [ + "plaît", + -13.513769149780273 + ], + [ + "▁Telefonnummer", + -13.513772010803223 + ], + [ + "welded", + -13.513788223266602 + ], + [ + "pondere", + -13.513976097106934 + ], + [ + "▁funcţiona", + -13.514012336730957 + ], + [ + "▁politicieni", + -13.514187812805176 + ], + [ + "fleck", + -13.514240264892578 + ], + [ + "▁Nitro", + -13.514264106750488 + ], + [ + "wettbewerb", + -13.514518737792969 + ], + [ + "▁ingrijire", + -13.514518737792969 + ], + [ + "▁Gehirn", + -13.514521598815918 + ], + [ + "sigură", + -13.514904022216797 + ], + [ + "400,000", + -13.515237808227539 + ], + [ + "▁cataract", + -13.515277862548828 + ], + [ + "outskirt", + -13.515280723571777 + ], + [ + "▁Identification", + -13.515287399291992 + ], + [ + "▁imperfections", + -13.515317916870117 + ], + [ + "▁Dokumentation", + -13.515474319458008 + ], + [ + "Engine", + -13.515851974487305 + ], + [ + "extindere", + -13.516046524047852 + ], + [ + "bijoux", + -13.516797065734863 + ], + [ + "▁dărui", + -13.516802787780762 + ], + [ + "▁Moderator", + -13.516913414001465 + ], + [ + "biblio", + -13.517024040222168 + ], + [ + "енн", + -13.517024040222168 + ], + [ + "▁Relevan", + -13.51728630065918 + ], + [ + "ansprüche", + -13.517557144165039 + ], + [ + "épaisseur", + -13.517580032348633 + ], + [ + "▁emoţi", + -13.517677307128906 + ], + [ + "exacerbate", + -13.518318176269531 + ], + [ + "▁Wimbledon", + -13.518318176269531 + ], + [ + "▁Pandora", + -13.518319129943848 + ], + [ + "perhaps", + -13.518725395202637 + ], + [ + "certify", + -13.518762588500977 + ], + [ + "Strukturen", + -13.5189208984375 + ], + [ + "▁Kreativität", + -13.519079208374023 + ], + [ + "schlägt", + -13.51908016204834 + ], + [ + "▁certifié", + -13.51911735534668 + ], + [ + "/09/", + -13.519211769104004 + ], + [ + "▁suprafaţ", + -13.519493103027344 + ], + [ + "verständnis", + -13.519841194152832 + ], + [ + "presedintele", + -13.519842147827148 + ], + [ + "▁orthopedic", + -13.519842147827148 + ], + [ + "▁superioara", + -13.519843101501465 + ], + [ + "älteste", + -13.519903182983398 + ], + [ + "▁conducător", + -13.520153999328613 + ], + [ + "supplementary", + -13.520243644714355 + ], + [ + "wetlands", + -13.520438194274902 + ], + [ + "▁suprafete", + -13.520605087280273 + ], + [ + "▁aparțin", + -13.520951271057129 + ], + [ + "analiză", + -13.521014213562012 + ], + [ + "Uneori", + -13.52115535736084 + ], + [ + "Toujours", + -13.521368026733398 + ], + [ + "▁Nairobi", + -13.521368026733398 + ], + [ + "▁asparagus", + -13.521368026733398 + ], + [ + "▁crowdfunding", + -13.521368026733398 + ], + [ + "gutachten", + -13.521369934082031 + ], + [ + "smelling", + -13.521659851074219 + ], + [ + "▁elektrisch", + -13.521718978881836 + ], + [ + "begging", + -13.522055625915527 + ], + [ + "▁Renewable", + -13.522896766662598 + ], + [ + "▁Trouble", + -13.522896766662598 + ], + [ + "▁devastated", + -13.522896766662598 + ], + [ + "▁remplacé", + -13.522896766662598 + ], + [ + "▁schmeckt", + -13.522896766662598 + ], + [ + "▁exerciți", + -13.523005485534668 + ], + [ + "▁vermute", + -13.523650169372559 + ], + [ + "▁Constanța", + -13.523661613464355 + ], + [ + "expunere", + -13.523693084716797 + ], + [ + "▁Fitzgerald", + -13.52442741394043 + ], + [ + "▁Mechanism", + -13.524429321289062 + ], + [ + "▁underscore", + -13.524484634399414 + ], + [ + "poziţie", + -13.524901390075684 + ], + [ + "stöbern", + -13.525193214416504 + ], + [ + "▁littérature", + -13.525193214416504 + ], + [ + "▁împrumut", + -13.525193214416504 + ], + [ + "Vision", + -13.525771141052246 + ], + [ + "▁overwhelm", + -13.525773048400879 + ], + [ + "▁erweitern", + -13.525959968566895 + ], + [ + "skeletal", + -13.525960922241211 + ], + [ + "▁terrified", + -13.525960922241211 + ], + [ + "aggravate", + -13.525962829589844 + ], + [ + "▁Malawi", + -13.525969505310059 + ], + [ + "▁neuroscience", + -13.526009559631348 + ], + [ + "trecută", + -13.526097297668457 + ], + [ + "▁maestr", + -13.52634334564209 + ], + [ + "нов", + -13.526555061340332 + ], + [ + "▁Cobb", + -13.52667236328125 + ], + [ + "▁Schwangerschaft", + -13.526727676391602 + ], + [ + "▁internationaux", + -13.526727676391602 + ], + [ + "▁entspannen", + -13.526729583740234 + ], + [ + "▁Früchte", + -13.52676773071289 + ], + [ + "mâine", + -13.526805877685547 + ], + [ + "stützt", + -13.526938438415527 + ], + [ + "flipped", + -13.527076721191406 + ], + [ + "Palatul", + -13.527252197265625 + ], + [ + "▁Gérard", + -13.527496337890625 + ], + [ + "▁Kensington", + -13.527498245239258 + ], + [ + "chargée", + -13.52807331085205 + ], + [ + "iolo", + -13.528203964233398 + ], + [ + "▁excesiv", + -13.52904987335205 + ], + [ + "▁Gymnas", + -13.52962875366211 + ], + [ + "▁optimise", + -13.529678344726562 + ], + [ + "possibilités", + -13.529717445373535 + ], + [ + "▁periculoas", + -13.529810905456543 + ], + [ + "mechanical", + -13.529839515686035 + ], + [ + "▁confruntă", + -13.529868125915527 + ], + [ + "quatrième", + -13.530573844909668 + ], + [ + "▁Preservation", + -13.530573844909668 + ], + [ + "▁Juventus", + -13.530574798583984 + ], + [ + "vorsitzende", + -13.5305757522583 + ], + [ + "électora", + -13.530586242675781 + ], + [ + "▁fascinant", + -13.53061580657959 + ], + [ + "▁lagoon", + -13.530671119689941 + ], + [ + "referencing", + -13.53079605102539 + ], + [ + "appointed", + -13.530988693237305 + ], + [ + "Audible", + -13.531112670898438 + ], + [ + "sighted", + -13.531612396240234 + ], + [ + "▁gewünscht", + -13.532061576843262 + ], + [ + "▁Expedition", + -13.532115936279297 + ], + [ + "▁genunchi", + -13.532115936279297 + ], + [ + "▁PROVIDE", + -13.53211784362793 + ], + [ + "▁rosemary", + -13.532118797302246 + ], + [ + "▁cleanliness", + -13.532130241394043 + ], + [ + "commanded", + -13.53223991394043 + ], + [ + "ältere", + -13.532530784606934 + ], + [ + "ност", + -13.532547950744629 + ], + [ + "kühlen", + -13.532917976379395 + ], + [ + "mettez", + -13.533548355102539 + ], + [ + "connaitre", + -13.533661842346191 + ], + [ + "Qaeda", + -13.533662796020508 + ], + [ + "▁traumhaft", + -13.53366470336914 + ], + [ + "kommst", + -13.533666610717773 + ], + [ + "▁Abbott", + -13.533669471740723 + ], + [ + "▁Fool", + -13.533686637878418 + ], + [ + "▁médaill", + -13.533687591552734 + ], + [ + "▁genotyp", + -13.533693313598633 + ], + [ + "▁Fälle", + -13.53375244140625 + ], + [ + "▁actuator", + -13.533843994140625 + ], + [ + "CLASS", + -13.534042358398438 + ], + [ + "progressively", + -13.534421920776367 + ], + [ + "negative", + -13.53469467163086 + ], + [ + "bundled", + -13.535009384155273 + ], + [ + "▁dezbatere", + -13.535208702087402 + ], + [ + "kamagra", + -13.535237312316895 + ], + [ + "gardinen", + -13.535250663757324 + ], + [ + "unsecured", + -13.535271644592285 + ], + [ + "Assisted", + -13.535298347473145 + ], + [ + "Gymnasium", + -13.535386085510254 + ], + [ + "▁brusc", + -13.535591125488281 + ], + [ + "prinzip", + -13.535655975341797 + ], + [ + "Torrent", + -13.535964965820312 + ], + [ + "Presented", + -13.535967826843262 + ], + [ + "▁impressionnant", + -13.53628921508789 + ], + [ + "charakter", + -13.536758422851562 + ], + [ + "▁Acoustic", + -13.536762237548828 + ], + [ + "▁appartient", + -13.536763191223145 + ], + [ + "gesteuert", + -13.536879539489746 + ], + [ + "▁condiți", + -13.537089347839355 + ], + [ + "authentic", + -13.537313461303711 + ], + [ + "▁Erholung", + -13.537534713745117 + ], + [ + "▁Veranstalter", + -13.537534713745117 + ], + [ + "▁Filial", + -13.537665367126465 + ], + [ + "ruhigen", + -13.537714958190918 + ], + [ + "symptôme", + -13.538311004638672 + ], + [ + "▁Efficiency", + -13.538311004638672 + ], + [ + "▁stunned", + -13.538311004638672 + ], + [ + "▁sympathique", + -13.538311004638672 + ], + [ + "Uploaded", + -13.538352966308594 + ], + [ + "▁geistig", + -13.538453102111816 + ], + [ + "Pläne", + -13.538509368896484 + ], + [ + "▁Apartament", + -13.53855037689209 + ], + [ + "▁ușoar", + -13.539119720458984 + ], + [ + "▁locuinț", + -13.539122581481934 + ], + [ + "épouse", + -13.539166450500488 + ], + [ + "îngrijire", + -13.539215087890625 + ], + [ + "Obtain", + -13.539261817932129 + ], + [ + "Detect", + -13.539590835571289 + ], + [ + "▁Dumitru", + -13.539865493774414 + ], + [ + "▁refrigeration", + -13.539865493774414 + ], + [ + "ärztliche", + -13.539881706237793 + ], + [ + "efficiency", + -13.540032386779785 + ], + [ + "▁snail", + -13.540328979492188 + ], + [ + "gelände", + -13.540419578552246 + ], + [ + "expected", + -13.540620803833008 + ], + [ + "kompetenz", + -13.540643692016602 + ], + [ + "▁sfânt", + -13.540643692016602 + ], + [ + "océan", + -13.540685653686523 + ], + [ + "▁Plasma", + -13.540717124938965 + ], + [ + "▁vulgar", + -13.54075813293457 + ], + [ + "▁slump", + -13.541083335876465 + ], + [ + "autoimmune", + -13.541422843933105 + ], + [ + "▁Cynthia", + -13.541422843933105 + ], + [ + "▁dimineaţ", + -13.541422843933105 + ], + [ + "▁whimsical", + -13.541422843933105 + ], + [ + "▁evaporate", + -13.541488647460938 + ], + [ + "▁calorii", + -13.54186725616455 + ], + [ + "portion", + -13.54187297821045 + ], + [ + "crowned", + -13.5419282913208 + ], + [ + "▁întâmpin", + -13.54220199584961 + ], + [ + "▁Centenar", + -13.542620658874512 + ], + [ + "▁Genehmigung", + -13.54298210144043 + ], + [ + "▁Wahrscheinlich", + -13.54298210144043 + ], + [ + "▁accompaniment", + -13.54298210144043 + ], + [ + "▁Negoti", + -13.542984962463379 + ], + [ + "▁Vanilla", + -13.543000221252441 + ], + [ + "▁Receiv", + -13.543014526367188 + ], + [ + "▁bestseller", + -13.543052673339844 + ], + [ + "tendons", + -13.543069839477539 + ], + [ + "Reilly", + -13.543192863464355 + ], + [ + "▁refroidi", + -13.543731689453125 + ], + [ + "▁überrascht", + -13.543763160705566 + ], + [ + "Gitarre", + -13.543828964233398 + ], + [ + "wände", + -13.544173240661621 + ], + [ + "veniturile", + -13.544321060180664 + ], + [ + "▁portofoliu", + -13.54454517364502 + ], + [ + "▁temporaire", + -13.54454517364502 + ], + [ + "▁Dawson", + -13.544546127319336 + ], + [ + "foreseeable", + -13.544547080993652 + ], + [ + "▁Gastgeber", + -13.545344352722168 + ], + [ + "Access", + -13.545432090759277 + ], + [ + "▁Defender", + -13.545537948608398 + ], + [ + "▁Quarry", + -13.546109199523926 + ], + [ + "▁trolley", + -13.546110153198242 + ], + [ + "▁carburant", + -13.546111106872559 + ], + [ + "▁titluri", + -13.54631233215332 + ], + [ + "comparatively", + -13.546327590942383 + ], + [ + "nachfolgend", + -13.54659652709961 + ], + [ + "anfang", + -13.546740531921387 + ], + [ + "▁faszinieren", + -13.546891212463379 + ], + [ + "trăiesc", + -13.547082901000977 + ], + [ + "▁Travail", + -13.547159194946289 + ], + [ + "Contact", + -13.547235488891602 + ], + [ + "fashion", + -13.547245025634766 + ], + [ + "▁épais", + -13.547585487365723 + ], + [ + "plattform", + -13.547676086425781 + ], + [ + "ventricular", + -13.547677040100098 + ], + [ + "▁Portsmouth", + -13.547677993774414 + ], + [ + "▁împărat", + -13.54767894744873 + ], + [ + "▁vândut", + -13.547698020935059 + ], + [ + "▁evidenț", + -13.547708511352539 + ], + [ + "Purchasing", + -13.547877311706543 + ], + [ + "discerning", + -13.54804801940918 + ], + [ + "odonti", + -13.548080444335938 + ], + [ + "distilled", + -13.548316955566406 + ], + [ + "saveur", + -13.548447608947754 + ], + [ + "▁récompense", + -13.54845905303955 + ], + [ + "confortul", + -13.548552513122559 + ], + [ + "arbeitete", + -13.548787117004395 + ], + [ + "partenerii", + -13.549064636230469 + ], + [ + "mirrored", + -13.54908561706543 + ], + [ + "Dienstleister", + -13.549243927001953 + ], + [ + "▁Jakarta", + -13.549243927001953 + ], + [ + "▁WEBSITE", + -13.549243927001953 + ], + [ + "▁Acquisition", + -13.549262046813965 + ], + [ + "▁Miranda", + -13.549287796020508 + ], + [ + "Syndic", + -13.549356460571289 + ], + [ + "▁stadiu", + -13.549450874328613 + ], + [ + "▁Parchet", + -13.549498558044434 + ], + [ + "Générale", + -13.54954719543457 + ], + [ + "▁jpl", + -13.549579620361328 + ], + [ + "attainable", + -13.549949645996094 + ], + [ + "École", + -13.550041198730469 + ], + [ + "Sphere", + -13.550538063049316 + ], + [ + "obtainable", + -13.550592422485352 + ], + [ + "▁Sapphire", + -13.55081558227539 + ], + [ + "▁aérienne", + -13.55081558227539 + ], + [ + "▁bărbați", + -13.55081558227539 + ], + [ + "▁irritating", + -13.55081558227539 + ], + [ + "▁ultraviolet", + -13.550816535949707 + ], + [ + "untouched", + -13.550817489624023 + ], + [ + "▁Ramsey", + -13.550819396972656 + ], + [ + "titres", + -13.551087379455566 + ], + [ + "▁Coordinat", + -13.551218032836914 + ], + [ + "believable", + -13.551358222961426 + ], + [ + "▁Grundsätzlich", + -13.551602363586426 + ], + [ + "▁konsequent", + -13.551602363586426 + ], + [ + "▁Cerceta", + -13.551909446716309 + ], + [ + "dirigé", + -13.552116394042969 + ], + [ + "▁disturb", + -13.552151679992676 + ], + [ + "conciliation", + -13.552210807800293 + ], + [ + "▁gelöscht", + -13.552390098571777 + ], + [ + "▁sauvegarde", + -13.552391052246094 + ], + [ + "▁cavities", + -13.552393913269043 + ], + [ + "stunde", + -13.55241584777832 + ], + [ + "▁foloseasc", + -13.552430152893066 + ], + [ + "▁simpati", + -13.552873611450195 + ], + [ + "Chacun", + -13.553032875061035 + ], + [ + "adversaire", + -13.553178787231445 + ], + [ + "Eigentlich", + -13.55319881439209 + ], + [ + "defense", + -13.553593635559082 + ], + [ + "consider", + -13.553672790527344 + ], + [ + "▁Trinidad", + -13.553966522216797 + ], + [ + "▁strategist", + -13.553966522216797 + ], + [ + "distorted", + -13.553967475891113 + ], + [ + "▁hypothetical", + -13.553967475891113 + ], + [ + "▁ramburs", + -13.55396842956543 + ], + [ + "▁Mallorca", + -13.553970336914062 + ], + [ + "▁Domino", + -13.554018020629883 + ], + [ + "arrondissement", + -13.554756164550781 + ], + [ + "konferenz", + -13.554756164550781 + ], + [ + "▁Beleuchtung", + -13.554756164550781 + ], + [ + "aggregat", + -13.55484676361084 + ], + [ + "subsidize", + -13.554896354675293 + ], + [ + "shri", + -13.555503845214844 + ], + [ + "Kaufentscheidung", + -13.555545806884766 + ], + [ + "▁Hernandez", + -13.555545806884766 + ], + [ + "▁Upholster", + -13.555546760559082 + ], + [ + "atlantic", + -13.555614471435547 + ], + [ + "▁locuinte", + -13.555652618408203 + ], + [ + "integrates", + -13.55583381652832 + ], + [ + "ewusst", + -13.555878639221191 + ], + [ + "▁Avocado", + -13.556337356567383 + ], + [ + "Decorative", + -13.557014465332031 + ], + [ + "▁Corinthians", + -13.557127952575684 + ], + [ + "▁clădire", + -13.557127952575684 + ], + [ + "▁plomberie", + -13.557127952575684 + ], + [ + "vases", + -13.557143211364746 + ], + [ + "▁crippl", + -13.557247161865234 + ], + [ + "cluttered", + -13.557487487792969 + ], + [ + "departed", + -13.557807922363281 + ], + [ + "▁entscheidet", + -13.5579195022583 + ], + [ + "Certaine", + -13.558243751525879 + ], + [ + "honda", + -13.558294296264648 + ], + [ + "triggering", + -13.558527946472168 + ], + [ + "▁Erdogan", + -13.558712005615234 + ], + [ + "▁Widerstand", + -13.558712005615234 + ], + [ + "▁Bhutan", + -13.558713912963867 + ], + [ + "▁ascunde", + -13.558736801147461 + ], + [ + "▁shading", + -13.558748245239258 + ], + [ + "behavioural", + -13.559172630310059 + ], + [ + "▁transfér", + -13.55960750579834 + ], + [ + "versichert", + -13.559623718261719 + ], + [ + "▁vinovat", + -13.559646606445312 + ], + [ + "▁airfare", + -13.560142517089844 + ], + [ + "▁simplistic", + -13.56030559539795 + ], + [ + "▁Asigura", + -13.560320854187012 + ], + [ + "Chauffe", + -13.560480117797852 + ], + [ + "scrisă", + -13.560585975646973 + ], + [ + "trouvez", + -13.560702323913574 + ], + [ + "greasy", + -13.560709953308105 + ], + [ + "bottled", + -13.560809135437012 + ], + [ + "grouped", + -13.560934066772461 + ], + [ + "▁beeinflussen", + -13.561092376708984 + ], + [ + "▁chronological", + -13.561114311218262 + ], + [ + "(2000)", + -13.56127643585205 + ], + [ + "sheltered", + -13.561298370361328 + ], + [ + "Historically", + -13.561931610107422 + ], + [ + "piled", + -13.562012672424316 + ], + [ + "publicate", + -13.562378883361816 + ], + [ + "▁étudié", + -13.56268310546875 + ], + [ + "▁vertraut", + -13.562688827514648 + ], + [ + "▁Anpassung", + -13.562697410583496 + ], + [ + "cifra", + -13.562705993652344 + ], + [ + "▁recueil", + -13.562762260437012 + ], + [ + "enforceable", + -13.563183784484863 + ], + [ + "Distinguished", + -13.56347942352295 + ], + [ + "Empfänger", + -13.56347942352295 + ], + [ + "▁Acrylic", + -13.56347942352295 + ], + [ + "▁Encyclopedia", + -13.56347942352295 + ], + [ + "▁proaspete", + -13.56347942352295 + ], + [ + "▁unrealistic", + -13.56347942352295 + ], + [ + "▁Assignment", + -13.563481330871582 + ], + [ + "▁incubator", + -13.563491821289062 + ], + [ + "▁unilateral", + -13.563501358032227 + ], + [ + "elasticity", + -13.564398765563965 + ], + [ + "amintim", + -13.564475059509277 + ], + [ + "fournit", + -13.564553260803223 + ], + [ + "semblent", + -13.564763069152832 + ], + [ + "▁$69.", + -13.56496524810791 + ], + [ + "▁prominence", + -13.56507396697998 + ], + [ + "Übertragung", + -13.565075874328613 + ], + [ + "▁2014-11-", + -13.565075874328613 + ], + [ + "▁Giurgiu", + -13.565104484558105 + ], + [ + "étendue", + -13.565123558044434 + ], + [ + "ceputul", + -13.565187454223633 + ], + [ + "Schwierigkeiten", + -13.565872192382812 + ], + [ + "▁subtract", + -13.565881729125977 + ], + [ + "▁gesichert", + -13.56589126586914 + ], + [ + "▁uimit", + -13.565925598144531 + ], + [ + "▁mensuel", + -13.565967559814453 + ], + [ + "Vorgaben", + -13.566215515136719 + ], + [ + "▁legitimacy", + -13.566670417785645 + ], + [ + "▁Kendall", + -13.566673278808594 + ], + [ + "▁détach", + -13.566790580749512 + ], + [ + "▁kennenlernen", + -13.567469596862793 + ], + [ + "▁gewöhnlich", + -13.56747055053711 + ], + [ + "Octav", + -13.567917823791504 + ], + [ + "responsive", + -13.568169593811035 + ], + [ + "▁Mängel", + -13.568269729614258 + ], + [ + "▁mișcare", + -13.568269729614258 + ], + [ + "▁ludique", + -13.568270683288574 + ], + [ + "▁Exeter", + -13.568324089050293 + ], + [ + "▁respins", + -13.569114685058594 + ], + [ + "oraşului", + -13.569173812866211 + ], + [ + "▁sfârşit", + -13.56949520111084 + ], + [ + "BUSINESS", + -13.56987190246582 + ], + [ + "illustrating", + -13.56987190246582 + ], + [ + "▁Tottenham", + -13.56987190246582 + ], + [ + "▁pruning", + -13.569886207580566 + ], + [ + "▁Înainte", + -13.569904327392578 + ], + [ + "▁interesel", + -13.570096969604492 + ], + [ + "discovered", + -13.57031536102295 + ], + [ + "(0)", + -13.570572853088379 + ], + [ + "▁Bewerber", + -13.570673942565918 + ], + [ + "▁DESIGN", + -13.570673942565918 + ], + [ + "▁Orientierung", + -13.570686340332031 + ], + [ + "library", + -13.571041107177734 + ], + [ + "cheltuielile", + -13.571419715881348 + ], + [ + "▁Canterbury", + -13.571475982666016 + ], + [ + "▁intellectuelle", + -13.571477890014648 + ], + [ + "▁amalgam", + -13.571497917175293 + ], + [ + "▁Toledo", + -13.57150650024414 + ], + [ + "gezahlt", + -13.571531295776367 + ], + [ + "Veronica", + -13.571659088134766 + ], + [ + "deleting", + -13.571946144104004 + ], + [ + "▁Merlin", + -13.572442054748535 + ], + [ + "▁opérationnel", + -13.572554588317871 + ], + [ + "schmutz", + -13.572568893432617 + ], + [ + "hyroid", + -13.57279109954834 + ], + [ + "▁Compatible", + -13.57308292388916 + ], + [ + "▁Leopard", + -13.57308292388916 + ], + [ + "▁cylindrical", + -13.57308292388916 + ], + [ + "▁terrestrial", + -13.57308292388916 + ], + [ + "conferencing", + -13.573088645935059 + ], + [ + "▁Variety", + -13.573097229003906 + ], + [ + "▁Screw", + -13.573164939880371 + ], + [ + "character", + -13.573637962341309 + ], + [ + "shortened", + -13.573643684387207 + ], + [ + "▁întrerup", + -13.573736190795898 + ], + [ + "freude", + -13.573884010314941 + ], + [ + "▁dezbateri", + -13.573887825012207 + ], + [ + "viteză", + -13.574563026428223 + ], + [ + "formațiile", + -13.574600219726562 + ], + [ + "▁responsibly", + -13.574692726135254 + ], + [ + "Dimensiuni", + -13.574695587158203 + ], + [ + "Arrangement", + -13.57469654083252 + ], + [ + "▁Leisure", + -13.574712753295898 + ], + [ + "escaping", + -13.5750732421875 + ], + [ + "flexion", + -13.575104713439941 + ], + [ + "▁religieuse", + -13.575308799743652 + ], + [ + "crystalline", + -13.575457572937012 + ], + [ + "▁clasp", + -13.575520515441895 + ], + [ + "festigt", + -13.57554817199707 + ], + [ + "▁trouvai", + -13.57596206665039 + ], + [ + "cutaneous", + -13.576305389404297 + ], + [ + "▁carcinoma", + -13.576305389404297 + ], + [ + "▁juxtapos", + -13.576305389404297 + ], + [ + "assemblage", + -13.576306343078613 + ], + [ + "▁Messiah", + -13.576306343078613 + ], + [ + "▁Sleeve", + -13.576306343078613 + ], + [ + "▁șofer", + -13.576386451721191 + ], + [ + "/05/", + -13.57666301727295 + ], + [ + "▁expoziți", + -13.576703071594238 + ], + [ + "▁pătrun", + -13.577343940734863 + ], + [ + "▁Lydia", + -13.57739543914795 + ], + [ + "▁grădini", + -13.577919006347656 + ], + [ + "▁toothpaste", + -13.577919960021973 + ], + [ + "ordained", + -13.577921867370605 + ], + [ + "▁Renovation", + -13.577922821044922 + ], + [ + "voicing", + -13.578327178955078 + ], + [ + "président", + -13.578595161437988 + ], + [ + "▁gestartet", + -13.578728675842285 + ], + [ + "Multi", + -13.579121589660645 + ], + [ + "itinéraire", + -13.579537391662598 + ], + [ + "▁influenza", + -13.579537391662598 + ], + [ + "▁psychiatrist", + -13.579537391662598 + ], + [ + "▁schizophrenia", + -13.579537391662598 + ], + [ + "▁Magnolia", + -13.57953929901123 + ], + [ + "▁Scottsdale", + -13.579541206359863 + ], + [ + "▁interessieren", + -13.579548835754395 + ], + [ + "▁asfalt", + -13.579643249511719 + ], + [ + "▁Journalism", + -13.57977294921875 + ], + [ + "Multe", + -13.580089569091797 + ], + [ + "Westfalen", + -13.580347061157227 + ], + [ + "▁Vorschriften", + -13.580348014831543 + ], + [ + "Angleterre", + -13.58034896850586 + ], + [ + "sustainable", + -13.580354690551758 + ], + [ + "▁Retour", + -13.580589294433594 + ], + [ + "▁pâr", + -13.5809965133667 + ], + [ + "steigert", + -13.581120491027832 + ], + [ + "▁AMAZING", + -13.581157684326172 + ], + [ + "▁turbulent", + -13.581157684326172 + ], + [ + "costing", + -13.58155345916748 + ], + [ + "▁Carolyn", + -13.581634521484375 + ], + [ + "utti", + -13.581802368164062 + ], + [ + "dürftig", + -13.581968307495117 + ], + [ + "Keep", + -13.582038879394531 + ], + [ + "▁Théâtre", + -13.582780838012695 + ], + [ + "▁combustibil", + -13.582780838012695 + ], + [ + "▁halloween", + -13.582780838012695 + ], + [ + "▁emulator", + -13.582785606384277 + ], + [ + "▁povești", + -13.582785606384277 + ], + [ + "broyeur", + -13.582810401916504 + ], + [ + "▁émerg", + -13.582927703857422 + ], + [ + "overwhelmingly", + -13.583025932312012 + ], + [ + "regulă", + -13.583124160766602 + ], + [ + "goutte", + -13.583125114440918 + ], + [ + "▁Fertigung", + -13.583593368530273 + ], + [ + "constituted", + -13.584304809570312 + ], + [ + "▁QuickBooks", + -13.584406852722168 + ], + [ + "▁genealogy", + -13.584407806396484 + ], + [ + "▁laundering", + -13.584432601928711 + ], + [ + "▁échéan", + -13.584491729736328 + ], + [ + "Account", + -13.584601402282715 + ], + [ + "oyons", + -13.584792137145996 + ], + [ + "nitro", + -13.584905624389648 + ], + [ + "▁corespund", + -13.585219383239746 + ], + [ + "▁suggér", + -13.58527660369873 + ], + [ + "manipulated", + -13.585348129272461 + ], + [ + "deseori", + -13.585817337036133 + ], + [ + "permeabil", + -13.585912704467773 + ], + [ + "Australia", + -13.58594799041748 + ], + [ + "▁Erasmus", + -13.586034774780273 + ], + [ + "▁disrespect", + -13.586034774780273 + ], + [ + "▁trimestre", + -13.586038589477539 + ], + [ + "▁emanat", + -13.586103439331055 + ], + [ + "Schraub", + -13.58624267578125 + ], + [ + "distinctly", + -13.586319923400879 + ], + [ + "Germain", + -13.586637496948242 + ], + [ + "▁pedepse", + -13.5868501663208 + ], + [ + "réglage", + -13.5868558883667 + ], + [ + "făcute", + -13.587308883666992 + ], + [ + "▁garanteaz", + -13.587434768676758 + ], + [ + "▁unterlieg", + -13.587701797485352 + ], + [ + "▁cheddar", + -13.587712287902832 + ], + [ + "▁refugi", + -13.587756156921387 + ], + [ + "▁inférieur", + -13.587836265563965 + ], + [ + "dimension", + -13.588440895080566 + ], + [ + "▁erkennt", + -13.588570594787598 + ], + [ + "amitié", + -13.588632583618164 + ], + [ + "▁predominant", + -13.588680267333984 + ], + [ + "nourishe", + -13.588800430297852 + ], + [ + "exerce", + -13.588907241821289 + ], + [ + "▁disguise", + -13.589225769042969 + ], + [ + "▁traditi", + -13.589289665222168 + ], + [ + "▁Intellectual", + -13.5892972946167 + ], + [ + "▁imunitar", + -13.589299201965332 + ], + [ + "▁Cushion", + -13.589300155639648 + ], + [ + "▁erwachsene", + -13.589517593383789 + ], + [ + "▁Internațional", + -13.590115547180176 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ] + ], + "byte_fallback": false + } +} \ No newline at end of file diff --git a/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer_config.json b/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..02020eb6d20746871e1ea93f14c4475cf9368f98 --- /dev/null +++ b/src/comfyui/comfy/text_encoders/t5_tokenizer/tokenizer_config.json @@ -0,0 +1,939 @@ +{ + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32000": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32001": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32002": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32003": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32004": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32005": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32006": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32007": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32008": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32009": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32010": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32011": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32012": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32013": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32014": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32015": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32016": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32017": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32018": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32019": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32020": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32021": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32022": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32023": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32024": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32025": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32026": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32027": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32028": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32029": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32030": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32031": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32032": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32033": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32034": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32035": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32036": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32037": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32038": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32039": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32040": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32041": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32042": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32043": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32044": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32045": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32046": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32047": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32048": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32049": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32050": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32051": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32052": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32053": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32054": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32055": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32056": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32057": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32058": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32059": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32060": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32061": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32062": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32063": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32064": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32065": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32066": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32067": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32068": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32069": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32070": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32071": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32072": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32073": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32074": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32075": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32076": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32077": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32078": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32079": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32080": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32081": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32082": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32083": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32084": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32085": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32086": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32087": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32088": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32089": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32090": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32091": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32092": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32093": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32094": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32095": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32096": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32097": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32098": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "32099": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": true, + "eos_token": "", + "extra_ids": 100, + "legacy": false, + "model_max_length": 512, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/src/comfyui/comfy/utils.py b/src/comfyui/comfy/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..cc92e111529a1d189690e7eb5c0f4f620b86595c --- /dev/null +++ b/src/comfyui/comfy/utils.py @@ -0,0 +1,850 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Comfy + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + + +import torch +import math +import struct +import comfy.checkpoint_pickle +import safetensors.torch +import numpy as np +from PIL import Image +import logging +import itertools + +def load_torch_file(ckpt, safe_load=False, device=None): + if device is None: + device = torch.device("cpu") + if ckpt.lower().endswith(".safetensors") or ckpt.lower().endswith(".sft"): + sd = safetensors.torch.load_file(ckpt, device=device.type) + else: + if safe_load: + if not 'weights_only' in torch.load.__code__.co_varnames: + logging.warning("Warning torch.load doesn't support weights_only on this pytorch version, loading unsafely.") + safe_load = False + if safe_load: + pl_sd = torch.load(ckpt, map_location=device, weights_only=True) + else: + pl_sd = torch.load(ckpt, map_location=device, pickle_module=comfy.checkpoint_pickle) + if "global_step" in pl_sd: + logging.debug(f"Global Step: {pl_sd['global_step']}") + if "state_dict" in pl_sd: + sd = pl_sd["state_dict"] + else: + sd = pl_sd + return sd + +def save_torch_file(sd, ckpt, metadata=None): + if metadata is not None: + safetensors.torch.save_file(sd, ckpt, metadata=metadata) + else: + safetensors.torch.save_file(sd, ckpt) + +def calculate_parameters(sd, prefix=""): + params = 0 + for k in sd.keys(): + if k.startswith(prefix): + w = sd[k] + params += w.nelement() + return params + +def weight_dtype(sd, prefix=""): + dtypes = {} + for k in sd.keys(): + if k.startswith(prefix): + w = sd[k] + dtypes[w.dtype] = dtypes.get(w.dtype, 0) + w.numel() + + if len(dtypes) == 0: + return None + + return max(dtypes, key=dtypes.get) + +def state_dict_key_replace(state_dict, keys_to_replace): + for x in keys_to_replace: + if x in state_dict: + state_dict[keys_to_replace[x]] = state_dict.pop(x) + return state_dict + +def state_dict_prefix_replace(state_dict, replace_prefix, filter_keys=False): + if filter_keys: + out = {} + else: + out = state_dict + for rp in replace_prefix: + replace = list(map(lambda a: (a, "{}{}".format(replace_prefix[rp], a[len(rp):])), filter(lambda a: a.startswith(rp), state_dict.keys()))) + for x in replace: + w = state_dict.pop(x[0]) + out[x[1]] = w + return out + + +def transformers_convert(sd, prefix_from, prefix_to, number): + keys_to_replace = { + "{}positional_embedding": "{}embeddings.position_embedding.weight", + "{}token_embedding.weight": "{}embeddings.token_embedding.weight", + "{}ln_final.weight": "{}final_layer_norm.weight", + "{}ln_final.bias": "{}final_layer_norm.bias", + } + + for k in keys_to_replace: + x = k.format(prefix_from) + if x in sd: + sd[keys_to_replace[k].format(prefix_to)] = sd.pop(x) + + resblock_to_replace = { + "ln_1": "layer_norm1", + "ln_2": "layer_norm2", + "mlp.c_fc": "mlp.fc1", + "mlp.c_proj": "mlp.fc2", + "attn.out_proj": "self_attn.out_proj", + } + + for resblock in range(number): + for x in resblock_to_replace: + for y in ["weight", "bias"]: + k = "{}transformer.resblocks.{}.{}.{}".format(prefix_from, resblock, x, y) + k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, resblock_to_replace[x], y) + if k in sd: + sd[k_to] = sd.pop(k) + + for y in ["weight", "bias"]: + k_from = "{}transformer.resblocks.{}.attn.in_proj_{}".format(prefix_from, resblock, y) + if k_from in sd: + weights = sd.pop(k_from) + shape_from = weights.shape[0] // 3 + for x in range(3): + p = ["self_attn.q_proj", "self_attn.k_proj", "self_attn.v_proj"] + k_to = "{}encoder.layers.{}.{}.{}".format(prefix_to, resblock, p[x], y) + sd[k_to] = weights[shape_from*x:shape_from*(x + 1)] + + return sd + +def clip_text_transformers_convert(sd, prefix_from, prefix_to): + sd = transformers_convert(sd, prefix_from, "{}text_model.".format(prefix_to), 32) + + tp = "{}text_projection.weight".format(prefix_from) + if tp in sd: + sd["{}text_projection.weight".format(prefix_to)] = sd.pop(tp) + + tp = "{}text_projection".format(prefix_from) + if tp in sd: + sd["{}text_projection.weight".format(prefix_to)] = sd.pop(tp).transpose(0, 1).contiguous() + return sd + + +UNET_MAP_ATTENTIONS = { + "proj_in.weight", + "proj_in.bias", + "proj_out.weight", + "proj_out.bias", + "norm.weight", + "norm.bias", +} + +TRANSFORMER_BLOCKS = { + "norm1.weight", + "norm1.bias", + "norm2.weight", + "norm2.bias", + "norm3.weight", + "norm3.bias", + "attn1.to_q.weight", + "attn1.to_k.weight", + "attn1.to_v.weight", + "attn1.to_out.0.weight", + "attn1.to_out.0.bias", + "attn2.to_q.weight", + "attn2.to_k.weight", + "attn2.to_v.weight", + "attn2.to_out.0.weight", + "attn2.to_out.0.bias", + "ff.net.0.proj.weight", + "ff.net.0.proj.bias", + "ff.net.2.weight", + "ff.net.2.bias", +} + +UNET_MAP_RESNET = { + "in_layers.2.weight": "conv1.weight", + "in_layers.2.bias": "conv1.bias", + "emb_layers.1.weight": "time_emb_proj.weight", + "emb_layers.1.bias": "time_emb_proj.bias", + "out_layers.3.weight": "conv2.weight", + "out_layers.3.bias": "conv2.bias", + "skip_connection.weight": "conv_shortcut.weight", + "skip_connection.bias": "conv_shortcut.bias", + "in_layers.0.weight": "norm1.weight", + "in_layers.0.bias": "norm1.bias", + "out_layers.0.weight": "norm2.weight", + "out_layers.0.bias": "norm2.bias", +} + +UNET_MAP_BASIC = { + ("label_emb.0.0.weight", "class_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "class_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "class_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "class_embedding.linear_2.bias"), + ("label_emb.0.0.weight", "add_embedding.linear_1.weight"), + ("label_emb.0.0.bias", "add_embedding.linear_1.bias"), + ("label_emb.0.2.weight", "add_embedding.linear_2.weight"), + ("label_emb.0.2.bias", "add_embedding.linear_2.bias"), + ("input_blocks.0.0.weight", "conv_in.weight"), + ("input_blocks.0.0.bias", "conv_in.bias"), + ("out.0.weight", "conv_norm_out.weight"), + ("out.0.bias", "conv_norm_out.bias"), + ("out.2.weight", "conv_out.weight"), + ("out.2.bias", "conv_out.bias"), + ("time_embed.0.weight", "time_embedding.linear_1.weight"), + ("time_embed.0.bias", "time_embedding.linear_1.bias"), + ("time_embed.2.weight", "time_embedding.linear_2.weight"), + ("time_embed.2.bias", "time_embedding.linear_2.bias") +} + +def unet_to_diffusers(unet_config): + if "num_res_blocks" not in unet_config: + return {} + num_res_blocks = unet_config["num_res_blocks"] + channel_mult = unet_config["channel_mult"] + transformer_depth = unet_config["transformer_depth"][:] + transformer_depth_output = unet_config["transformer_depth_output"][:] + num_blocks = len(channel_mult) + + transformers_mid = unet_config.get("transformer_depth_middle", None) + + diffusers_unet_map = {} + for x in range(num_blocks): + n = 1 + (num_res_blocks[x] + 1) * x + for i in range(num_res_blocks[x]): + for b in UNET_MAP_RESNET: + diffusers_unet_map["down_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "input_blocks.{}.0.{}".format(n, b) + num_transformers = transformer_depth.pop(0) + if num_transformers > 0: + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["down_blocks.{}.attentions.{}.{}".format(x, i, b)] = "input_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["down_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "input_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + n += 1 + for k in ["weight", "bias"]: + diffusers_unet_map["down_blocks.{}.downsamplers.0.conv.{}".format(x, k)] = "input_blocks.{}.0.op.{}".format(n, k) + + i = 0 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["mid_block.attentions.{}.{}".format(i, b)] = "middle_block.1.{}".format(b) + for t in range(transformers_mid): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["mid_block.attentions.{}.transformer_blocks.{}.{}".format(i, t, b)] = "middle_block.1.transformer_blocks.{}.{}".format(t, b) + + for i, n in enumerate([0, 2]): + for b in UNET_MAP_RESNET: + diffusers_unet_map["mid_block.resnets.{}.{}".format(i, UNET_MAP_RESNET[b])] = "middle_block.{}.{}".format(n, b) + + num_res_blocks = list(reversed(num_res_blocks)) + for x in range(num_blocks): + n = (num_res_blocks[x] + 1) * x + l = num_res_blocks[x] + 1 + for i in range(l): + c = 0 + for b in UNET_MAP_RESNET: + diffusers_unet_map["up_blocks.{}.resnets.{}.{}".format(x, i, UNET_MAP_RESNET[b])] = "output_blocks.{}.0.{}".format(n, b) + c += 1 + num_transformers = transformer_depth_output.pop() + if num_transformers > 0: + c += 1 + for b in UNET_MAP_ATTENTIONS: + diffusers_unet_map["up_blocks.{}.attentions.{}.{}".format(x, i, b)] = "output_blocks.{}.1.{}".format(n, b) + for t in range(num_transformers): + for b in TRANSFORMER_BLOCKS: + diffusers_unet_map["up_blocks.{}.attentions.{}.transformer_blocks.{}.{}".format(x, i, t, b)] = "output_blocks.{}.1.transformer_blocks.{}.{}".format(n, t, b) + if i == l - 1: + for k in ["weight", "bias"]: + diffusers_unet_map["up_blocks.{}.upsamplers.0.conv.{}".format(x, k)] = "output_blocks.{}.{}.conv.{}".format(n, c, k) + n += 1 + + for k in UNET_MAP_BASIC: + diffusers_unet_map[k[1]] = k[0] + + return diffusers_unet_map + +def swap_scale_shift(weight): + shift, scale = weight.chunk(2, dim=0) + new_weight = torch.cat([scale, shift], dim=0) + return new_weight + +MMDIT_MAP_BASIC = { + ("context_embedder.bias", "context_embedder.bias"), + ("context_embedder.weight", "context_embedder.weight"), + ("t_embedder.mlp.0.bias", "time_text_embed.timestep_embedder.linear_1.bias"), + ("t_embedder.mlp.0.weight", "time_text_embed.timestep_embedder.linear_1.weight"), + ("t_embedder.mlp.2.bias", "time_text_embed.timestep_embedder.linear_2.bias"), + ("t_embedder.mlp.2.weight", "time_text_embed.timestep_embedder.linear_2.weight"), + ("x_embedder.proj.bias", "pos_embed.proj.bias"), + ("x_embedder.proj.weight", "pos_embed.proj.weight"), + ("y_embedder.mlp.0.bias", "time_text_embed.text_embedder.linear_1.bias"), + ("y_embedder.mlp.0.weight", "time_text_embed.text_embedder.linear_1.weight"), + ("y_embedder.mlp.2.bias", "time_text_embed.text_embedder.linear_2.bias"), + ("y_embedder.mlp.2.weight", "time_text_embed.text_embedder.linear_2.weight"), + ("pos_embed", "pos_embed.pos_embed"), + ("final_layer.adaLN_modulation.1.bias", "norm_out.linear.bias", swap_scale_shift), + ("final_layer.adaLN_modulation.1.weight", "norm_out.linear.weight", swap_scale_shift), + ("final_layer.linear.bias", "proj_out.bias"), + ("final_layer.linear.weight", "proj_out.weight"), +} + +MMDIT_MAP_BLOCK = { + ("context_block.adaLN_modulation.1.bias", "norm1_context.linear.bias"), + ("context_block.adaLN_modulation.1.weight", "norm1_context.linear.weight"), + ("context_block.attn.proj.bias", "attn.to_add_out.bias"), + ("context_block.attn.proj.weight", "attn.to_add_out.weight"), + ("context_block.mlp.fc1.bias", "ff_context.net.0.proj.bias"), + ("context_block.mlp.fc1.weight", "ff_context.net.0.proj.weight"), + ("context_block.mlp.fc2.bias", "ff_context.net.2.bias"), + ("context_block.mlp.fc2.weight", "ff_context.net.2.weight"), + ("context_block.attn.ln_q.weight", "attn.norm_added_q.weight"), + ("context_block.attn.ln_k.weight", "attn.norm_added_k.weight"), + ("x_block.adaLN_modulation.1.bias", "norm1.linear.bias"), + ("x_block.adaLN_modulation.1.weight", "norm1.linear.weight"), + ("x_block.attn.proj.bias", "attn.to_out.0.bias"), + ("x_block.attn.proj.weight", "attn.to_out.0.weight"), + ("x_block.attn.ln_q.weight", "attn.norm_q.weight"), + ("x_block.attn.ln_k.weight", "attn.norm_k.weight"), + ("x_block.attn2.proj.bias", "attn2.to_out.0.bias"), + ("x_block.attn2.proj.weight", "attn2.to_out.0.weight"), + ("x_block.attn2.ln_q.weight", "attn2.norm_q.weight"), + ("x_block.attn2.ln_k.weight", "attn2.norm_k.weight"), + ("x_block.mlp.fc1.bias", "ff.net.0.proj.bias"), + ("x_block.mlp.fc1.weight", "ff.net.0.proj.weight"), + ("x_block.mlp.fc2.bias", "ff.net.2.bias"), + ("x_block.mlp.fc2.weight", "ff.net.2.weight"), +} + +def mmdit_to_diffusers(mmdit_config, output_prefix=""): + key_map = {} + + depth = mmdit_config.get("depth", 0) + num_blocks = mmdit_config.get("num_blocks", depth) + for i in range(num_blocks): + block_from = "transformer_blocks.{}".format(i) + block_to = "{}joint_blocks.{}".format(output_prefix, i) + + offset = depth * 64 + + for end in ("weight", "bias"): + k = "{}.attn.".format(block_from) + qkv = "{}.x_block.attn.qkv.{}".format(block_to, end) + key_map["{}to_q.{}".format(k, end)] = (qkv, (0, 0, offset)) + key_map["{}to_k.{}".format(k, end)] = (qkv, (0, offset, offset)) + key_map["{}to_v.{}".format(k, end)] = (qkv, (0, offset * 2, offset)) + + qkv = "{}.context_block.attn.qkv.{}".format(block_to, end) + key_map["{}add_q_proj.{}".format(k, end)] = (qkv, (0, 0, offset)) + key_map["{}add_k_proj.{}".format(k, end)] = (qkv, (0, offset, offset)) + key_map["{}add_v_proj.{}".format(k, end)] = (qkv, (0, offset * 2, offset)) + + k = "{}.attn2.".format(block_from) + qkv = "{}.x_block.attn2.qkv.{}".format(block_to, end) + key_map["{}to_q.{}".format(k, end)] = (qkv, (0, 0, offset)) + key_map["{}to_k.{}".format(k, end)] = (qkv, (0, offset, offset)) + key_map["{}to_v.{}".format(k, end)] = (qkv, (0, offset * 2, offset)) + + for k in MMDIT_MAP_BLOCK: + key_map["{}.{}".format(block_from, k[1])] = "{}.{}".format(block_to, k[0]) + + map_basic = MMDIT_MAP_BASIC.copy() + map_basic.add(("joint_blocks.{}.context_block.adaLN_modulation.1.bias".format(depth - 1), "transformer_blocks.{}.norm1_context.linear.bias".format(depth - 1), swap_scale_shift)) + map_basic.add(("joint_blocks.{}.context_block.adaLN_modulation.1.weight".format(depth - 1), "transformer_blocks.{}.norm1_context.linear.weight".format(depth - 1), swap_scale_shift)) + + for k in map_basic: + if len(k) > 2: + key_map[k[1]] = ("{}{}".format(output_prefix, k[0]), None, k[2]) + else: + key_map[k[1]] = "{}{}".format(output_prefix, k[0]) + + return key_map + + +def auraflow_to_diffusers(mmdit_config, output_prefix=""): + n_double_layers = mmdit_config.get("n_double_layers", 0) + n_layers = mmdit_config.get("n_layers", 0) + + key_map = {} + for i in range(n_layers): + if i < n_double_layers: + index = i + prefix_from = "joint_transformer_blocks" + prefix_to = "{}double_layers".format(output_prefix) + block_map = { + "attn.to_q.weight": "attn.w2q.weight", + "attn.to_k.weight": "attn.w2k.weight", + "attn.to_v.weight": "attn.w2v.weight", + "attn.to_out.0.weight": "attn.w2o.weight", + "attn.add_q_proj.weight": "attn.w1q.weight", + "attn.add_k_proj.weight": "attn.w1k.weight", + "attn.add_v_proj.weight": "attn.w1v.weight", + "attn.to_add_out.weight": "attn.w1o.weight", + "ff.linear_1.weight": "mlpX.c_fc1.weight", + "ff.linear_2.weight": "mlpX.c_fc2.weight", + "ff.out_projection.weight": "mlpX.c_proj.weight", + "ff_context.linear_1.weight": "mlpC.c_fc1.weight", + "ff_context.linear_2.weight": "mlpC.c_fc2.weight", + "ff_context.out_projection.weight": "mlpC.c_proj.weight", + "norm1.linear.weight": "modX.1.weight", + "norm1_context.linear.weight": "modC.1.weight", + } + else: + index = i - n_double_layers + prefix_from = "single_transformer_blocks" + prefix_to = "{}single_layers".format(output_prefix) + + block_map = { + "attn.to_q.weight": "attn.w1q.weight", + "attn.to_k.weight": "attn.w1k.weight", + "attn.to_v.weight": "attn.w1v.weight", + "attn.to_out.0.weight": "attn.w1o.weight", + "norm1.linear.weight": "modCX.1.weight", + "ff.linear_1.weight": "mlp.c_fc1.weight", + "ff.linear_2.weight": "mlp.c_fc2.weight", + "ff.out_projection.weight": "mlp.c_proj.weight" + } + + for k in block_map: + key_map["{}.{}.{}".format(prefix_from, index, k)] = "{}.{}.{}".format(prefix_to, index, block_map[k]) + + MAP_BASIC = { + ("positional_encoding", "pos_embed.pos_embed"), + ("register_tokens", "register_tokens"), + ("t_embedder.mlp.0.weight", "time_step_proj.linear_1.weight"), + ("t_embedder.mlp.0.bias", "time_step_proj.linear_1.bias"), + ("t_embedder.mlp.2.weight", "time_step_proj.linear_2.weight"), + ("t_embedder.mlp.2.bias", "time_step_proj.linear_2.bias"), + ("cond_seq_linear.weight", "context_embedder.weight"), + ("init_x_linear.weight", "pos_embed.proj.weight"), + ("init_x_linear.bias", "pos_embed.proj.bias"), + ("final_linear.weight", "proj_out.weight"), + ("modF.1.weight", "norm_out.linear.weight", swap_scale_shift), + } + + for k in MAP_BASIC: + if len(k) > 2: + key_map[k[1]] = ("{}{}".format(output_prefix, k[0]), None, k[2]) + else: + key_map[k[1]] = "{}{}".format(output_prefix, k[0]) + + return key_map + +def flux_to_diffusers(mmdit_config, output_prefix=""): + n_double_layers = mmdit_config.get("depth", 0) + n_single_layers = mmdit_config.get("depth_single_blocks", 0) + hidden_size = mmdit_config.get("hidden_size", 0) + + key_map = {} + for index in range(n_double_layers): + prefix_from = "transformer_blocks.{}".format(index) + prefix_to = "{}double_blocks.{}".format(output_prefix, index) + + for end in ("weight", "bias"): + k = "{}.attn.".format(prefix_from) + qkv = "{}.img_attn.qkv.{}".format(prefix_to, end) + key_map["{}to_q.{}".format(k, end)] = (qkv, (0, 0, hidden_size)) + key_map["{}to_k.{}".format(k, end)] = (qkv, (0, hidden_size, hidden_size)) + key_map["{}to_v.{}".format(k, end)] = (qkv, (0, hidden_size * 2, hidden_size)) + + k = "{}.attn.".format(prefix_from) + qkv = "{}.txt_attn.qkv.{}".format(prefix_to, end) + key_map["{}add_q_proj.{}".format(k, end)] = (qkv, (0, 0, hidden_size)) + key_map["{}add_k_proj.{}".format(k, end)] = (qkv, (0, hidden_size, hidden_size)) + key_map["{}add_v_proj.{}".format(k, end)] = (qkv, (0, hidden_size * 2, hidden_size)) + + block_map = { + "attn.to_out.0.weight": "img_attn.proj.weight", + "attn.to_out.0.bias": "img_attn.proj.bias", + "norm1.linear.weight": "img_mod.lin.weight", + "norm1.linear.bias": "img_mod.lin.bias", + "norm1_context.linear.weight": "txt_mod.lin.weight", + "norm1_context.linear.bias": "txt_mod.lin.bias", + "attn.to_add_out.weight": "txt_attn.proj.weight", + "attn.to_add_out.bias": "txt_attn.proj.bias", + "ff.net.0.proj.weight": "img_mlp.0.weight", + "ff.net.0.proj.bias": "img_mlp.0.bias", + "ff.net.2.weight": "img_mlp.2.weight", + "ff.net.2.bias": "img_mlp.2.bias", + "ff_context.net.0.proj.weight": "txt_mlp.0.weight", + "ff_context.net.0.proj.bias": "txt_mlp.0.bias", + "ff_context.net.2.weight": "txt_mlp.2.weight", + "ff_context.net.2.bias": "txt_mlp.2.bias", + "attn.norm_q.weight": "img_attn.norm.query_norm.scale", + "attn.norm_k.weight": "img_attn.norm.key_norm.scale", + "attn.norm_added_q.weight": "txt_attn.norm.query_norm.scale", + "attn.norm_added_k.weight": "txt_attn.norm.key_norm.scale", + } + + for k in block_map: + key_map["{}.{}".format(prefix_from, k)] = "{}.{}".format(prefix_to, block_map[k]) + + for index in range(n_single_layers): + prefix_from = "single_transformer_blocks.{}".format(index) + prefix_to = "{}single_blocks.{}".format(output_prefix, index) + + for end in ("weight", "bias"): + k = "{}.attn.".format(prefix_from) + qkv = "{}.linear1.{}".format(prefix_to, end) + key_map["{}to_q.{}".format(k, end)] = (qkv, (0, 0, hidden_size)) + key_map["{}to_k.{}".format(k, end)] = (qkv, (0, hidden_size, hidden_size)) + key_map["{}to_v.{}".format(k, end)] = (qkv, (0, hidden_size * 2, hidden_size)) + key_map["{}.proj_mlp.{}".format(prefix_from, end)] = (qkv, (0, hidden_size * 3, hidden_size * 4)) + + block_map = { + "norm.linear.weight": "modulation.lin.weight", + "norm.linear.bias": "modulation.lin.bias", + "proj_out.weight": "linear2.weight", + "proj_out.bias": "linear2.bias", + "attn.norm_q.weight": "norm.query_norm.scale", + "attn.norm_k.weight": "norm.key_norm.scale", + } + + for k in block_map: + key_map["{}.{}".format(prefix_from, k)] = "{}.{}".format(prefix_to, block_map[k]) + + MAP_BASIC = { + ("final_layer.linear.bias", "proj_out.bias"), + ("final_layer.linear.weight", "proj_out.weight"), + ("img_in.bias", "x_embedder.bias"), + ("img_in.weight", "x_embedder.weight"), + ("time_in.in_layer.bias", "time_text_embed.timestep_embedder.linear_1.bias"), + ("time_in.in_layer.weight", "time_text_embed.timestep_embedder.linear_1.weight"), + ("time_in.out_layer.bias", "time_text_embed.timestep_embedder.linear_2.bias"), + ("time_in.out_layer.weight", "time_text_embed.timestep_embedder.linear_2.weight"), + ("txt_in.bias", "context_embedder.bias"), + ("txt_in.weight", "context_embedder.weight"), + ("vector_in.in_layer.bias", "time_text_embed.text_embedder.linear_1.bias"), + ("vector_in.in_layer.weight", "time_text_embed.text_embedder.linear_1.weight"), + ("vector_in.out_layer.bias", "time_text_embed.text_embedder.linear_2.bias"), + ("vector_in.out_layer.weight", "time_text_embed.text_embedder.linear_2.weight"), + ("guidance_in.in_layer.bias", "time_text_embed.guidance_embedder.linear_1.bias"), + ("guidance_in.in_layer.weight", "time_text_embed.guidance_embedder.linear_1.weight"), + ("guidance_in.out_layer.bias", "time_text_embed.guidance_embedder.linear_2.bias"), + ("guidance_in.out_layer.weight", "time_text_embed.guidance_embedder.linear_2.weight"), + ("final_layer.adaLN_modulation.1.bias", "norm_out.linear.bias", swap_scale_shift), + ("final_layer.adaLN_modulation.1.weight", "norm_out.linear.weight", swap_scale_shift), + ("pos_embed_input.bias", "controlnet_x_embedder.bias"), + ("pos_embed_input.weight", "controlnet_x_embedder.weight"), + } + + for k in MAP_BASIC: + if len(k) > 2: + key_map[k[1]] = ("{}{}".format(output_prefix, k[0]), None, k[2]) + else: + key_map[k[1]] = "{}{}".format(output_prefix, k[0]) + + return key_map + +def repeat_to_batch_size(tensor, batch_size, dim=0): + if tensor.shape[dim] > batch_size: + return tensor.narrow(dim, 0, batch_size) + elif tensor.shape[dim] < batch_size: + return tensor.repeat(dim * [1] + [math.ceil(batch_size / tensor.shape[dim])] + [1] * (len(tensor.shape) - 1 - dim)).narrow(dim, 0, batch_size) + return tensor + +def resize_to_batch_size(tensor, batch_size): + in_batch_size = tensor.shape[0] + if in_batch_size == batch_size: + return tensor + + if batch_size <= 1: + return tensor[:batch_size] + + output = torch.empty([batch_size] + list(tensor.shape)[1:], dtype=tensor.dtype, device=tensor.device) + if batch_size < in_batch_size: + scale = (in_batch_size - 1) / (batch_size - 1) + for i in range(batch_size): + output[i] = tensor[min(round(i * scale), in_batch_size - 1)] + else: + scale = in_batch_size / batch_size + for i in range(batch_size): + output[i] = tensor[min(math.floor((i + 0.5) * scale), in_batch_size - 1)] + + return output + +def convert_sd_to(state_dict, dtype): + keys = list(state_dict.keys()) + for k in keys: + state_dict[k] = state_dict[k].to(dtype) + return state_dict + +def safetensors_header(safetensors_path, max_size=100*1024*1024): + with open(safetensors_path, "rb") as f: + header = f.read(8) + length_of_header = struct.unpack(' max_size: + return None + return f.read(length_of_header) + +def set_attr(obj, attr, value): + attrs = attr.split(".") + for name in attrs[:-1]: + obj = getattr(obj, name) + prev = getattr(obj, attrs[-1]) + setattr(obj, attrs[-1], value) + return prev + +def set_attr_param(obj, attr, value): + return set_attr(obj, attr, torch.nn.Parameter(value, requires_grad=False)) + +def copy_to_param(obj, attr, value): + # inplace update tensor instead of replacing it + attrs = attr.split(".") + for name in attrs[:-1]: + obj = getattr(obj, name) + prev = getattr(obj, attrs[-1]) + prev.data.copy_(value) + +def get_attr(obj, attr): + attrs = attr.split(".") + for name in attrs: + obj = getattr(obj, name) + return obj + +def bislerp(samples, width, height): + def slerp(b1, b2, r): + '''slerps batches b1, b2 according to ratio r, batches should be flat e.g. NxC''' + + c = b1.shape[-1] + + #norms + b1_norms = torch.norm(b1, dim=-1, keepdim=True) + b2_norms = torch.norm(b2, dim=-1, keepdim=True) + + #normalize + b1_normalized = b1 / b1_norms + b2_normalized = b2 / b2_norms + + #zero when norms are zero + b1_normalized[b1_norms.expand(-1,c) == 0.0] = 0.0 + b2_normalized[b2_norms.expand(-1,c) == 0.0] = 0.0 + + #slerp + dot = (b1_normalized*b2_normalized).sum(1) + omega = torch.acos(dot) + so = torch.sin(omega) + + #technically not mathematically correct, but more pleasing? + res = (torch.sin((1.0-r.squeeze(1))*omega)/so).unsqueeze(1)*b1_normalized + (torch.sin(r.squeeze(1)*omega)/so).unsqueeze(1) * b2_normalized + res *= (b1_norms * (1.0-r) + b2_norms * r).expand(-1,c) + + #edge cases for same or polar opposites + res[dot > 1 - 1e-5] = b1[dot > 1 - 1e-5] + res[dot < 1e-5 - 1] = (b1 * (1.0-r) + b2 * r)[dot < 1e-5 - 1] + return res + + def generate_bilinear_data(length_old, length_new, device): + coords_1 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) + coords_1 = torch.nn.functional.interpolate(coords_1, size=(1, length_new), mode="bilinear") + ratios = coords_1 - coords_1.floor() + coords_1 = coords_1.to(torch.int64) + + coords_2 = torch.arange(length_old, dtype=torch.float32, device=device).reshape((1,1,1,-1)) + 1 + coords_2[:,:,:,-1] -= 1 + coords_2 = torch.nn.functional.interpolate(coords_2, size=(1, length_new), mode="bilinear") + coords_2 = coords_2.to(torch.int64) + return ratios, coords_1, coords_2 + + orig_dtype = samples.dtype + samples = samples.float() + n,c,h,w = samples.shape + h_new, w_new = (height, width) + + #linear w + ratios, coords_1, coords_2 = generate_bilinear_data(w, w_new, samples.device) + coords_1 = coords_1.expand((n, c, h, -1)) + coords_2 = coords_2.expand((n, c, h, -1)) + ratios = ratios.expand((n, 1, h, -1)) + + pass_1 = samples.gather(-1,coords_1).movedim(1, -1).reshape((-1,c)) + pass_2 = samples.gather(-1,coords_2).movedim(1, -1).reshape((-1,c)) + ratios = ratios.movedim(1, -1).reshape((-1,1)) + + result = slerp(pass_1, pass_2, ratios) + result = result.reshape(n, h, w_new, c).movedim(-1, 1) + + #linear h + ratios, coords_1, coords_2 = generate_bilinear_data(h, h_new, samples.device) + coords_1 = coords_1.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) + coords_2 = coords_2.reshape((1,1,-1,1)).expand((n, c, -1, w_new)) + ratios = ratios.reshape((1,1,-1,1)).expand((n, 1, -1, w_new)) + + pass_1 = result.gather(-2,coords_1).movedim(1, -1).reshape((-1,c)) + pass_2 = result.gather(-2,coords_2).movedim(1, -1).reshape((-1,c)) + ratios = ratios.movedim(1, -1).reshape((-1,1)) + + result = slerp(pass_1, pass_2, ratios) + result = result.reshape(n, h_new, w_new, c).movedim(-1, 1) + return result.to(orig_dtype) + +def lanczos(samples, width, height): + images = [Image.fromarray(np.clip(255. * image.movedim(0, -1).cpu().numpy(), 0, 255).astype(np.uint8)) for image in samples] + images = [image.resize((width, height), resample=Image.Resampling.LANCZOS) for image in images] + images = [torch.from_numpy(np.array(image).astype(np.float32) / 255.0).movedim(-1, 0) for image in images] + result = torch.stack(images) + return result.to(samples.device, samples.dtype) + +def common_upscale(samples, width, height, upscale_method, crop): + orig_shape = tuple(samples.shape) + if len(orig_shape) > 4: + samples = samples.reshape(samples.shape[0], samples.shape[1], -1, samples.shape[-2], samples.shape[-1]) + samples = samples.movedim(2, 1) + samples = samples.reshape(-1, orig_shape[1], orig_shape[-2], orig_shape[-1]) + if crop == "center": + old_width = samples.shape[-1] + old_height = samples.shape[-2] + old_aspect = old_width / old_height + new_aspect = width / height + x = 0 + y = 0 + if old_aspect > new_aspect: + x = round((old_width - old_width * (new_aspect / old_aspect)) / 2) + elif old_aspect < new_aspect: + y = round((old_height - old_height * (old_aspect / new_aspect)) / 2) + s = samples.narrow(-2, y, old_height - y * 2).narrow(-1, x, old_width - x * 2) + else: + s = samples + + if upscale_method == "bislerp": + out = bislerp(s, width, height) + elif upscale_method == "lanczos": + out = lanczos(s, width, height) + else: + out = torch.nn.functional.interpolate(s, size=(height, width), mode=upscale_method) + + if len(orig_shape) == 4: + return out + + out = out.reshape((orig_shape[0], -1, orig_shape[1]) + (height, width)) + return out.movedim(2, 1).reshape(orig_shape[:-2] + (height, width)) + +def get_tiled_scale_steps(width, height, tile_x, tile_y, overlap): + rows = 1 if height <= tile_y else math.ceil((height - overlap) / (tile_y - overlap)) + cols = 1 if width <= tile_x else math.ceil((width - overlap) / (tile_x - overlap)) + return rows * cols + +@torch.inference_mode() +def tiled_scale_multidim(samples, function, tile=(64, 64), overlap = 8, upscale_amount = 4, out_channels = 3, output_device="cpu", pbar = None): + dims = len(tile) + + if not (isinstance(upscale_amount, (tuple, list))): + upscale_amount = [upscale_amount] * dims + + if not (isinstance(overlap, (tuple, list))): + overlap = [overlap] * dims + + def get_upscale(dim, val): + up = upscale_amount[dim] + if callable(up): + return up(val) + else: + return up * val + + def mult_list_upscale(a): + out = [] + for i in range(len(a)): + out.append(round(get_upscale(i, a[i]))) + return out + + output = torch.empty([samples.shape[0], out_channels] + mult_list_upscale(samples.shape[2:]), device=output_device) + + for b in range(samples.shape[0]): + s = samples[b:b+1] + + # handle entire input fitting in a single tile + if all(s.shape[d+2] <= tile[d] for d in range(dims)): + output[b:b+1] = function(s).to(output_device) + if pbar is not None: + pbar.update(1) + continue + + out = torch.zeros([s.shape[0], out_channels] + mult_list_upscale(s.shape[2:]), device=output_device) + out_div = torch.zeros([s.shape[0], out_channels] + mult_list_upscale(s.shape[2:]), device=output_device) + + positions = [range(0, s.shape[d+2], tile[d] - overlap[d]) if s.shape[d+2] > tile[d] else [0] for d in range(dims)] + + for it in itertools.product(*positions): + s_in = s + upscaled = [] + + for d in range(dims): + pos = max(0, min(s.shape[d + 2] - (overlap[d] + 1), it[d])) + l = min(tile[d], s.shape[d + 2] - pos) + s_in = s_in.narrow(d + 2, pos, l) + upscaled.append(round(get_upscale(d, pos))) + + ps = function(s_in).to(output_device) + mask = torch.ones_like(ps) + + for d in range(2, dims + 2): + feather = round(get_upscale(d - 2, overlap[d - 2])) + for t in range(feather): + a = (t + 1) / feather + mask.narrow(d, t, 1).mul_(a) + mask.narrow(d, mask.shape[d] - 1 - t, 1).mul_(a) + + o = out + o_d = out_div + for d in range(dims): + o = o.narrow(d + 2, upscaled[d], mask.shape[d + 2]) + o_d = o_d.narrow(d + 2, upscaled[d], mask.shape[d + 2]) + + o.add_(ps * mask) + o_d.add_(mask) + + if pbar is not None: + pbar.update(1) + + output[b:b+1] = out/out_div + return output + +def tiled_scale(samples, function, tile_x=64, tile_y=64, overlap = 8, upscale_amount = 4, out_channels = 3, output_device="cpu", pbar = None): + return tiled_scale_multidim(samples, function, (tile_y, tile_x), overlap, upscale_amount, out_channels, output_device, pbar) + +PROGRESS_BAR_ENABLED = True +def set_progress_bar_enabled(enabled): + global PROGRESS_BAR_ENABLED + PROGRESS_BAR_ENABLED = enabled + +PROGRESS_BAR_HOOK = None +def set_progress_bar_global_hook(function): + global PROGRESS_BAR_HOOK + PROGRESS_BAR_HOOK = function + +class ProgressBar: + def __init__(self, total): + global PROGRESS_BAR_HOOK + self.total = total + self.current = 0 + self.hook = PROGRESS_BAR_HOOK + + def update_absolute(self, value, total=None, preview=None): + if total is not None: + self.total = total + if value > self.total: + value = self.total + self.current = value + if self.hook is not None: + self.hook(self.current, self.total, preview) + + def update(self, value): + self.update_absolute(self.current + value) diff --git a/src/comfyui/comfy_execution/__pycache__/caching.cpython-310.pyc b/src/comfyui/comfy_execution/__pycache__/caching.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f97f4cfa1c8c240a3fb7ebe5e5728a8942e5756 Binary files /dev/null and b/src/comfyui/comfy_execution/__pycache__/caching.cpython-310.pyc differ diff --git a/src/comfyui/comfy_execution/__pycache__/graph.cpython-310.pyc b/src/comfyui/comfy_execution/__pycache__/graph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..898c1eae7c42767c6f92be7b3e035fed00fd86cf Binary files /dev/null and b/src/comfyui/comfy_execution/__pycache__/graph.cpython-310.pyc differ diff --git a/src/comfyui/comfy_execution/__pycache__/graph_utils.cpython-310.pyc b/src/comfyui/comfy_execution/__pycache__/graph_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b2f8081215e86832b9f8ce70ad8bf678913a87e4 Binary files /dev/null and b/src/comfyui/comfy_execution/__pycache__/graph_utils.cpython-310.pyc differ diff --git a/src/comfyui/comfy_execution/caching.py b/src/comfyui/comfy_execution/caching.py new file mode 100644 index 0000000000000000000000000000000000000000..630f280fc5e94ae7fd1cfa81f290a49f2a506e8a --- /dev/null +++ b/src/comfyui/comfy_execution/caching.py @@ -0,0 +1,318 @@ +import itertools +from typing import Sequence, Mapping, Dict +from comfy_execution.graph import DynamicPrompt + +import nodes + +from comfy_execution.graph_utils import is_link + +NODE_CLASS_CONTAINS_UNIQUE_ID: Dict[str, bool] = {} + + +def include_unique_id_in_input(class_type: str) -> bool: + if class_type in NODE_CLASS_CONTAINS_UNIQUE_ID: + return NODE_CLASS_CONTAINS_UNIQUE_ID[class_type] + class_def = nodes.NODE_CLASS_MAPPINGS[class_type] + NODE_CLASS_CONTAINS_UNIQUE_ID[class_type] = "UNIQUE_ID" in class_def.INPUT_TYPES().get("hidden", {}).values() + return NODE_CLASS_CONTAINS_UNIQUE_ID[class_type] + +class CacheKeySet: + def __init__(self, dynprompt, node_ids, is_changed_cache): + self.keys = {} + self.subcache_keys = {} + + def add_keys(self, node_ids): + raise NotImplementedError() + + def all_node_ids(self): + return set(self.keys.keys()) + + def get_used_keys(self): + return self.keys.values() + + def get_used_subcache_keys(self): + return self.subcache_keys.values() + + def get_data_key(self, node_id): + return self.keys.get(node_id, None) + + def get_subcache_key(self, node_id): + return self.subcache_keys.get(node_id, None) + +class Unhashable: + def __init__(self): + self.value = float("NaN") + +def to_hashable(obj): + # So that we don't infinitely recurse since frozenset and tuples + # are Sequences. + if isinstance(obj, (int, float, str, bool, type(None))): + return obj + elif isinstance(obj, Mapping): + return frozenset([(to_hashable(k), to_hashable(v)) for k, v in sorted(obj.items())]) + elif isinstance(obj, Sequence): + return frozenset(zip(itertools.count(), [to_hashable(i) for i in obj])) + else: + # TODO - Support other objects like tensors? + return Unhashable() + +class CacheKeySetID(CacheKeySet): + def __init__(self, dynprompt, node_ids, is_changed_cache): + super().__init__(dynprompt, node_ids, is_changed_cache) + self.dynprompt = dynprompt + self.add_keys(node_ids) + + def add_keys(self, node_ids): + for node_id in node_ids: + if node_id in self.keys: + continue + if not self.dynprompt.has_node(node_id): + continue + node = self.dynprompt.get_node(node_id) + self.keys[node_id] = (node_id, node["class_type"]) + self.subcache_keys[node_id] = (node_id, node["class_type"]) + +class CacheKeySetInputSignature(CacheKeySet): + def __init__(self, dynprompt, node_ids, is_changed_cache): + super().__init__(dynprompt, node_ids, is_changed_cache) + self.dynprompt = dynprompt + self.is_changed_cache = is_changed_cache + self.add_keys(node_ids) + + def include_node_id_in_input(self) -> bool: + return False + + def add_keys(self, node_ids): + for node_id in node_ids: + if node_id in self.keys: + continue + if not self.dynprompt.has_node(node_id): + continue + node = self.dynprompt.get_node(node_id) + self.keys[node_id] = self.get_node_signature(self.dynprompt, node_id) + self.subcache_keys[node_id] = (node_id, node["class_type"]) + + def get_node_signature(self, dynprompt, node_id): + signature = [] + ancestors, order_mapping = self.get_ordered_ancestry(dynprompt, node_id) + signature.append(self.get_immediate_node_signature(dynprompt, node_id, order_mapping)) + for ancestor_id in ancestors: + signature.append(self.get_immediate_node_signature(dynprompt, ancestor_id, order_mapping)) + return to_hashable(signature) + + def get_immediate_node_signature(self, dynprompt, node_id, ancestor_order_mapping): + if not dynprompt.has_node(node_id): + # This node doesn't exist -- we can't cache it. + return [float("NaN")] + node = dynprompt.get_node(node_id) + class_type = node["class_type"] + class_def = nodes.NODE_CLASS_MAPPINGS[class_type] + signature = [class_type, self.is_changed_cache.get(node_id)] + if self.include_node_id_in_input() or (hasattr(class_def, "NOT_IDEMPOTENT") and class_def.NOT_IDEMPOTENT) or include_unique_id_in_input(class_type): + signature.append(node_id) + inputs = node["inputs"] + for key in sorted(inputs.keys()): + if is_link(inputs[key]): + (ancestor_id, ancestor_socket) = inputs[key] + ancestor_index = ancestor_order_mapping[ancestor_id] + signature.append((key,("ANCESTOR", ancestor_index, ancestor_socket))) + else: + signature.append((key, inputs[key])) + return signature + + # This function returns a list of all ancestors of the given node. The order of the list is + # deterministic based on which specific inputs the ancestor is connected by. + def get_ordered_ancestry(self, dynprompt, node_id): + ancestors = [] + order_mapping = {} + self.get_ordered_ancestry_internal(dynprompt, node_id, ancestors, order_mapping) + return ancestors, order_mapping + + def get_ordered_ancestry_internal(self, dynprompt, node_id, ancestors, order_mapping): + if not dynprompt.has_node(node_id): + return + inputs = dynprompt.get_node(node_id)["inputs"] + input_keys = sorted(inputs.keys()) + for key in input_keys: + if is_link(inputs[key]): + ancestor_id = inputs[key][0] + if ancestor_id not in order_mapping: + ancestors.append(ancestor_id) + order_mapping[ancestor_id] = len(ancestors) - 1 + self.get_ordered_ancestry_internal(dynprompt, ancestor_id, ancestors, order_mapping) + +class BasicCache: + def __init__(self, key_class): + self.key_class = key_class + self.initialized = False + self.dynprompt: DynamicPrompt + self.cache_key_set: CacheKeySet + self.cache = {} + self.subcaches = {} + + def set_prompt(self, dynprompt, node_ids, is_changed_cache): + self.dynprompt = dynprompt + self.cache_key_set = self.key_class(dynprompt, node_ids, is_changed_cache) + self.is_changed_cache = is_changed_cache + self.initialized = True + + def all_node_ids(self): + assert self.initialized + node_ids = self.cache_key_set.all_node_ids() + for subcache in self.subcaches.values(): + node_ids = node_ids.union(subcache.all_node_ids()) + return node_ids + + def _clean_cache(self): + preserve_keys = set(self.cache_key_set.get_used_keys()) + to_remove = [] + for key in self.cache: + if key not in preserve_keys: + to_remove.append(key) + for key in to_remove: + del self.cache[key] + + def _clean_subcaches(self): + preserve_subcaches = set(self.cache_key_set.get_used_subcache_keys()) + + to_remove = [] + for key in self.subcaches: + if key not in preserve_subcaches: + to_remove.append(key) + for key in to_remove: + del self.subcaches[key] + + def clean_unused(self): + assert self.initialized + self._clean_cache() + self._clean_subcaches() + + def _set_immediate(self, node_id, value): + assert self.initialized + cache_key = self.cache_key_set.get_data_key(node_id) + self.cache[cache_key] = value + + def _get_immediate(self, node_id): + if not self.initialized: + return None + cache_key = self.cache_key_set.get_data_key(node_id) + if cache_key in self.cache: + return self.cache[cache_key] + else: + return None + + def _ensure_subcache(self, node_id, children_ids): + subcache_key = self.cache_key_set.get_subcache_key(node_id) + subcache = self.subcaches.get(subcache_key, None) + if subcache is None: + subcache = BasicCache(self.key_class) + self.subcaches[subcache_key] = subcache + subcache.set_prompt(self.dynprompt, children_ids, self.is_changed_cache) + return subcache + + def _get_subcache(self, node_id): + assert self.initialized + subcache_key = self.cache_key_set.get_subcache_key(node_id) + if subcache_key in self.subcaches: + return self.subcaches[subcache_key] + else: + return None + + def recursive_debug_dump(self): + result = [] + for key in self.cache: + result.append({"key": key, "value": self.cache[key]}) + for key in self.subcaches: + result.append({"subcache_key": key, "subcache": self.subcaches[key].recursive_debug_dump()}) + return result + +class HierarchicalCache(BasicCache): + def __init__(self, key_class): + super().__init__(key_class) + + def _get_cache_for(self, node_id): + assert self.dynprompt is not None + parent_id = self.dynprompt.get_parent_node_id(node_id) + if parent_id is None: + return self + + hierarchy = [] + while parent_id is not None: + hierarchy.append(parent_id) + parent_id = self.dynprompt.get_parent_node_id(parent_id) + + cache = self + for parent_id in reversed(hierarchy): + cache = cache._get_subcache(parent_id) + if cache is None: + return None + return cache + + def get(self, node_id): + cache = self._get_cache_for(node_id) + if cache is None: + return None + return cache._get_immediate(node_id) + + def set(self, node_id, value): + cache = self._get_cache_for(node_id) + assert cache is not None + cache._set_immediate(node_id, value) + + def ensure_subcache_for(self, node_id, children_ids): + cache = self._get_cache_for(node_id) + assert cache is not None + return cache._ensure_subcache(node_id, children_ids) + +class LRUCache(BasicCache): + def __init__(self, key_class, max_size=100): + super().__init__(key_class) + self.max_size = max_size + self.min_generation = 0 + self.generation = 0 + self.used_generation = {} + self.children = {} + + def set_prompt(self, dynprompt, node_ids, is_changed_cache): + super().set_prompt(dynprompt, node_ids, is_changed_cache) + self.generation += 1 + for node_id in node_ids: + self._mark_used(node_id) + + def clean_unused(self): + while len(self.cache) > self.max_size and self.min_generation < self.generation: + self.min_generation += 1 + to_remove = [key for key in self.cache if self.used_generation[key] < self.min_generation] + for key in to_remove: + del self.cache[key] + del self.used_generation[key] + if key in self.children: + del self.children[key] + self._clean_subcaches() + + def get(self, node_id): + self._mark_used(node_id) + return self._get_immediate(node_id) + + def _mark_used(self, node_id): + cache_key = self.cache_key_set.get_data_key(node_id) + if cache_key is not None: + self.used_generation[cache_key] = self.generation + + def set(self, node_id, value): + self._mark_used(node_id) + return self._set_immediate(node_id, value) + + def ensure_subcache_for(self, node_id, children_ids): + # Just uses subcaches for tracking 'live' nodes + super()._ensure_subcache(node_id, children_ids) + + self.cache_key_set.add_keys(children_ids) + self._mark_used(node_id) + cache_key = self.cache_key_set.get_data_key(node_id) + self.children[cache_key] = [] + for child_id in children_ids: + self._mark_used(child_id) + self.children[cache_key].append(self.cache_key_set.get_data_key(child_id)) + return self + diff --git a/src/comfyui/comfy_execution/graph.py b/src/comfyui/comfy_execution/graph.py new file mode 100644 index 0000000000000000000000000000000000000000..0b5bf189906994a3b6c5db61929a174d4cc7b109 --- /dev/null +++ b/src/comfyui/comfy_execution/graph.py @@ -0,0 +1,270 @@ +import nodes + +from comfy_execution.graph_utils import is_link + +class DependencyCycleError(Exception): + pass + +class NodeInputError(Exception): + pass + +class NodeNotFoundError(Exception): + pass + +class DynamicPrompt: + def __init__(self, original_prompt): + # The original prompt provided by the user + self.original_prompt = original_prompt + # Any extra pieces of the graph created during execution + self.ephemeral_prompt = {} + self.ephemeral_parents = {} + self.ephemeral_display = {} + + def get_node(self, node_id): + if node_id in self.ephemeral_prompt: + return self.ephemeral_prompt[node_id] + if node_id in self.original_prompt: + return self.original_prompt[node_id] + raise NodeNotFoundError(f"Node {node_id} not found") + + def has_node(self, node_id): + return node_id in self.original_prompt or node_id in self.ephemeral_prompt + + def add_ephemeral_node(self, node_id, node_info, parent_id, display_id): + self.ephemeral_prompt[node_id] = node_info + self.ephemeral_parents[node_id] = parent_id + self.ephemeral_display[node_id] = display_id + + def get_real_node_id(self, node_id): + while node_id in self.ephemeral_parents: + node_id = self.ephemeral_parents[node_id] + return node_id + + def get_parent_node_id(self, node_id): + return self.ephemeral_parents.get(node_id, None) + + def get_display_node_id(self, node_id): + while node_id in self.ephemeral_display: + node_id = self.ephemeral_display[node_id] + return node_id + + def all_node_ids(self): + return set(self.original_prompt.keys()).union(set(self.ephemeral_prompt.keys())) + + def get_original_prompt(self): + return self.original_prompt + +def get_input_info(class_def, input_name): + valid_inputs = class_def.INPUT_TYPES() + input_info = None + input_category = None + if "required" in valid_inputs and input_name in valid_inputs["required"]: + input_category = "required" + input_info = valid_inputs["required"][input_name] + elif "optional" in valid_inputs and input_name in valid_inputs["optional"]: + input_category = "optional" + input_info = valid_inputs["optional"][input_name] + elif "hidden" in valid_inputs and input_name in valid_inputs["hidden"]: + input_category = "hidden" + input_info = valid_inputs["hidden"][input_name] + if input_info is None: + return None, None, None + input_type = input_info[0] + if len(input_info) > 1: + extra_info = input_info[1] + else: + extra_info = {} + return input_type, input_category, extra_info + +class TopologicalSort: + def __init__(self, dynprompt): + self.dynprompt = dynprompt + self.pendingNodes = {} + self.blockCount = {} # Number of nodes this node is directly blocked by + self.blocking = {} # Which nodes are blocked by this node + + def get_input_info(self, unique_id, input_name): + class_type = self.dynprompt.get_node(unique_id)["class_type"] + class_def = nodes.NODE_CLASS_MAPPINGS[class_type] + return get_input_info(class_def, input_name) + + def make_input_strong_link(self, to_node_id, to_input): + inputs = self.dynprompt.get_node(to_node_id)["inputs"] + if to_input not in inputs: + raise NodeInputError(f"Node {to_node_id} says it needs input {to_input}, but there is no input to that node at all") + value = inputs[to_input] + if not is_link(value): + raise NodeInputError(f"Node {to_node_id} says it needs input {to_input}, but that value is a constant") + from_node_id, from_socket = value + self.add_strong_link(from_node_id, from_socket, to_node_id) + + def add_strong_link(self, from_node_id, from_socket, to_node_id): + if not self.is_cached(from_node_id): + self.add_node(from_node_id) + if to_node_id not in self.blocking[from_node_id]: + self.blocking[from_node_id][to_node_id] = {} + self.blockCount[to_node_id] += 1 + self.blocking[from_node_id][to_node_id][from_socket] = True + + def add_node(self, node_unique_id, include_lazy=False, subgraph_nodes=None): + node_ids = [node_unique_id] + links = [] + + while len(node_ids) > 0: + unique_id = node_ids.pop() + if unique_id in self.pendingNodes: + continue + + self.pendingNodes[unique_id] = True + self.blockCount[unique_id] = 0 + self.blocking[unique_id] = {} + + inputs = self.dynprompt.get_node(unique_id)["inputs"] + for input_name in inputs: + value = inputs[input_name] + if is_link(value): + from_node_id, from_socket = value + if subgraph_nodes is not None and from_node_id not in subgraph_nodes: + continue + input_type, input_category, input_info = self.get_input_info(unique_id, input_name) + is_lazy = input_info is not None and "lazy" in input_info and input_info["lazy"] + if (include_lazy or not is_lazy) and not self.is_cached(from_node_id): + node_ids.append(from_node_id) + links.append((from_node_id, from_socket, unique_id)) + + for link in links: + self.add_strong_link(*link) + + def is_cached(self, node_id): + return False + + def get_ready_nodes(self): + return [node_id for node_id in self.pendingNodes if self.blockCount[node_id] == 0] + + def pop_node(self, unique_id): + del self.pendingNodes[unique_id] + for blocked_node_id in self.blocking[unique_id]: + self.blockCount[blocked_node_id] -= 1 + del self.blocking[unique_id] + + def is_empty(self): + return len(self.pendingNodes) == 0 + +class ExecutionList(TopologicalSort): + """ + ExecutionList implements a topological dissolve of the graph. After a node is staged for execution, + it can still be returned to the graph after having further dependencies added. + """ + def __init__(self, dynprompt, output_cache): + super().__init__(dynprompt) + self.output_cache = output_cache + self.staged_node_id = None + + def is_cached(self, node_id): + return self.output_cache.get(node_id) is not None + + def stage_node_execution(self): + assert self.staged_node_id is None + if self.is_empty(): + return None, None, None + available = self.get_ready_nodes() + if len(available) == 0: + cycled_nodes = self.get_nodes_in_cycle() + # Because cycles composed entirely of static nodes are caught during initial validation, + # we will 'blame' the first node in the cycle that is not a static node. + blamed_node = cycled_nodes[0] + for node_id in cycled_nodes: + display_node_id = self.dynprompt.get_display_node_id(node_id) + if display_node_id != node_id: + blamed_node = display_node_id + break + ex = DependencyCycleError("Dependency cycle detected") + error_details = { + "node_id": blamed_node, + "exception_message": str(ex), + "exception_type": "graph.DependencyCycleError", + "traceback": [], + "current_inputs": [] + } + return None, error_details, ex + + self.staged_node_id = self.ux_friendly_pick_node(available) + return self.staged_node_id, None, None + + def ux_friendly_pick_node(self, node_list): + # If an output node is available, do that first. + # Technically this has no effect on the overall length of execution, but it feels better as a user + # for a PreviewImage to display a result as soon as it can + # Some other heuristics could probably be used here to improve the UX further. + def is_output(node_id): + class_type = self.dynprompt.get_node(node_id)["class_type"] + class_def = nodes.NODE_CLASS_MAPPINGS[class_type] + if hasattr(class_def, 'OUTPUT_NODE') and class_def.OUTPUT_NODE == True: + return True + return False + + for node_id in node_list: + if is_output(node_id): + return node_id + + #This should handle the VAEDecode -> preview case + for node_id in node_list: + for blocked_node_id in self.blocking[node_id]: + if is_output(blocked_node_id): + return node_id + + #This should handle the VAELoader -> VAEDecode -> preview case + for node_id in node_list: + for blocked_node_id in self.blocking[node_id]: + for blocked_node_id1 in self.blocking[blocked_node_id]: + if is_output(blocked_node_id1): + return node_id + + #TODO: this function should be improved + return node_list[0] + + def unstage_node_execution(self): + assert self.staged_node_id is not None + self.staged_node_id = None + + def complete_node_execution(self): + node_id = self.staged_node_id + self.pop_node(node_id) + self.staged_node_id = None + + def get_nodes_in_cycle(self): + # We'll dissolve the graph in reverse topological order to leave only the nodes in the cycle. + # We're skipping some of the performance optimizations from the original TopologicalSort to keep + # the code simple (and because having a cycle in the first place is a catastrophic error) + blocked_by = { node_id: {} for node_id in self.pendingNodes } + for from_node_id in self.blocking: + for to_node_id in self.blocking[from_node_id]: + if True in self.blocking[from_node_id][to_node_id].values(): + blocked_by[to_node_id][from_node_id] = True + to_remove = [node_id for node_id in blocked_by if len(blocked_by[node_id]) == 0] + while len(to_remove) > 0: + for node_id in to_remove: + for to_node_id in blocked_by: + if node_id in blocked_by[to_node_id]: + del blocked_by[to_node_id][node_id] + del blocked_by[node_id] + to_remove = [node_id for node_id in blocked_by if len(blocked_by[node_id]) == 0] + return list(blocked_by.keys()) + +class ExecutionBlocker: + """ + Return this from a node and any users will be blocked with the given error message. + If the message is None, execution will be blocked silently instead. + Generally, you should avoid using this functionality unless absolutely necessary. Whenever it's + possible, a lazy input will be more efficient and have a better user experience. + This functionality is useful in two cases: + 1. You want to conditionally prevent an output node from executing. (Particularly a built-in node + like SaveImage. For your own output nodes, I would recommend just adding a BOOL input and using + lazy evaluation to let it conditionally disable itself.) + 2. You have a node with multiple possible outputs, some of which are invalid and should not be used. + (I would recommend not making nodes like this in the future -- instead, make multiple nodes with + different outputs. Unfortunately, there are several popular existing nodes using this pattern.) + """ + def __init__(self, message): + self.message = message + diff --git a/src/comfyui/comfy_execution/graph_utils.py b/src/comfyui/comfy_execution/graph_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8595e942d32160152c3c0163c85ad9bcd2e68d45 --- /dev/null +++ b/src/comfyui/comfy_execution/graph_utils.py @@ -0,0 +1,139 @@ +def is_link(obj): + if not isinstance(obj, list): + return False + if len(obj) != 2: + return False + if not isinstance(obj[0], str): + return False + if not isinstance(obj[1], int) and not isinstance(obj[1], float): + return False + return True + +# The GraphBuilder is just a utility class that outputs graphs in the form expected by the ComfyUI back-end +class GraphBuilder: + _default_prefix_root = "" + _default_prefix_call_index = 0 + _default_prefix_graph_index = 0 + + def __init__(self, prefix = None): + if prefix is None: + self.prefix = GraphBuilder.alloc_prefix() + else: + self.prefix = prefix + self.nodes = {} + self.id_gen = 1 + + @classmethod + def set_default_prefix(cls, prefix_root, call_index, graph_index = 0): + cls._default_prefix_root = prefix_root + cls._default_prefix_call_index = call_index + cls._default_prefix_graph_index = graph_index + + @classmethod + def alloc_prefix(cls, root=None, call_index=None, graph_index=None): + if root is None: + root = GraphBuilder._default_prefix_root + if call_index is None: + call_index = GraphBuilder._default_prefix_call_index + if graph_index is None: + graph_index = GraphBuilder._default_prefix_graph_index + result = f"{root}.{call_index}.{graph_index}." + GraphBuilder._default_prefix_graph_index += 1 + return result + + def node(self, class_type, id=None, **kwargs): + if id is None: + id = str(self.id_gen) + self.id_gen += 1 + id = self.prefix + id + if id in self.nodes: + return self.nodes[id] + + node = Node(id, class_type, kwargs) + self.nodes[id] = node + return node + + def lookup_node(self, id): + id = self.prefix + id + return self.nodes.get(id) + + def finalize(self): + output = {} + for node_id, node in self.nodes.items(): + output[node_id] = node.serialize() + return output + + def replace_node_output(self, node_id, index, new_value): + node_id = self.prefix + node_id + to_remove = [] + for node in self.nodes.values(): + for key, value in node.inputs.items(): + if is_link(value) and value[0] == node_id and value[1] == index: + if new_value is None: + to_remove.append((node, key)) + else: + node.inputs[key] = new_value + for node, key in to_remove: + del node.inputs[key] + + def remove_node(self, id): + id = self.prefix + id + del self.nodes[id] + +class Node: + def __init__(self, id, class_type, inputs): + self.id = id + self.class_type = class_type + self.inputs = inputs + self.override_display_id = None + + def out(self, index): + return [self.id, index] + + def set_input(self, key, value): + if value is None: + if key in self.inputs: + del self.inputs[key] + else: + self.inputs[key] = value + + def get_input(self, key): + return self.inputs.get(key) + + def set_override_display_id(self, override_display_id): + self.override_display_id = override_display_id + + def serialize(self): + serialized = { + "class_type": self.class_type, + "inputs": self.inputs + } + if self.override_display_id is not None: + serialized["override_display_id"] = self.override_display_id + return serialized + +def add_graph_prefix(graph, outputs, prefix): + # Change the node IDs and any internal links + new_graph = {} + for node_id, node_info in graph.items(): + # Make sure the added nodes have unique IDs + new_node_id = prefix + node_id + new_node = { "class_type": node_info["class_type"], "inputs": {} } + for input_name, input_value in node_info.get("inputs", {}).items(): + if is_link(input_value): + new_node["inputs"][input_name] = [prefix + input_value[0], input_value[1]] + else: + new_node["inputs"][input_name] = input_value + new_graph[new_node_id] = new_node + + # Change the node IDs in the outputs + new_outputs = [] + for n in range(len(outputs)): + output = outputs[n] + if is_link(output): + new_outputs.append([prefix + output[0], output[1]]) + else: + new_outputs.append(output) + + return new_graph, tuple(new_outputs) + diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_advanced_samplers.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_advanced_samplers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c46e31e4ee061a7a2e3e263f6264ae424aab1767 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_advanced_samplers.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_align_your_steps.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_align_your_steps.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66b8e24e186c5405542df25b632dee4a7b1d0ec2 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_align_your_steps.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_attention_multiply.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_attention_multiply.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a2c6b013c1fe1dfe7d93c61df4668efbbf40414 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_attention_multiply.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_audio.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_audio.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0eb66accf9af17fd649b23da67b288df12e682ed Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_audio.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_canny.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_canny.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c117260535060937e0d1dd899ddad04510481d8 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_canny.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_clip_sdxl.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_clip_sdxl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d5569ff552a9ef405c3b69709f8fb8a0e7a4edd Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_clip_sdxl.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_compositing.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_compositing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f54793c521cf6107a7f6d91ede8d5a33f18e24f Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_compositing.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_cond.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_cond.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12b08e9fa12c090e5b27da852cc4fb345464571e Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_cond.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_controlnet.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_controlnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a5b75db76a171445575b14cd5c674eea13ba35a Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_controlnet.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_custom_sampler.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_custom_sampler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc39adb436ec6e9f768239de173e5a7925e71d48 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_custom_sampler.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_differential_diffusion.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_differential_diffusion.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d35325b13c141277223509d39f91559d65fc74db Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_differential_diffusion.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_flux.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_flux.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9a381001e33d03e9565e6909e137e058d51287f Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_flux.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_freelunch.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_freelunch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4622e47e7aea7d69286399369c6552de999cdb39 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_freelunch.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_gits.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_gits.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7091f1aa67e23c34a0ec784bdeba42af4136bfc2 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_gits.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_hunyuan.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_hunyuan.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..885c463d18f010d2d08eaf6d6c37d555aebeed63 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_hunyuan.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_hypernetwork.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_hypernetwork.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68a11011ac931cba419344fb0b67bd25ea08343d Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_hypernetwork.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_hypertile.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_hypertile.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..790e3142bb3c62e6d77ac85aee7578fe18d8c415 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_hypertile.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_images.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_images.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1ae8fd52145c2b8b1c78e86595ef87253ad87e2 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_images.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_ip2p.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_ip2p.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..879de609580424ea8464d900a00ccd4b64524639 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_ip2p.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_latent.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_latent.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8e465d45cc21100709b9f722350b6784f68ce1a Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_latent.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_lora_extract.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_lora_extract.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2cd4866f55cc32387b3fc90441e5428ba2b8b68 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_lora_extract.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_mask.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_mask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..318e175dac0a5e434ab1707a8210fe26d4c8af13 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_mask.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_mochi.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_mochi.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91871bdb0299e11cec87f4df9e982aadb5bcf547 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_mochi.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_model_advanced.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_model_advanced.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..475b0d7108a866cce618ebd9a9a941a5e70f6bf3 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_model_advanced.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_model_downscale.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_model_downscale.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79f965325ce69284b7da58e9331a8a921e9fd910 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_model_downscale.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_model_merging.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_model_merging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28bf540251835e7275f5fa30dc34857da7f1cff3 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_model_merging.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_model_merging_model_specific.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_model_merging_model_specific.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f98f2eca848037e8f0081e17ab652896a29cc6e3 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_model_merging_model_specific.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_morphology.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_morphology.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e648391eb2dbf02b370d764cf8e81bd97215a32 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_morphology.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_pag.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_pag.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e4e66128bd3649bab65a97644d62af383064a3d3 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_pag.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_perpneg.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_perpneg.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99bf0d8735f42d9e39b98ff2f107058c09e02efa Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_perpneg.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_photomaker.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_photomaker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb1340c6a87f1ee3304f76bbb0b0375e2c7b81b6 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_photomaker.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_post_processing.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_post_processing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e1b1dfa8446af8afdd28e00b7e70865cbae5969 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_post_processing.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_rebatch.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_rebatch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f05df288c2bf238f30ef130ecc638ea4b418e074 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_rebatch.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_sag.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_sag.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b1f8fb56e03b5834b9777ecb3dde977c13b39485 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_sag.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_sd3.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_sd3.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a55b3342c820bbfffa2aef09c1abdaa8b3c2dfa Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_sd3.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_sdupscale.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_sdupscale.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3bc186455b009a7292e9107f0660fb78fb0e7738 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_sdupscale.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_stable3d.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_stable3d.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5dbd887c21d2fd113cf93aad05fe53f69845f8b9 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_stable3d.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_stable_cascade.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_stable_cascade.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a94345cda4497bacb6b21b9c3bc0fb639ad9991 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_stable_cascade.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_tomesd.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_tomesd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ddf06b67fa98dbc7b3a61cb2043042c83d638c2 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_tomesd.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_torch_compile.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_torch_compile.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7b0e2db97bc01614b27b4ad470a9b006e54f2be Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_torch_compile.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_upscale_model.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_upscale_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba5e8df924b389b737ca57b5723af6f6fad149e5 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_upscale_model.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_video_model.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_video_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21cc08c6c463f95c94693cc3826927ad9bceaafa Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_video_model.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/__pycache__/nodes_webcam.cpython-310.pyc b/src/comfyui/comfy_extras/__pycache__/nodes_webcam.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..efc28967765cb22ed4e99e9a71a8c21067ab4c64 Binary files /dev/null and b/src/comfyui/comfy_extras/__pycache__/nodes_webcam.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/chainner_models/__pycache__/model_loading.cpython-310.pyc b/src/comfyui/comfy_extras/chainner_models/__pycache__/model_loading.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b95b5919f98260b360dbb56c0c4f094dda74134 Binary files /dev/null and b/src/comfyui/comfy_extras/chainner_models/__pycache__/model_loading.cpython-310.pyc differ diff --git a/src/comfyui/comfy_extras/chainner_models/model_loading.py b/src/comfyui/comfy_extras/chainner_models/model_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..d48bc238ccc6efbb383232c46d83d9c29c54ce5d --- /dev/null +++ b/src/comfyui/comfy_extras/chainner_models/model_loading.py @@ -0,0 +1,5 @@ +from spandrel import ModelLoader + +def load_state_dict(state_dict): + print("WARNING: comfy_extras.chainner_models is deprecated and has been replaced by the spandrel library.") + return ModelLoader().load_from_state_dict(state_dict).eval() diff --git a/src/comfyui/comfy_extras/nodes_advanced_samplers.py b/src/comfyui/comfy_extras/nodes_advanced_samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..820c250ef3aa4b3099ab7e20c4443bd6ed9bff6b --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_advanced_samplers.py @@ -0,0 +1,112 @@ +import comfy.samplers +import comfy.utils +import torch +import numpy as np +from tqdm.auto import trange, tqdm +import math + + +@torch.no_grad() +def sample_lcm_upscale(model, x, sigmas, extra_args=None, callback=None, disable=None, total_upscale=2.0, upscale_method="bislerp", upscale_steps=None): + extra_args = {} if extra_args is None else extra_args + + if upscale_steps is None: + upscale_steps = max(len(sigmas) // 2 + 1, 2) + else: + upscale_steps += 1 + upscale_steps = min(upscale_steps, len(sigmas) + 1) + + upscales = np.linspace(1.0, total_upscale, upscale_steps)[1:] + + orig_shape = x.size() + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + denoised = model(x, sigmas[i] * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i], 'denoised': denoised}) + + x = denoised + if i < len(upscales): + x = comfy.utils.common_upscale(x, round(orig_shape[-1] * upscales[i]), round(orig_shape[-2] * upscales[i]), upscale_method, "disabled") + + if sigmas[i + 1] > 0: + x += sigmas[i + 1] * torch.randn_like(x) + return x + + +class SamplerLCMUpscale: + upscale_methods = ["bislerp", "nearest-exact", "bilinear", "area", "bicubic"] + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"scale_ratio": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 20.0, "step": 0.01}), + "scale_steps": ("INT", {"default": -1, "min": -1, "max": 1000, "step": 1}), + "upscale_method": (s.upscale_methods,), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, scale_ratio, scale_steps, upscale_method): + if scale_steps < 0: + scale_steps = None + sampler = comfy.samplers.KSAMPLER(sample_lcm_upscale, extra_options={"total_upscale": scale_ratio, "upscale_steps": scale_steps, "upscale_method": upscale_method}) + return (sampler, ) + +from comfy.k_diffusion.sampling import to_d +import comfy.model_patcher + +@torch.no_grad() +def sample_euler_pp(model, x, sigmas, extra_args=None, callback=None, disable=None): + extra_args = {} if extra_args is None else extra_args + + temp = [0] + def post_cfg_function(args): + temp[0] = args["uncond_denoised"] + return args["denoised"] + + model_options = extra_args.get("model_options", {}).copy() + extra_args["model_options"] = comfy.model_patcher.set_model_options_post_cfg_function(model_options, post_cfg_function, disable_cfg1_optimization=True) + + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + sigma_hat = sigmas[i] + denoised = model(x, sigma_hat * s_in, **extra_args) + d = to_d(x - denoised + temp[0], sigmas[i], denoised) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + x = x + d * dt + return x + + +class SamplerEulerCFGpp: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"version": (["regular", "alternative"],),} + } + RETURN_TYPES = ("SAMPLER",) + # CATEGORY = "sampling/custom_sampling/samplers" + CATEGORY = "_for_testing" + + FUNCTION = "get_sampler" + + def get_sampler(self, version): + if version == "alternative": + sampler = comfy.samplers.KSAMPLER(sample_euler_pp) + else: + sampler = comfy.samplers.ksampler("euler_cfg_pp") + return (sampler, ) + +NODE_CLASS_MAPPINGS = { + "SamplerLCMUpscale": SamplerLCMUpscale, + "SamplerEulerCFGpp": SamplerEulerCFGpp, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "SamplerEulerCFGpp": "SamplerEulerCFG++", +} diff --git a/src/comfyui/comfy_extras/nodes_align_your_steps.py b/src/comfyui/comfy_extras/nodes_align_your_steps.py new file mode 100644 index 0000000000000000000000000000000000000000..3ffe531878521bdb81e7fee4e9d03f7c37636abc --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_align_your_steps.py @@ -0,0 +1,53 @@ +#from: https://research.nvidia.com/labs/toronto-ai/AlignYourSteps/howto.html +import numpy as np +import torch + +def loglinear_interp(t_steps, num_steps): + """ + Performs log-linear interpolation of a given array of decreasing numbers. + """ + xs = np.linspace(0, 1, len(t_steps)) + ys = np.log(t_steps[::-1]) + + new_xs = np.linspace(0, 1, num_steps) + new_ys = np.interp(new_xs, xs, ys) + + interped_ys = np.exp(new_ys)[::-1].copy() + return interped_ys + +NOISE_LEVELS = {"SD1": [14.6146412293, 6.4745760956, 3.8636745985, 2.6946151520, 1.8841921177, 1.3943805092, 0.9642583904, 0.6523686016, 0.3977456272, 0.1515232662, 0.0291671582], + "SDXL":[14.6146412293, 6.3184485287, 3.7681790315, 2.1811480769, 1.3405244945, 0.8620721141, 0.5550693289, 0.3798540708, 0.2332364134, 0.1114188177, 0.0291671582], + "SVD": [700.00, 54.5, 15.886, 7.977, 4.248, 1.789, 0.981, 0.403, 0.173, 0.034, 0.002]} + +class AlignYourStepsScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model_type": (["SD1", "SDXL", "SVD"], ), + "steps": ("INT", {"default": 10, "min": 10, "max": 10000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model_type, steps, denoise): + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = round(steps * denoise) + + sigmas = NOISE_LEVELS[model_type][:] + if (steps + 1) != len(sigmas): + sigmas = loglinear_interp(sigmas, steps + 1) + + sigmas = sigmas[-(total_steps + 1):] + sigmas[-1] = 0 + return (torch.FloatTensor(sigmas), ) + +NODE_CLASS_MAPPINGS = { + "AlignYourStepsScheduler": AlignYourStepsScheduler, +} diff --git a/src/comfyui/comfy_extras/nodes_attention_multiply.py b/src/comfyui/comfy_extras/nodes_attention_multiply.py new file mode 100644 index 0000000000000000000000000000000000000000..4747eb39568afe9e19d34fd583531fcab1acba69 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_attention_multiply.py @@ -0,0 +1,120 @@ + +def attention_multiply(attn, model, q, k, v, out): + m = model.clone() + sd = model.model_state_dict() + + for key in sd: + if key.endswith("{}.to_q.bias".format(attn)) or key.endswith("{}.to_q.weight".format(attn)): + m.add_patches({key: (None,)}, 0.0, q) + if key.endswith("{}.to_k.bias".format(attn)) or key.endswith("{}.to_k.weight".format(attn)): + m.add_patches({key: (None,)}, 0.0, k) + if key.endswith("{}.to_v.bias".format(attn)) or key.endswith("{}.to_v.weight".format(attn)): + m.add_patches({key: (None,)}, 0.0, v) + if key.endswith("{}.to_out.0.bias".format(attn)) or key.endswith("{}.to_out.0.weight".format(attn)): + m.add_patches({key: (None,)}, 0.0, out) + + return m + + +class UNetSelfAttentionMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing/attention_experiments" + + def patch(self, model, q, k, v, out): + m = attention_multiply("attn1", model, q, k, v, out) + return (m, ) + +class UNetCrossAttentionMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing/attention_experiments" + + def patch(self, model, q, k, v, out): + m = attention_multiply("attn2", model, q, k, v, out) + return (m, ) + +class CLIPAttentionMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip": ("CLIP",), + "q": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "k": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "v": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "patch" + + CATEGORY = "_for_testing/attention_experiments" + + def patch(self, clip, q, k, v, out): + m = clip.clone() + sd = m.patcher.model_state_dict() + + for key in sd: + if key.endswith("self_attn.q_proj.weight") or key.endswith("self_attn.q_proj.bias"): + m.add_patches({key: (None,)}, 0.0, q) + if key.endswith("self_attn.k_proj.weight") or key.endswith("self_attn.k_proj.bias"): + m.add_patches({key: (None,)}, 0.0, k) + if key.endswith("self_attn.v_proj.weight") or key.endswith("self_attn.v_proj.bias"): + m.add_patches({key: (None,)}, 0.0, v) + if key.endswith("self_attn.out_proj.weight") or key.endswith("self_attn.out_proj.bias"): + m.add_patches({key: (None,)}, 0.0, out) + return (m, ) + +class UNetTemporalAttentionMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "self_structural": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "self_temporal": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "cross_structural": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "cross_temporal": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing/attention_experiments" + + def patch(self, model, self_structural, self_temporal, cross_structural, cross_temporal): + m = model.clone() + sd = model.model_state_dict() + + for k in sd: + if (k.endswith("attn1.to_out.0.bias") or k.endswith("attn1.to_out.0.weight")): + if '.time_stack.' in k: + m.add_patches({k: (None,)}, 0.0, self_temporal) + else: + m.add_patches({k: (None,)}, 0.0, self_structural) + elif (k.endswith("attn2.to_out.0.bias") or k.endswith("attn2.to_out.0.weight")): + if '.time_stack.' in k: + m.add_patches({k: (None,)}, 0.0, cross_temporal) + else: + m.add_patches({k: (None,)}, 0.0, cross_structural) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "UNetSelfAttentionMultiply": UNetSelfAttentionMultiply, + "UNetCrossAttentionMultiply": UNetCrossAttentionMultiply, + "CLIPAttentionMultiply": CLIPAttentionMultiply, + "UNetTemporalAttentionMultiply": UNetTemporalAttentionMultiply, +} diff --git a/src/comfyui/comfy_extras/nodes_audio.py b/src/comfyui/comfy_extras/nodes_audio.py new file mode 100644 index 0000000000000000000000000000000000000000..e5cc4dffeb0b3a9754f3817502a1d8db5bf0d271 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_audio.py @@ -0,0 +1,228 @@ +import torchaudio +import torch +import comfy.model_management +import folder_paths +import os +import io +import json +import struct +import random +import hashlib +from comfy.cli_args import args + +class EmptyLatentAudio: + def __init__(self): + self.device = comfy.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": {"seconds": ("FLOAT", {"default": 47.6, "min": 1.0, "max": 1000.0, "step": 0.1}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096, "tooltip": "The number of latent images in the batch."}), + }} + RETURN_TYPES = ("LATENT",) + FUNCTION = "generate" + + CATEGORY = "latent/audio" + + def generate(self, seconds, batch_size): + length = round((seconds * 44100 / 2048) / 2) * 2 + latent = torch.zeros([batch_size, 64, length], device=self.device) + return ({"samples":latent, "type": "audio"}, ) + +class VAEEncodeAudio: + @classmethod + def INPUT_TYPES(s): + return {"required": { "audio": ("AUDIO", ), "vae": ("VAE", )}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "encode" + + CATEGORY = "latent/audio" + + def encode(self, vae, audio): + sample_rate = audio["sample_rate"] + if 44100 != sample_rate: + waveform = torchaudio.functional.resample(audio["waveform"], sample_rate, 44100) + else: + waveform = audio["waveform"] + + t = vae.encode(waveform.movedim(1, -1)) + return ({"samples":t}, ) + +class VAEDecodeAudio: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT", ), "vae": ("VAE", )}} + RETURN_TYPES = ("AUDIO",) + FUNCTION = "decode" + + CATEGORY = "latent/audio" + + def decode(self, vae, samples): + audio = vae.decode(samples["samples"]).movedim(-1, 1) + std = torch.std(audio, dim=[1,2], keepdim=True) * 5.0 + std[std < 1.0] = 1.0 + audio /= std + return ({"waveform": audio, "sample_rate": 44100}, ) + + +def create_vorbis_comment_block(comment_dict, last_block): + vendor_string = b'ComfyUI' + vendor_length = len(vendor_string) + + comments = [] + for key, value in comment_dict.items(): + comment = f"{key}={value}".encode('utf-8') + comments.append(struct.pack('I', len(comment_data))[1:] + comment_data + + return comment_block + +def insert_or_replace_vorbis_comment(flac_io, comment_dict): + if len(comment_dict) == 0: + return flac_io + + flac_io.seek(4) + + blocks = [] + last_block = False + + while not last_block: + header = flac_io.read(4) + last_block = (header[0] & 0x80) != 0 + block_type = header[0] & 0x7F + block_length = struct.unpack('>I', b'\x00' + header[1:])[0] + block_data = flac_io.read(block_length) + + if block_type == 4 or block_type == 1: + pass + else: + header = bytes([(header[0] & (~0x80))]) + header[1:] + blocks.append(header + block_data) + + blocks.append(create_vorbis_comment_block(comment_dict, last_block=True)) + + new_flac_io = io.BytesIO() + new_flac_io.write(b'fLaC') + for block in blocks: + new_flac_io.write(block) + + new_flac_io.write(flac_io.read()) + return new_flac_io + + +class SaveAudio: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + self.type = "output" + self.prefix_append = "" + + @classmethod + def INPUT_TYPES(s): + return {"required": { "audio": ("AUDIO", ), + "filename_prefix": ("STRING", {"default": "audio/ComfyUI"})}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_audio" + + OUTPUT_NODE = True + + CATEGORY = "audio" + + def save_audio(self, audio, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + results = list() + + metadata = {} + if not args.disable_metadata: + if prompt is not None: + metadata["prompt"] = json.dumps(prompt) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + for (batch_number, waveform) in enumerate(audio["waveform"].cpu()): + filename_with_batch_num = filename.replace("%batch_num%", str(batch_number)) + file = f"{filename_with_batch_num}_{counter:05}_.flac" + + buff = io.BytesIO() + torchaudio.save(buff, waveform, audio["sample_rate"], format="FLAC") + + buff = insert_or_replace_vorbis_comment(buff, metadata) + + with open(os.path.join(full_output_folder, file), 'wb') as f: + f.write(buff.getbuffer()) + + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + counter += 1 + + return { "ui": { "audio": results } } + +class PreviewAudio(SaveAudio): + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"audio": ("AUDIO", ), }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + +class LoadAudio: + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = folder_paths.filter_files_content_types(os.listdir(input_dir), ["audio", "video"]) + return {"required": {"audio": (sorted(files), {"audio_upload": True})}} + + CATEGORY = "audio" + + RETURN_TYPES = ("AUDIO", ) + FUNCTION = "load" + + def load(self, audio): + audio_path = folder_paths.get_annotated_filepath(audio) + waveform, sample_rate = torchaudio.load(audio_path) + audio = {"waveform": waveform.unsqueeze(0), "sample_rate": sample_rate} + return (audio, ) + + @classmethod + def IS_CHANGED(s, audio): + image_path = folder_paths.get_annotated_filepath(audio) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, audio): + if not folder_paths.exists_annotated_filepath(audio): + return "Invalid audio file: {}".format(audio) + return True + +NODE_CLASS_MAPPINGS = { + "EmptyLatentAudio": EmptyLatentAudio, + "VAEEncodeAudio": VAEEncodeAudio, + "VAEDecodeAudio": VAEDecodeAudio, + "SaveAudio": SaveAudio, + "LoadAudio": LoadAudio, + "PreviewAudio": PreviewAudio, +} diff --git a/src/comfyui/comfy_extras/nodes_canny.py b/src/comfyui/comfy_extras/nodes_canny.py new file mode 100644 index 0000000000000000000000000000000000000000..d85e6b85691dcdcc55e31705039934846d466fc1 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_canny.py @@ -0,0 +1,25 @@ +from kornia.filters import canny +import comfy.model_management + + +class Canny: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), + "low_threshold": ("FLOAT", {"default": 0.4, "min": 0.01, "max": 0.99, "step": 0.01}), + "high_threshold": ("FLOAT", {"default": 0.8, "min": 0.01, "max": 0.99, "step": 0.01}) + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "detect_edge" + + CATEGORY = "image/preprocessors" + + def detect_edge(self, image, low_threshold, high_threshold): + output = canny(image.to(comfy.model_management.get_torch_device()).movedim(-1, 1), low_threshold, high_threshold) + img_out = output[1].to(comfy.model_management.intermediate_device()).repeat(1, 3, 1, 1).movedim(1, -1) + return (img_out,) + +NODE_CLASS_MAPPINGS = { + "Canny": Canny, +} diff --git a/src/comfyui/comfy_extras/nodes_clip_sdxl.py b/src/comfyui/comfy_extras/nodes_clip_sdxl.py new file mode 100644 index 0000000000000000000000000000000000000000..3087b917b41dba951d69029bdd278c258d97e40f --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_clip_sdxl.py @@ -0,0 +1,56 @@ +import torch +from nodes import MAX_RESOLUTION + +class CLIPTextEncodeSDXLRefiner: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "ascore": ("FLOAT", {"default": 6.0, "min": 0.0, "max": 1000.0, "step": 0.01}), + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, ascore, width, height, text): + tokens = clip.tokenize(text) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "aesthetic_score": ascore, "width": width,"height": height}]], ) + +class CLIPTextEncodeSDXL: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "crop_w": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "crop_h": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION}), + "target_width": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "target_height": ("INT", {"default": 1024.0, "min": 0, "max": MAX_RESOLUTION}), + "text_g": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), + "text_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), "clip": ("CLIP", ), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, width, height, crop_w, crop_h, target_width, target_height, text_g, text_l): + tokens = clip.tokenize(text_g) + tokens["l"] = clip.tokenize(text_l)["l"] + if len(tokens["l"]) != len(tokens["g"]): + empty = clip.tokenize("") + while len(tokens["l"]) < len(tokens["g"]): + tokens["l"] += empty["l"] + while len(tokens["l"]) > len(tokens["g"]): + tokens["g"] += empty["g"] + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled, "width": width, "height": height, "crop_w": crop_w, "crop_h": crop_h, "target_width": target_width, "target_height": target_height}]], ) + +NODE_CLASS_MAPPINGS = { + "CLIPTextEncodeSDXLRefiner": CLIPTextEncodeSDXLRefiner, + "CLIPTextEncodeSDXL": CLIPTextEncodeSDXL, +} diff --git a/src/comfyui/comfy_extras/nodes_compositing.py b/src/comfyui/comfy_extras/nodes_compositing.py new file mode 100644 index 0000000000000000000000000000000000000000..48fe5e3ddc60e1e150299c35f044cae831c02300 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_compositing.py @@ -0,0 +1,215 @@ +import numpy as np +import torch +import comfy.utils +from enum import Enum + +def resize_mask(mask, shape): + return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1) + +class PorterDuffMode(Enum): + ADD = 0 + CLEAR = 1 + DARKEN = 2 + DST = 3 + DST_ATOP = 4 + DST_IN = 5 + DST_OUT = 6 + DST_OVER = 7 + LIGHTEN = 8 + MULTIPLY = 9 + OVERLAY = 10 + SCREEN = 11 + SRC = 12 + SRC_ATOP = 13 + SRC_IN = 14 + SRC_OUT = 15 + SRC_OVER = 16 + XOR = 17 + + +def porter_duff_composite(src_image: torch.Tensor, src_alpha: torch.Tensor, dst_image: torch.Tensor, dst_alpha: torch.Tensor, mode: PorterDuffMode): + # convert mask to alpha + src_alpha = 1 - src_alpha + dst_alpha = 1 - dst_alpha + # premultiply alpha + src_image = src_image * src_alpha + dst_image = dst_image * dst_alpha + + # composite ops below assume alpha-premultiplied images + if mode == PorterDuffMode.ADD: + out_alpha = torch.clamp(src_alpha + dst_alpha, 0, 1) + out_image = torch.clamp(src_image + dst_image, 0, 1) + elif mode == PorterDuffMode.CLEAR: + out_alpha = torch.zeros_like(dst_alpha) + out_image = torch.zeros_like(dst_image) + elif mode == PorterDuffMode.DARKEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.min(src_image, dst_image) + elif mode == PorterDuffMode.DST: + out_alpha = dst_alpha + out_image = dst_image + elif mode == PorterDuffMode.DST_ATOP: + out_alpha = src_alpha + out_image = src_alpha * dst_image + (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.DST_IN: + out_alpha = src_alpha * dst_alpha + out_image = dst_image * src_alpha + elif mode == PorterDuffMode.DST_OUT: + out_alpha = (1 - src_alpha) * dst_alpha + out_image = (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.DST_OVER: + out_alpha = dst_alpha + (1 - dst_alpha) * src_alpha + out_image = dst_image + (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.LIGHTEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + torch.max(src_image, dst_image) + elif mode == PorterDuffMode.MULTIPLY: + out_alpha = src_alpha * dst_alpha + out_image = src_image * dst_image + elif mode == PorterDuffMode.OVERLAY: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = torch.where(2 * dst_image < dst_alpha, 2 * src_image * dst_image, + src_alpha * dst_alpha - 2 * (dst_alpha - src_image) * (src_alpha - dst_image)) + elif mode == PorterDuffMode.SCREEN: + out_alpha = src_alpha + dst_alpha - src_alpha * dst_alpha + out_image = src_image + dst_image - src_image * dst_image + elif mode == PorterDuffMode.SRC: + out_alpha = src_alpha + out_image = src_image + elif mode == PorterDuffMode.SRC_ATOP: + out_alpha = dst_alpha + out_image = dst_alpha * src_image + (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.SRC_IN: + out_alpha = src_alpha * dst_alpha + out_image = src_image * dst_alpha + elif mode == PorterDuffMode.SRC_OUT: + out_alpha = (1 - dst_alpha) * src_alpha + out_image = (1 - dst_alpha) * src_image + elif mode == PorterDuffMode.SRC_OVER: + out_alpha = src_alpha + (1 - src_alpha) * dst_alpha + out_image = src_image + (1 - src_alpha) * dst_image + elif mode == PorterDuffMode.XOR: + out_alpha = (1 - dst_alpha) * src_alpha + (1 - src_alpha) * dst_alpha + out_image = (1 - dst_alpha) * src_image + (1 - src_alpha) * dst_image + else: + return None, None + + # back to non-premultiplied alpha + out_image = torch.where(out_alpha > 1e-5, out_image / out_alpha, torch.zeros_like(out_image)) + out_image = torch.clamp(out_image, 0, 1) + # convert alpha to mask + out_alpha = 1 - out_alpha + return out_image, out_alpha + + +class PorterDuffImageComposite: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "source": ("IMAGE",), + "source_alpha": ("MASK",), + "destination": ("IMAGE",), + "destination_alpha": ("MASK",), + "mode": ([mode.name for mode in PorterDuffMode], {"default": PorterDuffMode.DST.name}), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "composite" + CATEGORY = "mask/compositing" + + def composite(self, source: torch.Tensor, source_alpha: torch.Tensor, destination: torch.Tensor, destination_alpha: torch.Tensor, mode): + batch_size = min(len(source), len(source_alpha), len(destination), len(destination_alpha)) + out_images = [] + out_alphas = [] + + for i in range(batch_size): + src_image = source[i] + dst_image = destination[i] + + assert src_image.shape[2] == dst_image.shape[2] # inputs need to have same number of channels + + src_alpha = source_alpha[i].unsqueeze(2) + dst_alpha = destination_alpha[i].unsqueeze(2) + + if dst_alpha.shape[:2] != dst_image.shape[:2]: + upscale_input = dst_alpha.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = comfy.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') + dst_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) + if src_image.shape != dst_image.shape: + upscale_input = src_image.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = comfy.utils.common_upscale(upscale_input, dst_image.shape[1], dst_image.shape[0], upscale_method='bicubic', crop='center') + src_image = upscale_output.permute(0, 2, 3, 1).squeeze(0) + if src_alpha.shape != dst_alpha.shape: + upscale_input = src_alpha.unsqueeze(0).permute(0, 3, 1, 2) + upscale_output = comfy.utils.common_upscale(upscale_input, dst_alpha.shape[1], dst_alpha.shape[0], upscale_method='bicubic', crop='center') + src_alpha = upscale_output.permute(0, 2, 3, 1).squeeze(0) + + out_image, out_alpha = porter_duff_composite(src_image, src_alpha, dst_image, dst_alpha, PorterDuffMode[mode]) + + out_images.append(out_image) + out_alphas.append(out_alpha.squeeze(2)) + + result = (torch.stack(out_images), torch.stack(out_alphas)) + return result + + +class SplitImageWithAlpha: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + } + } + + CATEGORY = "mask/compositing" + RETURN_TYPES = ("IMAGE", "MASK") + FUNCTION = "split_image_with_alpha" + + def split_image_with_alpha(self, image: torch.Tensor): + out_images = [i[:,:,:3] for i in image] + out_alphas = [i[:,:,3] if i.shape[2] > 3 else torch.ones_like(i[:,:,0]) for i in image] + result = (torch.stack(out_images), 1.0 - torch.stack(out_alphas)) + return result + + +class JoinImageWithAlpha: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "alpha": ("MASK",), + } + } + + CATEGORY = "mask/compositing" + RETURN_TYPES = ("IMAGE",) + FUNCTION = "join_image_with_alpha" + + def join_image_with_alpha(self, image: torch.Tensor, alpha: torch.Tensor): + batch_size = min(len(image), len(alpha)) + out_images = [] + + alpha = 1.0 - resize_mask(alpha, image.shape[1:]) + for i in range(batch_size): + out_images.append(torch.cat((image[i][:,:,:3], alpha[i].unsqueeze(2)), dim=2)) + + result = (torch.stack(out_images),) + return result + + +NODE_CLASS_MAPPINGS = { + "PorterDuffImageComposite": PorterDuffImageComposite, + "SplitImageWithAlpha": SplitImageWithAlpha, + "JoinImageWithAlpha": JoinImageWithAlpha, +} + + +NODE_DISPLAY_NAME_MAPPINGS = { + "PorterDuffImageComposite": "Porter-Duff Image Composite", + "SplitImageWithAlpha": "Split Image with Alpha", + "JoinImageWithAlpha": "Join Image with Alpha", +} diff --git a/src/comfyui/comfy_extras/nodes_cond.py b/src/comfyui/comfy_extras/nodes_cond.py new file mode 100644 index 0000000000000000000000000000000000000000..4c3a1d5bf63e732c1d738b49e47da8ff7714f8a1 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_cond.py @@ -0,0 +1,25 @@ + + +class CLIPTextEncodeControlnet: + @classmethod + def INPUT_TYPES(s): + return {"required": {"clip": ("CLIP", ), "conditioning": ("CONDITIONING", ), "text": ("STRING", {"multiline": True, "dynamicPrompts": True})}} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "_for_testing/conditioning" + + def encode(self, clip, conditioning, text): + tokens = clip.tokenize(text) + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + c = [] + for t in conditioning: + n = [t[0], t[1].copy()] + n[1]['cross_attn_controlnet'] = cond + n[1]['pooled_output_controlnet'] = pooled + c.append(n) + return (c, ) + +NODE_CLASS_MAPPINGS = { + "CLIPTextEncodeControlnet": CLIPTextEncodeControlnet +} diff --git a/src/comfyui/comfy_extras/nodes_controlnet.py b/src/comfyui/comfy_extras/nodes_controlnet.py new file mode 100644 index 0000000000000000000000000000000000000000..2d20e1fed7c26c8115f4b9878b0b54911e7a2e7f --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_controlnet.py @@ -0,0 +1,60 @@ +from comfy.cldm.control_types import UNION_CONTROLNET_TYPES +import nodes +import comfy.utils + +class SetUnionControlNetType: + @classmethod + def INPUT_TYPES(s): + return {"required": {"control_net": ("CONTROL_NET", ), + "type": (["auto"] + list(UNION_CONTROLNET_TYPES.keys()),) + }} + + CATEGORY = "conditioning/controlnet" + RETURN_TYPES = ("CONTROL_NET",) + + FUNCTION = "set_controlnet_type" + + def set_controlnet_type(self, control_net, type): + control_net = control_net.copy() + type_number = UNION_CONTROLNET_TYPES.get(type, -1) + if type_number >= 0: + control_net.set_extra_arg("control_type", [type_number]) + else: + control_net.set_extra_arg("control_type", []) + + return (control_net,) + +class ControlNetInpaintingAliMamaApply(nodes.ControlNetApplyAdvanced): + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "control_net": ("CONTROL_NET", ), + "vae": ("VAE", ), + "image": ("IMAGE", ), + "mask": ("MASK", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }} + + FUNCTION = "apply_inpaint_controlnet" + + CATEGORY = "conditioning/controlnet" + + def apply_inpaint_controlnet(self, positive, negative, control_net, vae, image, mask, strength, start_percent, end_percent): + extra_concat = [] + if control_net.concat_mask: + mask = 1.0 - mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])) + mask_apply = comfy.utils.common_upscale(mask, image.shape[2], image.shape[1], "bilinear", "center").round() + image = image * mask_apply.movedim(1, -1).repeat(1, 1, 1, image.shape[3]) + extra_concat = [mask] + + return self.apply_controlnet(positive, negative, control_net, image, strength, start_percent, end_percent, vae=vae, extra_concat=extra_concat) + + + +NODE_CLASS_MAPPINGS = { + "SetUnionControlNetType": SetUnionControlNetType, + "ControlNetInpaintingAliMamaApply": ControlNetInpaintingAliMamaApply, +} diff --git a/src/comfyui/comfy_extras/nodes_custom_sampler.py b/src/comfyui/comfy_extras/nodes_custom_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..c7ff9a4d8f9c56c5120f0032a202be79c851d5b3 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_custom_sampler.py @@ -0,0 +1,725 @@ +import comfy.samplers +import comfy.sample +from comfy.k_diffusion import sampling as k_diffusion_sampling +import latent_preview +import torch +import comfy.utils +import node_helpers + + +class BasicScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "scheduler": (comfy.samplers.SCHEDULER_NAMES, ), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model, scheduler, steps, denoise): + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = int(steps/denoise) + + sigmas = comfy.samplers.calculate_sigmas(model.get_model_object("model_sampling"), scheduler, total_steps).cpu() + sigmas = sigmas[-(steps + 1):] + return (sigmas, ) + + +class KarrasScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "rho": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, rho): + sigmas = k_diffusion_sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) + return (sigmas, ) + +class ExponentialScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min): + sigmas = k_diffusion_sampling.get_sigmas_exponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max) + return (sigmas, ) + +class PolyexponentialScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "rho": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, rho): + sigmas = k_diffusion_sampling.get_sigmas_polyexponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) + return (sigmas, ) + +class LaplaceScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "mu": ("FLOAT", {"default": 0.0, "min": -10.0, "max": 10.0, "step":0.1, "round": False}), + "beta": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 10.0, "step":0.1, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, mu, beta): + sigmas = k_diffusion_sampling.get_sigmas_laplace(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, mu=mu, beta=beta) + return (sigmas, ) + + +class SDTurboScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 1, "min": 1, "max": 10}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model, steps, denoise): + start_step = 10 - int(10 * denoise) + timesteps = torch.flip(torch.arange(1, 11) * 100 - 1, (0,))[start_step:start_step + steps] + sigmas = model.get_model_object("model_sampling").sigma(timesteps) + sigmas = torch.cat([sigmas, sigmas.new_zeros([1])]) + return (sigmas, ) + +class BetaSamplingScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "alpha": ("FLOAT", {"default": 0.6, "min": 0.0, "max": 50.0, "step":0.01, "round": False}), + "beta": ("FLOAT", {"default": 0.6, "min": 0.0, "max": 50.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, model, steps, alpha, beta): + sigmas = comfy.samplers.beta_scheduler(model.get_model_object("model_sampling"), steps, alpha=alpha, beta=beta) + return (sigmas, ) + +class VPScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "beta_d": ("FLOAT", {"default": 19.9, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), #TODO: fix default values + "beta_min": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "eps_s": ("FLOAT", {"default": 0.001, "min": 0.0, "max": 1.0, "step":0.0001, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, beta_d, beta_min, eps_s): + sigmas = k_diffusion_sampling.get_sigmas_vp(n=steps, beta_d=beta_d, beta_min=beta_min, eps_s=eps_s) + return (sigmas, ) + +class SplitSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + "step": ("INT", {"default": 0, "min": 0, "max": 10000}), + } + } + RETURN_TYPES = ("SIGMAS","SIGMAS") + RETURN_NAMES = ("high_sigmas", "low_sigmas") + CATEGORY = "sampling/custom_sampling/sigmas" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, sigmas, step): + sigmas1 = sigmas[:step + 1] + sigmas2 = sigmas[step:] + return (sigmas1, sigmas2) + +class SplitSigmasDenoise: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS","SIGMAS") + RETURN_NAMES = ("high_sigmas", "low_sigmas") + CATEGORY = "sampling/custom_sampling/sigmas" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, sigmas, denoise): + steps = max(sigmas.shape[-1] - 1, 0) + total_steps = round(steps * denoise) + sigmas1 = sigmas[:-(total_steps)] + sigmas2 = sigmas[-(total_steps + 1):] + return (sigmas1, sigmas2) + +class FlipSigmas: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sigmas": ("SIGMAS", ), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/sigmas" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, sigmas): + if len(sigmas) == 0: + return (sigmas,) + + sigmas = sigmas.flip(0) + if sigmas[0] == 0: + sigmas[0] = 0.0001 + return (sigmas,) + +class KSamplerSelect: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"sampler_name": (comfy.samplers.SAMPLER_NAMES, ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, sampler_name): + sampler = comfy.samplers.sampler_object(sampler_name) + return (sampler, ) + +class SamplerDPMPP_3M_SDE: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "noise_device": (['gpu', 'cpu'], ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise, noise_device): + if noise_device == 'cpu': + sampler_name = "dpmpp_3m_sde" + else: + sampler_name = "dpmpp_3m_sde_gpu" + sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise}) + return (sampler, ) + +class SamplerDPMPP_2M_SDE: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"solver_type": (['midpoint', 'heun'], ), + "eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "noise_device": (['gpu', 'cpu'], ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, solver_type, eta, s_noise, noise_device): + if noise_device == 'cpu': + sampler_name = "dpmpp_2m_sde" + else: + sampler_name = "dpmpp_2m_sde_gpu" + sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "solver_type": solver_type}) + return (sampler, ) + + +class SamplerDPMPP_SDE: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "r": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "noise_device": (['gpu', 'cpu'], ), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise, r, noise_device): + if noise_device == 'cpu': + sampler_name = "dpmpp_sde" + else: + sampler_name = "dpmpp_sde_gpu" + sampler = comfy.samplers.ksampler(sampler_name, {"eta": eta, "s_noise": s_noise, "r": r}) + return (sampler, ) + +class SamplerDPMPP_2S_Ancestral: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise): + sampler = comfy.samplers.ksampler("dpmpp_2s_ancestral", {"eta": eta, "s_noise": s_noise}) + return (sampler, ) + +class SamplerEulerAncestral: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise): + sampler = comfy.samplers.ksampler("euler_ancestral", {"eta": eta, "s_noise": s_noise}) + return (sampler, ) + +class SamplerEulerAncestralCFGPP: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "eta": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step":0.01, "round": False}), + }} + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, eta, s_noise): + sampler = comfy.samplers.ksampler( + "euler_ancestral_cfg_pp", + {"eta": eta, "s_noise": s_noise}) + return (sampler, ) + +class SamplerLMS: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"order": ("INT", {"default": 4, "min": 1, "max": 100}), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, order): + sampler = comfy.samplers.ksampler("lms", {"order": order}) + return (sampler, ) + +class SamplerDPMAdaptative: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"order": ("INT", {"default": 3, "min": 2, "max": 3}), + "rtol": ("FLOAT", {"default": 0.05, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "atol": ("FLOAT", {"default": 0.0078, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "h_init": ("FLOAT", {"default": 0.05, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "pcoeff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "icoeff": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "dcoeff": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "accept_safety": ("FLOAT", {"default": 0.81, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "eta": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + "s_noise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.01, "round": False}), + } + } + RETURN_TYPES = ("SAMPLER",) + CATEGORY = "sampling/custom_sampling/samplers" + + FUNCTION = "get_sampler" + + def get_sampler(self, order, rtol, atol, h_init, pcoeff, icoeff, dcoeff, accept_safety, eta, s_noise): + sampler = comfy.samplers.ksampler("dpm_adaptive", {"order": order, "rtol": rtol, "atol": atol, "h_init": h_init, "pcoeff": pcoeff, + "icoeff": icoeff, "dcoeff": dcoeff, "accept_safety": accept_safety, "eta": eta, + "s_noise":s_noise }) + return (sampler, ) + +class Noise_EmptyNoise: + def __init__(self): + self.seed = 0 + + def generate_noise(self, input_latent): + latent_image = input_latent["samples"] + return torch.zeros(latent_image.shape, dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + + +class Noise_RandomNoise: + def __init__(self, seed): + self.seed = seed + + def generate_noise(self, input_latent): + latent_image = input_latent["samples"] + batch_inds = input_latent["batch_index"] if "batch_index" in input_latent else None + return comfy.sample.prepare_noise(latent_image, self.seed, batch_inds) + +class SamplerCustom: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "add_noise": ("BOOLEAN", {"default": True}), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "sampler": ("SAMPLER", ), + "sigmas": ("SIGMAS", ), + "latent_image": ("LATENT", ), + } + } + + RETURN_TYPES = ("LATENT","LATENT") + RETURN_NAMES = ("output", "denoised_output") + + FUNCTION = "sample" + + CATEGORY = "sampling/custom_sampling" + + def sample(self, model, add_noise, noise_seed, cfg, positive, negative, sampler, sigmas, latent_image): + latent = latent_image + latent_image = latent["samples"] + latent = latent.copy() + latent_image = comfy.sample.fix_empty_latent_channels(model, latent_image) + latent["samples"] = latent_image + + if not add_noise: + noise = Noise_EmptyNoise().generate_noise(latent) + else: + noise = Noise_RandomNoise(noise_seed).generate_noise(latent) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + x0_output = {} + callback = latent_preview.prepare_callback(model, sigmas.shape[-1] - 1, x0_output) + + disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED + samples = comfy.sample.sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise_seed) + + out = latent.copy() + out["samples"] = samples + if "x0" in x0_output: + out_denoised = latent.copy() + out_denoised["samples"] = model.model.process_latent_out(x0_output["x0"].cpu()) + else: + out_denoised = out + return (out, out_denoised) + +class Guider_Basic(comfy.samplers.CFGGuider): + def set_conds(self, positive): + self.inner_set_conds({"positive": positive}) + +class BasicGuider: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "conditioning": ("CONDITIONING", ), + } + } + + RETURN_TYPES = ("GUIDER",) + + FUNCTION = "get_guider" + CATEGORY = "sampling/custom_sampling/guiders" + + def get_guider(self, model, conditioning): + guider = Guider_Basic(model) + guider.set_conds(conditioning) + return (guider,) + +class CFGGuider: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + } + } + + RETURN_TYPES = ("GUIDER",) + + FUNCTION = "get_guider" + CATEGORY = "sampling/custom_sampling/guiders" + + def get_guider(self, model, positive, negative, cfg): + guider = comfy.samplers.CFGGuider(model) + guider.set_conds(positive, negative) + guider.set_cfg(cfg) + return (guider,) + +class Guider_DualCFG(comfy.samplers.CFGGuider): + def set_cfg(self, cfg1, cfg2): + self.cfg1 = cfg1 + self.cfg2 = cfg2 + + def set_conds(self, positive, middle, negative): + middle = node_helpers.conditioning_set_values(middle, {"prompt_type": "negative"}) + self.inner_set_conds({"positive": positive, "middle": middle, "negative": negative}) + + def predict_noise(self, x, timestep, model_options={}, seed=None): + negative_cond = self.conds.get("negative", None) + middle_cond = self.conds.get("middle", None) + + out = comfy.samplers.calc_cond_batch(self.inner_model, [negative_cond, middle_cond, self.conds.get("positive", None)], x, timestep, model_options) + return comfy.samplers.cfg_function(self.inner_model, out[1], out[0], self.cfg2, x, timestep, model_options=model_options, cond=middle_cond, uncond=negative_cond) + (out[2] - out[1]) * self.cfg1 + +class DualCFGGuider: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "cond1": ("CONDITIONING", ), + "cond2": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "cfg_conds": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "cfg_cond2_negative": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + } + } + + RETURN_TYPES = ("GUIDER",) + + FUNCTION = "get_guider" + CATEGORY = "sampling/custom_sampling/guiders" + + def get_guider(self, model, cond1, cond2, negative, cfg_conds, cfg_cond2_negative): + guider = Guider_DualCFG(model) + guider.set_conds(cond1, cond2, negative) + guider.set_cfg(cfg_conds, cfg_cond2_negative) + return (guider,) + +class DisableNoise: + @classmethod + def INPUT_TYPES(s): + return {"required":{ + } + } + + RETURN_TYPES = ("NOISE",) + FUNCTION = "get_noise" + CATEGORY = "sampling/custom_sampling/noise" + + def get_noise(self): + return (Noise_EmptyNoise(),) + + +class RandomNoise(DisableNoise): + @classmethod + def INPUT_TYPES(s): + return {"required":{ + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + } + } + + def get_noise(self, noise_seed): + return (Noise_RandomNoise(noise_seed),) + + +class SamplerCustomAdvanced: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"noise": ("NOISE", ), + "guider": ("GUIDER", ), + "sampler": ("SAMPLER", ), + "sigmas": ("SIGMAS", ), + "latent_image": ("LATENT", ), + } + } + + RETURN_TYPES = ("LATENT","LATENT") + RETURN_NAMES = ("output", "denoised_output") + + FUNCTION = "sample" + + CATEGORY = "sampling/custom_sampling" + + def sample(self, noise, guider, sampler, sigmas, latent_image): + latent = latent_image + latent_image = latent["samples"] + latent = latent.copy() + latent_image = comfy.sample.fix_empty_latent_channels(guider.model_patcher, latent_image) + latent["samples"] = latent_image + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + x0_output = {} + callback = latent_preview.prepare_callback(guider.model_patcher, sigmas.shape[-1] - 1, x0_output) + + disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED + samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed) + samples = samples.to(comfy.model_management.intermediate_device()) + + out = latent.copy() + out["samples"] = samples + if "x0" in x0_output: + out_denoised = latent.copy() + out_denoised["samples"] = guider.model_patcher.model.process_latent_out(x0_output["x0"].cpu()) + else: + out_denoised = out + return (out, out_denoised) + +class AddNoise: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "noise": ("NOISE", ), + "sigmas": ("SIGMAS", ), + "latent_image": ("LATENT", ), + } + } + + RETURN_TYPES = ("LATENT",) + + FUNCTION = "add_noise" + + CATEGORY = "_for_testing/custom_sampling/noise" + + def add_noise(self, model, noise, sigmas, latent_image): + if len(sigmas) == 0: + return latent_image + + latent = latent_image + latent_image = latent["samples"] + + noisy = noise.generate_noise(latent) + + model_sampling = model.get_model_object("model_sampling") + process_latent_out = model.get_model_object("process_latent_out") + process_latent_in = model.get_model_object("process_latent_in") + + if len(sigmas) > 1: + scale = torch.abs(sigmas[0] - sigmas[-1]) + else: + scale = sigmas[0] + + if torch.count_nonzero(latent_image) > 0: #Don't shift the empty latent image. + latent_image = process_latent_in(latent_image) + noisy = model_sampling.noise_scaling(scale, noisy, latent_image) + noisy = process_latent_out(noisy) + noisy = torch.nan_to_num(noisy, nan=0.0, posinf=0.0, neginf=0.0) + + out = latent.copy() + out["samples"] = noisy + return (out,) + + +NODE_CLASS_MAPPINGS = { + "SamplerCustom": SamplerCustom, + "BasicScheduler": BasicScheduler, + "KarrasScheduler": KarrasScheduler, + "ExponentialScheduler": ExponentialScheduler, + "PolyexponentialScheduler": PolyexponentialScheduler, + "LaplaceScheduler": LaplaceScheduler, + "VPScheduler": VPScheduler, + "BetaSamplingScheduler": BetaSamplingScheduler, + "SDTurboScheduler": SDTurboScheduler, + "KSamplerSelect": KSamplerSelect, + "SamplerEulerAncestral": SamplerEulerAncestral, + "SamplerEulerAncestralCFGPP": SamplerEulerAncestralCFGPP, + "SamplerLMS": SamplerLMS, + "SamplerDPMPP_3M_SDE": SamplerDPMPP_3M_SDE, + "SamplerDPMPP_2M_SDE": SamplerDPMPP_2M_SDE, + "SamplerDPMPP_SDE": SamplerDPMPP_SDE, + "SamplerDPMPP_2S_Ancestral": SamplerDPMPP_2S_Ancestral, + "SamplerDPMAdaptative": SamplerDPMAdaptative, + "SplitSigmas": SplitSigmas, + "SplitSigmasDenoise": SplitSigmasDenoise, + "FlipSigmas": FlipSigmas, + + "CFGGuider": CFGGuider, + "DualCFGGuider": DualCFGGuider, + "BasicGuider": BasicGuider, + "RandomNoise": RandomNoise, + "DisableNoise": DisableNoise, + "AddNoise": AddNoise, + "SamplerCustomAdvanced": SamplerCustomAdvanced, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "SamplerEulerAncestralCFGPP": "SamplerEulerAncestralCFG++", +} diff --git a/src/comfyui/comfy_extras/nodes_differential_diffusion.py b/src/comfyui/comfy_extras/nodes_differential_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..98dbbf102dac861cfb65ed19ad1af499abf7465d --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_differential_diffusion.py @@ -0,0 +1,42 @@ +# code adapted from https://github.com/exx8/differential-diffusion + +import torch + +class DifferentialDiffusion(): + @classmethod + def INPUT_TYPES(s): + return {"required": {"model": ("MODEL", ), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply" + CATEGORY = "_for_testing" + INIT = False + + def apply(self, model): + model = model.clone() + model.set_model_denoise_mask_function(self.forward) + return (model,) + + def forward(self, sigma: torch.Tensor, denoise_mask: torch.Tensor, extra_options: dict): + model = extra_options["model"] + step_sigmas = extra_options["sigmas"] + sigma_to = model.inner_model.model_sampling.sigma_min + if step_sigmas[-1] > sigma_to: + sigma_to = step_sigmas[-1] + sigma_from = step_sigmas[0] + + ts_from = model.inner_model.model_sampling.timestep(sigma_from) + ts_to = model.inner_model.model_sampling.timestep(sigma_to) + current_ts = model.inner_model.model_sampling.timestep(sigma[0]) + + threshold = (current_ts - ts_to) / (ts_from - ts_to) + + return (denoise_mask >= threshold).to(denoise_mask.dtype) + + +NODE_CLASS_MAPPINGS = { + "DifferentialDiffusion": DifferentialDiffusion, +} +NODE_DISPLAY_NAME_MAPPINGS = { + "DifferentialDiffusion": "Differential Diffusion", +} diff --git a/src/comfyui/comfy_extras/nodes_flux.py b/src/comfyui/comfy_extras/nodes_flux.py new file mode 100644 index 0000000000000000000000000000000000000000..b690432b55bfc0249db3daa924069bb9f0456d7c --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_flux.py @@ -0,0 +1,47 @@ +import node_helpers + +class CLIPTextEncodeFlux: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "clip": ("CLIP", ), + "clip_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "t5xxl": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "guidance": ("FLOAT", {"default": 3.5, "min": 0.0, "max": 100.0, "step": 0.1}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning/flux" + + def encode(self, clip, clip_l, t5xxl, guidance): + tokens = clip.tokenize(clip_l) + tokens["t5xxl"] = clip.tokenize(t5xxl)["t5xxl"] + + output = clip.encode_from_tokens(tokens, return_pooled=True, return_dict=True) + cond = output.pop("cond") + output["guidance"] = guidance + return ([[cond, output]], ) + +class FluxGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning": ("CONDITIONING", ), + "guidance": ("FLOAT", {"default": 3.5, "min": 0.0, "max": 100.0, "step": 0.1}), + }} + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "advanced/conditioning/flux" + + def append(self, conditioning, guidance): + c = node_helpers.conditioning_set_values(conditioning, {"guidance": guidance}) + return (c, ) + + +NODE_CLASS_MAPPINGS = { + "CLIPTextEncodeFlux": CLIPTextEncodeFlux, + "FluxGuidance": FluxGuidance, +} diff --git a/src/comfyui/comfy_extras/nodes_freelunch.py b/src/comfyui/comfy_extras/nodes_freelunch.py new file mode 100644 index 0000000000000000000000000000000000000000..e3ac58447b29f604debb5bfc0aed3a5f100a4ae9 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_freelunch.py @@ -0,0 +1,113 @@ +#code originally taken from: https://github.com/ChenyangSi/FreeU (under MIT License) + +import torch +import logging + +def Fourier_filter(x, threshold, scale): + # FFT + x_freq = torch.fft.fftn(x.float(), dim=(-2, -1)) + x_freq = torch.fft.fftshift(x_freq, dim=(-2, -1)) + + B, C, H, W = x_freq.shape + mask = torch.ones((B, C, H, W), device=x.device) + + crow, ccol = H // 2, W //2 + mask[..., crow - threshold:crow + threshold, ccol - threshold:ccol + threshold] = scale + x_freq = x_freq * mask + + # IFFT + x_freq = torch.fft.ifftshift(x_freq, dim=(-2, -1)) + x_filtered = torch.fft.ifftn(x_freq, dim=(-2, -1)).real + + return x_filtered.to(x.dtype) + + +class FreeU: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "b1": ("FLOAT", {"default": 1.1, "min": 0.0, "max": 10.0, "step": 0.01}), + "b2": ("FLOAT", {"default": 1.2, "min": 0.0, "max": 10.0, "step": 0.01}), + "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), + "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, b1, b2, s1, s2): + model_channels = model.model.model_config.unet_config["model_channels"] + scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} + on_cpu_devices = {} + + def output_block_patch(h, hsp, transformer_options): + scale = scale_dict.get(int(h.shape[1]), None) + if scale is not None: + h[:,:h.shape[1] // 2] = h[:,:h.shape[1] // 2] * scale[0] + if hsp.device not in on_cpu_devices: + try: + hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) + except: + logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device)) + on_cpu_devices[hsp.device] = True + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + else: + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + + return h, hsp + + m = model.clone() + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +class FreeU_V2: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "b1": ("FLOAT", {"default": 1.3, "min": 0.0, "max": 10.0, "step": 0.01}), + "b2": ("FLOAT", {"default": 1.4, "min": 0.0, "max": 10.0, "step": 0.01}), + "s1": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 10.0, "step": 0.01}), + "s2": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, b1, b2, s1, s2): + model_channels = model.model.model_config.unet_config["model_channels"] + scale_dict = {model_channels * 4: (b1, s1), model_channels * 2: (b2, s2)} + on_cpu_devices = {} + + def output_block_patch(h, hsp, transformer_options): + scale = scale_dict.get(int(h.shape[1]), None) + if scale is not None: + hidden_mean = h.mean(1).unsqueeze(1) + B = hidden_mean.shape[0] + hidden_max, _ = torch.max(hidden_mean.view(B, -1), dim=-1, keepdim=True) + hidden_min, _ = torch.min(hidden_mean.view(B, -1), dim=-1, keepdim=True) + hidden_mean = (hidden_mean - hidden_min.unsqueeze(2).unsqueeze(3)) / (hidden_max - hidden_min).unsqueeze(2).unsqueeze(3) + + h[:,:h.shape[1] // 2] = h[:,:h.shape[1] // 2] * ((scale[0] - 1 ) * hidden_mean + 1) + + if hsp.device not in on_cpu_devices: + try: + hsp = Fourier_filter(hsp, threshold=1, scale=scale[1]) + except: + logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device)) + on_cpu_devices[hsp.device] = True + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + else: + hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device) + + return h, hsp + + m = model.clone() + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "FreeU": FreeU, + "FreeU_V2": FreeU_V2, +} diff --git a/src/comfyui/comfy_extras/nodes_gits.py b/src/comfyui/comfy_extras/nodes_gits.py new file mode 100644 index 0000000000000000000000000000000000000000..7bfae4ce688721c3b671676c2fc92ed0708f499f --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_gits.py @@ -0,0 +1,369 @@ +# from https://github.com/zju-pi/diff-sampler/tree/main/gits-main +import numpy as np +import torch + +def loglinear_interp(t_steps, num_steps): + """ + Performs log-linear interpolation of a given array of decreasing numbers. + """ + xs = np.linspace(0, 1, len(t_steps)) + ys = np.log(t_steps[::-1]) + + new_xs = np.linspace(0, 1, num_steps) + new_ys = np.interp(new_xs, xs, ys) + + interped_ys = np.exp(new_ys)[::-1].copy() + return interped_ys + +NOISE_LEVELS = { + 0.80: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 3.07277966, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.75677586, 2.84484982, 1.78698075, 0.803307, 0.02916753], + ], + 0.85: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 7.49001646, 1.84880662, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 11.54541874, 7.11996698, 3.07277966, 1.24153244, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.60512662, 2.6383388, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, 0.02916753], + ], + 0.90: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 7.49001646, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 3.07277966, 0.95350921, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.07277966, 1.61558151, 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.11996698, 4.86714602, 3.07277966, 1.61558151, 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 2.95596409, 1.61558151, 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.19988537, 1.51179266, 0.89115214, 0.43325692, 0.02916753], + ], + 0.95: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 2.84484982, 0.89115214, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.41535246, 0.803307, 0.38853383, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.86714602, 3.75677586, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.78698075, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.75677586, 3.07277966, 2.45070267, 1.91321158, 1.46270394, 1.05362725, 0.72133851, 0.43325692, 0.19894916, 0.02916753], + ], + 1.00: [ + [14.61464119, 1.56271636, 0.02916753], + [14.61464119, 6.77309084, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.11996698, 3.07277966, 1.56271636, 0.59516323, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.41535246, 0.57119018, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.98035145, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.54755926, 0.25053367, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.12350607, 1.56271636, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.61558151, 1.162866, 0.803307, 0.50118381, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.84880662, 1.36964464, 1.01931262, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.46139455, 2.84484982, 2.19988537, 1.67050016, 1.24153244, 0.92192322, 0.64427125, 0.43325692, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + ], + 1.05: [ + [14.61464119, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 0.89115214, 0.02916753], + [14.61464119, 6.77309084, 2.05039096, 0.72133851, 0.02916753], + [14.61464119, 6.77309084, 2.84484982, 1.28281462, 0.52423614, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.52423614, 0.22545385, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.74807048, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.59516323, 0.34370604, 0.13792117, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.72759056, 1.24153244, 0.86115354, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.61558151, 1.162866, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, 1.67050016, 1.28281462, 0.95350921, 0.72133851, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.36326075, 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.61951244, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, 1.98035145, 1.61558151, 1.32549286, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + ], + 1.10: [ + [14.61464119, 0.89115214, 0.02916753], + [14.61464119, 2.36326075, 0.72133851, 0.02916753], + [14.61464119, 5.85520077, 1.61558151, 0.57119018, 0.02916753], + [14.61464119, 6.77309084, 2.45070267, 1.08895338, 0.45573691, 0.02916753], + [14.61464119, 6.77309084, 2.95596409, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.89115214, 0.4783645, 0.19894916, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.08895338, 0.64427125, 0.34370604, 0.13792117, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.54755926, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.4783645, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.41535246, 0.95350921, 0.64427125, 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.61558151, 1.12534678, 0.803307, 0.54755926, 0.36617002, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.32507086, 2.45070267, 1.72759056, 1.24153244, 0.89115214, 0.64427125, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.05039096, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.12350607, 1.61558151, 1.24153244, 0.95350921, 0.72133851, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, 1.36964464, 1.08895338, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + ], + 1.15: [ + [14.61464119, 0.83188516, 0.02916753], + [14.61464119, 1.84880662, 0.59516323, 0.02916753], + [14.61464119, 5.85520077, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 5.85520077, 1.91321158, 0.83188516, 0.34370604, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.51179266, 0.803307, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.56271636, 0.89115214, 0.50118381, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.12534678, 0.72133851, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.36964464, 0.95350921, 0.69515091, 0.4783645, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, 0.64427125, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.20: [ + [14.61464119, 0.803307, 0.02916753], + [14.61464119, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 2.36326075, 0.92192322, 0.36617002, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.05039096, 0.95350921, 0.45573691, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.64427125, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.803307, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.95350921, 0.59516323, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.83188516, 0.59516323, 0.41087446, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 1.98035145, 1.36964464, 0.95350921, 0.69515091, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.36326075, 1.56271636, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.45070267, 1.61558151, 1.162866, 0.86115354, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.20157266, 0.92192322, 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, 0.74807048, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.25: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.56271636, 0.50118381, 0.02916753], + [14.61464119, 2.05039096, 0.803307, 0.32104823, 0.02916753], + [14.61464119, 2.36326075, 0.95350921, 0.43325692, 0.17026083, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.51179266, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.36326075, 1.24153244, 0.72133851, 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.52423614, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.98595673, 0.64427125, 0.43325692, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.52423614, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.803307, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.86115354, 0.64427125, 0.4783645, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.28281462, 0.92192322, 0.69515091, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.54755926, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.46270394, 1.08895338, 0.83188516, 0.66947293, 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.30: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.24153244, 0.43325692, 0.02916753], + [14.61464119, 1.56271636, 0.59516323, 0.22545385, 0.02916753], + [14.61464119, 1.84880662, 0.803307, 0.36617002, 0.13792117, 0.02916753], + [14.61464119, 2.36326075, 1.01931262, 0.52423614, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.74807048, 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.56271636, 0.89115214, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 0.95350921, 0.61951244, 0.41087446, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.41535246, 0.92192322, 0.64427125, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.56271636, 1.01931262, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.77538133, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.4783645, 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.35: [ + [14.61464119, 0.69515091, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.56271636, 0.57119018, 0.19894916, 0.02916753], + [14.61464119, 1.61558151, 0.69515091, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.83188516, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.162866, 0.64427125, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.803307, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.32104823, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.51179266, 1.01931262, 0.74807048, 0.57119018, 0.45573691, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.40: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.08895338, 0.43325692, 0.13792117, 0.02916753], + [14.61464119, 1.56271636, 0.64427125, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.05039096, 0.95350921, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.72133851, 0.43325692, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.52423614, 0.36617002, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.86115354, 0.59516323, 0.43325692, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.4783645, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.69515091, 0.52423614, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.72133851, 0.54755926, 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.57119018, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.43325692, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.45: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.91321158, 0.95350921, 0.57119018, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.08895338, 0.64427125, 0.41087446, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.36617002, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.41087446, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.43325692, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.72133851, 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.57119018, 0.4783645, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.59516323, 0.50118381, 0.43325692, 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.50: [ + [14.61464119, 0.54755926, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.86115354, 0.32104823, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.83188516, 0.52423614, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.38853383, 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.41087446, 0.29807833, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.61951244, 0.43325692, 0.32104823, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.32549286, 0.86115354, 0.64427125, 0.50118381, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.36964464, 0.92192322, 0.69515091, 0.54755926, 0.45573691, 0.41087446, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.41535246, 0.95350921, 0.72133851, 0.57119018, 0.4783645, 0.43325692, 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], +} + +class GITSScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"coeff": ("FLOAT", {"default": 1.20, "min": 0.80, "max": 1.50, "step": 0.05}), + "steps": ("INT", {"default": 10, "min": 2, "max": 1000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, coeff, steps, denoise): + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = round(steps * denoise) + + if steps <= 20: + sigmas = NOISE_LEVELS[round(coeff, 2)][steps-2][:] + else: + sigmas = NOISE_LEVELS[round(coeff, 2)][-1][:] + sigmas = loglinear_interp(sigmas, steps + 1) + + sigmas = sigmas[-(total_steps + 1):] + sigmas[-1] = 0 + return (torch.FloatTensor(sigmas), ) + +NODE_CLASS_MAPPINGS = { + "GITSScheduler": GITSScheduler, +} diff --git a/src/comfyui/comfy_extras/nodes_hunyuan.py b/src/comfyui/comfy_extras/nodes_hunyuan.py new file mode 100644 index 0000000000000000000000000000000000000000..b03eaf6a2044d6a9c05413b9ca9e0b985bbdd26e --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_hunyuan.py @@ -0,0 +1,25 @@ +class CLIPTextEncodeHunyuanDiT: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "clip": ("CLIP", ), + "bert": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "mt5xl": ("STRING", {"multiline": True, "dynamicPrompts": True}), + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, bert, mt5xl): + tokens = clip.tokenize(bert) + tokens["mt5xl"] = clip.tokenize(mt5xl)["mt5xl"] + + output = clip.encode_from_tokens(tokens, return_pooled=True, return_dict=True) + cond = output.pop("cond") + return ([[cond, output]], ) + + +NODE_CLASS_MAPPINGS = { + "CLIPTextEncodeHunyuanDiT": CLIPTextEncodeHunyuanDiT, +} diff --git a/src/comfyui/comfy_extras/nodes_hypernetwork.py b/src/comfyui/comfy_extras/nodes_hypernetwork.py new file mode 100644 index 0000000000000000000000000000000000000000..665632292782e00fff4d73fa085e9c397a9bec4f --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_hypernetwork.py @@ -0,0 +1,120 @@ +import comfy.utils +import folder_paths +import torch +import logging + +def load_hypernetwork_patch(path, strength): + sd = comfy.utils.load_torch_file(path, safe_load=True) + activation_func = sd.get('activation_func', 'linear') + is_layer_norm = sd.get('is_layer_norm', False) + use_dropout = sd.get('use_dropout', False) + activate_output = sd.get('activate_output', False) + last_layer_dropout = sd.get('last_layer_dropout', False) + + valid_activation = { + "linear": torch.nn.Identity, + "relu": torch.nn.ReLU, + "leakyrelu": torch.nn.LeakyReLU, + "elu": torch.nn.ELU, + "swish": torch.nn.Hardswish, + "tanh": torch.nn.Tanh, + "sigmoid": torch.nn.Sigmoid, + "softsign": torch.nn.Softsign, + "mish": torch.nn.Mish, + } + + if activation_func not in valid_activation: + logging.error("Unsupported Hypernetwork format, if you report it I might implement it. {} {} {} {} {} {}".format(path, activation_func, is_layer_norm, use_dropout, activate_output, last_layer_dropout)) + return None + + out = {} + + for d in sd: + try: + dim = int(d) + except: + continue + + output = [] + for index in [0, 1]: + attn_weights = sd[dim][index] + keys = attn_weights.keys() + + linears = filter(lambda a: a.endswith(".weight"), keys) + linears = list(map(lambda a: a[:-len(".weight")], linears)) + layers = [] + + i = 0 + while i < len(linears): + lin_name = linears[i] + last_layer = (i == (len(linears) - 1)) + penultimate_layer = (i == (len(linears) - 2)) + + lin_weight = attn_weights['{}.weight'.format(lin_name)] + lin_bias = attn_weights['{}.bias'.format(lin_name)] + layer = torch.nn.Linear(lin_weight.shape[1], lin_weight.shape[0]) + layer.load_state_dict({"weight": lin_weight, "bias": lin_bias}) + layers.append(layer) + if activation_func != "linear": + if (not last_layer) or (activate_output): + layers.append(valid_activation[activation_func]()) + if is_layer_norm: + i += 1 + ln_name = linears[i] + ln_weight = attn_weights['{}.weight'.format(ln_name)] + ln_bias = attn_weights['{}.bias'.format(ln_name)] + ln = torch.nn.LayerNorm(ln_weight.shape[0]) + ln.load_state_dict({"weight": ln_weight, "bias": ln_bias}) + layers.append(ln) + if use_dropout: + if (not last_layer) and (not penultimate_layer or last_layer_dropout): + layers.append(torch.nn.Dropout(p=0.3)) + i += 1 + + output.append(torch.nn.Sequential(*layers)) + out[dim] = torch.nn.ModuleList(output) + + class hypernetwork_patch: + def __init__(self, hypernet, strength): + self.hypernet = hypernet + self.strength = strength + def __call__(self, q, k, v, extra_options): + dim = k.shape[-1] + if dim in self.hypernet: + hn = self.hypernet[dim] + k = k + hn[0](k) * self.strength + v = v + hn[1](v) * self.strength + + return q, k, v + + def to(self, device): + for d in self.hypernet.keys(): + self.hypernet[d] = self.hypernet[d].to(device) + return self + + return hypernetwork_patch(out, strength) + +class HypernetworkLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "hypernetwork_name": (folder_paths.get_filename_list("hypernetworks"), ), + "strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_hypernetwork" + + CATEGORY = "loaders" + + def load_hypernetwork(self, model, hypernetwork_name, strength): + hypernetwork_path = folder_paths.get_full_path_or_raise("hypernetworks", hypernetwork_name) + model_hypernetwork = model.clone() + patch = load_hypernetwork_patch(hypernetwork_path, strength) + if patch is not None: + model_hypernetwork.set_model_attn1_patch(patch) + model_hypernetwork.set_model_attn2_patch(patch) + return (model_hypernetwork,) + +NODE_CLASS_MAPPINGS = { + "HypernetworkLoader": HypernetworkLoader +} diff --git a/src/comfyui/comfy_extras/nodes_hypertile.py b/src/comfyui/comfy_extras/nodes_hypertile.py new file mode 100644 index 0000000000000000000000000000000000000000..227133f3978e156cebd7496c2f442916c35afda7 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_hypertile.py @@ -0,0 +1,83 @@ +#Taken from: https://github.com/tfernd/HyperTile/ + +import math +from einops import rearrange +# Use torch rng for consistency across generations +from torch import randint + +def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int: + min_value = min(min_value, value) + + # All big divisors of value (inclusive) + divisors = [i for i in range(min_value, value + 1) if value % i == 0] + + ns = [value // i for i in divisors[:max_options]] # has at least 1 element + + if len(ns) - 1 > 0: + idx = randint(low=0, high=len(ns) - 1, size=(1,)).item() + else: + idx = 0 + + return ns[idx] + +class HyperTile: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), + "swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), + "max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), + "scale_depth": ("BOOLEAN", {"default": False}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, tile_size, swap_size, max_depth, scale_depth): + model_channels = model.model.model_config.unet_config["model_channels"] + + latent_tile_size = max(32, tile_size) // 8 + self.temp = None + + def hypertile_in(q, k, v, extra_options): + model_chans = q.shape[-2] + orig_shape = extra_options['original_shape'] + apply_to = [] + for i in range(max_depth + 1): + apply_to.append((orig_shape[-2] / (2 ** i)) * (orig_shape[-1] / (2 ** i))) + + if model_chans in apply_to: + shape = extra_options["original_shape"] + aspect_ratio = shape[-1] / shape[-2] + + hw = q.size(1) + h, w = round(math.sqrt(hw * aspect_ratio)), round(math.sqrt(hw / aspect_ratio)) + + factor = (2 ** apply_to.index(model_chans)) if scale_depth else 1 + nh = random_divisor(h, latent_tile_size * factor, swap_size) + nw = random_divisor(w, latent_tile_size * factor, swap_size) + + if nh * nw > 1: + q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw) + self.temp = (nh, nw, h, w) + return q, k, v + + return q, k, v + def hypertile_out(out, extra_options): + if self.temp is not None: + nh, nw, h, w = self.temp + self.temp = None + out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) + out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw) + return out + + + m = model.clone() + m.set_model_attn1_patch(hypertile_in) + m.set_model_attn1_output_patch(hypertile_out) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "HyperTile": HyperTile, +} diff --git a/src/comfyui/comfy_extras/nodes_images.py b/src/comfyui/comfy_extras/nodes_images.py new file mode 100644 index 0000000000000000000000000000000000000000..af37666b29fcf5d6a572b715682fd031e28639bc --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_images.py @@ -0,0 +1,195 @@ +import nodes +import folder_paths +from comfy.cli_args import args + +from PIL import Image +from PIL.PngImagePlugin import PngInfo + +import numpy as np +import json +import os + +MAX_RESOLUTION = nodes.MAX_RESOLUTION + +class ImageCrop: + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "crop" + + CATEGORY = "image/transform" + + def crop(self, image, width, height, x, y): + x = min(x, image.shape[2] - 1) + y = min(y, image.shape[1] - 1) + to_x = width + x + to_y = height + y + img = image[:,y:to_y, x:to_x, :] + return (img,) + +class RepeatImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), + "amount": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "repeat" + + CATEGORY = "image/batch" + + def repeat(self, image, amount): + s = image.repeat((amount, 1,1,1)) + return (s,) + +class ImageFromBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), + "batch_index": ("INT", {"default": 0, "min": 0, "max": 4095}), + "length": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "frombatch" + + CATEGORY = "image/batch" + + def frombatch(self, image, batch_index, length): + s_in = image + batch_index = min(s_in.shape[0] - 1, batch_index) + length = min(s_in.shape[0] - batch_index, length) + s = s_in[batch_index:batch_index + length].clone() + return (s,) + +class SaveAnimatedWEBP: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + self.type = "output" + self.prefix_append = "" + + methods = {"default": 4, "fastest": 0, "slowest": 6} + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ComfyUI"}), + "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), + "lossless": ("BOOLEAN", {"default": True}), + "quality": ("INT", {"default": 80, "min": 0, "max": 100}), + "method": (list(s.methods.keys()),), + # "num_frames": ("INT", {"default": 0, "min": 0, "max": 8192}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images" + + OUTPUT_NODE = True + + CATEGORY = "image/animation" + + def save_images(self, images, fps, filename_prefix, lossless, quality, method, num_frames=0, prompt=None, extra_pnginfo=None): + method = self.methods.get(method) + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + pil_images = [] + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + + metadata = pil_images[0].getexif() + if not args.disable_metadata: + if prompt is not None: + metadata[0x0110] = "prompt:{}".format(json.dumps(prompt)) + if extra_pnginfo is not None: + inital_exif = 0x010f + for x in extra_pnginfo: + metadata[inital_exif] = "{}:{}".format(x, json.dumps(extra_pnginfo[x])) + inital_exif -= 1 + + if num_frames == 0: + num_frames = len(pil_images) + + c = len(pil_images) + for i in range(0, c, num_frames): + file = f"{filename}_{counter:05}_.webp" + pil_images[i].save(os.path.join(full_output_folder, file), save_all=True, duration=int(1000.0/fps), append_images=pil_images[i + 1:i + num_frames], exif=metadata, lossless=lossless, quality=quality, method=method) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + counter += 1 + + animated = num_frames != 1 + return { "ui": { "images": results, "animated": (animated,) } } + +class SaveAnimatedPNG: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + self.type = "output" + self.prefix_append = "" + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ComfyUI"}), + "fps": ("FLOAT", {"default": 6.0, "min": 0.01, "max": 1000.0, "step": 0.01}), + "compress_level": ("INT", {"default": 4, "min": 0, "max": 9}) + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = () + FUNCTION = "save_images" + + OUTPUT_NODE = True + + CATEGORY = "image/animation" + + def save_images(self, images, fps, compress_level, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None): + filename_prefix += self.prefix_append + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0]) + results = list() + pil_images = [] + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + pil_images.append(img) + + metadata = None + if not args.disable_metadata: + metadata = PngInfo() + if prompt is not None: + metadata.add(b"comf", "prompt".encode("latin-1", "strict") + b"\0" + json.dumps(prompt).encode("latin-1", "strict"), after_idat=True) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata.add(b"comf", x.encode("latin-1", "strict") + b"\0" + json.dumps(extra_pnginfo[x]).encode("latin-1", "strict"), after_idat=True) + + file = f"{filename}_{counter:05}_.png" + pil_images[0].save(os.path.join(full_output_folder, file), pnginfo=metadata, compress_level=compress_level, save_all=True, duration=int(1000.0/fps), append_images=pil_images[1:]) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + + return { "ui": { "images": results, "animated": (True,)} } + +NODE_CLASS_MAPPINGS = { + "ImageCrop": ImageCrop, + "RepeatImageBatch": RepeatImageBatch, + "ImageFromBatch": ImageFromBatch, + "SaveAnimatedWEBP": SaveAnimatedWEBP, + "SaveAnimatedPNG": SaveAnimatedPNG, +} diff --git a/src/comfyui/comfy_extras/nodes_ip2p.py b/src/comfyui/comfy_extras/nodes_ip2p.py new file mode 100644 index 0000000000000000000000000000000000000000..c2e70a84c10ca5cc1b3ca853a97adc3c64fbb315 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_ip2p.py @@ -0,0 +1,45 @@ +import torch + +class InstructPixToPixConditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "vae": ("VAE", ), + "pixels": ("IMAGE", ), + }} + + RETURN_TYPES = ("CONDITIONING","CONDITIONING","LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + FUNCTION = "encode" + + CATEGORY = "conditioning/instructpix2pix" + + def encode(self, positive, negative, pixels, vae): + x = (pixels.shape[1] // 8) * 8 + y = (pixels.shape[2] // 8) * 8 + + if pixels.shape[1] != x or pixels.shape[2] != y: + x_offset = (pixels.shape[1] % 8) // 2 + y_offset = (pixels.shape[2] % 8) // 2 + pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:] + + concat_latent = vae.encode(pixels) + + out_latent = {} + out_latent["samples"] = torch.zeros_like(concat_latent) + + out = [] + for conditioning in [positive, negative]: + c = [] + for t in conditioning: + d = t[1].copy() + d["concat_latent_image"] = concat_latent + n = [t[0], d] + c.append(n) + out.append(c) + return (out[0], out[1], out_latent) + +NODE_CLASS_MAPPINGS = { + "InstructPixToPixConditioning": InstructPixToPixConditioning, +} diff --git a/src/comfyui/comfy_extras/nodes_latent.py b/src/comfyui/comfy_extras/nodes_latent.py new file mode 100644 index 0000000000000000000000000000000000000000..af27368182d9212b078d4f7eead3796ffca15fe5 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_latent.py @@ -0,0 +1,285 @@ +import comfy.utils +import comfy_extras.nodes_post_processing +import torch + +def reshape_latent_to(target_shape, latent): + if latent.shape[1:] != target_shape[1:]: + latent = comfy.utils.common_upscale(latent, target_shape[3], target_shape[2], "bilinear", "center") + return comfy.utils.repeat_to_batch_size(latent, target_shape[0]) + + +class LatentAdd: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + samples_out["samples"] = s1 + s2 + return (samples_out,) + +class LatentSubtract: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + samples_out["samples"] = s1 - s2 + return (samples_out,) + +class LatentMultiply: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples, multiplier): + samples_out = samples.copy() + + s1 = samples["samples"] + samples_out["samples"] = s1 * multiplier + return (samples_out,) + +class LatentInterpolate: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), + "samples2": ("LATENT",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples1, samples2, ratio): + samples_out = samples1.copy() + + s1 = samples1["samples"] + s2 = samples2["samples"] + + s2 = reshape_latent_to(s1.shape, s2) + + m1 = torch.linalg.vector_norm(s1, dim=(1)) + m2 = torch.linalg.vector_norm(s2, dim=(1)) + + s1 = torch.nan_to_num(s1 / m1) + s2 = torch.nan_to_num(s2 / m2) + + t = (s1 * ratio + s2 * (1.0 - ratio)) + mt = torch.linalg.vector_norm(t, dim=(1)) + st = torch.nan_to_num(t / mt) + + samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio)) + return (samples_out,) + +class LatentBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "batch" + + CATEGORY = "latent/batch" + + def batch(self, samples1, samples2): + samples_out = samples1.copy() + s1 = samples1["samples"] + s2 = samples2["samples"] + + if s1.shape[1:] != s2.shape[1:]: + s2 = comfy.utils.common_upscale(s2, s1.shape[3], s1.shape[2], "bilinear", "center") + s = torch.cat((s1, s2), dim=0) + samples_out["samples"] = s + samples_out["batch_index"] = samples1.get("batch_index", [x for x in range(0, s1.shape[0])]) + samples2.get("batch_index", [x for x in range(0, s2.shape[0])]) + return (samples_out,) + +class LatentBatchSeedBehavior: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "seed_behavior": (["random", "fixed"],{"default": "fixed"}),}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced" + + def op(self, samples, seed_behavior): + samples_out = samples.copy() + latent = samples["samples"] + if seed_behavior == "random": + if 'batch_index' in samples_out: + samples_out.pop('batch_index') + elif seed_behavior == "fixed": + batch_number = samples_out.get("batch_index", [0])[0] + samples_out["batch_index"] = [batch_number] * latent.shape[0] + + return (samples_out,) + +class LatentApplyOperation: + @classmethod + def INPUT_TYPES(s): + return {"required": { "samples": ("LATENT",), + "operation": ("LATENT_OPERATION",), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "op" + + CATEGORY = "latent/advanced/operations" + EXPERIMENTAL = True + + def op(self, samples, operation): + samples_out = samples.copy() + + s1 = samples["samples"] + samples_out["samples"] = operation(latent=s1) + return (samples_out,) + +class LatentApplyOperationCFG: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "operation": ("LATENT_OPERATION",), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "latent/advanced/operations" + EXPERIMENTAL = True + + def patch(self, model, operation): + m = model.clone() + + def pre_cfg_function(args): + conds_out = args["conds_out"] + if len(conds_out) == 2: + conds_out[0] = operation(latent=(conds_out[0] - conds_out[1])) + conds_out[1] + else: + conds_out[0] = operation(latent=conds_out[0]) + return conds_out + + m.set_model_sampler_pre_cfg_function(pre_cfg_function) + return (m, ) + +class LatentOperationTonemapReinhard: + @classmethod + def INPUT_TYPES(s): + return {"required": { "multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), + }} + + RETURN_TYPES = ("LATENT_OPERATION",) + FUNCTION = "op" + + CATEGORY = "latent/advanced/operations" + EXPERIMENTAL = True + + def op(self, multiplier): + def tonemap_reinhard(latent, **kwargs): + latent_vector_magnitude = (torch.linalg.vector_norm(latent, dim=(1)) + 0.0000000001)[:,None] + normalized_latent = latent / latent_vector_magnitude + + mean = torch.mean(latent_vector_magnitude, dim=(1,2,3), keepdim=True) + std = torch.std(latent_vector_magnitude, dim=(1,2,3), keepdim=True) + + top = (std * 5 + mean) * multiplier + + #reinhard + latent_vector_magnitude *= (1.0 / top) + new_magnitude = latent_vector_magnitude / (latent_vector_magnitude + 1.0) + new_magnitude *= top + + return normalized_latent * new_magnitude + return (tonemap_reinhard,) + +class LatentOperationSharpen: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "sharpen_radius": ("INT", { + "default": 9, + "min": 1, + "max": 31, + "step": 1 + }), + "sigma": ("FLOAT", { + "default": 1.0, + "min": 0.1, + "max": 10.0, + "step": 0.1 + }), + "alpha": ("FLOAT", { + "default": 0.1, + "min": 0.0, + "max": 5.0, + "step": 0.01 + }), + }} + + RETURN_TYPES = ("LATENT_OPERATION",) + FUNCTION = "op" + + CATEGORY = "latent/advanced/operations" + EXPERIMENTAL = True + + def op(self, sharpen_radius, sigma, alpha): + def sharpen(latent, **kwargs): + luminance = (torch.linalg.vector_norm(latent, dim=(1)) + 1e-6)[:,None] + normalized_latent = latent / luminance + channels = latent.shape[1] + + kernel_size = sharpen_radius * 2 + 1 + kernel = comfy_extras.nodes_post_processing.gaussian_kernel(kernel_size, sigma, device=luminance.device) + center = kernel_size // 2 + + kernel *= alpha * -10 + kernel[center, center] = kernel[center, center] - kernel.sum() + 1.0 + + padded_image = torch.nn.functional.pad(normalized_latent, (sharpen_radius,sharpen_radius,sharpen_radius,sharpen_radius), 'reflect') + sharpened = torch.nn.functional.conv2d(padded_image, kernel.repeat(channels, 1, 1).unsqueeze(1), padding=kernel_size // 2, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius] + + return luminance * sharpened + return (sharpen,) + +NODE_CLASS_MAPPINGS = { + "LatentAdd": LatentAdd, + "LatentSubtract": LatentSubtract, + "LatentMultiply": LatentMultiply, + "LatentInterpolate": LatentInterpolate, + "LatentBatch": LatentBatch, + "LatentBatchSeedBehavior": LatentBatchSeedBehavior, + "LatentApplyOperation": LatentApplyOperation, + "LatentApplyOperationCFG": LatentApplyOperationCFG, + "LatentOperationTonemapReinhard": LatentOperationTonemapReinhard, + "LatentOperationSharpen": LatentOperationSharpen, +} diff --git a/src/comfyui/comfy_extras/nodes_lora_extract.py b/src/comfyui/comfy_extras/nodes_lora_extract.py new file mode 100644 index 0000000000000000000000000000000000000000..dfd4fe9f4a5c4b7aff37d244fc25033e9a286119 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_lora_extract.py @@ -0,0 +1,119 @@ +import torch +import comfy.model_management +import comfy.utils +import folder_paths +import os +import logging +from enum import Enum + +CLAMP_QUANTILE = 0.99 + +def extract_lora(diff, rank): + conv2d = (len(diff.shape) == 4) + kernel_size = None if not conv2d else diff.size()[2:4] + conv2d_3x3 = conv2d and kernel_size != (1, 1) + out_dim, in_dim = diff.size()[0:2] + rank = min(rank, in_dim, out_dim) + + if conv2d: + if conv2d_3x3: + diff = diff.flatten(start_dim=1) + else: + diff = diff.squeeze() + + + U, S, Vh = torch.linalg.svd(diff.float()) + U = U[:, :rank] + S = S[:rank] + U = U @ torch.diag(S) + Vh = Vh[:rank, :] + + dist = torch.cat([U.flatten(), Vh.flatten()]) + hi_val = torch.quantile(dist, CLAMP_QUANTILE) + low_val = -hi_val + + U = U.clamp(low_val, hi_val) + Vh = Vh.clamp(low_val, hi_val) + if conv2d: + U = U.reshape(out_dim, rank, 1, 1) + Vh = Vh.reshape(rank, in_dim, kernel_size[0], kernel_size[1]) + return (U, Vh) + +class LORAType(Enum): + STANDARD = 0 + FULL_DIFF = 1 + +LORA_TYPES = {"standard": LORAType.STANDARD, + "full_diff": LORAType.FULL_DIFF} + +def calc_lora_model(model_diff, rank, prefix_model, prefix_lora, output_sd, lora_type, bias_diff=False): + comfy.model_management.load_models_gpu([model_diff], force_patch_weights=True) + sd = model_diff.model_state_dict(filter_prefix=prefix_model) + + for k in sd: + if k.endswith(".weight"): + weight_diff = sd[k] + if lora_type == LORAType.STANDARD: + if weight_diff.ndim < 2: + if bias_diff: + output_sd["{}{}.diff".format(prefix_lora, k[len(prefix_model):-7])] = weight_diff.contiguous().half().cpu() + continue + try: + out = extract_lora(weight_diff, rank) + output_sd["{}{}.lora_up.weight".format(prefix_lora, k[len(prefix_model):-7])] = out[0].contiguous().half().cpu() + output_sd["{}{}.lora_down.weight".format(prefix_lora, k[len(prefix_model):-7])] = out[1].contiguous().half().cpu() + except: + logging.warning("Could not generate lora weights for key {}, is the weight difference a zero?".format(k)) + elif lora_type == LORAType.FULL_DIFF: + output_sd["{}{}.diff".format(prefix_lora, k[len(prefix_model):-7])] = weight_diff.contiguous().half().cpu() + + elif bias_diff and k.endswith(".bias"): + output_sd["{}{}.diff_b".format(prefix_lora, k[len(prefix_model):-5])] = sd[k].contiguous().half().cpu() + return output_sd + +class LoraSave: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": {"filename_prefix": ("STRING", {"default": "loras/ComfyUI_extracted_lora"}), + "rank": ("INT", {"default": 8, "min": 1, "max": 4096, "step": 1}), + "lora_type": (tuple(LORA_TYPES.keys()),), + "bias_diff": ("BOOLEAN", {"default": True}), + }, + "optional": {"model_diff": ("MODEL", {"tooltip": "The ModelSubtract output to be converted to a lora."}), + "text_encoder_diff": ("CLIP", {"tooltip": "The CLIPSubtract output to be converted to a lora."})}, + } + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "_for_testing" + + def save(self, filename_prefix, rank, lora_type, bias_diff, model_diff=None, text_encoder_diff=None): + if model_diff is None and text_encoder_diff is None: + return {} + + lora_type = LORA_TYPES.get(lora_type) + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + + output_sd = {} + if model_diff is not None: + output_sd = calc_lora_model(model_diff, rank, "diffusion_model.", "diffusion_model.", output_sd, lora_type, bias_diff=bias_diff) + if text_encoder_diff is not None: + output_sd = calc_lora_model(text_encoder_diff.patcher, rank, "", "text_encoders.", output_sd, lora_type, bias_diff=bias_diff) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + comfy.utils.save_torch_file(output_sd, output_checkpoint, metadata=None) + return {} + +NODE_CLASS_MAPPINGS = { + "LoraSave": LoraSave +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "LoraSave": "Extract and Save Lora" +} diff --git a/src/comfyui/comfy_extras/nodes_mask.py b/src/comfyui/comfy_extras/nodes_mask.py new file mode 100644 index 0000000000000000000000000000000000000000..29589b4abade581e4c60ccc2cab23b582dd2f61a --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_mask.py @@ -0,0 +1,382 @@ +import numpy as np +import scipy.ndimage +import torch +import comfy.utils + +from nodes import MAX_RESOLUTION + +def composite(destination, source, x, y, mask = None, multiplier = 8, resize_source = False): + source = source.to(destination.device) + if resize_source: + source = torch.nn.functional.interpolate(source, size=(destination.shape[2], destination.shape[3]), mode="bilinear") + + source = comfy.utils.repeat_to_batch_size(source, destination.shape[0]) + + x = max(-source.shape[3] * multiplier, min(x, destination.shape[3] * multiplier)) + y = max(-source.shape[2] * multiplier, min(y, destination.shape[2] * multiplier)) + + left, top = (x // multiplier, y // multiplier) + right, bottom = (left + source.shape[3], top + source.shape[2],) + + if mask is None: + mask = torch.ones_like(source) + else: + mask = mask.to(destination.device, copy=True) + mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(source.shape[2], source.shape[3]), mode="bilinear") + mask = comfy.utils.repeat_to_batch_size(mask, source.shape[0]) + + # calculate the bounds of the source that will be overlapping the destination + # this prevents the source trying to overwrite latent pixels that are out of bounds + # of the destination + visible_width, visible_height = (destination.shape[3] - left + min(0, x), destination.shape[2] - top + min(0, y),) + + mask = mask[:, :, :visible_height, :visible_width] + inverse_mask = torch.ones_like(mask) - mask + + source_portion = mask * source[:, :, :visible_height, :visible_width] + destination_portion = inverse_mask * destination[:, :, top:bottom, left:right] + + destination[:, :, top:bottom, left:right] = source_portion + destination_portion + return destination + +class LatentCompositeMasked: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("LATENT",), + "source": ("LATENT",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 8}), + "resize_source": ("BOOLEAN", {"default": False}), + }, + "optional": { + "mask": ("MASK",), + } + } + RETURN_TYPES = ("LATENT",) + FUNCTION = "composite" + + CATEGORY = "latent" + + def composite(self, destination, source, x, y, resize_source, mask = None): + output = destination.copy() + destination = destination["samples"].clone() + source = source["samples"] + output["samples"] = composite(destination, source, x, y, mask, 8, resize_source) + return (output,) + +class ImageCompositeMasked: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("IMAGE",), + "source": ("IMAGE",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "resize_source": ("BOOLEAN", {"default": False}), + }, + "optional": { + "mask": ("MASK",), + } + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "composite" + + CATEGORY = "image" + + def composite(self, destination, source, x, y, resize_source, mask = None): + destination = destination.clone().movedim(-1, 1) + output = composite(destination, source.movedim(-1, 1), x, y, mask, 1, resize_source).movedim(1, -1) + return (output,) + +class MaskToImage: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "mask_to_image" + + def mask_to_image(self, mask): + result = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + return (result,) + +class ImageToMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "channel": (["red", "green", "blue", "alpha"],), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "image_to_mask" + + def image_to_mask(self, image, channel): + channels = ["red", "green", "blue", "alpha"] + mask = image[:, :, :, channels.index(channel)] + return (mask,) + +class ImageColorToMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "color": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFF, "step": 1, "display": "color"}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "image_to_mask" + + def image_to_mask(self, image, color): + temp = (torch.clamp(image, 0, 1.0) * 255.0).round().to(torch.int) + temp = torch.bitwise_left_shift(temp[:,:,:,0], 16) + torch.bitwise_left_shift(temp[:,:,:,1], 8) + temp[:,:,:,2] + mask = torch.where(temp == color, 255, 0).float() + return (mask,) + +class SolidMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "value": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "solid" + + def solid(self, value, width, height): + out = torch.full((1, height, width), value, dtype=torch.float32, device="cpu") + return (out,) + +class InvertMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "invert" + + def invert(self, mask): + out = 1.0 - mask + return (out,) + +class CropMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "crop" + + def crop(self, mask, x, y, width, height): + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = mask[:, y:y + height, x:x + width] + return (out,) + +class MaskComposite: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "destination": ("MASK",), + "source": ("MASK",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "operation": (["multiply", "add", "subtract", "and", "or", "xor"],), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "combine" + + def combine(self, destination, source, x, y, operation): + output = destination.reshape((-1, destination.shape[-2], destination.shape[-1])).clone() + source = source.reshape((-1, source.shape[-2], source.shape[-1])) + + left, top = (x, y,) + right, bottom = (min(left + source.shape[-1], destination.shape[-1]), min(top + source.shape[-2], destination.shape[-2])) + visible_width, visible_height = (right - left, bottom - top,) + + source_portion = source[:, :visible_height, :visible_width] + destination_portion = destination[:, top:bottom, left:right] + + if operation == "multiply": + output[:, top:bottom, left:right] = destination_portion * source_portion + elif operation == "add": + output[:, top:bottom, left:right] = destination_portion + source_portion + elif operation == "subtract": + output[:, top:bottom, left:right] = destination_portion - source_portion + elif operation == "and": + output[:, top:bottom, left:right] = torch.bitwise_and(destination_portion.round().bool(), source_portion.round().bool()).float() + elif operation == "or": + output[:, top:bottom, left:right] = torch.bitwise_or(destination_portion.round().bool(), source_portion.round().bool()).float() + elif operation == "xor": + output[:, top:bottom, left:right] = torch.bitwise_xor(destination_portion.round().bool(), source_portion.round().bool()).float() + + output = torch.clamp(output, 0.0, 1.0) + + return (output,) + +class FeatherMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "left": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "top": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "right": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "bottom": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "feather" + + def feather(self, mask, left, top, right, bottom): + output = mask.reshape((-1, mask.shape[-2], mask.shape[-1])).clone() + + left = min(left, output.shape[-1]) + right = min(right, output.shape[-1]) + top = min(top, output.shape[-2]) + bottom = min(bottom, output.shape[-2]) + + for x in range(left): + feather_rate = (x + 1.0) / left + output[:, :, x] *= feather_rate + + for x in range(right): + feather_rate = (x + 1) / right + output[:, :, -x] *= feather_rate + + for y in range(top): + feather_rate = (y + 1) / top + output[:, y, :] *= feather_rate + + for y in range(bottom): + feather_rate = (y + 1) / bottom + output[:, -y, :] *= feather_rate + + return (output,) + +class GrowMask: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "mask": ("MASK",), + "expand": ("INT", {"default": 0, "min": -MAX_RESOLUTION, "max": MAX_RESOLUTION, "step": 1}), + "tapered_corners": ("BOOLEAN", {"default": True}), + }, + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + + FUNCTION = "expand_mask" + + def expand_mask(self, mask, expand, tapered_corners): + c = 0 if tapered_corners else 1 + kernel = np.array([[c, 1, c], + [1, 1, 1], + [c, 1, c]]) + mask = mask.reshape((-1, mask.shape[-2], mask.shape[-1])) + out = [] + for m in mask: + output = m.numpy() + for _ in range(abs(expand)): + if expand < 0: + output = scipy.ndimage.grey_erosion(output, footprint=kernel) + else: + output = scipy.ndimage.grey_dilation(output, footprint=kernel) + output = torch.from_numpy(output) + out.append(output) + return (torch.stack(out, dim=0),) + +class ThresholdMask: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mask": ("MASK",), + "value": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + CATEGORY = "mask" + + RETURN_TYPES = ("MASK",) + FUNCTION = "image_to_mask" + + def image_to_mask(self, mask, value): + mask = (mask > value).float() + return (mask,) + + +NODE_CLASS_MAPPINGS = { + "LatentCompositeMasked": LatentCompositeMasked, + "ImageCompositeMasked": ImageCompositeMasked, + "MaskToImage": MaskToImage, + "ImageToMask": ImageToMask, + "ImageColorToMask": ImageColorToMask, + "SolidMask": SolidMask, + "InvertMask": InvertMask, + "CropMask": CropMask, + "MaskComposite": MaskComposite, + "FeatherMask": FeatherMask, + "GrowMask": GrowMask, + "ThresholdMask": ThresholdMask, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "ImageToMask": "Convert Image to Mask", + "MaskToImage": "Convert Mask to Image", +} diff --git a/src/comfyui/comfy_extras/nodes_mochi.py b/src/comfyui/comfy_extras/nodes_mochi.py new file mode 100644 index 0000000000000000000000000000000000000000..4cbbea099968e4ea8b1ef410524fa7fa14f4fd73 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_mochi.py @@ -0,0 +1,26 @@ +import nodes +import torch +import comfy.model_management + +class EmptyMochiLatentVideo: + def __init__(self): + self.device = comfy.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "width": ("INT", {"default": 848, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 16}), + "height": ("INT", {"default": 480, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 16}), + "length": ("INT", {"default": 25, "min": 7, "max": nodes.MAX_RESOLUTION, "step": 6}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096})}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "generate" + + CATEGORY = "latent/mochi" + + def generate(self, width, height, length, batch_size=1): + latent = torch.zeros([batch_size, 12, ((length - 1) // 6) + 1, height // 8, width // 8], device=self.device) + return ({"samples":latent}, ) + +NODE_CLASS_MAPPINGS = { + "EmptyMochiLatentVideo": EmptyMochiLatentVideo, +} diff --git a/src/comfyui/comfy_extras/nodes_model_advanced.py b/src/comfyui/comfy_extras/nodes_model_advanced.py new file mode 100644 index 0000000000000000000000000000000000000000..918e6085aada9ef87ef04c47270317ab16246450 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_model_advanced.py @@ -0,0 +1,326 @@ +import folder_paths +import comfy.sd +import comfy.model_sampling +import comfy.latent_formats +import nodes +import torch + +class LCM(comfy.model_sampling.EPS): + def calculate_denoised(self, sigma, model_output, model_input): + timestep = self.timestep(sigma).view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + x0 = model_input - model_output * sigma + + sigma_data = 0.5 + scaled_timestep = timestep * 10.0 #timestep_scaling + + c_skip = sigma_data**2 / (scaled_timestep**2 + sigma_data**2) + c_out = scaled_timestep / (scaled_timestep**2 + sigma_data**2) ** 0.5 + + return c_out * x0 + c_skip * model_input + +class X0(comfy.model_sampling.EPS): + def calculate_denoised(self, sigma, model_output, model_input): + return model_output + +class ModelSamplingDiscreteDistilled(comfy.model_sampling.ModelSamplingDiscrete): + original_timesteps = 50 + + def __init__(self, model_config=None): + super().__init__(model_config) + + self.skip_steps = self.num_timesteps // self.original_timesteps + + sigmas_valid = torch.zeros((self.original_timesteps), dtype=torch.float32) + for x in range(self.original_timesteps): + sigmas_valid[self.original_timesteps - 1 - x] = self.sigmas[self.num_timesteps - 1 - x * self.skip_steps] + + self.set_sigmas(sigmas_valid) + + def timestep(self, sigma): + log_sigma = sigma.log() + dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None] + return (dists.abs().argmin(dim=0).view(sigma.shape) * self.skip_steps + (self.skip_steps - 1)).to(sigma.device) + + def sigma(self, timestep): + t = torch.clamp(((timestep.float().to(self.log_sigmas.device) - (self.skip_steps - 1)) / self.skip_steps).float(), min=0, max=(len(self.sigmas) - 1)) + low_idx = t.floor().long() + high_idx = t.ceil().long() + w = t.frac() + log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx] + return log_sigma.exp().to(timestep.device) + + +def rescale_zero_terminal_snr_sigmas(sigmas): + alphas_cumprod = 1 / ((sigmas * sigmas) + 1) + alphas_bar_sqrt = alphas_cumprod.sqrt() + + # Store old values. + alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone() + alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone() + + # Shift so the last timestep is zero. + alphas_bar_sqrt -= (alphas_bar_sqrt_T) + + # Scale so the first timestep is back to the old value. + alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) + + # Convert alphas_bar_sqrt to betas + alphas_bar = alphas_bar_sqrt**2 # Revert sqrt + alphas_bar[-1] = 4.8973451890853435e-08 + return ((1 - alphas_bar) / alphas_bar) ** 0.5 + +class ModelSamplingDiscrete: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "sampling": (["eps", "v_prediction", "lcm", "x0"],), + "zsnr": ("BOOLEAN", {"default": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, sampling, zsnr): + m = model.clone() + + sampling_base = comfy.model_sampling.ModelSamplingDiscrete + if sampling == "eps": + sampling_type = comfy.model_sampling.EPS + elif sampling == "v_prediction": + sampling_type = comfy.model_sampling.V_PREDICTION + elif sampling == "lcm": + sampling_type = LCM + sampling_base = ModelSamplingDiscreteDistilled + elif sampling == "x0": + sampling_type = X0 + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + if zsnr: + model_sampling.set_sigmas(rescale_zero_terminal_snr_sigmas(model_sampling.sigmas)) + + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class ModelSamplingStableCascade: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "shift": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 100.0, "step":0.01}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, shift): + m = model.clone() + + sampling_base = comfy.model_sampling.StableCascadeSampling + sampling_type = comfy.model_sampling.EPS + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(shift) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class ModelSamplingSD3: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "shift": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step":0.01}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, shift, multiplier=1000): + m = model.clone() + + sampling_base = comfy.model_sampling.ModelSamplingDiscreteFlow + sampling_type = comfy.model_sampling.CONST + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(shift=shift, multiplier=multiplier) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class ModelSamplingAuraFlow(ModelSamplingSD3): + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "shift": ("FLOAT", {"default": 1.73, "min": 0.0, "max": 100.0, "step":0.01}), + }} + + FUNCTION = "patch_aura" + + def patch_aura(self, model, shift): + return self.patch(model, shift, multiplier=1.0) + +class ModelSamplingFlux: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "max_shift": ("FLOAT", {"default": 1.15, "min": 0.0, "max": 100.0, "step":0.01}), + "base_shift": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01}), + "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, max_shift, base_shift, width, height): + m = model.clone() + + x1 = 256 + x2 = 4096 + mm = (max_shift - base_shift) / (x2 - x1) + b = base_shift - mm * x1 + shift = (width * height / (8 * 8 * 2 * 2)) * mm + b + + sampling_base = comfy.model_sampling.ModelSamplingFlux + sampling_type = comfy.model_sampling.CONST + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(shift=shift) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + + +class ModelSamplingContinuousEDM: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "sampling": (["v_prediction", "edm_playground_v2.5", "eps"],), + "sigma_max": ("FLOAT", {"default": 120.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.002, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, sampling, sigma_max, sigma_min): + m = model.clone() + + latent_format = None + sigma_data = 1.0 + if sampling == "eps": + sampling_type = comfy.model_sampling.EPS + elif sampling == "v_prediction": + sampling_type = comfy.model_sampling.V_PREDICTION + elif sampling == "edm_playground_v2.5": + sampling_type = comfy.model_sampling.EDM + sigma_data = 0.5 + latent_format = comfy.latent_formats.SDXL_Playground_2_5() + + class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousEDM, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(sigma_min, sigma_max, sigma_data) + m.add_object_patch("model_sampling", model_sampling) + if latent_format is not None: + m.add_object_patch("latent_format", latent_format) + return (m, ) + +class ModelSamplingContinuousV: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "sampling": (["v_prediction"],), + "sigma_max": ("FLOAT", {"default": 500.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.03, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, sampling, sigma_max, sigma_min): + m = model.clone() + + latent_format = None + sigma_data = 1.0 + if sampling == "v_prediction": + sampling_type = comfy.model_sampling.V_PREDICTION + + class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousV, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(sigma_min, sigma_max, sigma_data) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + +class RescaleCFG: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "multiplier": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, multiplier): + def rescale_cfg(args): + cond = args["cond"] + uncond = args["uncond"] + cond_scale = args["cond_scale"] + sigma = args["sigma"] + sigma = sigma.view(sigma.shape[:1] + (1,) * (cond.ndim - 1)) + x_orig = args["input"] + + #rescale cfg has to be done on v-pred model output + x = x_orig / (sigma * sigma + 1.0) + cond = ((x - (x_orig - cond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) + uncond = ((x - (x_orig - uncond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma) + + #rescalecfg + x_cfg = uncond + cond_scale * (cond - uncond) + ro_pos = torch.std(cond, dim=(1,2,3), keepdim=True) + ro_cfg = torch.std(x_cfg, dim=(1,2,3), keepdim=True) + + x_rescaled = x_cfg * (ro_pos / ro_cfg) + x_final = multiplier * x_rescaled + (1.0 - multiplier) * x_cfg + + return x_orig - (x - x_final * sigma / (sigma * sigma + 1.0) ** 0.5) + + m = model.clone() + m.set_model_sampler_cfg_function(rescale_cfg) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "ModelSamplingDiscrete": ModelSamplingDiscrete, + "ModelSamplingContinuousEDM": ModelSamplingContinuousEDM, + "ModelSamplingContinuousV": ModelSamplingContinuousV, + "ModelSamplingStableCascade": ModelSamplingStableCascade, + "ModelSamplingSD3": ModelSamplingSD3, + "ModelSamplingAuraFlow": ModelSamplingAuraFlow, + "ModelSamplingFlux": ModelSamplingFlux, + "RescaleCFG": RescaleCFG, +} diff --git a/src/comfyui/comfy_extras/nodes_model_downscale.py b/src/comfyui/comfy_extras/nodes_model_downscale.py new file mode 100644 index 0000000000000000000000000000000000000000..15ffc4c8ee65edebd3fb36331a832bdb865d41b9 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_model_downscale.py @@ -0,0 +1,54 @@ +import torch +import comfy.utils + +class PatchModelAddDownscale: + upscale_methods = ["bicubic", "nearest-exact", "bilinear", "area", "bislerp"] + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "block_number": ("INT", {"default": 3, "min": 1, "max": 32, "step": 1}), + "downscale_factor": ("FLOAT", {"default": 2.0, "min": 0.1, "max": 9.0, "step": 0.001}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 0.35, "min": 0.0, "max": 1.0, "step": 0.001}), + "downscale_after_skip": ("BOOLEAN", {"default": True}), + "downscale_method": (s.upscale_methods,), + "upscale_method": (s.upscale_methods,), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip, downscale_method, upscale_method): + model_sampling = model.get_model_object("model_sampling") + sigma_start = model_sampling.percent_to_sigma(start_percent) + sigma_end = model_sampling.percent_to_sigma(end_percent) + + def input_block_patch(h, transformer_options): + if transformer_options["block"][1] == block_number: + sigma = transformer_options["sigmas"][0].item() + if sigma <= sigma_start and sigma >= sigma_end: + h = comfy.utils.common_upscale(h, round(h.shape[-1] * (1.0 / downscale_factor)), round(h.shape[-2] * (1.0 / downscale_factor)), downscale_method, "disabled") + return h + + def output_block_patch(h, hsp, transformer_options): + if h.shape[2] != hsp.shape[2]: + h = comfy.utils.common_upscale(h, hsp.shape[-1], hsp.shape[-2], upscale_method, "disabled") + return h, hsp + + m = model.clone() + if downscale_after_skip: + m.set_model_input_block_patch_after_skip(input_block_patch) + else: + m.set_model_input_block_patch(input_block_patch) + m.set_model_output_block_patch(output_block_patch) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "PatchModelAddDownscale": PatchModelAddDownscale, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Sampling + "PatchModelAddDownscale": "PatchModelAddDownscale (Kohya Deep Shrink)", +} diff --git a/src/comfyui/comfy_extras/nodes_model_merging.py b/src/comfyui/comfy_extras/nodes_model_merging.py new file mode 100644 index 0000000000000000000000000000000000000000..ccf601158d59ab707d07f1e438c739d4dbced131 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_model_merging.py @@ -0,0 +1,371 @@ +import comfy.sd +import comfy.utils +import comfy.model_base +import comfy.model_management +import comfy.model_sampling + +import torch +import folder_paths +import json +import os + +from comfy.cli_args import args + +class ModelMergeSimple: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, ratio): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + +class ModelSubtract: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, multiplier): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, - multiplier, multiplier) + return (m, ) + +class ModelAdd: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + for k in kp: + m.add_patches({k: kp[k]}, 1.0, 1.0) + return (m, ) + + +class CLIPMergeSimple: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip1": ("CLIP",), + "clip2": ("CLIP",), + "ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, clip1, clip2, ratio): + m = clip1.clone() + kp = clip2.get_key_patches() + for k in kp: + if k.endswith(".position_ids") or k.endswith(".logit_scale"): + continue + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + + +class CLIPSubtract: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip1": ("CLIP",), + "clip2": ("CLIP",), + "multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, clip1, clip2, multiplier): + m = clip1.clone() + kp = clip2.get_key_patches() + for k in kp: + if k.endswith(".position_ids") or k.endswith(".logit_scale"): + continue + m.add_patches({k: kp[k]}, - multiplier, multiplier) + return (m, ) + + +class CLIPAdd: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip1": ("CLIP",), + "clip2": ("CLIP",), + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, clip1, clip2): + m = clip1.clone() + kp = clip2.get_key_patches() + for k in kp: + if k.endswith(".position_ids") or k.endswith(".logit_scale"): + continue + m.add_patches({k: kp[k]}, 1.0, 1.0) + return (m, ) + + +class ModelMergeBlocks: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model1": ("MODEL",), + "model2": ("MODEL",), + "input": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "middle": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "out": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "merge" + + CATEGORY = "advanced/model_merging" + + def merge(self, model1, model2, **kwargs): + m = model1.clone() + kp = model2.get_key_patches("diffusion_model.") + default_ratio = next(iter(kwargs.values())) + + for k in kp: + ratio = default_ratio + k_unet = k[len("diffusion_model."):] + + last_arg_size = 0 + for arg in kwargs: + if k_unet.startswith(arg) and last_arg_size < len(arg): + ratio = kwargs[arg] + last_arg_size = len(arg) + + m.add_patches({k: kp[k]}, 1.0 - ratio, ratio) + return (m, ) + +def save_checkpoint(model, clip=None, vae=None, clip_vision=None, filename_prefix=None, output_dir=None, prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, output_dir) + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + + enable_modelspec = True + if isinstance(model.model, comfy.model_base.SDXL): + if isinstance(model.model, comfy.model_base.SDXL_instructpix2pix): + metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-edit" + else: + metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-base" + elif isinstance(model.model, comfy.model_base.SDXLRefiner): + metadata["modelspec.architecture"] = "stable-diffusion-xl-v1-refiner" + elif isinstance(model.model, comfy.model_base.SVD_img2vid): + metadata["modelspec.architecture"] = "stable-video-diffusion-img2vid-v1" + elif isinstance(model.model, comfy.model_base.SD3): + metadata["modelspec.architecture"] = "stable-diffusion-v3-medium" #TODO: other SD3 variants + else: + enable_modelspec = False + + if enable_modelspec: + metadata["modelspec.sai_model_spec"] = "1.0.0" + metadata["modelspec.implementation"] = "sgm" + metadata["modelspec.title"] = "{} {}".format(filename, counter) + + #TODO: + # "stable-diffusion-v1", "stable-diffusion-v1-inpainting", "stable-diffusion-v2-512", + # "stable-diffusion-v2-768-v", "stable-diffusion-v2-unclip-l", "stable-diffusion-v2-unclip-h", + # "v2-inpainting" + + extra_keys = {} + model_sampling = model.get_model_object("model_sampling") + if isinstance(model_sampling, comfy.model_sampling.ModelSamplingContinuousEDM): + if isinstance(model_sampling, comfy.model_sampling.V_PREDICTION): + extra_keys["edm_vpred.sigma_max"] = torch.tensor(model_sampling.sigma_max).float() + extra_keys["edm_vpred.sigma_min"] = torch.tensor(model_sampling.sigma_min).float() + + if model.model.model_type == comfy.model_base.ModelType.EPS: + metadata["modelspec.predict_key"] = "epsilon" + elif model.model.model_type == comfy.model_base.ModelType.V_PREDICTION: + metadata["modelspec.predict_key"] = "v" + + if not args.disable_metadata: + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + comfy.sd.save_checkpoint(output_checkpoint, model, clip, vae, clip_vision, metadata=metadata, extra_keys=extra_keys) + +class CheckpointSave: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "checkpoints/ComfyUI"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, model, clip, vae, filename_prefix, prompt=None, extra_pnginfo=None): + save_checkpoint(model, clip=clip, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) + return {} + +class CLIPSave: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip": ("CLIP",), + "filename_prefix": ("STRING", {"default": "clip/ComfyUI"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, clip, filename_prefix, prompt=None, extra_pnginfo=None): + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + if not args.disable_metadata: + metadata["format"] = "pt" + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + comfy.model_management.load_models_gpu([clip.load_model()], force_patch_weights=True) + clip_sd = clip.get_sd() + + for prefix in ["clip_l.", "clip_g.", ""]: + k = list(filter(lambda a: a.startswith(prefix), clip_sd.keys())) + current_clip_sd = {} + for x in k: + current_clip_sd[x] = clip_sd.pop(x) + if len(current_clip_sd) == 0: + continue + + p = prefix[:-1] + replace_prefix = {} + filename_prefix_ = filename_prefix + if len(p) > 0: + filename_prefix_ = "{}_{}".format(filename_prefix_, p) + replace_prefix[prefix] = "" + replace_prefix["transformer."] = "" + + full_output_folder, filename, counter, subfolder, filename_prefix_ = folder_paths.get_save_image_path(filename_prefix_, self.output_dir) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + current_clip_sd = comfy.utils.state_dict_prefix_replace(current_clip_sd, replace_prefix) + + comfy.utils.save_torch_file(current_clip_sd, output_checkpoint, metadata=metadata) + return {} + +class VAESave: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "vae/ComfyUI_vae"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, vae, filename_prefix, prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + prompt_info = "" + if prompt is not None: + prompt_info = json.dumps(prompt) + + metadata = {} + if not args.disable_metadata: + metadata["prompt"] = prompt_info + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata[x] = json.dumps(extra_pnginfo[x]) + + output_checkpoint = f"{filename}_{counter:05}_.safetensors" + output_checkpoint = os.path.join(full_output_folder, output_checkpoint) + + comfy.utils.save_torch_file(vae.get_sd(), output_checkpoint, metadata=metadata) + return {} + +class ModelSave: + def __init__(self): + self.output_dir = folder_paths.get_output_directory() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "filename_prefix": ("STRING", {"default": "diffusion_models/ComfyUI"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + RETURN_TYPES = () + FUNCTION = "save" + OUTPUT_NODE = True + + CATEGORY = "advanced/model_merging" + + def save(self, model, filename_prefix, prompt=None, extra_pnginfo=None): + save_checkpoint(model, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) + return {} + +NODE_CLASS_MAPPINGS = { + "ModelMergeSimple": ModelMergeSimple, + "ModelMergeBlocks": ModelMergeBlocks, + "ModelMergeSubtract": ModelSubtract, + "ModelMergeAdd": ModelAdd, + "CheckpointSave": CheckpointSave, + "CLIPMergeSimple": CLIPMergeSimple, + "CLIPMergeSubtract": CLIPSubtract, + "CLIPMergeAdd": CLIPAdd, + "CLIPSave": CLIPSave, + "VAESave": VAESave, + "ModelSave": ModelSave, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "CheckpointSave": "Save Checkpoint", +} diff --git a/src/comfyui/comfy_extras/nodes_model_merging_model_specific.py b/src/comfyui/comfy_extras/nodes_model_merging_model_specific.py new file mode 100644 index 0000000000000000000000000000000000000000..0e847192813183379e31264c2fa5f9f7cb300d77 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_model_merging_model_specific.py @@ -0,0 +1,134 @@ +import comfy_extras.nodes_model_merging + +class ModelMergeSD1(comfy_extras.nodes_model_merging.ModelMergeBlocks): + CATEGORY = "advanced/model_merging/model_specific" + @classmethod + def INPUT_TYPES(s): + arg_dict = { "model1": ("MODEL",), + "model2": ("MODEL",)} + + argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + + arg_dict["time_embed."] = argument + arg_dict["label_emb."] = argument + + for i in range(12): + arg_dict["input_blocks.{}.".format(i)] = argument + + for i in range(3): + arg_dict["middle_block.{}.".format(i)] = argument + + for i in range(12): + arg_dict["output_blocks.{}.".format(i)] = argument + + arg_dict["out."] = argument + + return {"required": arg_dict} + + +class ModelMergeSDXL(comfy_extras.nodes_model_merging.ModelMergeBlocks): + CATEGORY = "advanced/model_merging/model_specific" + + @classmethod + def INPUT_TYPES(s): + arg_dict = { "model1": ("MODEL",), + "model2": ("MODEL",)} + + argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + + arg_dict["time_embed."] = argument + arg_dict["label_emb."] = argument + + for i in range(9): + arg_dict["input_blocks.{}".format(i)] = argument + + for i in range(3): + arg_dict["middle_block.{}".format(i)] = argument + + for i in range(9): + arg_dict["output_blocks.{}".format(i)] = argument + + arg_dict["out."] = argument + + return {"required": arg_dict} + +class ModelMergeSD3_2B(comfy_extras.nodes_model_merging.ModelMergeBlocks): + CATEGORY = "advanced/model_merging/model_specific" + + @classmethod + def INPUT_TYPES(s): + arg_dict = { "model1": ("MODEL",), + "model2": ("MODEL",)} + + argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + + arg_dict["pos_embed."] = argument + arg_dict["x_embedder."] = argument + arg_dict["context_embedder."] = argument + arg_dict["y_embedder."] = argument + arg_dict["t_embedder."] = argument + + for i in range(24): + arg_dict["joint_blocks.{}.".format(i)] = argument + + arg_dict["final_layer."] = argument + + return {"required": arg_dict} + +class ModelMergeFlux1(comfy_extras.nodes_model_merging.ModelMergeBlocks): + CATEGORY = "advanced/model_merging/model_specific" + + @classmethod + def INPUT_TYPES(s): + arg_dict = { "model1": ("MODEL",), + "model2": ("MODEL",)} + + argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + + arg_dict["img_in."] = argument + arg_dict["time_in."] = argument + arg_dict["guidance_in"] = argument + arg_dict["vector_in."] = argument + arg_dict["txt_in."] = argument + + for i in range(19): + arg_dict["double_blocks.{}.".format(i)] = argument + + for i in range(38): + arg_dict["single_blocks.{}.".format(i)] = argument + + arg_dict["final_layer."] = argument + + return {"required": arg_dict} + +class ModelMergeSD35_Large(comfy_extras.nodes_model_merging.ModelMergeBlocks): + CATEGORY = "advanced/model_merging/model_specific" + + @classmethod + def INPUT_TYPES(s): + arg_dict = { "model1": ("MODEL",), + "model2": ("MODEL",)} + + argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}) + + arg_dict["pos_embed."] = argument + arg_dict["x_embedder."] = argument + arg_dict["context_embedder."] = argument + arg_dict["y_embedder."] = argument + arg_dict["t_embedder."] = argument + + for i in range(38): + arg_dict["joint_blocks.{}.".format(i)] = argument + + arg_dict["final_layer."] = argument + + return {"required": arg_dict} + +NODE_CLASS_MAPPINGS = { + "ModelMergeSD1": ModelMergeSD1, + "ModelMergeSD2": ModelMergeSD1, #SD1 and SD2 have the same blocks + "ModelMergeSDXL": ModelMergeSDXL, + "ModelMergeSD3_2B": ModelMergeSD3_2B, + "ModelMergeFlux1": ModelMergeFlux1, + "ModelMergeSD35_Large": ModelMergeSD35_Large, +} diff --git a/src/comfyui/comfy_extras/nodes_morphology.py b/src/comfyui/comfy_extras/nodes_morphology.py new file mode 100644 index 0000000000000000000000000000000000000000..071521d879afe8f5cc956bd8b80d5dddd3f23d1a --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_morphology.py @@ -0,0 +1,49 @@ +import torch +import comfy.model_management + +from kornia.morphology import dilation, erosion, opening, closing, gradient, top_hat, bottom_hat + + +class Morphology: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), + "operation": (["erode", "dilate", "open", "close", "gradient", "bottom_hat", "top_hat"],), + "kernel_size": ("INT", {"default": 3, "min": 3, "max": 999, "step": 1}), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "process" + + CATEGORY = "image/postprocessing" + + def process(self, image, operation, kernel_size): + device = comfy.model_management.get_torch_device() + kernel = torch.ones(kernel_size, kernel_size, device=device) + image_k = image.to(device).movedim(-1, 1) + if operation == "erode": + output = erosion(image_k, kernel) + elif operation == "dilate": + output = dilation(image_k, kernel) + elif operation == "open": + output = opening(image_k, kernel) + elif operation == "close": + output = closing(image_k, kernel) + elif operation == "gradient": + output = gradient(image_k, kernel) + elif operation == "top_hat": + output = top_hat(image_k, kernel) + elif operation == "bottom_hat": + output = bottom_hat(image_k, kernel) + else: + raise ValueError(f"Invalid operation {operation} for morphology. Must be one of 'erode', 'dilate', 'open', 'close', 'gradient', 'tophat', 'bottomhat'") + img_out = output.to(comfy.model_management.intermediate_device()).movedim(1, -1) + return (img_out,) + +NODE_CLASS_MAPPINGS = { + "Morphology": Morphology, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "Morphology": "ImageMorphology", +} \ No newline at end of file diff --git a/src/comfyui/comfy_extras/nodes_pag.py b/src/comfyui/comfy_extras/nodes_pag.py new file mode 100644 index 0000000000000000000000000000000000000000..eb28196f41c56fd45fda051a42d0814b96558fb8 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_pag.py @@ -0,0 +1,56 @@ +#Modified/simplified version of the node from: https://github.com/pamparamm/sd-perturbed-attention +#If you want the one with more options see the above repo. + +#My modified one here is more basic but has less chances of breaking with ComfyUI updates. + +import comfy.model_patcher +import comfy.samplers + +class PerturbedAttentionGuidance: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": ("MODEL",), + "scale": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step": 0.01, "round": 0.01}), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, scale): + unet_block = "middle" + unet_block_id = 0 + m = model.clone() + + def perturbed_attention(q, k, v, extra_options, mask=None): + return v + + def post_cfg_function(args): + model = args["model"] + cond_pred = args["cond_denoised"] + cond = args["cond"] + cfg_result = args["denoised"] + sigma = args["sigma"] + model_options = args["model_options"].copy() + x = args["input"] + + if scale == 0: + return cfg_result + + # Replace Self-attention with PAG + model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, perturbed_attention, "attn1", unet_block, unet_block_id) + (pag,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options) + + return cfg_result + (cond_pred - pag) * scale + + m.set_model_sampler_post_cfg_function(post_cfg_function) + + return (m,) + +NODE_CLASS_MAPPINGS = { + "PerturbedAttentionGuidance": PerturbedAttentionGuidance, +} diff --git a/src/comfyui/comfy_extras/nodes_perpneg.py b/src/comfyui/comfy_extras/nodes_perpneg.py new file mode 100644 index 0000000000000000000000000000000000000000..762c402202d4fe245864ba67c6f68a8954a1d0ec --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_perpneg.py @@ -0,0 +1,129 @@ +import torch +import comfy.model_management +import comfy.sampler_helpers +import comfy.samplers +import comfy.utils +import node_helpers + +def perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale): + pos = noise_pred_pos - noise_pred_nocond + neg = noise_pred_neg - noise_pred_nocond + + perp = neg - ((torch.mul(neg, pos).sum())/(torch.norm(pos)**2)) * pos + perp_neg = perp * neg_scale + cfg_result = noise_pred_nocond + cond_scale*(pos - perp_neg) + return cfg_result + +#TODO: This node should be removed, it has been replaced with PerpNegGuider +class PerpNeg: + @classmethod + def INPUT_TYPES(s): + return {"required": {"model": ("MODEL", ), + "empty_conditioning": ("CONDITIONING", ), + "neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + DEPRECATED = True + + def patch(self, model, empty_conditioning, neg_scale): + m = model.clone() + nocond = comfy.sampler_helpers.convert_cond(empty_conditioning) + + def cfg_function(args): + model = args["model"] + noise_pred_pos = args["cond_denoised"] + noise_pred_neg = args["uncond_denoised"] + cond_scale = args["cond_scale"] + x = args["input"] + sigma = args["sigma"] + model_options = args["model_options"] + nocond_processed = comfy.samplers.encode_model_conds(model.extra_conds, nocond, x, x.device, "negative") + + (noise_pred_nocond,) = comfy.samplers.calc_cond_batch(model, [nocond_processed], x, sigma, model_options) + + cfg_result = x - perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale) + return cfg_result + + m.set_model_sampler_cfg_function(cfg_function) + + return (m, ) + + +class Guider_PerpNeg(comfy.samplers.CFGGuider): + def set_conds(self, positive, negative, empty_negative_prompt): + empty_negative_prompt = node_helpers.conditioning_set_values(empty_negative_prompt, {"prompt_type": "negative"}) + self.inner_set_conds({"positive": positive, "empty_negative_prompt": empty_negative_prompt, "negative": negative}) + + def set_cfg(self, cfg, neg_scale): + self.cfg = cfg + self.neg_scale = neg_scale + + def predict_noise(self, x, timestep, model_options={}, seed=None): + # in CFGGuider.predict_noise, we call sampling_function(), which uses cfg_function() to compute pos & neg + # but we'd rather do a single batch of sampling pos, neg, and empty, so we call calc_cond_batch([pos,neg,empty]) directly + + positive_cond = self.conds.get("positive", None) + negative_cond = self.conds.get("negative", None) + empty_cond = self.conds.get("empty_negative_prompt", None) + + (noise_pred_pos, noise_pred_neg, noise_pred_empty) = \ + comfy.samplers.calc_cond_batch(self.inner_model, [positive_cond, negative_cond, empty_cond], x, timestep, model_options) + cfg_result = perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_empty, self.neg_scale, self.cfg) + + # normally this would be done in cfg_function, but we skipped + # that for efficiency: we can compute the noise predictions in + # a single call to calc_cond_batch() (rather than two) + # so we replicate the hook here + for fn in model_options.get("sampler_post_cfg_function", []): + args = { + "denoised": cfg_result, + "cond": positive_cond, + "uncond": negative_cond, + "model": self.inner_model, + "uncond_denoised": noise_pred_neg, + "cond_denoised": noise_pred_pos, + "sigma": timestep, + "model_options": model_options, + "input": x, + # not in the original call in samplers.py:cfg_function, but made available for future hooks + "empty_cond": empty_cond, + "empty_cond_denoised": noise_pred_empty,} + cfg_result = fn(args) + + return cfg_result + +class PerpNegGuider: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "empty_conditioning": ("CONDITIONING", ), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}), + "neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("GUIDER",) + + FUNCTION = "get_guider" + CATEGORY = "_for_testing" + + def get_guider(self, model, positive, negative, empty_conditioning, cfg, neg_scale): + guider = Guider_PerpNeg(model) + guider.set_conds(positive, negative, empty_conditioning) + guider.set_cfg(cfg, neg_scale) + return (guider,) + +NODE_CLASS_MAPPINGS = { + "PerpNeg": PerpNeg, + "PerpNegGuider": PerpNegGuider, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "PerpNeg": "Perp-Neg (DEPRECATED by PerpNegGuider)", +} diff --git a/src/comfyui/comfy_extras/nodes_photomaker.py b/src/comfyui/comfy_extras/nodes_photomaker.py new file mode 100644 index 0000000000000000000000000000000000000000..95d24dd221e95f8a399207a625a78b0edd955625 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_photomaker.py @@ -0,0 +1,187 @@ +import torch +import torch.nn as nn +import folder_paths +import comfy.clip_model +import comfy.clip_vision +import comfy.ops + +# code for model from: https://github.com/TencentARC/PhotoMaker/blob/main/photomaker/model.py under Apache License Version 2.0 +VISION_CONFIG_DICT = { + "hidden_size": 1024, + "image_size": 224, + "intermediate_size": 4096, + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 24, + "patch_size": 14, + "projection_dim": 768, + "hidden_act": "quick_gelu", +} + +class MLP(nn.Module): + def __init__(self, in_dim, out_dim, hidden_dim, use_residual=True, operations=comfy.ops): + super().__init__() + if use_residual: + assert in_dim == out_dim + self.layernorm = operations.LayerNorm(in_dim) + self.fc1 = operations.Linear(in_dim, hidden_dim) + self.fc2 = operations.Linear(hidden_dim, out_dim) + self.use_residual = use_residual + self.act_fn = nn.GELU() + + def forward(self, x): + residual = x + x = self.layernorm(x) + x = self.fc1(x) + x = self.act_fn(x) + x = self.fc2(x) + if self.use_residual: + x = x + residual + return x + + +class FuseModule(nn.Module): + def __init__(self, embed_dim, operations): + super().__init__() + self.mlp1 = MLP(embed_dim * 2, embed_dim, embed_dim, use_residual=False, operations=operations) + self.mlp2 = MLP(embed_dim, embed_dim, embed_dim, use_residual=True, operations=operations) + self.layer_norm = operations.LayerNorm(embed_dim) + + def fuse_fn(self, prompt_embeds, id_embeds): + stacked_id_embeds = torch.cat([prompt_embeds, id_embeds], dim=-1) + stacked_id_embeds = self.mlp1(stacked_id_embeds) + prompt_embeds + stacked_id_embeds = self.mlp2(stacked_id_embeds) + stacked_id_embeds = self.layer_norm(stacked_id_embeds) + return stacked_id_embeds + + def forward( + self, + prompt_embeds, + id_embeds, + class_tokens_mask, + ) -> torch.Tensor: + # id_embeds shape: [b, max_num_inputs, 1, 2048] + id_embeds = id_embeds.to(prompt_embeds.dtype) + num_inputs = class_tokens_mask.sum().unsqueeze(0) # TODO: check for training case + batch_size, max_num_inputs = id_embeds.shape[:2] + # seq_length: 77 + seq_length = prompt_embeds.shape[1] + # flat_id_embeds shape: [b*max_num_inputs, 1, 2048] + flat_id_embeds = id_embeds.view( + -1, id_embeds.shape[-2], id_embeds.shape[-1] + ) + # valid_id_mask [b*max_num_inputs] + valid_id_mask = ( + torch.arange(max_num_inputs, device=flat_id_embeds.device)[None, :] + < num_inputs[:, None] + ) + valid_id_embeds = flat_id_embeds[valid_id_mask.flatten()] + + prompt_embeds = prompt_embeds.view(-1, prompt_embeds.shape[-1]) + class_tokens_mask = class_tokens_mask.view(-1) + valid_id_embeds = valid_id_embeds.view(-1, valid_id_embeds.shape[-1]) + # slice out the image token embeddings + image_token_embeds = prompt_embeds[class_tokens_mask] + stacked_id_embeds = self.fuse_fn(image_token_embeds, valid_id_embeds) + assert class_tokens_mask.sum() == stacked_id_embeds.shape[0], f"{class_tokens_mask.sum()} != {stacked_id_embeds.shape[0]}" + prompt_embeds.masked_scatter_(class_tokens_mask[:, None], stacked_id_embeds.to(prompt_embeds.dtype)) + updated_prompt_embeds = prompt_embeds.view(batch_size, seq_length, -1) + return updated_prompt_embeds + +class PhotoMakerIDEncoder(comfy.clip_model.CLIPVisionModelProjection): + def __init__(self): + self.load_device = comfy.model_management.text_encoder_device() + offload_device = comfy.model_management.text_encoder_offload_device() + dtype = comfy.model_management.text_encoder_dtype(self.load_device) + + super().__init__(VISION_CONFIG_DICT, dtype, offload_device, comfy.ops.manual_cast) + self.visual_projection_2 = comfy.ops.manual_cast.Linear(1024, 1280, bias=False) + self.fuse_module = FuseModule(2048, comfy.ops.manual_cast) + + def forward(self, id_pixel_values, prompt_embeds, class_tokens_mask): + b, num_inputs, c, h, w = id_pixel_values.shape + id_pixel_values = id_pixel_values.view(b * num_inputs, c, h, w) + + shared_id_embeds = self.vision_model(id_pixel_values)[2] + id_embeds = self.visual_projection(shared_id_embeds) + id_embeds_2 = self.visual_projection_2(shared_id_embeds) + + id_embeds = id_embeds.view(b, num_inputs, 1, -1) + id_embeds_2 = id_embeds_2.view(b, num_inputs, 1, -1) + + id_embeds = torch.cat((id_embeds, id_embeds_2), dim=-1) + updated_prompt_embeds = self.fuse_module(prompt_embeds, id_embeds, class_tokens_mask) + + return updated_prompt_embeds + + +class PhotoMakerLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "photomaker_model_name": (folder_paths.get_filename_list("photomaker"), )}} + + RETURN_TYPES = ("PHOTOMAKER",) + FUNCTION = "load_photomaker_model" + + CATEGORY = "_for_testing/photomaker" + + def load_photomaker_model(self, photomaker_model_name): + photomaker_model_path = folder_paths.get_full_path_or_raise("photomaker", photomaker_model_name) + photomaker_model = PhotoMakerIDEncoder() + data = comfy.utils.load_torch_file(photomaker_model_path, safe_load=True) + if "id_encoder" in data: + data = data["id_encoder"] + photomaker_model.load_state_dict(data) + return (photomaker_model,) + + +class PhotoMakerEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "photomaker": ("PHOTOMAKER",), + "image": ("IMAGE",), + "clip": ("CLIP", ), + "text": ("STRING", {"multiline": True, "dynamicPrompts": True, "default": "photograph of photomaker"}), + }} + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "apply_photomaker" + + CATEGORY = "_for_testing/photomaker" + + def apply_photomaker(self, photomaker, image, clip, text): + special_token = "photomaker" + pixel_values = comfy.clip_vision.clip_preprocess(image.to(photomaker.load_device)).float() + try: + index = text.split(" ").index(special_token) + 1 + except ValueError: + index = -1 + tokens = clip.tokenize(text, return_word_ids=True) + out_tokens = {} + for k in tokens: + out_tokens[k] = [] + for t in tokens[k]: + f = list(filter(lambda x: x[2] != index, t)) + while len(f) < len(t): + f.append(t[-1]) + out_tokens[k].append(f) + + cond, pooled = clip.encode_from_tokens(out_tokens, return_pooled=True) + + if index > 0: + token_index = index - 1 + num_id_images = 1 + class_tokens_mask = [True if token_index <= i < token_index+num_id_images else False for i in range(77)] + out = photomaker(id_pixel_values=pixel_values.unsqueeze(0), prompt_embeds=cond.to(photomaker.load_device), + class_tokens_mask=torch.tensor(class_tokens_mask, dtype=torch.bool, device=photomaker.load_device).unsqueeze(0)) + else: + out = cond + + return ([[out, {"pooled_output": pooled}]], ) + + +NODE_CLASS_MAPPINGS = { + "PhotoMakerLoader": PhotoMakerLoader, + "PhotoMakerEncode": PhotoMakerEncode, +} + diff --git a/src/comfyui/comfy_extras/nodes_post_processing.py b/src/comfyui/comfy_extras/nodes_post_processing.py new file mode 100644 index 0000000000000000000000000000000000000000..68f6ef51e791c78c16c179a292b70ee4ad39b656 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_post_processing.py @@ -0,0 +1,279 @@ +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +import math + +import comfy.utils +import comfy.model_management + + +class Blend: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image1": ("IMAGE",), + "image2": ("IMAGE",), + "blend_factor": ("FLOAT", { + "default": 0.5, + "min": 0.0, + "max": 1.0, + "step": 0.01 + }), + "blend_mode": (["normal", "multiply", "screen", "overlay", "soft_light", "difference"],), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "blend_images" + + CATEGORY = "image/postprocessing" + + def blend_images(self, image1: torch.Tensor, image2: torch.Tensor, blend_factor: float, blend_mode: str): + image2 = image2.to(image1.device) + if image1.shape != image2.shape: + image2 = image2.permute(0, 3, 1, 2) + image2 = comfy.utils.common_upscale(image2, image1.shape[2], image1.shape[1], upscale_method='bicubic', crop='center') + image2 = image2.permute(0, 2, 3, 1) + + blended_image = self.blend_mode(image1, image2, blend_mode) + blended_image = image1 * (1 - blend_factor) + blended_image * blend_factor + blended_image = torch.clamp(blended_image, 0, 1) + return (blended_image,) + + def blend_mode(self, img1, img2, mode): + if mode == "normal": + return img2 + elif mode == "multiply": + return img1 * img2 + elif mode == "screen": + return 1 - (1 - img1) * (1 - img2) + elif mode == "overlay": + return torch.where(img1 <= 0.5, 2 * img1 * img2, 1 - 2 * (1 - img1) * (1 - img2)) + elif mode == "soft_light": + return torch.where(img2 <= 0.5, img1 - (1 - 2 * img2) * img1 * (1 - img1), img1 + (2 * img2 - 1) * (self.g(img1) - img1)) + elif mode == "difference": + return img1 - img2 + else: + raise ValueError(f"Unsupported blend mode: {mode}") + + def g(self, x): + return torch.where(x <= 0.25, ((16 * x - 12) * x + 4) * x, torch.sqrt(x)) + +def gaussian_kernel(kernel_size: int, sigma: float, device=None): + x, y = torch.meshgrid(torch.linspace(-1, 1, kernel_size, device=device), torch.linspace(-1, 1, kernel_size, device=device), indexing="ij") + d = torch.sqrt(x * x + y * y) + g = torch.exp(-(d * d) / (2.0 * sigma * sigma)) + return g / g.sum() + +class Blur: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "blur_radius": ("INT", { + "default": 1, + "min": 1, + "max": 31, + "step": 1 + }), + "sigma": ("FLOAT", { + "default": 1.0, + "min": 0.1, + "max": 10.0, + "step": 0.1 + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "blur" + + CATEGORY = "image/postprocessing" + + def blur(self, image: torch.Tensor, blur_radius: int, sigma: float): + if blur_radius == 0: + return (image,) + + image = image.to(comfy.model_management.get_torch_device()) + batch_size, height, width, channels = image.shape + + kernel_size = blur_radius * 2 + 1 + kernel = gaussian_kernel(kernel_size, sigma, device=image.device).repeat(channels, 1, 1).unsqueeze(1) + + image = image.permute(0, 3, 1, 2) # Torch wants (B, C, H, W) we use (B, H, W, C) + padded_image = F.pad(image, (blur_radius,blur_radius,blur_radius,blur_radius), 'reflect') + blurred = F.conv2d(padded_image, kernel, padding=kernel_size // 2, groups=channels)[:,:,blur_radius:-blur_radius, blur_radius:-blur_radius] + blurred = blurred.permute(0, 2, 3, 1) + + return (blurred.to(comfy.model_management.intermediate_device()),) + +class Quantize: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "colors": ("INT", { + "default": 256, + "min": 1, + "max": 256, + "step": 1 + }), + "dither": (["none", "floyd-steinberg", "bayer-2", "bayer-4", "bayer-8", "bayer-16"],), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "quantize" + + CATEGORY = "image/postprocessing" + + def bayer(im, pal_im, order): + def normalized_bayer_matrix(n): + if n == 0: + return np.zeros((1,1), "float32") + else: + q = 4 ** n + m = q * normalized_bayer_matrix(n - 1) + return np.bmat(((m-1.5, m+0.5), (m+1.5, m-0.5))) / q + + num_colors = len(pal_im.getpalette()) // 3 + spread = 2 * 256 / num_colors + bayer_n = int(math.log2(order)) + bayer_matrix = torch.from_numpy(spread * normalized_bayer_matrix(bayer_n) + 0.5) + + result = torch.from_numpy(np.array(im).astype(np.float32)) + tw = math.ceil(result.shape[0] / bayer_matrix.shape[0]) + th = math.ceil(result.shape[1] / bayer_matrix.shape[1]) + tiled_matrix = bayer_matrix.tile(tw, th).unsqueeze(-1) + result.add_(tiled_matrix[:result.shape[0],:result.shape[1]]).clamp_(0, 255) + result = result.to(dtype=torch.uint8) + + im = Image.fromarray(result.cpu().numpy()) + im = im.quantize(palette=pal_im, dither=Image.Dither.NONE) + return im + + def quantize(self, image: torch.Tensor, colors: int, dither: str): + batch_size, height, width, _ = image.shape + result = torch.zeros_like(image) + + for b in range(batch_size): + im = Image.fromarray((image[b] * 255).to(torch.uint8).numpy(), mode='RGB') + + pal_im = im.quantize(colors=colors) # Required as described in https://github.com/python-pillow/Pillow/issues/5836 + + if dither == "none": + quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.NONE) + elif dither == "floyd-steinberg": + quantized_image = im.quantize(palette=pal_im, dither=Image.Dither.FLOYDSTEINBERG) + elif dither.startswith("bayer"): + order = int(dither.split('-')[-1]) + quantized_image = Quantize.bayer(im, pal_im, order) + + quantized_array = torch.tensor(np.array(quantized_image.convert("RGB"))).float() / 255 + result[b] = quantized_array + + return (result,) + +class Sharpen: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("IMAGE",), + "sharpen_radius": ("INT", { + "default": 1, + "min": 1, + "max": 31, + "step": 1 + }), + "sigma": ("FLOAT", { + "default": 1.0, + "min": 0.1, + "max": 10.0, + "step": 0.01 + }), + "alpha": ("FLOAT", { + "default": 1.0, + "min": 0.0, + "max": 5.0, + "step": 0.01 + }), + }, + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "sharpen" + + CATEGORY = "image/postprocessing" + + def sharpen(self, image: torch.Tensor, sharpen_radius: int, sigma:float, alpha: float): + if sharpen_radius == 0: + return (image,) + + batch_size, height, width, channels = image.shape + image = image.to(comfy.model_management.get_torch_device()) + + kernel_size = sharpen_radius * 2 + 1 + kernel = gaussian_kernel(kernel_size, sigma, device=image.device) * -(alpha*10) + center = kernel_size // 2 + kernel[center, center] = kernel[center, center] - kernel.sum() + 1.0 + kernel = kernel.repeat(channels, 1, 1).unsqueeze(1) + + tensor_image = image.permute(0, 3, 1, 2) # Torch wants (B, C, H, W) we use (B, H, W, C) + tensor_image = F.pad(tensor_image, (sharpen_radius,sharpen_radius,sharpen_radius,sharpen_radius), 'reflect') + sharpened = F.conv2d(tensor_image, kernel, padding=center, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius] + sharpened = sharpened.permute(0, 2, 3, 1) + + result = torch.clamp(sharpened, 0, 1) + + return (result.to(comfy.model_management.intermediate_device()),) + +class ImageScaleToTotalPixels: + upscale_methods = ["nearest-exact", "bilinear", "area", "bicubic", "lanczos"] + crop_methods = ["disabled", "center"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), + "megapixels": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 16.0, "step": 0.01}), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, image, upscale_method, megapixels): + samples = image.movedim(-1,1) + total = int(megapixels * 1024 * 1024) + + scale_by = math.sqrt(total / (samples.shape[3] * samples.shape[2])) + width = round(samples.shape[3] * scale_by) + height = round(samples.shape[2] * scale_by) + + s = comfy.utils.common_upscale(samples, width, height, upscale_method, "disabled") + s = s.movedim(1,-1) + return (s,) + +NODE_CLASS_MAPPINGS = { + "ImageBlend": Blend, + "ImageBlur": Blur, + "ImageQuantize": Quantize, + "ImageSharpen": Sharpen, + "ImageScaleToTotalPixels": ImageScaleToTotalPixels, +} diff --git a/src/comfyui/comfy_extras/nodes_rebatch.py b/src/comfyui/comfy_extras/nodes_rebatch.py new file mode 100644 index 0000000000000000000000000000000000000000..3010fbd4b69034399390894d74c1b8cc415b61f0 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_rebatch.py @@ -0,0 +1,138 @@ +import torch + +class LatentRebatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "latents": ("LATENT",), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("LATENT",) + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, ) + + FUNCTION = "rebatch" + + CATEGORY = "latent/batch" + + @staticmethod + def get_batch(latents, list_ind, offset): + '''prepare a batch out of the list of latents''' + samples = latents[list_ind]['samples'] + shape = samples.shape + mask = latents[list_ind]['noise_mask'] if 'noise_mask' in latents[list_ind] else torch.ones((shape[0], 1, shape[2]*8, shape[3]*8), device='cpu') + if mask.shape[-1] != shape[-1] * 8 or mask.shape[-2] != shape[-2]: + torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[-2]*8, shape[-1]*8), mode="bilinear") + if mask.shape[0] < samples.shape[0]: + mask = mask.repeat((shape[0] - 1) // mask.shape[0] + 1, 1, 1, 1)[:shape[0]] + if 'batch_index' in latents[list_ind]: + batch_inds = latents[list_ind]['batch_index'] + else: + batch_inds = [x+offset for x in range(shape[0])] + return samples, mask, batch_inds + + @staticmethod + def get_slices(indexable, num, batch_size): + '''divides an indexable object into num slices of length batch_size, and a remainder''' + slices = [] + for i in range(num): + slices.append(indexable[i*batch_size:(i+1)*batch_size]) + if num * batch_size < len(indexable): + return slices, indexable[num * batch_size:] + else: + return slices, None + + @staticmethod + def slice_batch(batch, num, batch_size): + result = [LatentRebatch.get_slices(x, num, batch_size) for x in batch] + return list(zip(*result)) + + @staticmethod + def cat_batch(batch1, batch2): + if batch1[0] is None: + return batch2 + result = [torch.cat((b1, b2)) if torch.is_tensor(b1) else b1 + b2 for b1, b2 in zip(batch1, batch2)] + return result + + def rebatch(self, latents, batch_size): + batch_size = batch_size[0] + + output_list = [] + current_batch = (None, None, None) + processed = 0 + + for i in range(len(latents)): + # fetch new entry of list + #samples, masks, indices = self.get_batch(latents, i) + next_batch = self.get_batch(latents, i, processed) + processed += len(next_batch[2]) + # set to current if current is None + if current_batch[0] is None: + current_batch = next_batch + # add previous to list if dimensions do not match + elif next_batch[0].shape[-1] != current_batch[0].shape[-1] or next_batch[0].shape[-2] != current_batch[0].shape[-2]: + sliced, _ = self.slice_batch(current_batch, 1, batch_size) + output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) + current_batch = next_batch + # cat if everything checks out + else: + current_batch = self.cat_batch(current_batch, next_batch) + + # add to list if dimensions gone above target batch size + if current_batch[0].shape[0] > batch_size: + num = current_batch[0].shape[0] // batch_size + sliced, remainder = self.slice_batch(current_batch, num, batch_size) + + for i in range(num): + output_list.append({'samples': sliced[0][i], 'noise_mask': sliced[1][i], 'batch_index': sliced[2][i]}) + + current_batch = remainder + + #add remainder + if current_batch[0] is not None: + sliced, _ = self.slice_batch(current_batch, 1, batch_size) + output_list.append({'samples': sliced[0][0], 'noise_mask': sliced[1][0], 'batch_index': sliced[2][0]}) + + #get rid of empty masks + for s in output_list: + if s['noise_mask'].mean() == 1.0: + del s['noise_mask'] + + return (output_list,) + +class ImageRebatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { "images": ("IMAGE",), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + }} + RETURN_TYPES = ("IMAGE",) + INPUT_IS_LIST = True + OUTPUT_IS_LIST = (True, ) + + FUNCTION = "rebatch" + + CATEGORY = "image/batch" + + def rebatch(self, images, batch_size): + batch_size = batch_size[0] + + output_list = [] + all_images = [] + for img in images: + for i in range(img.shape[0]): + all_images.append(img[i:i+1]) + + for i in range(0, len(all_images), batch_size): + output_list.append(torch.cat(all_images[i:i+batch_size], dim=0)) + + return (output_list,) + +NODE_CLASS_MAPPINGS = { + "RebatchLatents": LatentRebatch, + "RebatchImages": ImageRebatch, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "RebatchLatents": "Rebatch Latents", + "RebatchImages": "Rebatch Images", +} diff --git a/src/comfyui/comfy_extras/nodes_sag.py b/src/comfyui/comfy_extras/nodes_sag.py new file mode 100644 index 0000000000000000000000000000000000000000..5e15b99e56712f05a64b12e41bbd5304dae1d665 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_sag.py @@ -0,0 +1,169 @@ +import torch +from torch import einsum +import torch.nn.functional as F +import math + +from einops import rearrange, repeat +from comfy.ldm.modules.attention import optimized_attention +import comfy.samplers + +# from comfy/ldm/modules/attention.py +# but modified to return attention scores as well as output +def attention_basic_with_sim(q, k, v, heads, mask=None, attn_precision=None): + b, _, dim_head = q.shape + dim_head //= heads + scale = dim_head ** -0.5 + + h = heads + q, k, v = map( + lambda t: t.unsqueeze(3) + .reshape(b, -1, heads, dim_head) + .permute(0, 2, 1, 3) + .reshape(b * heads, -1, dim_head) + .contiguous(), + (q, k, v), + ) + + # force cast to fp32 to avoid overflowing + if attn_precision == torch.float32: + sim = einsum('b i d, b j d -> b i j', q.float(), k.float()) * scale + else: + sim = einsum('b i d, b j d -> b i j', q, k) * scale + + del q, k + + if mask is not None: + mask = rearrange(mask, 'b ... -> b (...)') + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + + # attention, what we cannot get enough of + sim = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', sim.to(v.dtype), v) + out = ( + out.unsqueeze(0) + .reshape(b, heads, -1, dim_head) + .permute(0, 2, 1, 3) + .reshape(b, -1, heads * dim_head) + ) + return (out, sim) + +def create_blur_map(x0, attn, sigma=3.0, threshold=1.0): + # reshape and GAP the attention map + _, hw1, hw2 = attn.shape + b, _, lh, lw = x0.shape + attn = attn.reshape(b, -1, hw1, hw2) + # Global Average Pool + mask = attn.mean(1, keepdim=False).sum(1, keepdim=False) > threshold + ratio = 2**(math.ceil(math.sqrt(lh * lw / hw1)) - 1).bit_length() + mid_shape = [math.ceil(lh / ratio), math.ceil(lw / ratio)] + + # Reshape + mask = ( + mask.reshape(b, *mid_shape) + .unsqueeze(1) + .type(attn.dtype) + ) + # Upsample + mask = F.interpolate(mask, (lh, lw)) + + blurred = gaussian_blur_2d(x0, kernel_size=9, sigma=sigma) + blurred = blurred * mask + x0 * (1 - mask) + return blurred + +def gaussian_blur_2d(img, kernel_size, sigma): + ksize_half = (kernel_size - 1) * 0.5 + + x = torch.linspace(-ksize_half, ksize_half, steps=kernel_size) + + pdf = torch.exp(-0.5 * (x / sigma).pow(2)) + + x_kernel = pdf / pdf.sum() + x_kernel = x_kernel.to(device=img.device, dtype=img.dtype) + + kernel2d = torch.mm(x_kernel[:, None], x_kernel[None, :]) + kernel2d = kernel2d.expand(img.shape[-3], 1, kernel2d.shape[0], kernel2d.shape[1]) + + padding = [kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size // 2] + + img = F.pad(img, padding, mode="reflect") + img = F.conv2d(img, kernel2d, groups=img.shape[-3]) + return img + +class SelfAttentionGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "scale": ("FLOAT", {"default": 0.5, "min": -2.0, "max": 5.0, "step": 0.01}), + "blur_sigma": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 10.0, "step": 0.1}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + + def patch(self, model, scale, blur_sigma): + m = model.clone() + + attn_scores = None + + # TODO: make this work properly with chunked batches + # currently, we can only save the attn from one UNet call + def attn_and_record(q, k, v, extra_options): + nonlocal attn_scores + # if uncond, save the attention scores + heads = extra_options["n_heads"] + cond_or_uncond = extra_options["cond_or_uncond"] + b = q.shape[0] // len(cond_or_uncond) + if 1 in cond_or_uncond: + uncond_index = cond_or_uncond.index(1) + # do the entire attention operation, but save the attention scores to attn_scores + (out, sim) = attention_basic_with_sim(q, k, v, heads=heads, attn_precision=extra_options["attn_precision"]) + # when using a higher batch size, I BELIEVE the result batch dimension is [uc1, ... ucn, c1, ... cn] + n_slices = heads * b + attn_scores = sim[n_slices * uncond_index:n_slices * (uncond_index+1)] + return out + else: + return optimized_attention(q, k, v, heads=heads, attn_precision=extra_options["attn_precision"]) + + def post_cfg_function(args): + nonlocal attn_scores + uncond_attn = attn_scores + + sag_scale = scale + sag_sigma = blur_sigma + sag_threshold = 1.0 + model = args["model"] + uncond_pred = args["uncond_denoised"] + uncond = args["uncond"] + cfg_result = args["denoised"] + sigma = args["sigma"] + model_options = args["model_options"] + x = args["input"] + if min(cfg_result.shape[2:]) <= 4: #skip when too small to add padding + return cfg_result + + # create the adversarially blurred image + degraded = create_blur_map(uncond_pred, uncond_attn, sag_sigma, sag_threshold) + degraded_noised = degraded + x - uncond_pred + # call into the UNet + (sag,) = comfy.samplers.calc_cond_batch(model, [uncond], degraded_noised, sigma, model_options) + return cfg_result + (degraded - sag) * sag_scale + + m.set_model_sampler_post_cfg_function(post_cfg_function, disable_cfg1_optimization=True) + + # from diffusers: + # unet.mid_block.attentions[0].transformer_blocks[0].attn1.patch + m.set_model_attn1_replace(attn_and_record, "middle", 0, 0) + + return (m, ) + +NODE_CLASS_MAPPINGS = { + "SelfAttentionGuidance": SelfAttentionGuidance, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "SelfAttentionGuidance": "Self-Attention Guidance", +} diff --git a/src/comfyui/comfy_extras/nodes_sd3.py b/src/comfyui/comfy_extras/nodes_sd3.py new file mode 100644 index 0000000000000000000000000000000000000000..4d664093cd4340c2b732c4f97277425a694b9eb9 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_sd3.py @@ -0,0 +1,167 @@ +import folder_paths +import comfy.sd +import comfy.model_management +import nodes +import torch +import re +class TripleCLIPLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_name1": (folder_paths.get_filename_list("clip"), ), "clip_name2": (folder_paths.get_filename_list("clip"), ), "clip_name3": (folder_paths.get_filename_list("clip"), ) + }} + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_clip" + + CATEGORY = "advanced/loaders" + + def load_clip(self, clip_name1, clip_name2, clip_name3): + clip_path1 = folder_paths.get_full_path_or_raise("clip", clip_name1) + clip_path2 = folder_paths.get_full_path_or_raise("clip", clip_name2) + clip_path3 = folder_paths.get_full_path_or_raise("clip", clip_name3) + clip = comfy.sd.load_clip(ckpt_paths=[clip_path1, clip_path2, clip_path3], embedding_directory=folder_paths.get_folder_paths("embeddings")) + return (clip,) + +class EmptySD3LatentImage: + def __init__(self): + self.device = comfy.model_management.intermediate_device() + + @classmethod + def INPUT_TYPES(s): + return {"required": { "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 16}), + "height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 16}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096})}} + RETURN_TYPES = ("LATENT",) + FUNCTION = "generate" + + CATEGORY = "latent/sd3" + + def generate(self, width, height, batch_size=1): + latent = torch.zeros([batch_size, 16, height // 8, width // 8], device=self.device) + return ({"samples":latent}, ) + +class CLIPTextEncodeSD3: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "clip": ("CLIP", ), + "clip_l": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "clip_g": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "t5xxl": ("STRING", {"multiline": True, "dynamicPrompts": True}), + "empty_padding": (["none", "empty_prompt"], ) + }} + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "encode" + + CATEGORY = "advanced/conditioning" + + def encode(self, clip, clip_l, clip_g, t5xxl, empty_padding): + no_padding = empty_padding == "none" + + tokens = clip.tokenize(clip_g) + if len(clip_g) == 0 and no_padding: + tokens["g"] = [] + + if len(clip_l) == 0 and no_padding: + tokens["l"] = [] + else: + tokens["l"] = clip.tokenize(clip_l)["l"] + + if len(t5xxl) == 0 and no_padding: + tokens["t5xxl"] = [] + else: + tokens["t5xxl"] = clip.tokenize(t5xxl)["t5xxl"] + if len(tokens["l"]) != len(tokens["g"]): + empty = clip.tokenize("") + while len(tokens["l"]) < len(tokens["g"]): + tokens["l"] += empty["l"] + while len(tokens["l"]) > len(tokens["g"]): + tokens["g"] += empty["g"] + cond, pooled = clip.encode_from_tokens(tokens, return_pooled=True) + return ([[cond, {"pooled_output": pooled}]], ) + + +class ControlNetApplySD3(nodes.ControlNetApplyAdvanced): + @classmethod + def INPUT_TYPES(s): + return {"required": {"positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "control_net": ("CONTROL_NET", ), + "vae": ("VAE", ), + "image": ("IMAGE", ), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }} + CATEGORY = "conditioning/controlnet" + DEPRECATED = True + +class SkipLayerGuidanceSD3: + ''' + Enhance guidance towards detailed dtructure by having another set of CFG negative with skipped layers. + Inspired by Perturbed Attention Guidance (https://arxiv.org/abs/2403.17377) + Experimental implementation by Dango233@StabilityAI. + ''' + @classmethod + def INPUT_TYPES(s): + return {"required": {"model": ("MODEL", ), + "layers": ("STRING", {"default": "7, 8, 9", "multiline": False}), + "scale": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 10.0, "step": 0.1}), + "start_percent": ("FLOAT", {"default": 0.01, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 0.15, "min": 0.0, "max": 1.0, "step": 0.001}) + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "skip_guidance" + + CATEGORY = "advanced/guidance" + + + def skip_guidance(self, model, layers, scale, start_percent, end_percent): + if layers == "" or layers == None: + return (model, ) + # check if layer is comma separated integers + def skip(args, extra_args): + return args + + model_sampling = model.get_model_object("model_sampling") + sigma_start = model_sampling.percent_to_sigma(start_percent) + sigma_end = model_sampling.percent_to_sigma(end_percent) + + def post_cfg_function(args): + model = args["model"] + cond_pred = args["cond_denoised"] + cond = args["cond"] + cfg_result = args["denoised"] + sigma = args["sigma"] + x = args["input"] + model_options = args["model_options"].copy() + + for layer in layers: + model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, skip, "dit", "double_block", layer) + model_sampling.percent_to_sigma(start_percent) + + sigma_ = sigma[0].item() + if scale > 0 and sigma_ >= sigma_end and sigma_ <= sigma_start: + (slg,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options) + cfg_result = cfg_result + (cond_pred - slg) * scale + return cfg_result + + layers = re.findall(r'\d+', layers) + layers = [int(i) for i in layers] + m = model.clone() + m.set_model_sampler_post_cfg_function(post_cfg_function) + + return (m, ) + + +NODE_CLASS_MAPPINGS = { + "TripleCLIPLoader": TripleCLIPLoader, + "EmptySD3LatentImage": EmptySD3LatentImage, + "CLIPTextEncodeSD3": CLIPTextEncodeSD3, + "ControlNetApplySD3": ControlNetApplySD3, + "SkipLayerGuidanceSD3": SkipLayerGuidanceSD3, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + # Sampling + "ControlNetApplySD3": "Apply Controlnet with VAE", +} diff --git a/src/comfyui/comfy_extras/nodes_sdupscale.py b/src/comfyui/comfy_extras/nodes_sdupscale.py new file mode 100644 index 0000000000000000000000000000000000000000..bba67e8ddff8064a90ec0f8e71e953ca4e56c4c6 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_sdupscale.py @@ -0,0 +1,46 @@ +import torch +import comfy.utils + +class SD_4XUpscale_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "images": ("IMAGE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "scale_ratio": ("FLOAT", {"default": 4.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "noise_augmentation": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/upscale_diffusion" + + def encode(self, images, positive, negative, scale_ratio, noise_augmentation): + width = max(1, round(images.shape[-2] * scale_ratio)) + height = max(1, round(images.shape[-3] * scale_ratio)) + + pixels = comfy.utils.common_upscale((images.movedim(-1,1) * 2.0) - 1.0, width // 4, height // 4, "bilinear", "center") + + out_cp = [] + out_cn = [] + + for t in positive: + n = [t[0], t[1].copy()] + n[1]['concat_image'] = pixels + n[1]['noise_augmentation'] = noise_augmentation + out_cp.append(n) + + for t in negative: + n = [t[0], t[1].copy()] + n[1]['concat_image'] = pixels + n[1]['noise_augmentation'] = noise_augmentation + out_cn.append(n) + + latent = torch.zeros([images.shape[0], 4, height // 4, width // 4]) + return (out_cp, out_cn, {"samples":latent}) + +NODE_CLASS_MAPPINGS = { + "SD_4XUpscale_Conditioning": SD_4XUpscale_Conditioning, +} diff --git a/src/comfyui/comfy_extras/nodes_stable3d.py b/src/comfyui/comfy_extras/nodes_stable3d.py new file mode 100644 index 0000000000000000000000000000000000000000..be2e34c28f49f160a21703c313305193ed00546f --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_stable3d.py @@ -0,0 +1,143 @@ +import torch +import nodes +import comfy.utils + +def camera_embeddings(elevation, azimuth): + elevation = torch.as_tensor([elevation]) + azimuth = torch.as_tensor([azimuth]) + embeddings = torch.stack( + [ + torch.deg2rad( + (90 - elevation) - (90) + ), # Zero123 polar is 90-elevation + torch.sin(torch.deg2rad(azimuth)), + torch.cos(torch.deg2rad(azimuth)), + torch.deg2rad( + 90 - torch.full_like(elevation, 0) + ), + ], dim=-1).unsqueeze(1) + + return embeddings + + +class StableZero123_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/3d_models" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + cam_embeds = camera_embeddings(elevation, azimuth) + cond = torch.cat([pooled, cam_embeds.to(pooled.device).repeat((pooled.shape[0], 1, 1))], dim=-1) + + positive = [[cond, {"concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent}) + +class StableZero123_Conditioning_Batched: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 256, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}), + "elevation": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + "azimuth": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + "elevation_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + "azimuth_batch_increment": ("FLOAT", {"default": 0.0, "min": -180.0, "max": 180.0, "step": 0.1, "round": False}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/3d_models" + + def encode(self, clip_vision, init_image, vae, width, height, batch_size, elevation, azimuth, elevation_batch_increment, azimuth_batch_increment): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + + cam_embeds = [] + for i in range(batch_size): + cam_embeds.append(camera_embeddings(elevation, azimuth)) + elevation += elevation_batch_increment + azimuth += azimuth_batch_increment + + cam_embeds = torch.cat(cam_embeds, dim=0) + cond = torch.cat([comfy.utils.repeat_to_batch_size(pooled, batch_size), cam_embeds], dim=-1) + + positive = [[cond, {"concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([batch_size, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent, "batch_index": [0] * batch_size}) + +class SV3D_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "video_frames": ("INT", {"default": 21, "min": 1, "max": 4096}), + "elevation": ("FLOAT", {"default": 0.0, "min": -90.0, "max": 90.0, "step": 0.1, "round": False}), + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/3d_models" + + def encode(self, clip_vision, init_image, vae, width, height, video_frames, elevation): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + t = vae.encode(encode_pixels) + + azimuth = 0 + azimuth_increment = 360 / (max(video_frames, 2) - 1) + + elevations = [] + azimuths = [] + for i in range(video_frames): + elevations.append(elevation) + azimuths.append(azimuth) + azimuth += azimuth_increment + + positive = [[pooled, {"concat_latent_image": t, "elevation": elevations, "azimuth": azimuths}]] + negative = [[torch.zeros_like(pooled), {"concat_latent_image": torch.zeros_like(t), "elevation": elevations, "azimuth": azimuths}]] + latent = torch.zeros([video_frames, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent}) + + +NODE_CLASS_MAPPINGS = { + "StableZero123_Conditioning": StableZero123_Conditioning, + "StableZero123_Conditioning_Batched": StableZero123_Conditioning_Batched, + "SV3D_Conditioning": SV3D_Conditioning, +} diff --git a/src/comfyui/comfy_extras/nodes_stable_cascade.py b/src/comfyui/comfy_extras/nodes_stable_cascade.py new file mode 100644 index 0000000000000000000000000000000000000000..0034032150e6eac48d18bb6bd35819114a01fe8d --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_stable_cascade.py @@ -0,0 +1,141 @@ +""" + This file is part of ComfyUI. + Copyright (C) 2024 Stability AI + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import torch +import nodes +import comfy.utils + + +class StableCascade_EmptyLatentImage: + def __init__(self, device="cpu"): + self.device = device + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "width": ("INT", {"default": 1024, "min": 256, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 256, "max": nodes.MAX_RESOLUTION, "step": 8}), + "compression": ("INT", {"default": 42, "min": 4, "max": 128, "step": 1}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 4096}) + }} + RETURN_TYPES = ("LATENT", "LATENT") + RETURN_NAMES = ("stage_c", "stage_b") + FUNCTION = "generate" + + CATEGORY = "latent/stable_cascade" + + def generate(self, width, height, compression, batch_size=1): + c_latent = torch.zeros([batch_size, 16, height // compression, width // compression]) + b_latent = torch.zeros([batch_size, 4, height // 4, width // 4]) + return ({ + "samples": c_latent, + }, { + "samples": b_latent, + }) + +class StableCascade_StageC_VAEEncode: + def __init__(self, device="cpu"): + self.device = device + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "vae": ("VAE", ), + "compression": ("INT", {"default": 42, "min": 4, "max": 128, "step": 1}), + }} + RETURN_TYPES = ("LATENT", "LATENT") + RETURN_NAMES = ("stage_c", "stage_b") + FUNCTION = "generate" + + CATEGORY = "latent/stable_cascade" + + def generate(self, image, vae, compression): + width = image.shape[-2] + height = image.shape[-3] + out_width = (width // compression) * vae.downscale_ratio + out_height = (height // compression) * vae.downscale_ratio + + s = comfy.utils.common_upscale(image.movedim(-1,1), out_width, out_height, "bicubic", "center").movedim(1,-1) + + c_latent = vae.encode(s[:,:,:,:3]) + b_latent = torch.zeros([c_latent.shape[0], 4, (height // 8) * 2, (width // 8) * 2]) + return ({ + "samples": c_latent, + }, { + "samples": b_latent, + }) + +class StableCascade_StageB_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "conditioning": ("CONDITIONING",), + "stage_c": ("LATENT",), + }} + RETURN_TYPES = ("CONDITIONING",) + + FUNCTION = "set_prior" + + CATEGORY = "conditioning/stable_cascade" + + def set_prior(self, conditioning, stage_c): + c = [] + for t in conditioning: + d = t[1].copy() + d['stable_cascade_prior'] = stage_c['samples'] + n = [t[0], d] + c.append(n) + return (c, ) + +class StableCascade_SuperResolutionControlnet: + def __init__(self, device="cpu"): + self.device = device + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE",), + "vae": ("VAE", ), + }} + RETURN_TYPES = ("IMAGE", "LATENT", "LATENT") + RETURN_NAMES = ("controlnet_input", "stage_c", "stage_b") + FUNCTION = "generate" + + EXPERIMENTAL = True + CATEGORY = "_for_testing/stable_cascade" + + def generate(self, image, vae): + width = image.shape[-2] + height = image.shape[-3] + batch_size = image.shape[0] + controlnet_input = vae.encode(image[:,:,:,:3]).movedim(1, -1) + + c_latent = torch.zeros([batch_size, 16, height // 16, width // 16]) + b_latent = torch.zeros([batch_size, 4, height // 2, width // 2]) + return (controlnet_input, { + "samples": c_latent, + }, { + "samples": b_latent, + }) + +NODE_CLASS_MAPPINGS = { + "StableCascade_EmptyLatentImage": StableCascade_EmptyLatentImage, + "StableCascade_StageB_Conditioning": StableCascade_StageB_Conditioning, + "StableCascade_StageC_VAEEncode": StableCascade_StageC_VAEEncode, + "StableCascade_SuperResolutionControlnet": StableCascade_SuperResolutionControlnet, +} diff --git a/src/comfyui/comfy_extras/nodes_tomesd.py b/src/comfyui/comfy_extras/nodes_tomesd.py new file mode 100644 index 0000000000000000000000000000000000000000..ce7b32c773e2393669c51279b7b6da7705622b13 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_tomesd.py @@ -0,0 +1,177 @@ +#Taken from: https://github.com/dbolya/tomesd + +import torch +from typing import Tuple, Callable +import math + +def do_nothing(x: torch.Tensor, mode:str=None): + return x + + +def mps_gather_workaround(input, dim, index): + if input.shape[-1] == 1: + return torch.gather( + input.unsqueeze(-1), + dim - 1 if dim < 0 else dim, + index.unsqueeze(-1) + ).squeeze(-1) + else: + return torch.gather(input, dim, index) + + +def bipartite_soft_matching_random2d(metric: torch.Tensor, + w: int, h: int, sx: int, sy: int, r: int, + no_rand: bool = False) -> Tuple[Callable, Callable]: + """ + Partitions the tokens into src and dst and merges r tokens from src to dst. + Dst tokens are partitioned by choosing one randomy in each (sx, sy) region. + Args: + - metric [B, N, C]: metric to use for similarity + - w: image width in tokens + - h: image height in tokens + - sx: stride in the x dimension for dst, must divide w + - sy: stride in the y dimension for dst, must divide h + - r: number of tokens to remove (by merging) + - no_rand: if true, disable randomness (use top left corner only) + """ + B, N, _ = metric.shape + + if r <= 0 or w == 1 or h == 1: + return do_nothing, do_nothing + + gather = mps_gather_workaround if metric.device.type == "mps" else torch.gather + + with torch.no_grad(): + + hsy, wsx = h // sy, w // sx + + # For each sy by sx kernel, randomly assign one token to be dst and the rest src + if no_rand: + rand_idx = torch.zeros(hsy, wsx, 1, device=metric.device, dtype=torch.int64) + else: + rand_idx = torch.randint(sy*sx, size=(hsy, wsx, 1), device=metric.device) + + # The image might not divide sx and sy, so we need to work on a view of the top left if the idx buffer instead + idx_buffer_view = torch.zeros(hsy, wsx, sy*sx, device=metric.device, dtype=torch.int64) + idx_buffer_view.scatter_(dim=2, index=rand_idx, src=-torch.ones_like(rand_idx, dtype=rand_idx.dtype)) + idx_buffer_view = idx_buffer_view.view(hsy, wsx, sy, sx).transpose(1, 2).reshape(hsy * sy, wsx * sx) + + # Image is not divisible by sx or sy so we need to move it into a new buffer + if (hsy * sy) < h or (wsx * sx) < w: + idx_buffer = torch.zeros(h, w, device=metric.device, dtype=torch.int64) + idx_buffer[:(hsy * sy), :(wsx * sx)] = idx_buffer_view + else: + idx_buffer = idx_buffer_view + + # We set dst tokens to be -1 and src to be 0, so an argsort gives us dst|src indices + rand_idx = idx_buffer.reshape(1, -1, 1).argsort(dim=1) + + # We're finished with these + del idx_buffer, idx_buffer_view + + # rand_idx is currently dst|src, so split them + num_dst = hsy * wsx + a_idx = rand_idx[:, num_dst:, :] # src + b_idx = rand_idx[:, :num_dst, :] # dst + + def split(x): + C = x.shape[-1] + src = gather(x, dim=1, index=a_idx.expand(B, N - num_dst, C)) + dst = gather(x, dim=1, index=b_idx.expand(B, num_dst, C)) + return src, dst + + # Cosine similarity between A and B + metric = metric / metric.norm(dim=-1, keepdim=True) + a, b = split(metric) + scores = a @ b.transpose(-1, -2) + + # Can't reduce more than the # tokens in src + r = min(a.shape[1], r) + + # Find the most similar greedily + node_max, node_idx = scores.max(dim=-1) + edge_idx = node_max.argsort(dim=-1, descending=True)[..., None] + + unm_idx = edge_idx[..., r:, :] # Unmerged Tokens + src_idx = edge_idx[..., :r, :] # Merged Tokens + dst_idx = gather(node_idx[..., None], dim=-2, index=src_idx) + + def merge(x: torch.Tensor, mode="mean") -> torch.Tensor: + src, dst = split(x) + n, t1, c = src.shape + + unm = gather(src, dim=-2, index=unm_idx.expand(n, t1 - r, c)) + src = gather(src, dim=-2, index=src_idx.expand(n, r, c)) + dst = dst.scatter_reduce(-2, dst_idx.expand(n, r, c), src, reduce=mode) + + return torch.cat([unm, dst], dim=1) + + def unmerge(x: torch.Tensor) -> torch.Tensor: + unm_len = unm_idx.shape[1] + unm, dst = x[..., :unm_len, :], x[..., unm_len:, :] + _, _, c = unm.shape + + src = gather(dst, dim=-2, index=dst_idx.expand(B, r, c)) + + # Combine back to the original shape + out = torch.zeros(B, N, c, device=x.device, dtype=x.dtype) + out.scatter_(dim=-2, index=b_idx.expand(B, num_dst, c), src=dst) + out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=unm_idx).expand(B, unm_len, c), src=unm) + out.scatter_(dim=-2, index=gather(a_idx.expand(B, a_idx.shape[1], 1), dim=1, index=src_idx).expand(B, r, c), src=src) + + return out + + return merge, unmerge + + +def get_functions(x, ratio, original_shape): + b, c, original_h, original_w = original_shape + original_tokens = original_h * original_w + downsample = int(math.ceil(math.sqrt(original_tokens // x.shape[1]))) + stride_x = 2 + stride_y = 2 + max_downsample = 1 + + if downsample <= max_downsample: + w = int(math.ceil(original_w / downsample)) + h = int(math.ceil(original_h / downsample)) + r = int(x.shape[1] * ratio) + no_rand = False + m, u = bipartite_soft_matching_random2d(x, w, h, stride_x, stride_y, r, no_rand) + return m, u + + nothing = lambda y: y + return nothing, nothing + + + +class TomePatchModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "ratio": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "model_patches/unet" + + def patch(self, model, ratio): + self.u = None + def tomesd_m(q, k, v, extra_options): + #NOTE: In the reference code get_functions takes x (input of the transformer block) as the argument instead of q + #however from my basic testing it seems that using q instead gives better results + m, self.u = get_functions(q, ratio, extra_options["original_shape"]) + return m(q), k, v + def tomesd_u(n, extra_options): + return self.u(n) + + m = model.clone() + m.set_model_attn1_patch(tomesd_m) + m.set_model_attn1_output_patch(tomesd_u) + return (m, ) + + +NODE_CLASS_MAPPINGS = { + "TomePatchModel": TomePatchModel, +} diff --git a/src/comfyui/comfy_extras/nodes_torch_compile.py b/src/comfyui/comfy_extras/nodes_torch_compile.py new file mode 100644 index 0000000000000000000000000000000000000000..1fe6f42c725f193b1e1147846b5793437dbbfd57 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_torch_compile.py @@ -0,0 +1,22 @@ +import torch + +class TorchCompileModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "backend": (["inductor", "cudagraphs"],), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "_for_testing" + EXPERIMENTAL = True + + def patch(self, model, backend): + m = model.clone() + m.add_object_patch("diffusion_model", torch.compile(model=m.get_model_object("diffusion_model"), backend=backend)) + return (m, ) + +NODE_CLASS_MAPPINGS = { + "TorchCompileModel": TorchCompileModel, +} diff --git a/src/comfyui/comfy_extras/nodes_upscale_model.py b/src/comfyui/comfy_extras/nodes_upscale_model.py new file mode 100644 index 0000000000000000000000000000000000000000..6ba3e404f2e1234fd5a3847f79c1a84fede0db15 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_upscale_model.py @@ -0,0 +1,84 @@ +import os +import logging +from spandrel import ModelLoader, ImageModelDescriptor +from comfy import model_management +import torch +import comfy.utils +import folder_paths + +try: + from spandrel_extra_arches import EXTRA_REGISTRY + from spandrel import MAIN_REGISTRY + MAIN_REGISTRY.add(*EXTRA_REGISTRY) + logging.info("Successfully imported spandrel_extra_arches: support for non commercial upscale models.") +except: + pass + +class UpscaleModelLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model_name": (folder_paths.get_filename_list("upscale_models"), ), + }} + RETURN_TYPES = ("UPSCALE_MODEL",) + FUNCTION = "load_model" + + CATEGORY = "loaders" + + def load_model(self, model_name): + model_path = folder_paths.get_full_path_or_raise("upscale_models", model_name) + sd = comfy.utils.load_torch_file(model_path, safe_load=True) + if "module.layers.0.residual_group.blocks.0.norm1.weight" in sd: + sd = comfy.utils.state_dict_prefix_replace(sd, {"module.":""}) + out = ModelLoader().load_from_state_dict(sd).eval() + + if not isinstance(out, ImageModelDescriptor): + raise Exception("Upscale model must be a single-image model.") + + return (out, ) + + +class ImageUpscaleWithModel: + @classmethod + def INPUT_TYPES(s): + return {"required": { "upscale_model": ("UPSCALE_MODEL",), + "image": ("IMAGE",), + }} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, upscale_model, image): + device = model_management.get_torch_device() + + memory_required = model_management.module_size(upscale_model.model) + memory_required += (512 * 512 * 3) * image.element_size() * max(upscale_model.scale, 1.0) * 384.0 #The 384.0 is an estimate of how much some of these models take, TODO: make it more accurate + memory_required += image.nelement() * image.element_size() + model_management.free_memory(memory_required, device) + + upscale_model.to(device) + in_img = image.movedim(-1,-3).to(device) + + tile = 512 + overlap = 32 + + oom = True + while oom: + try: + steps = in_img.shape[0] * comfy.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, tile_y=tile, overlap=overlap) + pbar = comfy.utils.ProgressBar(steps) + s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar) + oom = False + except model_management.OOM_EXCEPTION as e: + tile //= 2 + if tile < 128: + raise e + + upscale_model.to("cpu") + s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0) + return (s,) + +NODE_CLASS_MAPPINGS = { + "UpscaleModelLoader": UpscaleModelLoader, + "ImageUpscaleWithModel": ImageUpscaleWithModel +} diff --git a/src/comfyui/comfy_extras/nodes_video_model.py b/src/comfyui/comfy_extras/nodes_video_model.py new file mode 100644 index 0000000000000000000000000000000000000000..e7a7ec181fca8ebb5662b394cffaf55a953b5f28 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_video_model.py @@ -0,0 +1,134 @@ +import nodes +import torch +import comfy.utils +import comfy.sd +import folder_paths +import comfy_extras.nodes_model_merging + + +class ImageOnlyCheckpointLoader: + @classmethod + def INPUT_TYPES(s): + return {"required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"), ), + }} + RETURN_TYPES = ("MODEL", "CLIP_VISION", "VAE") + FUNCTION = "load_checkpoint" + + CATEGORY = "loaders/video_models" + + def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True): + ckpt_path = folder_paths.get_full_path_or_raise("checkpoints", ckpt_name) + out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=False, output_clipvision=True, embedding_directory=folder_paths.get_folder_paths("embeddings")) + return (out[0], out[3], out[2]) + + +class SVD_img2vid_Conditioning: + @classmethod + def INPUT_TYPES(s): + return {"required": { "clip_vision": ("CLIP_VISION",), + "init_image": ("IMAGE",), + "vae": ("VAE",), + "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 576, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "video_frames": ("INT", {"default": 14, "min": 1, "max": 4096}), + "motion_bucket_id": ("INT", {"default": 127, "min": 1, "max": 1023}), + "fps": ("INT", {"default": 6, "min": 1, "max": 1024}), + "augmentation_level": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 10.0, "step": 0.01}) + }} + RETURN_TYPES = ("CONDITIONING", "CONDITIONING", "LATENT") + RETURN_NAMES = ("positive", "negative", "latent") + + FUNCTION = "encode" + + CATEGORY = "conditioning/video_models" + + def encode(self, clip_vision, init_image, vae, width, height, video_frames, motion_bucket_id, fps, augmentation_level): + output = clip_vision.encode_image(init_image) + pooled = output.image_embeds.unsqueeze(0) + pixels = comfy.utils.common_upscale(init_image.movedim(-1,1), width, height, "bilinear", "center").movedim(1,-1) + encode_pixels = pixels[:,:,:,:3] + if augmentation_level > 0: + encode_pixels += torch.randn_like(pixels) * augmentation_level + t = vae.encode(encode_pixels) + positive = [[pooled, {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": t}]] + negative = [[torch.zeros_like(pooled), {"motion_bucket_id": motion_bucket_id, "fps": fps, "augmentation_level": augmentation_level, "concat_latent_image": torch.zeros_like(t)}]] + latent = torch.zeros([video_frames, 4, height // 8, width // 8]) + return (positive, negative, {"samples":latent}) + +class VideoLinearCFGGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "min_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.5, "round": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "sampling/video_models" + + def patch(self, model, min_cfg): + def linear_cfg(args): + cond = args["cond"] + uncond = args["uncond"] + cond_scale = args["cond_scale"] + + scale = torch.linspace(min_cfg, cond_scale, cond.shape[0], device=cond.device).reshape((cond.shape[0], 1, 1, 1)) + return uncond + scale * (cond - uncond) + + m = model.clone() + m.set_model_sampler_cfg_function(linear_cfg) + return (m, ) + +class VideoTriangleCFGGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "min_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step":0.5, "round": 0.01}), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "sampling/video_models" + + def patch(self, model, min_cfg): + def linear_cfg(args): + cond = args["cond"] + uncond = args["uncond"] + cond_scale = args["cond_scale"] + period = 1.0 + values = torch.linspace(0, 1, cond.shape[0], device=cond.device) + values = 2 * (values / period - torch.floor(values / period + 0.5)).abs() + scale = (values * (cond_scale - min_cfg) + min_cfg).reshape((cond.shape[0], 1, 1, 1)) + + return uncond + scale * (cond - uncond) + + m = model.clone() + m.set_model_sampler_cfg_function(linear_cfg) + return (m, ) + +class ImageOnlyCheckpointSave(comfy_extras.nodes_model_merging.CheckpointSave): + CATEGORY = "advanced/model_merging" + + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), + "vae": ("VAE",), + "filename_prefix": ("STRING", {"default": "checkpoints/ComfyUI"}),}, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"},} + + def save(self, model, clip_vision, vae, filename_prefix, prompt=None, extra_pnginfo=None): + comfy_extras.nodes_model_merging.save_checkpoint(model, clip_vision=clip_vision, vae=vae, filename_prefix=filename_prefix, output_dir=self.output_dir, prompt=prompt, extra_pnginfo=extra_pnginfo) + return {} + +NODE_CLASS_MAPPINGS = { + "ImageOnlyCheckpointLoader": ImageOnlyCheckpointLoader, + "SVD_img2vid_Conditioning": SVD_img2vid_Conditioning, + "VideoLinearCFGGuidance": VideoLinearCFGGuidance, + "VideoTriangleCFGGuidance": VideoTriangleCFGGuidance, + "ImageOnlyCheckpointSave": ImageOnlyCheckpointSave, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "ImageOnlyCheckpointLoader": "Image Only Checkpoint Loader (img2vid model)", +} diff --git a/src/comfyui/comfy_extras/nodes_webcam.py b/src/comfyui/comfy_extras/nodes_webcam.py new file mode 100644 index 0000000000000000000000000000000000000000..32a0ba2f67b1e9f6a84dc806d47dfcce61ab86b3 --- /dev/null +++ b/src/comfyui/comfy_extras/nodes_webcam.py @@ -0,0 +1,33 @@ +import nodes +import folder_paths + +MAX_RESOLUTION = nodes.MAX_RESOLUTION + + +class WebcamCapture(nodes.LoadImage): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "image": ("WEBCAM", {}), + "width": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "height": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "capture_on_queue": ("BOOLEAN", {"default": True}), + } + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "load_capture" + + CATEGORY = "image" + + def load_capture(s, image, **kwargs): + return super().load_image(folder_paths.get_annotated_filepath(image)) + + +NODE_CLASS_MAPPINGS = { + "WebcamCapture": WebcamCapture, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "WebcamCapture": "Webcam Capture", +} \ No newline at end of file diff --git a/src/comfyui/comfyui_screenshot.png b/src/comfyui/comfyui_screenshot.png new file mode 100644 index 0000000000000000000000000000000000000000..73272eae69339f92c88422c45ad166cbc5900adf Binary files /dev/null and b/src/comfyui/comfyui_screenshot.png differ diff --git a/src/comfyui/cuda_malloc.py b/src/comfyui/cuda_malloc.py new file mode 100644 index 0000000000000000000000000000000000000000..eb2857c5fe2db47c8e86fa64d3c53dcd7574408f --- /dev/null +++ b/src/comfyui/cuda_malloc.py @@ -0,0 +1,90 @@ +import os +import importlib.util +from comfy.cli_args import args +import subprocess + +#Can't use pytorch to get the GPU names because the cuda malloc has to be set before the first import. +def get_gpu_names(): + if os.name == 'nt': + import ctypes + + # Define necessary C structures and types + class DISPLAY_DEVICEA(ctypes.Structure): + _fields_ = [ + ('cb', ctypes.c_ulong), + ('DeviceName', ctypes.c_char * 32), + ('DeviceString', ctypes.c_char * 128), + ('StateFlags', ctypes.c_ulong), + ('DeviceID', ctypes.c_char * 128), + ('DeviceKey', ctypes.c_char * 128) + ] + + # Load user32.dll + user32 = ctypes.windll.user32 + + # Call EnumDisplayDevicesA + def enum_display_devices(): + device_info = DISPLAY_DEVICEA() + device_info.cb = ctypes.sizeof(device_info) + device_index = 0 + gpu_names = set() + + while user32.EnumDisplayDevicesA(None, device_index, ctypes.byref(device_info), 0): + device_index += 1 + gpu_names.add(device_info.DeviceString.decode('utf-8')) + return gpu_names + return enum_display_devices() + else: + gpu_names = set() + out = subprocess.check_output(['nvidia-smi', '-L']) + for l in out.split(b'\n'): + if len(l) > 0: + gpu_names.add(l.decode('utf-8').split(' (UUID')[0]) + return gpu_names + +blacklist = {"GeForce GTX TITAN X", "GeForce GTX 980", "GeForce GTX 970", "GeForce GTX 960", "GeForce GTX 950", "GeForce 945M", + "GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620", + "Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000", + "Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000", + "GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M", + "GeForce GTX 1650", "GeForce GTX 1630", "Tesla M4", "Tesla M6", "Tesla M10", "Tesla M40", "Tesla M60" + } + +def cuda_malloc_supported(): + try: + names = get_gpu_names() + except: + names = set() + for x in names: + if "NVIDIA" in x: + for b in blacklist: + if b in x: + return False + return True + + +if not args.cuda_malloc: + try: + version = "" + torch_spec = importlib.util.find_spec("torch") + for folder in torch_spec.submodule_search_locations: + ver_file = os.path.join(folder, "version.py") + if os.path.isfile(ver_file): + spec = importlib.util.spec_from_file_location("torch_version_import", ver_file) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + version = module.__version__ + if int(version[0]) >= 2: #enable by default for torch version 2.0 and up + args.cuda_malloc = cuda_malloc_supported() + except: + pass + + +if args.cuda_malloc and not args.disable_cuda_malloc: + env_var = os.environ.get('PYTORCH_CUDA_ALLOC_CONF', None) + if env_var is None: + env_var = "backend:cudaMallocAsync" + else: + env_var += ",backend:cudaMallocAsync" + + os.environ['PYTORCH_CUDA_ALLOC_CONF'] = env_var diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitattributes b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..dfe0770424b2a19faf507a501ebfc23be8f54e7b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitignore b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..31fbb906306af718b2c9556c20ad9487dfe78328 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/.gitignore @@ -0,0 +1,157 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ +.idea/vcs.xml +.idea/modules.xml +.idea/misc.xml +.idea/inspectionProfiles/profiles_settings.xml +.idea/ComfyUI-FaceSwap.iml diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/FaceSwapNode.py b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/FaceSwapNode.py new file mode 100644 index 0000000000000000000000000000000000000000..73555b65a2d4481f84cae6c50ca13952c902046b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/FaceSwapNode.py @@ -0,0 +1,115 @@ +import insightface +import onnxruntime +import torch +import glob +import tempfile +import numpy as np +import cv2 +import os +from PIL import Image +from typing import List, Union, Dict, Set, Tuple +import folder_paths +import torchvision.transforms as T +from comfy import model_management + +providers = ["CPUExecutionProvider"] +model_path = folder_paths.models_dir +onnx_path = os.path.join(model_path, "roop") +FS_MODEL = None +CURRENT_FS_MODEL_PATH = None +device = model_management.get_torch_device() + + +def get_models(): + models_path = os.path.join(onnx_path + os.path.sep + "*") + models = glob.glob(models_path) + models = [x for x in models if x.endswith(".onnx") or x.endswith(".pth")] + return models + + +def convert_to_sd(img): + return [False, tempfile.NamedTemporaryFile(delete=False, suffix=".png")] + + +class FaceSwapNode: + @classmethod + def INPUT_TYPES(s): + return {"required": {"face": ("IMAGE",), + "image": ("IMAGE",), + "source_face_index": ("INT", {"default": 0, "min": 0, "step": 1}), + "target_face_indices": ("STRING", {"multiline": False}), + }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "swap" + + CATEGORY = "image/faceswap" + + def swap(self, face: torch.Tensor, image: torch.Tensor, source_face_index=0, target_face_indices="0"): + models = get_models() + + target_faces = {int(x) for x in target_face_indices.strip(",").split(",") if x.isnumeric()} + result = swap_face(face, image, models[0], source_face_index, target_faces) + + result_tensor = np.array(result).astype(np.float32) / 255.0 + result_tensor = torch.from_numpy(result_tensor)[None,] + + return (result_tensor,) + + +def getFaceSwapModel(model_path: str): + global FS_MODEL + global CURRENT_FS_MODEL_PATH + if CURRENT_FS_MODEL_PATH is None or CURRENT_FS_MODEL_PATH != model_path: + CURRENT_FS_MODEL_PATH = model_path + FS_MODEL = insightface.model_zoo.get_model(model_path, providers=providers) + return FS_MODEL + + +def get_face_single(img_data: np.ndarray, face_index=0, det_size=(640, 640)): + face_analyser = insightface.app.FaceAnalysis(name="buffalo_l", providers=providers) + face_analyser.prepare(ctx_id=0, det_size=det_size) + + face = face_analyser.get(img_data) + + if len(face) == 0 and det_size[0] > 320 and det_size[1] > 320: + det_size_half = (det_size[0] // 2, det_size[1] // 2) + return get_face_single(img_data, face_index=face_index, det_size=det_size_half) + + try: + return sorted(face, key=lambda x: x.bbox[0])[face_index] + except IndexError: + return None + + +def swap_face( + source_img: torch.Tensor, + target_img: torch.Tensor, + model: Union[str, None] = None, + source_face: [int] = 0, + target_face_list: Set[int] = {0}, +) -> Image.Image: + result_image = target_img + converted = convert_to_sd(target_img) + scale, fn = converted[0], converted[1] + if model is not None and not scale: + + source_img = (source_img[0].detach().numpy() * 255).astype(np.uint8) + target_img = (target_img[0].detach().numpy() * 255).astype(np.uint8) + + source_img = cv2.cvtColor(source_img, cv2.COLOR_RGB2BGR) + target_img = cv2.cvtColor(target_img, cv2.COLOR_RGB2BGR) + source_face = get_face_single(source_img, face_index=source_face) + + if source_face is not None: + result = target_img + model_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), model) + face_swapper = getFaceSwapModel(model_path) + + for face_num in target_face_list: + target_face = get_face_single(target_img, face_index=face_num) + if target_face is not None: + result = face_swapper.get(result, target_face, source_face) + result_image = cv2.cvtColor(result, cv2.COLOR_BGR2RGB) + + return result_image diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/LICENSE b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e20b431bcb69267acc39eaf8fdb94c30f40e3223 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/LICENSE @@ -0,0 +1,661 @@ +GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/README.md b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5077d4e2547f14c94846bb743f96d0e204ce51c3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/README.md @@ -0,0 +1,18 @@ +# ComfyUI-FaceSwap +Very basic custom node to enable face swapping in ComfyUI. Only runs on CPU at the moment. GPU support will come later. Mostly a direct port from the excellent a1111 plugin: https://github.com/s0md3v/sd-webui-roop. +At the moment Insightface have only released the 128x128 model so the results can be a bit janky at large res. Hopefully they release their bigger res models soon. Use at your own discretion, i am not responsible for what you create with this. + +# Installation + +cd /custom_nodes/ + +git clone https://github.com/imb101/ComfyUI-FaceSwap + +re-start comfyui and it should install all the bits to work + +# Usage + +Add a FaceSwapNode, give it an image of a face(s) and an image to swap the face into. Pick which source face you want to use and then which faces to replace(index starts from 0 and is comma seperated). You can chain the nodes to replace multiple faces in a scene. Sample workflows are in the repo. + +![img.png](img.png) + diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__init__.py b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7172aa984c733aba96762b623c420ac10fa5730e --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__init__.py @@ -0,0 +1,9 @@ +from .install import install +install() + +from .FaceSwapNode import FaceSwapNode + +NODE_CLASS_MAPPINGS = { + "FaceSwapNode": FaceSwapNode, +} + diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/FaceSwapNode.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/FaceSwapNode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9bbac4531780d86e153a05cb0d8a7471815d374 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/FaceSwapNode.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0e9ccdb539f39e8e4e4988d10a2cf8c19d6876d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/install.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/install.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8e2ec361c163fbcc3b3a2459335c61c12810be2 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/__pycache__/install.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/img.png b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/img.png new file mode 100644 index 0000000000000000000000000000000000000000..33d4bd0bde7b3191b3aa064aa3a05b2646f33672 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/img.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/install.py b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/install.py new file mode 100644 index 0000000000000000000000000000000000000000..cddf534bab04f8f88d2b93af53f9105046429024 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/install.py @@ -0,0 +1,41 @@ +import os +import sys +import subprocess + +comfy_path = '../..' +if sys.argv[0] == 'install.py': + sys.path.append('.') # for portable version + +impact_path = os.path.join(os.path.dirname(__file__), "modules") + +sys.path.append(impact_path) +sys.path.append(comfy_path) + +import platform +import folder_paths +from torchvision.datasets.utils import download_url + +print("### ComfyUI-FaceSwap: Check dependencies") + +if "python_embeded" in sys.executable or "python_embedded" in sys.executable: + pip_install = [sys.executable, '-s', '-m', 'pip', 'install'] +else: + pip_install = [sys.executable, '-m', 'pip', 'install'] + +def ensure_pip_packages(): + try: + import cython + except Exception: + my_path = os.path.dirname(__file__) + requirements_path = os.path.join(my_path, "requirements.txt") + subprocess.check_call(pip_install + ['-r', requirements_path]) + +def install(): + ensure_pip_packages() + # Download model + print("### ComfyUI-FaceSwap: Check basic models") + model_path = folder_paths.models_dir + onnx_path = os.path.join(model_path, "roop") + + if not os.path.exists(onnx_path): + download_url("https://huggingface.co/henryruhs/roop/resolve/main/inswapper_128.onnx", onnx_path) diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..3d0c6cc21ccead6372127add55b19c4475ef2c2d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/requirements.txt @@ -0,0 +1,5 @@ +insightface==0.7.3 +onnx==1.14.0 +onnxruntime==1.15.0 +opencv-python==4.7.0.72 +cython diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/latent-replace-workflow.json b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/latent-replace-workflow.json new file mode 100644 index 0000000000000000000000000000000000000000..175f21b2c231916d3832ee8816744a843d139ec3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/latent-replace-workflow.json @@ -0,0 +1,474 @@ +{ + "last_node_id": 9, + "last_link_id": 13, + "nodes": [ + { + "id": 6, + "type": "CLIPTextEncode", + "pos": [ + 654, + 311 + ], + "size": [ + 241.670166015625, + 92.5670166015625 + ], + "flags": {}, + "order": 3, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 6 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 7 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "3people" + ] + }, + { + "id": 7, + "type": "CLIPTextEncode", + "pos": [ + 673, + 450 + ], + "size": [ + 241.65985107421875, + 108.9278564453125 + ], + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 9 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 8 + ], + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "deformed" + ] + }, + { + "id": 8, + "type": "EmptyLatentImage", + "pos": [ + 77, + 403 + ], + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 10 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 512, + 512, + 1 + ] + }, + { + "id": 2, + "type": "LoadImage", + "pos": [ + 168, + 619 + ], + "size": [ + 315, + 314 + ], + "flags": {}, + "order": 1, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 1 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "pexels-photo-11338397.jpeg", + "image" + ] + }, + { + "id": 5, + "type": "KSampler", + "pos": [ + 1013, + 221 + ], + "size": { + "0": 315, + "1": 262 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 5 + }, + { + "name": "positive", + "type": "CONDITIONING", + "link": 7 + }, + { + "name": "negative", + "type": "CONDITIONING", + "link": 8, + "slot_index": 2 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 10 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 11 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 1073406468390373, + "randomize", + 20, + 8, + "euler", + "normal", + 1 + ] + }, + { + "id": 4, + "type": "CheckpointLoaderSimple", + "pos": [ + 75, + 232 + ], + "size": { + "0": 315, + "1": 98 + }, + "flags": {}, + "order": 2, + "mode": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 5 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 6, + 9 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 12 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "v1-5-pruned-emaonly.ckpt" + ] + }, + { + "id": 1, + "type": "FaceSwapNode", + "pos": [ + 1076, + 743 + ], + "size": { + "0": 315, + "1": 102 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [ + { + "name": "face", + "type": "IMAGE", + "link": 1 + }, + { + "name": "image", + "type": "IMAGE", + "link": 13, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 3 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FaceSwapNode" + }, + "widgets_values": [ + 1, + "0,1,2" + ] + }, + { + "id": 3, + "type": "PreviewImage", + "pos": [ + 1500, + 634 + ], + "size": [ + 210, + 246 + ], + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 3 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 9, + "type": "VAEDecode", + "pos": [ + 1398, + 236 + ], + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 11 + }, + { + "name": "vae", + "type": "VAE", + "link": 12 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + } + ], + "links": [ + [ + 1, + 2, + 0, + 1, + 0, + "IMAGE" + ], + [ + 3, + 1, + 0, + 3, + 0, + "IMAGE" + ], + [ + 5, + 4, + 0, + 5, + 0, + "MODEL" + ], + [ + 6, + 4, + 1, + 6, + 0, + "CLIP" + ], + [ + 7, + 6, + 0, + 5, + 1, + "CONDITIONING" + ], + [ + 8, + 7, + 0, + 5, + 2, + "CONDITIONING" + ], + [ + 9, + 4, + 1, + 7, + 0, + "CLIP" + ], + [ + 10, + 8, + 0, + 5, + 3, + "LATENT" + ], + [ + 11, + 5, + 0, + 9, + 0, + "LATENT" + ], + [ + 12, + 4, + 2, + 9, + 1, + "VAE" + ], + [ + 13, + 9, + 0, + 1, + 1, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/pexels-photo-11338397.jpeg b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/pexels-photo-11338397.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..ed2924702a5748f973d36c8a681b1ebbf16f27e5 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/pexels-photo-11338397.jpeg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b3f545388de11ae958fb53ac2aeb3dd73d1a1640164b1465fde3a29b590cf9b +size 4024433 diff --git a/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/simple-replace-workflow.json b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/simple-replace-workflow.json new file mode 100644 index 0000000000000000000000000000000000000000..7dc66cdd7cf6b4c6dba6f8d41894cc991a6503ee --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-FaceSwap/workflows/simple-replace-workflow.json @@ -0,0 +1,147 @@ +{ + "last_node_id": 3, + "last_link_id": 4, + "nodes": [ + { + "id": 3, + "type": "PreviewImage", + "pos": [ + 1102, + 365 + ], + "size": [ + 210, + 246 + ], + "flags": {}, + "order": 2, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 3 + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 2, + "type": "LoadImage", + "pos": [ + 136, + 255 + ], + "size": [ + 315, + 314 + ], + "flags": {}, + "order": 0, + "mode": 0, + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 1, + 4 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "pexels-photo-11338397.jpeg", + "image" + ] + }, + { + "id": 1, + "type": "FaceSwapNode", + "pos": [ + 627, + 334 + ], + "size": { + "0": 315, + "1": 102 + }, + "flags": {}, + "order": 1, + "mode": 0, + "inputs": [ + { + "name": "face", + "type": "IMAGE", + "link": 1 + }, + { + "name": "image", + "type": "IMAGE", + "link": 4, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 3 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FaceSwapNode" + }, + "widgets_values": [ + 1, + "0,2" + ] + } + ], + "links": [ + [ + 1, + 2, + 0, + 1, + 0, + "IMAGE" + ], + [ + 3, + 1, + 0, + 3, + 0, + "IMAGE" + ], + [ + 4, + 2, + 0, + 1, + 1, + "IMAGE" + ] + ], + "groups": [], + "config": {}, + "extra": {}, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..341e31ecbc6b4128147af1fbeea6491b6b52ad04 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +.idea \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/LICENSE b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..94a9ed024d3859793618152ea559a168bbcbb5e2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/README.md b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f3ecb52b3dca00aaf5a28d0770033a75296058fa --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/README.md @@ -0,0 +1,39 @@ +# ComfyUI-Flowty-LDSR + +This is a custom node that lets you take advantage of Latent Diffusion Super Resolution (LDSR) models inside ComfyUI. + +LDSR models have been known to produce significantly better results then other upscalers, but they tend to be much slower and require more sampling steps. Results may also vary based on the input image. + + +| ![example](example_lowres.png) | ![example](example_highres.png) | +|--------------------------------|---------------------------------| + +I've created this node for experimentation, feel free to submit PRs for performance improvements etc. + +### Installation: +* Install ComfyUI +* Clone this repo into ```custom_nodes```: + ```shell + $ cd ComfyUI/custom_nodes + $ git clone https://github.com/flowtyone/ComfyUI-Flowty-LDSR.git + ``` +* Install dependencies: + ```shell + $ cd ComfyUI-Flowty-LDSR + $ pip install -r requirements.txt + ``` +* [Download LDSR](https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1) and place it in ```ComfyUI/models/upscale_models``` +* Start ComfyUI (or restart) + +### Workflow: +![example](workflow.png) + +![example2](workflow2.png) + +This is a community project from [flowt.ai](https://flowt.ai). If you like it, check us out! + + + + + flowt.ai logo + \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f59abd3be8b240b37c5da40c44a1ff278a4fe218 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__init__.py @@ -0,0 +1,141 @@ +import sys +from os import path +sys.path.insert(0, path.dirname(__file__)) +from .ldsrlib.LDSR import LDSR +from folder_paths import get_filename_list, get_full_path +from comfy.model_management import get_torch_device +from comfy.utils import ProgressBar +import torch + + +class LDSRModelLoader: + @classmethod + def INPUT_TYPES(s): + model_list = get_filename_list("upscale_models") + candidates = [name for name in model_list if 'last.ckpt' in name] + if len(candidates) > 0: + default_path = candidates[0] + else: + default_path = 'last.ckpt' + + return { + "required": { + "model": (model_list, {'default': default_path}), + } + } + + RETURN_TYPES = ("UPSCALE_MODEL",) + FUNCTION = "load" + + CATEGORY = "Flowty LDSR" + + def load(self, model): + model_path = get_full_path("upscale_models", model) + model = LDSR.load_model_from_path(model_path) + model['model'].cpu() + return (model, ) + + +class LDSRUpscale: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "upscale_model": ("UPSCALE_MODEL",), + "images": ("IMAGE",), + "steps": (["25", "50", "100", "250", "500", "1000"], {"default": "100"}), + "pre_downscale": (['None', '1/2', '1/4'], {"default": "None"}), + "post_downscale": (['None', 'Original Size', '1/2', '1/4'], {"default": "None"}), + "downsample_method": (['Nearest', 'Lanczos'], {"default": "Lanczos"}), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "upscale" + + CATEGORY = "Flowty LDSR" + + def upscale(self, upscale_model, images, steps, pre_downscale="None", post_downscale="None", downsample_method="Lanczos"): + pbar = ProgressBar(int(steps)) + p = {"prev": 0} + + def prog(i): + i = i + 1 + if i < p["prev"]: + p["prev"] = 0 + pbar.update(i - p["prev"]) + p["prev"] = i + + ldsr = LDSR(model=upscale_model, on_progress=prog) + + outputs = [] + + for image in images: + outputs.append(ldsr.superResolution(image, int(steps), pre_downscale, post_downscale, downsample_method)) + + return (torch.stack(outputs),) + + +class LDSRUpscaler: + @classmethod + def INPUT_TYPES(s): + model_list = get_filename_list("upscale_models") + candidates = [name for name in model_list if 'last.ckpt' in name] + if len(candidates) > 0: + default_path = candidates[0] + else: + default_path = 'last.ckpt' + + return { + "required": { + "model": (model_list, {'default': default_path}), + "images": ("IMAGE",), + "steps": (["25", "50", "100", "250", "500", "1000"], {"default": "100"}), + "pre_downscale": (['None', '1/2', '1/4'], {"default": "None"}), + "post_downscale": (['None', 'Original Size', '1/2', '1/4'], {"default": "None"}), + "downsample_method": (['Nearest', 'Lanczos'], {"default": "Lanczos"}), + } + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("images",) + FUNCTION = "upscale" + + CATEGORY = "Flowty LDSR" + + def upscale(self, model, images, steps, pre_downscale="None", post_downscale="None", downsample_method="Lanczos"): + model_path = get_full_path("upscale_models", model) + pbar = ProgressBar(int(steps)) + p = {"prev": 0} + + def prog(i): + i = i + 1 + if i < p["prev"]: + p["prev"] = 0 + pbar.update(i - p["prev"]) + p["prev"] = i + + ldsr = LDSR(modelPath=model_path, torchdevice=get_torch_device(), on_progress=prog) + + outputs = [] + + for image in images: + outputs.append(ldsr.superResolution(image, int(steps), pre_downscale, post_downscale, downsample_method)) + + return (torch.stack(outputs),) + + +NODE_CLASS_MAPPINGS = { + "LDSRUpscaler": LDSRUpscaler, + "LDSRModelLoader": LDSRModelLoader, + "LDSRUpscale": LDSRUpscale +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "LDSRUpscaler": "LDSR Upscale (all-in-one)", + "LDSRModelLoader": "Load LDSR Model", + "LDSRUpscale": "LDSR Upscale" +} + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e050bd90b6b0c2b1117aa7f358e14d8ef3ae100 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_highres.png b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_highres.png new file mode 100644 index 0000000000000000000000000000000000000000..9b505e4d6629f310c8085f697994743eb78f9ea7 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_highres.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:160d461cbb8fa0ee1d283d85dfd0995a9736762119e06e11baaf9fc2968b16c5 +size 6358219 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_lowres.png b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_lowres.png new file mode 100644 index 0000000000000000000000000000000000000000..d907bf7d1fdbfe3ba6088ba78896638c72d1c5e4 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/example_lowres.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/LDSR.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/LDSR.py new file mode 100644 index 0000000000000000000000000000000000000000..9493b3d44e97a2eadb5b67a22c04ae85c57069ae --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/LDSR.py @@ -0,0 +1,343 @@ +import PIL.Image + +import nodes +from .ldm.util import instantiate_from_config +from .ldm.models.diffusion.ddim import DDIMSampler +from .ldm.util import ismap + +from PIL import Image +from einops import rearrange, repeat +import torch, torchvision +import time +from omegaconf import OmegaConf +import numpy as np +from os import path +import warnings +from comfy import model_management +import comfy +from PIL import ImageOps + +warnings.filterwarnings("ignore", category=UserWarning) + + +class LDSR(): + def __init__(self, modelPath=None, model=None, torchdevice=model_management.get_torch_device(), on_progress=None, yamlPath=path.join(path.dirname(__file__), "config.yaml")): + self.modelPath = modelPath + self.model = model + self.yamlPath = yamlPath + self.torchdevice = torchdevice + self.progress_hook = on_progress if on_progress else None + + @staticmethod + def normalize_image(image): + w, h = image.size + + # ensure (min length > 128) + if h < w and h < 128: + scale_ratio = 128 / h + h = 128 + w = int(scale_ratio * w) + elif w < 128: + scale_ratio = 128 / w + w = 128 + h = int(scale_ratio * h) + + resample = (Image.Resampling.LANCZOS if hasattr(Image, 'Resampling') else Image.LANCZOS) + image = image.resize((w, h), resample=resample) + + # ensure (multiply of 64) + w_pad = 64 - w % 64 + h_pad = 64 - h % 64 + + padded_image = Image.new("RGB", (w + w_pad, h + h_pad), color="black") + padded_image.paste(image, (0, 0)) + + return padded_image, w_pad, h_pad + + @staticmethod + def remove_padding(prev_pil, image, w_pad, h_pad): + if w_pad == 0 and h_pad == 0: + return image + + w1, h1 = prev_pil.size + h2, w2, _ = image.size() + + scale_ratio = h2 / h1 + w_pad = float.__ceil__(w_pad * scale_ratio) + h_pad = float.__ceil__(h_pad * scale_ratio) + + return image[:h2-h_pad, :w2-w_pad, :] + + + @staticmethod + def load_model_from_path(modelPath, device=model_management.get_torch_device(), yamlPath=path.join(path.dirname(__file__), "config.yaml")): + print(f"Loading model from {modelPath}") + pl_sd = torch.load(modelPath, map_location="cpu") + sd = pl_sd["state_dict"] + config = OmegaConf.load(yamlPath) + model = instantiate_from_config(config.model) + model.load_state_dict(sd, strict=False) + + model.to(device) + model.eval() + + return {"model": model} + + def load_model_from_config(self): + if self.model is None: + self.model = LDSR.load_model_from_path(self.modelPath, self.torchdevice) + else: + self.model['model'].to(self.torchdevice) + + return self.model + + def progress_callback(self, i): + if self.progress_hook: + self.progress_hook(i) + + def run(self, model, image, task, custom_steps, eta, resize_enabled=False, classifier_ckpt=None, global_step=None): + def make_convolutional_sample(batch, model, mode="vanilla", custom_steps=None, eta=1.0, swap_mode=False, + masked=False, + invert_mask=True, quantize_x0=False, custom_schedule=None, decode_interval=1000, + resize_enabled=False, custom_shape=None, temperature=1., noise_dropout=0., + corrector=None, + corrector_kwargs=None, x_T=None, save_intermediate_vid=False, make_progrow=True, + ddim_use_x0_pred=False): + log = dict() + + z, c, x, xrec, xc = model.get_input(batch, model.first_stage_key, + return_first_stage_outputs=True, + force_c_encode=not (hasattr(model, 'split_input_params') + and model.cond_stage_key == 'coordinates_bbox'), + return_original_cond=True) + + log_every_t = 1 if save_intermediate_vid else None + + if custom_shape is not None: + z = torch.randn(custom_shape) + # print(f"Generating {custom_shape[0]} samples of shape {custom_shape[1:]}") + + z0 = None + + log["input"] = x + log["reconstruction"] = xrec + + if ismap(xc): + log["original_conditioning"] = model.to_rgb(xc) + if hasattr(model, 'cond_stage_key'): + log[model.cond_stage_key] = model.to_rgb(xc) + + else: + log["original_conditioning"] = xc if xc is not None else torch.zeros_like(x) + if model.cond_stage_model: + log[model.cond_stage_key] = xc if xc is not None else torch.zeros_like(x) + if model.cond_stage_key == 'class_label': + log[model.cond_stage_key] = xc[model.cond_stage_key] + + with model.ema_scope("Plotting"): + t0 = time.time() + img_cb = None + sample, intermediates = convsample_ddim(model, c, steps=custom_steps, shape=z.shape, + eta=eta, + callback=self.progress_callback, + quantize_x0=quantize_x0, img_callback=img_cb, mask=None, x0=z0, + temperature=temperature, noise_dropout=noise_dropout, + score_corrector=corrector, corrector_kwargs=corrector_kwargs, + x_T=x_T, log_every_t=log_every_t) + t1 = time.time() + + if ddim_use_x0_pred: + sample = intermediates['pred_x0'][-1] + + x_sample = model.decode_first_stage(sample) + + try: + x_sample_noquant = model.decode_first_stage(sample, force_not_quantize=True) + log["sample_noquant"] = x_sample_noquant + log["sample_diff"] = torch.abs(x_sample_noquant - x_sample) + except: + pass + + log["sample"] = x_sample + log["time"] = t1 - t0 + + return log + + def convsample_ddim(model, cond, steps, shape, eta=1.0, callback=None, normals_sequence=None, + mask=None, x0=None, quantize_x0=False, img_callback=None, + temperature=1., noise_dropout=0., score_corrector=None, + corrector_kwargs=None, x_T=None, log_every_t=None + ): + ddim = DDIMSampler(model) + bs = shape[0] # dont know where this comes from but wayne + shape = shape[1:] # cut batch dim + print(f"Sampling with eta = {eta}; steps: {steps}") + samples, intermediates = ddim.sample(steps, batch_size=bs, shape=shape, conditioning=cond, + callback=callback, + normals_sequence=normals_sequence, quantize_x0=quantize_x0, eta=eta, + mask=mask, x0=x0, temperature=temperature, verbose=False, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, x_T=x_T) + return samples, intermediates + + # global stride + def get_cond(mode, img): + example = dict() + if mode == "superresolution": + up_f = 4 + # visualize_cond_img(selected_path) + + c = img.convert('RGB') + c = torch.unsqueeze(torchvision.transforms.ToTensor()(c), 0) + + c_up = torchvision.transforms.functional.resize(c, size=[up_f * c.shape[2], up_f * c.shape[3]], + antialias=True) + c_up = rearrange(c_up, '1 c h w -> 1 h w c') + c = rearrange(c, '1 c h w -> 1 h w c') + c = 2. * c - 1. + c = c.to(self.torchdevice) + example["LR_image"] = c + example["image"] = c_up + + return example + + example = get_cond(task, image) + + save_intermediate_vid = False + n_runs = 1 + masked = False + guider = None + ckwargs = None + mode = 'ddim' + ddim_use_x0_pred = False + temperature = 1. + eta = eta + make_progrow = True + custom_shape = None + + height, width = example["image"].shape[1:3] + split_input = height >= 128 and width >= 128 + + if split_input: + ks = 128 + stride = 64 + vqf = 4 # + model.split_input_params = {"ks": (ks, ks), "stride": (stride, stride), + "vqf": vqf, + "patch_distributed_vq": True, + "tie_braker": False, + "clip_max_weight": 0.5, + "clip_min_weight": 0.01, + "clip_max_tie_weight": 0.5, + "clip_min_tie_weight": 0.01} + else: + if hasattr(model, "split_input_params"): + delattr(model, "split_input_params") + + invert_mask = False + + x_T = None + for n in range(n_runs): + if custom_shape is not None: + x_T = torch.randn(1, custom_shape[1], custom_shape[2], custom_shape[3]).to(model.device) + x_T = repeat(x_T, '1 c h w -> b c h w', b=custom_shape[0]) + + logs = make_convolutional_sample(example, model, + mode=mode, custom_steps=custom_steps, + eta=eta, swap_mode=False, masked=masked, + invert_mask=invert_mask, quantize_x0=False, + custom_schedule=None, decode_interval=10, + resize_enabled=resize_enabled, custom_shape=custom_shape, + temperature=temperature, noise_dropout=0., + corrector=guider, corrector_kwargs=ckwargs, x_T=x_T, + save_intermediate_vid=save_intermediate_vid, + make_progrow=make_progrow, ddim_use_x0_pred=ddim_use_x0_pred + ) + return logs + + @torch.no_grad() + def superResolution(self, image, ddimSteps=100, preDownScale='None', postDownScale='None', downsampleMethod="Lanczos"): + diffMode = 'superresolution' + model = self.load_model_from_config() + + # Run settings + + diffusion_steps = int(ddimSteps) # @param [25, 50, 100, 250, 500, 1000] + eta = 1.0 # @param {type: 'raw'} + stride = 0 # not working atm + + # ####Scaling options: + # Downsampling to 256px first will often improve the final image and runs faster. + + # You can improve sharpness without upscaling by upscaling and then downsampling to the original size (i.e. Super Resolution) + pre_downsample = preDownScale # @param ['None', '1/2', '1/4'] + + post_downsample = postDownScale # @param ['None', 'Original Size', '1/2', '1/4'] + + # Nearest gives sharper results, but may look more pixellated. Lancoz is much higher quality, but result may be less crisp. + downsample_method = downsampleMethod # @param ['Nearest', 'Lanczos'] + + i = 255. * image.cpu().numpy() + im_og = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + width_og, height_og = im_og.size + + # Downsample Pre + if pre_downsample == '1/2': + downsample_rate = 2 + elif pre_downsample == '1/4': + downsample_rate = 4 + else: + downsample_rate = 1 + + width_downsampled_pre = width_og // downsample_rate + height_downsampled_pre = height_og // downsample_rate + if downsample_rate != 1: + print(f'Downsampling from [{width_og}, {height_og}] to [{width_downsampled_pre}, {height_downsampled_pre}]') + im_og = im_og.resize((width_downsampled_pre, height_downsampled_pre), Image.LANCZOS) + + im_og, w_pad, h_pad = LDSR.normalize_image(im_og) + + logs = self.run(model["model"], im_og, diffMode, diffusion_steps, eta) + + sample = logs["sample"] + sample = sample.detach().cpu() + sample = torch.clamp(sample, -1., 1.) + sample = (sample + 1.) / 2. * 255 + sample = sample.numpy().astype(np.uint8) + sample = np.transpose(sample, (0, 2, 3, 1)) + a = Image.fromarray(sample[0]) + + # Downsample Post + if post_downsample == '1/2': + downsample_rate = 2 + elif post_downsample == '1/4': + downsample_rate = 4 + else: + downsample_rate = 1 + + width, height = a.size + width_downsampled_post = width // downsample_rate + height_downsampled_post = height // downsample_rate + + if downsample_method == 'Lanczos': + aliasing = Image.LANCZOS + else: + aliasing = Image.NEAREST + + if downsample_rate != 1: + print(f'Downsampling from [{width}, {height}] to [{width_downsampled_post}, {height_downsampled_post}]') + a = a.resize((width_downsampled_post, height_downsampled_post), aliasing) + elif post_downsample == 'Original Size': + print(f'Downsampling from [{width}, {height}] to Original Size [{width_og}, {height_og}]') + a = a.resize((width_og+w_pad, height_og+h_pad), aliasing) + + out = np.array(a).astype(np.float32) / 255.0 + + # Finalize + result_image = torch.from_numpy(out) + result_image = LDSR.remove_padding(im_og, result_image, w_pad, h_pad) + + model['model'].cpu() + result_image.cpu() + + return result_image diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/LDSR.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/LDSR.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18323c1c0576f8eca6c5496558b86efdbfce5287 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/LDSR.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34032846ea0de9698f2961a91ebcd8752f0fdec3 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/config.yaml b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..195b2f5b3d8be67ce27ee77dd48050dfdedf33ef --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/config.yaml @@ -0,0 +1,56 @@ +model: + base_learning_rate: 1.0e-06 + target: ldsrlib.ldm.models.diffusion.ddpm.LatentDiffusion + params: + linear_start: 0.0015 + linear_end: 0.0155 + log_every_t: 100 + timesteps: 1000 + loss_type: l2 + first_stage_key: image + cond_stage_key: LR_image + image_size: 64 + channels: 3 + concat_mode: true + cond_stage_trainable: false + unet_config: + target: ldsrlib.ldm.modules.diffusionmodules.openaimodel.UNetModel + params: + image_size: 64 + in_channels: 6 + out_channels: 3 + model_channels: 160 + attention_resolutions: + - 16 + - 8 + num_res_blocks: 2 + channel_mult: + - 1 + - 2 + - 2 + - 4 + num_head_channels: 32 + first_stage_config: + target: ldsrlib.ldm.models.autoencoder.VQModelInterface + params: + embed_dim: 3 + n_embed: 8192 + monitor: val/rec_loss + ddconfig: + double_z: false + z_channels: 3 + resolution: 256 + in_channels: 3 + out_ch: 3 + ch: 128 + ch_mult: + - 1 + - 2 + - 4 + num_res_blocks: 2 + attn_resolutions: [] + dropout: 0.0 + lossconfig: + target: torch.nn.Module # todo + cond_stage_config: + target: torch.nn.Identity \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61f2fe8203598a94c3e1472d49e14a1c6ffe7f8d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/util.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30002c53b6aeafff15e94c0658296e338a0c5647 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/__pycache__/util.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af50fe84d724effede121f7014b195c0f624c803 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/autoencoder.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/autoencoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60cc0bcc5b3a6baaa5bdb72661a3d5587d4bd5f7 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/__pycache__/autoencoder.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/autoencoder.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..f85b86d3c6ce3a7d048840f15889485faf504223 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/autoencoder.py @@ -0,0 +1,443 @@ +import torch +import pytorch_lightning as pl +import torch.nn.functional as F +from contextlib import contextmanager + +from ...taming.modules.vqvae.quantize import VectorQuantizer2 as VectorQuantizer + +from ..modules.diffusionmodules.model import Encoder, Decoder +from ..modules.distributions.distributions import DiagonalGaussianDistribution + +from ..util import instantiate_from_config + + +class VQModel(pl.LightningModule): + def __init__(self, + ddconfig, + lossconfig, + n_embed, + embed_dim, + ckpt_path=None, + ignore_keys=[], + image_key="image", + colorize_nlabels=None, + monitor=None, + batch_resize_range=None, + scheduler_config=None, + lr_g_factor=1.0, + remap=None, + sane_index_shape=False, # tell vector quantizer to return indices as bhw + use_ema=False + ): + super().__init__() + self.embed_dim = embed_dim + self.n_embed = n_embed + self.image_key = image_key + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + self.loss = instantiate_from_config(lossconfig) + self.quantize = VectorQuantizer(n_embed, embed_dim, beta=0.25, + remap=remap, + sane_index_shape=sane_index_shape) + self.quant_conv = torch.nn.Conv2d(ddconfig["z_channels"], embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + if colorize_nlabels is not None: + assert type(colorize_nlabels)==int + self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1)) + if monitor is not None: + self.monitor = monitor + self.batch_resize_range = batch_resize_range + if self.batch_resize_range is not None: + print(f"{self.__class__.__name__}: Using per-batch resizing in range {batch_resize_range}.") + + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self) + print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + self.scheduler_config = scheduler_config + self.lr_g_factor = lr_g_factor + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.parameters()) + self.model_ema.copy_to(self) + if context is not None: + print(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.parameters()) + if context is not None: + print(f"{context}: Restored training weights") + + def init_from_ckpt(self, path, ignore_keys=list()): + sd = torch.load(path, map_location="cpu")["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + missing, unexpected = self.load_state_dict(sd, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + print(f"Unexpected Keys: {unexpected}") + + def on_train_batch_end(self, *args, **kwargs): + if self.use_ema: + self.model_ema(self) + + def encode(self, x): + h = self.encoder(x) + h = self.quant_conv(h) + quant, emb_loss, info = self.quantize(h) + return quant, emb_loss, info + + def encode_to_prequant(self, x): + h = self.encoder(x) + h = self.quant_conv(h) + return h + + def decode(self, quant): + quant = self.post_quant_conv(quant) + dec = self.decoder(quant) + return dec + + def decode_code(self, code_b): + quant_b = self.quantize.embed_code(code_b) + dec = self.decode(quant_b) + return dec + + def forward(self, input, return_pred_indices=False): + quant, diff, (_,_,ind) = self.encode(input) + dec = self.decode(quant) + if return_pred_indices: + return dec, diff, ind + return dec, diff + + def get_input(self, batch, k): + x = batch[k] + if len(x.shape) == 3: + x = x[..., None] + x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float() + if self.batch_resize_range is not None: + lower_size = self.batch_resize_range[0] + upper_size = self.batch_resize_range[1] + if self.global_step <= 4: + # do the first few batches with max size to avoid later oom + new_resize = upper_size + else: + new_resize = np.random.choice(np.arange(lower_size, upper_size+16, 16)) + if new_resize != x.shape[2]: + x = F.interpolate(x, size=new_resize, mode="bicubic") + x = x.detach() + return x + + def training_step(self, batch, batch_idx, optimizer_idx): + # https://github.com/pytorch/pytorch/issues/37142 + # try not to fool the heuristics + x = self.get_input(batch, self.image_key) + xrec, qloss, ind = self(x, return_pred_indices=True) + + if optimizer_idx == 0: + # autoencode + aeloss, log_dict_ae = self.loss(qloss, x, xrec, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train", + predicted_indices=ind) + + self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=True) + return aeloss + + if optimizer_idx == 1: + # discriminator + discloss, log_dict_disc = self.loss(qloss, x, xrec, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=True) + return discloss + + def validation_step(self, batch, batch_idx): + log_dict = self._validation_step(batch, batch_idx) + with self.ema_scope(): + log_dict_ema = self._validation_step(batch, batch_idx, suffix="_ema") + return log_dict + + def _validation_step(self, batch, batch_idx, suffix=""): + x = self.get_input(batch, self.image_key) + xrec, qloss, ind = self(x, return_pred_indices=True) + aeloss, log_dict_ae = self.loss(qloss, x, xrec, 0, + self.global_step, + last_layer=self.get_last_layer(), + split="val"+suffix, + predicted_indices=ind + ) + + discloss, log_dict_disc = self.loss(qloss, x, xrec, 1, + self.global_step, + last_layer=self.get_last_layer(), + split="val"+suffix, + predicted_indices=ind + ) + rec_loss = log_dict_ae[f"val{suffix}/rec_loss"] + self.log(f"val{suffix}/rec_loss", rec_loss, + prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True) + self.log(f"val{suffix}/aeloss", aeloss, + prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True) + if version.parse(pl.__version__) >= version.parse('1.4.0'): + del log_dict_ae[f"val{suffix}/rec_loss"] + self.log_dict(log_dict_ae) + self.log_dict(log_dict_disc) + return self.log_dict + + def configure_optimizers(self): + lr_d = self.learning_rate + lr_g = self.lr_g_factor*self.learning_rate + print("lr_d", lr_d) + print("lr_g", lr_g) + opt_ae = torch.optim.Adam(list(self.encoder.parameters())+ + list(self.decoder.parameters())+ + list(self.quantize.parameters())+ + list(self.quant_conv.parameters())+ + list(self.post_quant_conv.parameters()), + lr=lr_g, betas=(0.5, 0.9)) + opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(), + lr=lr_d, betas=(0.5, 0.9)) + + if self.scheduler_config is not None: + scheduler = instantiate_from_config(self.scheduler_config) + + print("Setting up LambdaLR scheduler...") + scheduler = [ + { + 'scheduler': LambdaLR(opt_ae, lr_lambda=scheduler.schedule), + 'interval': 'step', + 'frequency': 1 + }, + { + 'scheduler': LambdaLR(opt_disc, lr_lambda=scheduler.schedule), + 'interval': 'step', + 'frequency': 1 + }, + ] + return [opt_ae, opt_disc], scheduler + return [opt_ae, opt_disc], [] + + def get_last_layer(self): + return self.decoder.conv_out.weight + + def log_images(self, batch, only_inputs=False, plot_ema=False, **kwargs): + log = dict() + x = self.get_input(batch, self.image_key) + x = x.to(self.device) + if only_inputs: + log["inputs"] = x + return log + xrec, _ = self(x) + if x.shape[1] > 3: + # colorize with random projection + assert xrec.shape[1] > 3 + x = self.to_rgb(x) + xrec = self.to_rgb(xrec) + log["inputs"] = x + log["reconstructions"] = xrec + if plot_ema: + with self.ema_scope(): + xrec_ema, _ = self(x) + if x.shape[1] > 3: xrec_ema = self.to_rgb(xrec_ema) + log["reconstructions_ema"] = xrec_ema + return log + + def to_rgb(self, x): + assert self.image_key == "segmentation" + if not hasattr(self, "colorize"): + self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x)) + x = F.conv2d(x, weight=self.colorize) + x = 2.*(x-x.min())/(x.max()-x.min()) - 1. + return x + + +class VQModelInterface(VQModel): + def __init__(self, embed_dim, *args, **kwargs): + super().__init__(embed_dim=embed_dim, *args, **kwargs) + self.embed_dim = embed_dim + + def encode(self, x): + h = self.encoder(x) + h = self.quant_conv(h) + return h + + def decode(self, h, force_not_quantize=False): + # also go through quantization layer + if not force_not_quantize: + quant, emb_loss, info = self.quantize(h) + else: + quant = h + quant = self.post_quant_conv(quant) + dec = self.decoder(quant) + return dec + + +class AutoencoderKL(pl.LightningModule): + def __init__(self, + ddconfig, + lossconfig, + embed_dim, + ckpt_path=None, + ignore_keys=[], + image_key="image", + colorize_nlabels=None, + monitor=None, + ): + super().__init__() + self.image_key = image_key + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + self.loss = instantiate_from_config(lossconfig) + assert ddconfig["double_z"] + self.quant_conv = torch.nn.Conv2d(2*ddconfig["z_channels"], 2*embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + if colorize_nlabels is not None: + assert type(colorize_nlabels)==int + self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1)) + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + def init_from_ckpt(self, path, ignore_keys=list()): + sd = torch.load(path, map_location="cpu")["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + self.load_state_dict(sd, strict=False) + print(f"Restored from {path}") + + def encode(self, x): + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z): + z = self.post_quant_conv(z) + dec = self.decoder(z) + return dec + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + dec = self.decode(z) + return dec, posterior + + def get_input(self, batch, k): + x = batch[k] + if len(x.shape) == 3: + x = x[..., None] + x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float() + return x + + def training_step(self, batch, batch_idx, optimizer_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + + if optimizer_idx == 0: + # train encoder+decoder+logvar + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + self.log("aeloss", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return aeloss + + if optimizer_idx == 1: + # train the discriminator + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + + self.log("discloss", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return discloss + + def validation_step(self, batch, batch_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step, + last_layer=self.get_last_layer(), split="val") + + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step, + last_layer=self.get_last_layer(), split="val") + + self.log("val/rec_loss", log_dict_ae["val/rec_loss"]) + self.log_dict(log_dict_ae) + self.log_dict(log_dict_disc) + return self.log_dict + + def configure_optimizers(self): + lr = self.learning_rate + opt_ae = torch.optim.Adam(list(self.encoder.parameters())+ + list(self.decoder.parameters())+ + list(self.quant_conv.parameters())+ + list(self.post_quant_conv.parameters()), + lr=lr, betas=(0.5, 0.9)) + opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(), + lr=lr, betas=(0.5, 0.9)) + return [opt_ae, opt_disc], [] + + def get_last_layer(self): + return self.decoder.conv_out.weight + + @torch.no_grad() + def log_images(self, batch, only_inputs=False, **kwargs): + log = dict() + x = self.get_input(batch, self.image_key) + x = x.to(self.device) + if not only_inputs: + xrec, posterior = self(x) + if x.shape[1] > 3: + # colorize with random projection + assert xrec.shape[1] > 3 + x = self.to_rgb(x) + xrec = self.to_rgb(xrec) + log["samples"] = self.decode(torch.randn_like(posterior.sample())) + log["reconstructions"] = xrec + log["inputs"] = x + return log + + def to_rgb(self, x): + assert self.image_key == "segmentation" + if not hasattr(self, "colorize"): + self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x)) + x = F.conv2d(x, weight=self.colorize) + x = 2.*(x-x.min())/(x.max()-x.min()) - 1. + return x + + +class IdentityFirstStage(torch.nn.Module): + def __init__(self, *args, vq_interface=False, **kwargs): + self.vq_interface = vq_interface # TODO: Should be true by default but check to not break older stuff + super().__init__() + + def encode(self, x, *args, **kwargs): + return x + + def decode(self, x, *args, **kwargs): + return x + + def quantize(self, x, *args, **kwargs): + if self.vq_interface: + return x, None, [None, None, None] + return x + + def forward(self, x, *args, **kwargs): + return x diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d50d386e4e35dcdb3794f5a68ab03ea54a16910f Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddim.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddim.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..891dac9096f347d467e57e456b9a602cd17801d1 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddim.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddpm.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddpm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96d4d14ff89ce63fb6b35caf5756d6de0864c41c Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/__pycache__/ddpm.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddim.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddim.py new file mode 100644 index 0000000000000000000000000000000000000000..92f59a6e64a42442cb102da32a310fe7b8c9c54f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddim.py @@ -0,0 +1,202 @@ +"""SAMPLING ONLY.""" + +import torch +import numpy as np +from tqdm import tqdm + +from ...modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device == torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True): + self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose) + alphas_cumprod = self.model.alphas_cumprod + assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep' + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + self.register_buffer('betas', to_torch(self.model.betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu()))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu()))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1))) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta,verbose=verbose) + self.register_buffer('ddim_sigmas', ddim_sigmas) + self.register_buffer('ddim_alphas', ddim_alphas) + self.register_buffer('ddim_alphas_prev', ddim_alphas_prev) + self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * ( + 1 - self.alphas_cumprod / self.alphas_cumprod_prev)) + self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps) + + @torch.no_grad() + def sample(self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0., + mask=None, + x0=None, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1., + unconditional_conditioning=None, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs + ): + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + else: + if conditioning.shape[0] != batch_size: + print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose) + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + print(f'Data shape for DDIM sampling is {size}, eta {eta}') + + samples, intermediates = self.ddim_sampling(conditioning, size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling(self, cond, shape, + x_T=None, ddim_use_original_steps=False, + callback=None, timesteps=None, quantize_denoised=False, + mask=None, x0=None, img_callback=None, log_every_t=100, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None,): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + if timesteps is None: + timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps + elif timesteps is not None and not ddim_use_original_steps: + subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1 + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {'x_inter': [img], 'pred_x0': [img]} + time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps) + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + + if mask is not None: + assert x0 is not None + img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass? + img = img_orig * mask + (1. - mask) * img + + outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, temperature=temperature, + noise_dropout=noise_dropout, score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning) + img, pred_x0 = outs + if callback: callback(i) + if img_callback: img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates['x_inter'].append(img) + intermediates['pred_x0'].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None): + b, *_, device = *x.shape, x.device + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.: + e_t = self.model.apply_model(x, t, c) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t] * 2) + c_in = torch.cat([unconditional_conditioning, c]) + e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2) + e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond) + + if score_corrector is not None: + assert self.model.parameterization == "eps" + e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev + sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas + sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + # select parameters corresponding to the currently considered timestep + a_t = torch.full((b, 1, 1, 1), alphas[index], device=device) + a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device) + sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device) + sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device) + + # current prediction for x_0 + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + return x_prev, pred_x0 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddpm.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddpm.py new file mode 100644 index 0000000000000000000000000000000000000000..005178bffef2fee1ae3e1353be0e428f872fc24f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/models/diffusion/ddpm.py @@ -0,0 +1,1445 @@ +""" +wild mixture of +https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +https://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py +https://github.com/CompVis/taming-transformers +-- merci +""" + +import torch +import torch.nn as nn +import numpy as np +import pytorch_lightning as pl +from torch.optim.lr_scheduler import LambdaLR +from einops import rearrange, repeat +from contextlib import contextmanager +from functools import partial +from tqdm import tqdm +from torchvision.utils import make_grid +from pytorch_lightning.utilities.rank_zero import rank_zero_only + +from ...util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config +from ...modules.ema import LitEma +from ...modules.distributions.distributions import normal_kl, DiagonalGaussianDistribution +from ..autoencoder import VQModelInterface, IdentityFirstStage, AutoencoderKL +from ...modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like +from .ddim import DDIMSampler + + +__conditioning_keys__ = {'concat': 'c_concat', + 'crossattn': 'c_crossattn', + 'adm': 'y'} + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +def uniform_on_device(r1, r2, shape, device): + return (r1 - r2) * torch.rand(*shape, device=device) + r2 + + +class DDPM(pl.LightningModule): + # classic DDPM with Gaussian diffusion, in image space + def __init__(self, + unet_config, + timesteps=1000, + beta_schedule="linear", + loss_type="l2", + ckpt_path=None, + ignore_keys=[], + load_only_unet=False, + monitor="val/loss", + use_ema=True, + first_stage_key="image", + image_size=256, + channels=3, + log_every_t=100, + clip_denoised=True, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + given_betas=None, + original_elbo_weight=0., + v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta + l_simple_weight=1., + conditioning_key=None, + parameterization="eps", # all assuming fixed variance schedules + scheduler_config=None, + use_positional_encodings=False, + learn_logvar=False, + logvar_init=0., + ): + super().__init__() + assert parameterization in ["eps", "x0"], 'currently only supporting "eps" and "x0"' + self.parameterization = parameterization + print(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode") + self.cond_stage_model = None + self.clip_denoised = clip_denoised + self.log_every_t = log_every_t + self.first_stage_key = first_stage_key + self.image_size = image_size # try conv? + self.channels = channels + self.use_positional_encodings = use_positional_encodings + self.model = DiffusionWrapper(unet_config, conditioning_key) + count_params(self.model, verbose=True) + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self.model) + print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.use_scheduler = scheduler_config is not None + if self.use_scheduler: + self.scheduler_config = scheduler_config + + self.v_posterior = v_posterior + self.original_elbo_weight = original_elbo_weight + self.l_simple_weight = l_simple_weight + + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet) + + self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps, + linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s) + + self.loss_type = loss_type + + self.learn_logvar = learn_logvar + self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,)) + if self.learn_logvar: + self.logvar = nn.Parameter(self.logvar, requires_grad=True) + + + def register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if exists(given_betas): + betas = given_betas + else: + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, + cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep' + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / ( + 1. - alphas_cumprod) + self.v_posterior * betas + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer('posterior_variance', to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) + self.register_buffer('posterior_mean_coef1', to_torch( + betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) + self.register_buffer('posterior_mean_coef2', to_torch( + (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) + + if self.parameterization == "eps": + lvlb_weights = self.betas ** 2 / ( + 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod)) + elif self.parameterization == "x0": + lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod)) + else: + raise NotImplementedError("mu not supported") + # TODO how to choose this term + lvlb_weights[0] = lvlb_weights[1] + self.register_buffer('lvlb_weights', lvlb_weights, persistent=False) + assert not torch.isnan(self.lvlb_weights).all() + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + print(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + print(f"{context}: Restored training weights") + + def init_from_ckpt(self, path, ignore_keys=list(), only_model=False): + sd = torch.load(path, map_location="cpu") + if "state_dict" in list(sd.keys()): + sd = sd["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict( + sd, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + if len(unexpected) > 0: + print(f"Unexpected Keys: {unexpected}") + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start) + variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + return ( + extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - + extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise + ) + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, clip_denoised: bool): + model_out = self.model(x, t) + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + if clip_denoised: + x_recon.clamp_(-1., 1.) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, shape, return_intermediates=False): + device = self.betas.device + b = shape[0] + img = torch.randn(shape, device=device) + intermediates = [img] + for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps): + img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long), + clip_denoised=self.clip_denoised) + if i % self.log_every_t == 0 or i == self.num_timesteps - 1: + intermediates.append(img) + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, batch_size=16, return_intermediates=False): + image_size = self.image_size + channels = self.channels + return self.p_sample_loop((batch_size, channels, image_size, image_size), + return_intermediates=return_intermediates) + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise) + + def get_loss(self, pred, target, mean=True): + if self.loss_type == 'l1': + loss = (target - pred).abs() + if mean: + loss = loss.mean() + elif self.loss_type == 'l2': + if mean: + loss = torch.nn.functional.mse_loss(target, pred) + else: + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + else: + raise NotImplementedError("unknown loss type '{loss_type}'") + + return loss + + def p_losses(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_out = self.model(x_noisy, t) + + loss_dict = {} + if self.parameterization == "eps": + target = noise + elif self.parameterization == "x0": + target = x_start + else: + raise NotImplementedError(f"Paramterization {self.parameterization} not yet supported") + + loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3]) + + log_prefix = 'train' if self.training else 'val' + + loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()}) + loss_simple = loss.mean() * self.l_simple_weight + + loss_vlb = (self.lvlb_weights[t] * loss).mean() + loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb}) + + loss = loss_simple + self.original_elbo_weight * loss_vlb + + loss_dict.update({f'{log_prefix}/loss': loss}) + + return loss, loss_dict + + def forward(self, x, *args, **kwargs): + # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size + # assert h == img_size and w == img_size, f'height and width of image must be {img_size}' + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long() + return self.p_losses(x, t, *args, **kwargs) + + def get_input(self, batch, k): + x = batch[k] + if len(x.shape) == 3: + x = x[..., None] + x = rearrange(x, 'b h w c -> b c h w') + x = x.to(memory_format=torch.contiguous_format).float() + return x + + def shared_step(self, batch): + x = self.get_input(batch, self.first_stage_key) + loss, loss_dict = self(x) + return loss, loss_dict + + def training_step(self, batch, batch_idx): + loss, loss_dict = self.shared_step(batch) + + self.log_dict(loss_dict, prog_bar=True, + logger=True, on_step=True, on_epoch=True) + + self.log("global_step", self.global_step, + prog_bar=True, logger=True, on_step=True, on_epoch=False) + + if self.use_scheduler: + lr = self.optimizers().param_groups[0]['lr'] + self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False) + + return loss + + @torch.no_grad() + def validation_step(self, batch, batch_idx): + _, loss_dict_no_ema = self.shared_step(batch) + with self.ema_scope(): + _, loss_dict_ema = self.shared_step(batch) + loss_dict_ema = {key + '_ema': loss_dict_ema[key] for key in loss_dict_ema} + self.log_dict(loss_dict_no_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True) + self.log_dict(loss_dict_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True) + + def on_train_batch_end(self, *args, **kwargs): + if self.use_ema: + self.model_ema(self.model) + + def _get_rows_from_list(self, samples): + n_imgs_per_row = len(samples) + denoise_grid = rearrange(samples, 'n b c h w -> b n c h w') + denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w') + denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row) + return denoise_grid + + @torch.no_grad() + def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs): + log = dict() + x = self.get_input(batch, self.first_stage_key) + N = min(x.shape[0], N) + n_row = min(x.shape[0], n_row) + x = x.to(self.device)[:N] + log["inputs"] = x + + # get diffusion row + diffusion_row = list() + x_start = x[:n_row] + + for t in range(self.num_timesteps): + if t % self.log_every_t == 0 or t == self.num_timesteps - 1: + t = repeat(torch.tensor([t]), '1 -> b', b=n_row) + t = t.to(self.device).long() + noise = torch.randn_like(x_start) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + diffusion_row.append(x_noisy) + + log["diffusion_row"] = self._get_rows_from_list(diffusion_row) + + if sample: + # get denoise row + with self.ema_scope("Plotting"): + samples, denoise_row = self.sample(batch_size=N, return_intermediates=True) + + log["samples"] = samples + log["denoise_row"] = self._get_rows_from_list(denoise_row) + + if return_keys: + if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0: + return log + else: + return {key: log[key] for key in return_keys} + return log + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + if self.learn_logvar: + params = params + [self.logvar] + opt = torch.optim.AdamW(params, lr=lr) + return opt + + +class LatentDiffusion(DDPM): + """main class""" + def __init__(self, + first_stage_config, + cond_stage_config, + num_timesteps_cond=None, + cond_stage_key="image", + cond_stage_trainable=False, + concat_mode=True, + cond_stage_forward=None, + conditioning_key=None, + scale_factor=1.0, + scale_by_std=False, + *args, **kwargs): + self.num_timesteps_cond = default(num_timesteps_cond, 1) + self.scale_by_std = scale_by_std + assert self.num_timesteps_cond <= kwargs['timesteps'] + # for backwards compatibility after implementation of DiffusionWrapper + if conditioning_key is None: + conditioning_key = 'concat' if concat_mode else 'crossattn' + if cond_stage_config == '__is_unconditional__': + conditioning_key = None + ckpt_path = kwargs.pop("ckpt_path", None) + ignore_keys = kwargs.pop("ignore_keys", []) + super().__init__(conditioning_key=conditioning_key, *args, **kwargs) + self.concat_mode = concat_mode + self.cond_stage_trainable = cond_stage_trainable + self.cond_stage_key = cond_stage_key + try: + self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1 + except: + self.num_downs = 0 + if not scale_by_std: + self.scale_factor = scale_factor + else: + self.register_buffer('scale_factor', torch.tensor(scale_factor)) + self.instantiate_first_stage(first_stage_config) + self.instantiate_cond_stage(cond_stage_config) + self.cond_stage_forward = cond_stage_forward + self.clip_denoised = False + self.bbox_tokenizer = None + + self.restarted_from_ckpt = False + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys) + self.restarted_from_ckpt = True + + def make_cond_schedule(self, ): + self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long) + ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long() + self.cond_ids[:self.num_timesteps_cond] = ids + + @rank_zero_only + @torch.no_grad() + def on_train_batch_start(self, batch, batch_idx, dataloader_idx): + # only for very first batch + if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 and batch_idx == 0 and not self.restarted_from_ckpt: + assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously' + # set rescale weight to 1./std of encodings + print("### USING STD-RESCALING ###") + x = super().get_input(batch, self.first_stage_key) + x = x.to(self.device) + encoder_posterior = self.encode_first_stage(x) + z = self.get_first_stage_encoding(encoder_posterior).detach() + del self.scale_factor + self.register_buffer('scale_factor', 1. / z.flatten().std()) + print(f"setting self.scale_factor to {self.scale_factor}") + print("### USING STD-RESCALING ###") + + def register_schedule(self, + given_betas=None, beta_schedule="linear", timesteps=1000, + linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + super().register_schedule(given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s) + + self.shorten_cond_schedule = self.num_timesteps_cond > 1 + if self.shorten_cond_schedule: + self.make_cond_schedule() + + def instantiate_first_stage(self, config): + model = instantiate_from_config(config) + self.first_stage_model = model.eval() + self.first_stage_model.train = disabled_train + for param in self.first_stage_model.parameters(): + param.requires_grad = False + + def instantiate_cond_stage(self, config): + if not self.cond_stage_trainable: + if config == "__is_first_stage__": + print("Using first stage also as cond stage.") + self.cond_stage_model = self.first_stage_model + elif config == "__is_unconditional__": + print(f"Training {self.__class__.__name__} as an unconditional model.") + self.cond_stage_model = None + # self.be_unconditional = True + else: + model = instantiate_from_config(config) + self.cond_stage_model = model.eval() + self.cond_stage_model.train = disabled_train + for param in self.cond_stage_model.parameters(): + param.requires_grad = False + else: + assert config != '__is_first_stage__' + assert config != '__is_unconditional__' + model = instantiate_from_config(config) + self.cond_stage_model = model + + def _get_denoise_row_from_list(self, samples, desc='', force_no_decoder_quantization=False): + denoise_row = [] + for zd in tqdm(samples, desc=desc): + denoise_row.append(self.decode_first_stage(zd.to(self.device), + force_not_quantize=force_no_decoder_quantization)) + n_imgs_per_row = len(denoise_row) + denoise_row = torch.stack(denoise_row) # n_log_step, n_row, C, H, W + denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w') + denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w') + denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row) + return denoise_grid + + def get_first_stage_encoding(self, encoder_posterior): + if isinstance(encoder_posterior, DiagonalGaussianDistribution): + z = encoder_posterior.sample() + elif isinstance(encoder_posterior, torch.Tensor): + z = encoder_posterior + else: + raise NotImplementedError(f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented") + return self.scale_factor * z + + def get_learned_conditioning(self, c): + if self.cond_stage_forward is None: + if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode): + c = self.cond_stage_model.encode(c) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + else: + c = self.cond_stage_model(c) + else: + assert hasattr(self.cond_stage_model, self.cond_stage_forward) + c = getattr(self.cond_stage_model, self.cond_stage_forward)(c) + return c + + def meshgrid(self, h, w): + y = torch.arange(0, h).view(h, 1, 1).repeat(1, w, 1) + x = torch.arange(0, w).view(1, w, 1).repeat(h, 1, 1) + + arr = torch.cat([y, x], dim=-1) + return arr + + def delta_border(self, h, w): + """ + :param h: height + :param w: width + :return: normalized distance to image border, + wtith min distance = 0 at border and max dist = 0.5 at image center + """ + lower_right_corner = torch.tensor([h - 1, w - 1]).view(1, 1, 2) + arr = self.meshgrid(h, w) / lower_right_corner + dist_left_up = torch.min(arr, dim=-1, keepdims=True)[0] + dist_right_down = torch.min(1 - arr, dim=-1, keepdims=True)[0] + edge_dist = torch.min(torch.cat([dist_left_up, dist_right_down], dim=-1), dim=-1)[0] + return edge_dist + + def get_weighting(self, h, w, Ly, Lx, device): + weighting = self.delta_border(h, w) + weighting = torch.clip(weighting, self.split_input_params["clip_min_weight"], + self.split_input_params["clip_max_weight"], ) + weighting = weighting.view(1, h * w, 1).repeat(1, 1, Ly * Lx).to(device) + + if self.split_input_params["tie_braker"]: + L_weighting = self.delta_border(Ly, Lx) + L_weighting = torch.clip(L_weighting, + self.split_input_params["clip_min_tie_weight"], + self.split_input_params["clip_max_tie_weight"]) + + L_weighting = L_weighting.view(1, 1, Ly * Lx).to(device) + weighting = weighting * L_weighting + return weighting + + def get_fold_unfold(self, x, kernel_size, stride, uf=1, df=1): # todo load once not every time, shorten code + """ + :param x: img of size (bs, c, h, w) + :return: n img crops of size (n, bs, c, kernel_size[0], kernel_size[1]) + """ + bs, nc, h, w = x.shape + + # number of crops in image + Ly = (h - kernel_size[0]) // stride[0] + 1 + Lx = (w - kernel_size[1]) // stride[1] + 1 + + if uf == 1 and df == 1: + fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride) + unfold = torch.nn.Unfold(**fold_params) + + fold = torch.nn.Fold(output_size=x.shape[2:], **fold_params) + + weighting = self.get_weighting(kernel_size[0], kernel_size[1], Ly, Lx, x.device).to(x.dtype) + normalization = fold(weighting).view(1, 1, h, w) # normalizes the overlap + weighting = weighting.view((1, 1, kernel_size[0], kernel_size[1], Ly * Lx)) + + elif uf > 1 and df == 1: + fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride) + unfold = torch.nn.Unfold(**fold_params) + + fold_params2 = dict(kernel_size=(kernel_size[0] * uf, kernel_size[0] * uf), + dilation=1, padding=0, + stride=(stride[0] * uf, stride[1] * uf)) + fold = torch.nn.Fold(output_size=(x.shape[2] * uf, x.shape[3] * uf), **fold_params2) + + weighting = self.get_weighting(kernel_size[0] * uf, kernel_size[1] * uf, Ly, Lx, x.device).to(x.dtype) + normalization = fold(weighting).view(1, 1, h * uf, w * uf) # normalizes the overlap + weighting = weighting.view((1, 1, kernel_size[0] * uf, kernel_size[1] * uf, Ly * Lx)) + + elif df > 1 and uf == 1: + fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride) + unfold = torch.nn.Unfold(**fold_params) + + fold_params2 = dict(kernel_size=(kernel_size[0] // df, kernel_size[0] // df), + dilation=1, padding=0, + stride=(stride[0] // df, stride[1] // df)) + fold = torch.nn.Fold(output_size=(x.shape[2] // df, x.shape[3] // df), **fold_params2) + + weighting = self.get_weighting(kernel_size[0] // df, kernel_size[1] // df, Ly, Lx, x.device).to(x.dtype) + normalization = fold(weighting).view(1, 1, h // df, w // df) # normalizes the overlap + weighting = weighting.view((1, 1, kernel_size[0] // df, kernel_size[1] // df, Ly * Lx)) + + else: + raise NotImplementedError + + return fold, unfold, normalization, weighting + + @torch.no_grad() + def get_input(self, batch, k, return_first_stage_outputs=False, force_c_encode=False, + cond_key=None, return_original_cond=False, bs=None): + x = super().get_input(batch, k) + if bs is not None: + x = x[:bs] + x = x.to(self.device) + encoder_posterior = self.encode_first_stage(x) + z = self.get_first_stage_encoding(encoder_posterior).detach() + + if self.model.conditioning_key is not None: + if cond_key is None: + cond_key = self.cond_stage_key + if cond_key != self.first_stage_key: + if cond_key in ['caption', 'coordinates_bbox']: + xc = batch[cond_key] + elif cond_key == 'class_label': + xc = batch + else: + xc = super().get_input(batch, cond_key).to(self.device) + else: + xc = x + if not self.cond_stage_trainable or force_c_encode: + if isinstance(xc, dict) or isinstance(xc, list): + # import pudb; pudb.set_trace() + c = self.get_learned_conditioning(xc) + else: + c = self.get_learned_conditioning(xc.to(self.device)) + else: + c = xc + if bs is not None: + c = c[:bs] + + if self.use_positional_encodings: + pos_x, pos_y = self.compute_latent_shifts(batch) + ckey = __conditioning_keys__[self.model.conditioning_key] + c = {ckey: c, 'pos_x': pos_x, 'pos_y': pos_y} + + else: + c = None + xc = None + if self.use_positional_encodings: + pos_x, pos_y = self.compute_latent_shifts(batch) + c = {'pos_x': pos_x, 'pos_y': pos_y} + out = [z, c] + if return_first_stage_outputs: + xrec = self.decode_first_stage(z) + out.extend([x, xrec]) + if return_original_cond: + out.append(xc) + return out + + @torch.no_grad() + def decode_first_stage(self, z, predict_cids=False, force_not_quantize=False): + if predict_cids: + if z.dim() == 4: + z = torch.argmax(z.exp(), dim=1).long() + z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None) + z = rearrange(z, 'b h w c -> b c h w').contiguous() + + z = 1. / self.scale_factor * z + + if hasattr(self, "split_input_params"): + if self.split_input_params["patch_distributed_vq"]: + ks = self.split_input_params["ks"] # eg. (128, 128) + stride = self.split_input_params["stride"] # eg. (64, 64) + uf = self.split_input_params["vqf"] + bs, nc, h, w = z.shape + if ks[0] > h or ks[1] > w: + ks = (min(ks[0], h), min(ks[1], w)) + print("reducing Kernel") + + if stride[0] > h or stride[1] > w: + stride = (min(stride[0], h), min(stride[1], w)) + print("reducing stride") + + fold, unfold, normalization, weighting = self.get_fold_unfold(z, ks, stride, uf=uf) + + z = unfold(z) # (bn, nc * prod(**ks), L) + # 1. Reshape to img shape + z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L ) + + # 2. apply model loop over last dim + if isinstance(self.first_stage_model, VQModelInterface): + output_list = [self.first_stage_model.decode(z[:, :, :, :, i], + force_not_quantize=predict_cids or force_not_quantize) + for i in range(z.shape[-1])] + else: + + output_list = [self.first_stage_model.decode(z[:, :, :, :, i]) + for i in range(z.shape[-1])] + + o = torch.stack(output_list, axis=-1) # # (bn, nc, ks[0], ks[1], L) + o = o * weighting + # Reverse 1. reshape to img shape + o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L) + # stitch crops together + decoded = fold(o) + decoded = decoded / normalization # norm is shape (1, 1, h, w) + return decoded + else: + if isinstance(self.first_stage_model, VQModelInterface): + return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize) + else: + return self.first_stage_model.decode(z) + + else: + if isinstance(self.first_stage_model, VQModelInterface): + return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize) + else: + return self.first_stage_model.decode(z) + + # same as above but without decorator + def differentiable_decode_first_stage(self, z, predict_cids=False, force_not_quantize=False): + if predict_cids: + if z.dim() == 4: + z = torch.argmax(z.exp(), dim=1).long() + z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None) + z = rearrange(z, 'b h w c -> b c h w').contiguous() + + z = 1. / self.scale_factor * z + + if hasattr(self, "split_input_params"): + if self.split_input_params["patch_distributed_vq"]: + ks = self.split_input_params["ks"] # eg. (128, 128) + stride = self.split_input_params["stride"] # eg. (64, 64) + uf = self.split_input_params["vqf"] + bs, nc, h, w = z.shape + if ks[0] > h or ks[1] > w: + ks = (min(ks[0], h), min(ks[1], w)) + print("reducing Kernel") + + if stride[0] > h or stride[1] > w: + stride = (min(stride[0], h), min(stride[1], w)) + print("reducing stride") + + fold, unfold, normalization, weighting = self.get_fold_unfold(z, ks, stride, uf=uf) + + z = unfold(z) # (bn, nc * prod(**ks), L) + # 1. Reshape to img shape + z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L ) + + # 2. apply model loop over last dim + if isinstance(self.first_stage_model, VQModelInterface): + output_list = [self.first_stage_model.decode(z[:, :, :, :, i], + force_not_quantize=predict_cids or force_not_quantize) + for i in range(z.shape[-1])] + else: + + output_list = [self.first_stage_model.decode(z[:, :, :, :, i]) + for i in range(z.shape[-1])] + + o = torch.stack(output_list, axis=-1) # # (bn, nc, ks[0], ks[1], L) + o = o * weighting + # Reverse 1. reshape to img shape + o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L) + # stitch crops together + decoded = fold(o) + decoded = decoded / normalization # norm is shape (1, 1, h, w) + return decoded + else: + if isinstance(self.first_stage_model, VQModelInterface): + return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize) + else: + return self.first_stage_model.decode(z) + + else: + if isinstance(self.first_stage_model, VQModelInterface): + return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize) + else: + return self.first_stage_model.decode(z) + + @torch.no_grad() + def encode_first_stage(self, x): + if hasattr(self, "split_input_params"): + if self.split_input_params["patch_distributed_vq"]: + ks = self.split_input_params["ks"] # eg. (128, 128) + stride = self.split_input_params["stride"] # eg. (64, 64) + df = self.split_input_params["vqf"] + self.split_input_params['original_image_size'] = x.shape[-2:] + bs, nc, h, w = x.shape + if ks[0] > h or ks[1] > w: + ks = (min(ks[0], h), min(ks[1], w)) + print("reducing Kernel") + + if stride[0] > h or stride[1] > w: + stride = (min(stride[0], h), min(stride[1], w)) + print("reducing stride") + + fold, unfold, normalization, weighting = self.get_fold_unfold(x, ks, stride, df=df) + z = unfold(x) # (bn, nc * prod(**ks), L) + # Reshape to img shape + z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L ) + + output_list = [self.first_stage_model.encode(z[:, :, :, :, i]) + for i in range(z.shape[-1])] + + o = torch.stack(output_list, axis=-1) + o = o * weighting + + # Reverse reshape to img shape + o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L) + # stitch crops together + decoded = fold(o) + decoded = decoded / normalization + return decoded + + else: + return self.first_stage_model.encode(x) + else: + return self.first_stage_model.encode(x) + + def shared_step(self, batch, **kwargs): + x, c = self.get_input(batch, self.first_stage_key) + loss = self(x, c) + return loss + + def forward(self, x, c, *args, **kwargs): + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long() + if self.model.conditioning_key is not None: + assert c is not None + if self.cond_stage_trainable: + c = self.get_learned_conditioning(c) + if self.shorten_cond_schedule: # TODO: drop this option + tc = self.cond_ids[t].to(self.device) + c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float())) + return self.p_losses(x, c, t, *args, **kwargs) + + def _rescale_annotations(self, bboxes, crop_coordinates): # TODO: move to dataset + def rescale_bbox(bbox): + x0 = clamp((bbox[0] - crop_coordinates[0]) / crop_coordinates[2]) + y0 = clamp((bbox[1] - crop_coordinates[1]) / crop_coordinates[3]) + w = min(bbox[2] / crop_coordinates[2], 1 - x0) + h = min(bbox[3] / crop_coordinates[3], 1 - y0) + return x0, y0, w, h + + return [rescale_bbox(b) for b in bboxes] + + def apply_model(self, x_noisy, t, cond, return_ids=False): + + if isinstance(cond, dict): + # hybrid case, cond is exptected to be a dict + pass + else: + if not isinstance(cond, list): + cond = [cond] + key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn' + cond = {key: cond} + + if hasattr(self, "split_input_params"): + assert len(cond) == 1 # todo can only deal with one conditioning atm + assert not return_ids + ks = self.split_input_params["ks"] # eg. (128, 128) + stride = self.split_input_params["stride"] # eg. (64, 64) + + h, w = x_noisy.shape[-2:] + + fold, unfold, normalization, weighting = self.get_fold_unfold(x_noisy, ks, stride) + + z = unfold(x_noisy) # (bn, nc * prod(**ks), L) + # Reshape to img shape + z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1])) # (bn, nc, ks[0], ks[1], L ) + z_list = [z[:, :, :, :, i] for i in range(z.shape[-1])] + + if self.cond_stage_key in ["image", "LR_image", "segmentation", + 'bbox_img'] and self.model.conditioning_key: # todo check for completeness + c_key = next(iter(cond.keys())) # get key + c = next(iter(cond.values())) # get value + assert (len(c) == 1) # todo extend to list with more than one elem + c = c[0] # get element + + c = unfold(c) + c = c.view((c.shape[0], -1, ks[0], ks[1], c.shape[-1])) # (bn, nc, ks[0], ks[1], L ) + + cond_list = [{c_key: [c[:, :, :, :, i]]} for i in range(c.shape[-1])] + + elif self.cond_stage_key == 'coordinates_bbox': + assert 'original_image_size' in self.split_input_params, 'BoudingBoxRescaling is missing original_image_size' + + # assuming padding of unfold is always 0 and its dilation is always 1 + n_patches_per_row = int((w - ks[0]) / stride[0] + 1) + full_img_h, full_img_w = self.split_input_params['original_image_size'] + # as we are operating on latents, we need the factor from the original image size to the + # spatial latent size to properly rescale the crops for regenerating the bbox annotations + num_downs = self.first_stage_model.encoder.num_resolutions - 1 + rescale_latent = 2 ** (num_downs) + + # get top left postions of patches as conforming for the bbbox tokenizer, therefore we + # need to rescale the tl patch coordinates to be in between (0,1) + tl_patch_coordinates = [(rescale_latent * stride[0] * (patch_nr % n_patches_per_row) / full_img_w, + rescale_latent * stride[1] * (patch_nr // n_patches_per_row) / full_img_h) + for patch_nr in range(z.shape[-1])] + + # patch_limits are tl_coord, width and height coordinates as (x_tl, y_tl, h, w) + patch_limits = [(x_tl, y_tl, + rescale_latent * ks[0] / full_img_w, + rescale_latent * ks[1] / full_img_h) for x_tl, y_tl in tl_patch_coordinates] + # patch_values = [(np.arange(x_tl,min(x_tl+ks, 1.)),np.arange(y_tl,min(y_tl+ks, 1.))) for x_tl, y_tl in tl_patch_coordinates] + + # tokenize crop coordinates for the bounding boxes of the respective patches + patch_limits_tknzd = [torch.LongTensor(self.bbox_tokenizer._crop_encoder(bbox))[None].to(self.device) + for bbox in patch_limits] # list of length l with tensors of shape (1, 2) + print(patch_limits_tknzd[0].shape) + # cut tknzd crop position from conditioning + assert isinstance(cond, dict), 'cond must be dict to be fed into model' + cut_cond = cond['c_crossattn'][0][..., :-2].to(self.device) + print(cut_cond.shape) + + adapted_cond = torch.stack([torch.cat([cut_cond, p], dim=1) for p in patch_limits_tknzd]) + adapted_cond = rearrange(adapted_cond, 'l b n -> (l b) n') + print(adapted_cond.shape) + adapted_cond = self.get_learned_conditioning(adapted_cond) + print(adapted_cond.shape) + adapted_cond = rearrange(adapted_cond, '(l b) n d -> l b n d', l=z.shape[-1]) + print(adapted_cond.shape) + + cond_list = [{'c_crossattn': [e]} for e in adapted_cond] + + else: + cond_list = [cond for i in range(z.shape[-1])] # Todo make this more efficient + + # apply model by loop over crops + output_list = [self.model(z_list[i], t, **cond_list[i]) for i in range(z.shape[-1])] + assert not isinstance(output_list[0], + tuple) # todo cant deal with multiple model outputs check this never happens + + o = torch.stack(output_list, axis=-1) + o = o * weighting + # Reverse reshape to img shape + o = o.view((o.shape[0], -1, o.shape[-1])) # (bn, nc * ks[0] * ks[1], L) + # stitch crops together + x_recon = fold(o) / normalization + + else: + x_recon = self.model(x_noisy, t, **cond) + + if isinstance(x_recon, tuple) and not return_ids: + return x_recon[0] + else: + return x_recon + + def _predict_eps_from_xstart(self, x_t, t, pred_xstart): + return (extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - pred_xstart) / \ + extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) + + def _prior_bpd(self, x_start): + """ + Get the prior KL term for the variational lower-bound, measured in + bits-per-dim. + This term can't be optimized, as it only depends on the encoder. + :param x_start: the [N x C x ...] tensor of inputs. + :return: a batch of [N] KL values (in bits), one per batch element. + """ + batch_size = x_start.shape[0] + t = torch.tensor([self.num_timesteps - 1] * batch_size, device=x_start.device) + qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t) + kl_prior = normal_kl(mean1=qt_mean, logvar1=qt_log_variance, mean2=0.0, logvar2=0.0) + return mean_flat(kl_prior) / np.log(2.0) + + def p_losses(self, x_start, cond, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_output = self.apply_model(x_noisy, t, cond) + + loss_dict = {} + prefix = 'train' if self.training else 'val' + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + else: + raise NotImplementedError() + + loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3]) + loss_dict.update({f'{prefix}/loss_simple': loss_simple.mean()}) + + logvar_t = self.logvar[t].to(self.device) + loss = loss_simple / torch.exp(logvar_t) + logvar_t + # loss = loss_simple / torch.exp(self.logvar) + self.logvar + if self.learn_logvar: + loss_dict.update({f'{prefix}/loss_gamma': loss.mean()}) + loss_dict.update({'logvar': self.logvar.data.mean()}) + + loss = self.l_simple_weight * loss.mean() + + loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3)) + loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean() + loss_dict.update({f'{prefix}/loss_vlb': loss_vlb}) + loss += (self.original_elbo_weight * loss_vlb) + loss_dict.update({f'{prefix}/loss': loss}) + + return loss, loss_dict + + def p_mean_variance(self, x, c, t, clip_denoised: bool, return_codebook_ids=False, quantize_denoised=False, + return_x0=False, score_corrector=None, corrector_kwargs=None): + t_in = t + model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids) + + if score_corrector is not None: + assert self.parameterization == "eps" + model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs) + + if return_codebook_ids: + model_out, logits = model_out + + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + else: + raise NotImplementedError() + + if clip_denoised: + x_recon.clamp_(-1., 1.) + if quantize_denoised: + x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon) + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + if return_codebook_ids: + return model_mean, posterior_variance, posterior_log_variance, logits + elif return_x0: + return model_mean, posterior_variance, posterior_log_variance, x_recon + else: + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False, + return_codebook_ids=False, quantize_denoised=False, return_x0=False, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None): + b, *_, device = *x.shape, x.device + outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised, + return_codebook_ids=return_codebook_ids, + quantize_denoised=quantize_denoised, + return_x0=return_x0, + score_corrector=score_corrector, corrector_kwargs=corrector_kwargs) + if return_codebook_ids: + raise DeprecationWarning("Support dropped.") + model_mean, _, model_log_variance, logits = outputs + elif return_x0: + model_mean, _, model_log_variance, x0 = outputs + else: + model_mean, _, model_log_variance = outputs + + noise = noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + + if return_codebook_ids: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, logits.argmax(dim=1) + if return_x0: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0 + else: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def progressive_denoising(self, cond, shape, verbose=True, callback=None, quantize_denoised=False, + img_callback=None, mask=None, x0=None, temperature=1., noise_dropout=0., + score_corrector=None, corrector_kwargs=None, batch_size=None, x_T=None, start_T=None, + log_every_t=None): + if not log_every_t: + log_every_t = self.log_every_t + timesteps = self.num_timesteps + if batch_size is not None: + b = batch_size if batch_size is not None else shape[0] + shape = [batch_size] + list(shape) + else: + b = batch_size = shape[0] + if x_T is None: + img = torch.randn(shape, device=self.device) + else: + img = x_T + intermediates = [] + if cond is not None: + if isinstance(cond, dict): + cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else + list(map(lambda x: x[:batch_size], cond[key])) for key in cond} + else: + cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size] + + if start_T is not None: + timesteps = min(timesteps, start_T) + iterator = tqdm(reversed(range(0, timesteps)), desc='Progressive Generation', + total=timesteps) if verbose else reversed( + range(0, timesteps)) + if type(temperature) == float: + temperature = [temperature] * timesteps + + for i in iterator: + ts = torch.full((b,), i, device=self.device, dtype=torch.long) + if self.shorten_cond_schedule: + assert self.model.conditioning_key != 'hybrid' + tc = self.cond_ids[ts].to(cond.device) + cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond)) + + img, x0_partial = self.p_sample(img, cond, ts, + clip_denoised=self.clip_denoised, + quantize_denoised=quantize_denoised, return_x0=True, + temperature=temperature[i], noise_dropout=noise_dropout, + score_corrector=score_corrector, corrector_kwargs=corrector_kwargs) + if mask is not None: + assert x0 is not None + img_orig = self.q_sample(x0, ts) + img = img_orig * mask + (1. - mask) * img + + if i % log_every_t == 0 or i == timesteps - 1: + intermediates.append(x0_partial) + if callback: callback(i) + if img_callback: img_callback(img, i) + return img, intermediates + + @torch.no_grad() + def p_sample_loop(self, cond, shape, return_intermediates=False, + x_T=None, verbose=True, callback=None, timesteps=None, quantize_denoised=False, + mask=None, x0=None, img_callback=None, start_T=None, + log_every_t=None): + + if not log_every_t: + log_every_t = self.log_every_t + device = self.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + intermediates = [img] + if timesteps is None: + timesteps = self.num_timesteps + + if start_T is not None: + timesteps = min(timesteps, start_T) + iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed( + range(0, timesteps)) + + if mask is not None: + assert x0 is not None + assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match + + for i in iterator: + ts = torch.full((b,), i, device=device, dtype=torch.long) + if self.shorten_cond_schedule: + assert self.model.conditioning_key != 'hybrid' + tc = self.cond_ids[ts].to(cond.device) + cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond)) + + img = self.p_sample(img, cond, ts, + clip_denoised=self.clip_denoised, + quantize_denoised=quantize_denoised) + if mask is not None: + img_orig = self.q_sample(x0, ts) + img = img_orig * mask + (1. - mask) * img + + if i % log_every_t == 0 or i == timesteps - 1: + intermediates.append(img) + if callback: callback(i) + if img_callback: img_callback(img, i) + + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, cond, batch_size=16, return_intermediates=False, x_T=None, + verbose=True, timesteps=None, quantize_denoised=False, + mask=None, x0=None, shape=None,**kwargs): + if shape is None: + shape = (batch_size, self.channels, self.image_size, self.image_size) + if cond is not None: + if isinstance(cond, dict): + cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else + list(map(lambda x: x[:batch_size], cond[key])) for key in cond} + else: + cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size] + return self.p_sample_loop(cond, + shape, + return_intermediates=return_intermediates, x_T=x_T, + verbose=verbose, timesteps=timesteps, quantize_denoised=quantize_denoised, + mask=mask, x0=x0) + + @torch.no_grad() + def sample_log(self,cond,batch_size,ddim, ddim_steps,**kwargs): + + if ddim: + ddim_sampler = DDIMSampler(self) + shape = (self.channels, self.image_size, self.image_size) + samples, intermediates =ddim_sampler.sample(ddim_steps,batch_size, + shape,cond,verbose=False,**kwargs) + + else: + samples, intermediates = self.sample(cond=cond, batch_size=batch_size, + return_intermediates=True,**kwargs) + + return samples, intermediates + + + @torch.no_grad() + def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None, + quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True, + plot_diffusion_rows=True, **kwargs): + + use_ddim = ddim_steps is not None + + log = dict() + z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key, + return_first_stage_outputs=True, + force_c_encode=True, + return_original_cond=True, + bs=N) + N = min(x.shape[0], N) + n_row = min(x.shape[0], n_row) + log["inputs"] = x + log["reconstruction"] = xrec + if self.model.conditioning_key is not None: + if hasattr(self.cond_stage_model, "decode"): + xc = self.cond_stage_model.decode(c) + log["conditioning"] = xc + elif self.cond_stage_key in ["caption"]: + xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["caption"]) + log["conditioning"] = xc + elif self.cond_stage_key == 'class_label': + xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"]) + log['conditioning'] = xc + elif isimage(xc): + log["conditioning"] = xc + if ismap(xc): + log["original_conditioning"] = self.to_rgb(xc) + + if plot_diffusion_rows: + # get diffusion row + diffusion_row = list() + z_start = z[:n_row] + for t in range(self.num_timesteps): + if t % self.log_every_t == 0 or t == self.num_timesteps - 1: + t = repeat(torch.tensor([t]), '1 -> b', b=n_row) + t = t.to(self.device).long() + noise = torch.randn_like(z_start) + z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise) + diffusion_row.append(self.decode_first_stage(z_noisy)) + + diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W + diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w') + diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w') + diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0]) + log["diffusion_row"] = diffusion_grid + + if sample: + # get denoise row + with self.ema_scope("Plotting"): + samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim, + ddim_steps=ddim_steps,eta=ddim_eta) + # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True) + x_samples = self.decode_first_stage(samples) + log["samples"] = x_samples + if plot_denoise_rows: + denoise_grid = self._get_denoise_row_from_list(z_denoise_row) + log["denoise_row"] = denoise_grid + + if quantize_denoised and not isinstance(self.first_stage_model, AutoencoderKL) and not isinstance( + self.first_stage_model, IdentityFirstStage): + # also display when quantizing x0 while sampling + with self.ema_scope("Plotting Quantized Denoised"): + samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim, + ddim_steps=ddim_steps,eta=ddim_eta, + quantize_denoised=True) + # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True, + # quantize_denoised=True) + x_samples = self.decode_first_stage(samples.to(self.device)) + log["samples_x0_quantized"] = x_samples + + if inpaint: + # make a simple center square + b, h, w = z.shape[0], z.shape[2], z.shape[3] + mask = torch.ones(N, h, w).to(self.device) + # zeros will be filled in + mask[:, h // 4:3 * h // 4, w // 4:3 * w // 4] = 0. + mask = mask[:, None, ...] + with self.ema_scope("Plotting Inpaint"): + + samples, _ = self.sample_log(cond=c,batch_size=N,ddim=use_ddim, eta=ddim_eta, + ddim_steps=ddim_steps, x0=z[:N], mask=mask) + x_samples = self.decode_first_stage(samples.to(self.device)) + log["samples_inpainting"] = x_samples + log["mask"] = mask + + # outpaint + with self.ema_scope("Plotting Outpaint"): + samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,eta=ddim_eta, + ddim_steps=ddim_steps, x0=z[:N], mask=mask) + x_samples = self.decode_first_stage(samples.to(self.device)) + log["samples_outpainting"] = x_samples + + if plot_progressive_rows: + with self.ema_scope("Plotting Progressives"): + img, progressives = self.progressive_denoising(c, + shape=(self.channels, self.image_size, self.image_size), + batch_size=N) + prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation") + log["progressive_row"] = prog_row + + if return_keys: + if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0: + return log + else: + return {key: log[key] for key in return_keys} + return log + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + if self.cond_stage_trainable: + print(f"{self.__class__.__name__}: Also optimizing conditioner params!") + params = params + list(self.cond_stage_model.parameters()) + if self.learn_logvar: + print('Diffusion model optimizing logvar') + params.append(self.logvar) + opt = torch.optim.AdamW(params, lr=lr) + if self.use_scheduler: + assert 'target' in self.scheduler_config + scheduler = instantiate_from_config(self.scheduler_config) + + print("Setting up LambdaLR scheduler...") + scheduler = [ + { + 'scheduler': LambdaLR(opt, lr_lambda=scheduler.schedule), + 'interval': 'step', + 'frequency': 1 + }] + return [opt], scheduler + return opt + + @torch.no_grad() + def to_rgb(self, x): + x = x.float() + if not hasattr(self, "colorize"): + self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x) + x = nn.functional.conv2d(x, weight=self.colorize) + x = 2. * (x - x.min()) / (x.max() - x.min()) - 1. + return x + + +class DiffusionWrapper(pl.LightningModule): + def __init__(self, diff_model_config, conditioning_key): + super().__init__() + self.diffusion_model = instantiate_from_config(diff_model_config) + self.conditioning_key = conditioning_key + assert self.conditioning_key in [None, 'concat', 'crossattn', 'hybrid', 'adm'] + + def forward(self, x, t, c_concat: list = None, c_crossattn: list = None): + if self.conditioning_key is None: + out = self.diffusion_model(x, t) + elif self.conditioning_key == 'concat': + xc = torch.cat([x] + c_concat, dim=1) + out = self.diffusion_model(xc, t) + elif self.conditioning_key == 'crossattn': + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(x, t, context=cc) + elif self.conditioning_key == 'hybrid': + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc) + elif self.conditioning_key == 'adm': + cc = c_crossattn[0] + out = self.diffusion_model(x, t, y=cc) + else: + raise NotImplementedError() + + return out + + +class Layout2ImgDiffusion(LatentDiffusion): + # TODO: move all layout-specific hacks to this class + def __init__(self, cond_stage_key, *args, **kwargs): + assert cond_stage_key == 'coordinates_bbox', 'Layout2ImgDiffusion only for cond_stage_key="coordinates_bbox"' + super().__init__(cond_stage_key=cond_stage_key, *args, **kwargs) + + def log_images(self, batch, N=8, *args, **kwargs): + logs = super().log_images(batch=batch, N=N, *args, **kwargs) + + key = 'train' if self.training else 'validation' + dset = self.trainer.datamodule.datasets[key] + mapper = dset.conditional_builders[self.cond_stage_key] + + bbox_imgs = [] + map_fn = lambda catno: dset.get_textual_label(dset.get_category_id(catno)) + for tknzd_bbox in batch[self.cond_stage_key][:N]: + bboximg = mapper.plot(tknzd_bbox.detach().cpu(), map_fn, (256, 256)) + bbox_imgs.append(bboximg) + + cond_img = torch.stack(bbox_imgs, dim=0) + logs['bbox_image'] = cond_img + return logs diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7b48d925467c2f34119d6325960445bd03885d3 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/attention.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e5f3dd096b0950a6c6f629f42791a85fa5665f8 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/attention.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/ema.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/ema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0c47c92164cb90ebcb0cae1f30b6fda9ea94994 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/__pycache__/ema.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/attention.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..7bfc684f7bd038c00ca26f52df01654e03220734 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/attention.py @@ -0,0 +1,261 @@ +from inspect import isfunction +import math +import torch +import torch.nn.functional as F +from torch import nn, einsum +from einops import rearrange, repeat + +from .diffusionmodules.util import checkpoint + + +def exists(val): + return val is not None + + +def uniq(arr): + return{el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + nn.Linear(dim, inner_dim), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def Normalize(in_channels): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias = False) + self.to_out = nn.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3) + k = k.softmax(dim=-1) + context = torch.einsum('bhdn,bhen->bhde', k, v) + out = torch.einsum('bhde,bhdn->bhen', context, q) + out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = rearrange(q, 'b c h w -> b (h w) c') + k = rearrange(k, 'b c h w -> b c (h w)') + w_ = torch.einsum('bij,bjk->bik', q, k) + + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, 'b c h w -> b c (h w)') + w_ = rearrange(w_, 'b i j -> b j i') + h_ = torch.einsum('bij,bjk->bik', v, w_) + h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h) + h_ = self.proj_out(h_) + + return x+h_ + + +class CrossAttention(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.scale = dim_head ** -0.5 + self.heads = heads + + self.to_q = nn.Linear(query_dim, inner_dim, bias=False) + self.to_k = nn.Linear(context_dim, inner_dim, bias=False) + self.to_v = nn.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + nn.Linear(inner_dim, query_dim), + nn.Dropout(dropout) + ) + + def forward(self, x, context=None, mask=None): + h = self.heads + + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + v = self.to_v(context) + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v)) + + sim = einsum('b i d, b j d -> b i j', q, k) * self.scale + + if exists(mask): + mask = rearrange(mask, 'b ... -> b (...)') + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + + # attention, what we cannot get enough of + attn = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', attn, v) + out = rearrange(out, '(b h) n d -> b n (h d)', h=h) + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True): + super().__init__() + self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout) # is a self-attention + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim, + heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + self.norm3 = nn.LayerNorm(dim) + self.checkpoint = checkpoint + + def forward(self, x, context=None): + return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint) + + def _forward(self, x, context=None): + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context) + x + x = self.ff(self.norm3(x)) + x + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + """ + def __init__(self, in_channels, n_heads, d_head, + depth=1, dropout=0., context_dim=None): + super().__init__() + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = Normalize(in_channels) + + self.proj_in = nn.Conv2d(in_channels, + inner_dim, + kernel_size=1, + stride=1, + padding=0) + + self.transformer_blocks = nn.ModuleList( + [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim) + for d in range(depth)] + ) + + self.proj_out = zero_module(nn.Conv2d(inner_dim, + in_channels, + kernel_size=1, + stride=1, + padding=0)) + + def forward(self, x, context=None): + # note: if no context is given, cross-attention defaults to self-attention + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + x = self.proj_in(x) + x = rearrange(x, 'b c h w -> b (h w) c') + for block in self.transformer_blocks: + x = block(x, context=context) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w) + x = self.proj_out(x) + return x + x_in \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e08b2c715bae89d5785c712918cc6d878a57df39 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1153eef241be49aae9daf1bda881887ed3f81246 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/model.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bb4377d42fda8c550aad1fa8ea2e57ee2534346 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/openaimodel.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a343bb4332b79aefe28b87f4110ca8b824386662 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/__pycache__/util.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/model.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/model.py new file mode 100644 index 0000000000000000000000000000000000000000..2362bfcb1d7415e060a58b7f50091ce1a449486c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/model.py @@ -0,0 +1,835 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from einops import rearrange + +from ...util import instantiate_from_config +from ..attention import LinearAttention + + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0,1,0,0)) + return emb + + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + def __init__(self, in_channels): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b,c,h*w) + w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b,c,h,w) + + h_ = self.proj_out(h_) + + return x+h_ + + +def make_attn(in_channels, attn_type="vanilla"): + assert attn_type in ["vanilla", "linear", "none"], f'attn_type {attn_type} unknown' + print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) + + +class Model(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, use_timestep=True, use_linear_attn=False, attn_type="vanilla"): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch*4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList([ + torch.nn.Linear(self.ch, + self.temb_ch), + torch.nn.Linear(self.temb_ch, + self.temb_ch), + ]) + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + skip_in = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + if i_block == self.num_res_blocks: + skip_in = ch*in_ch_mult[i_level] + block.append(ResnetBlock(in_channels=block_in+skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x, t=None, context=None): + #assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", + **ignore_kwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # timestep embedding + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + attn_type="vanilla", **ignorekwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class SimpleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, *args, **kwargs): + super().__init__() + self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1), + ResnetBlock(in_channels=in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0), + ResnetBlock(in_channels=2 * in_channels, + out_channels=4 * in_channels, + temb_channels=0, dropout=0.0), + ResnetBlock(in_channels=4 * in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0), + nn.Conv2d(2*in_channels, in_channels, 1), + Upsample(in_channels, with_conv=True)]) + # end + self.norm_out = Normalize(in_channels) + self.conv_out = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + for i, layer in enumerate(self.model): + if i in [1,2,3]: + x = layer(x, None) + else: + x = layer(x) + + h = self.norm_out(x) + h = nonlinearity(h) + x = self.conv_out(h) + return x + + +class UpsampleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution, + ch_mult=(2,2), dropout=0.0): + super().__init__() + # upsampling + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + block_in = in_channels + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.res_blocks = nn.ModuleList() + self.upsample_blocks = nn.ModuleList() + for i_level in range(self.num_resolutions): + res_block = [] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + res_block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + self.res_blocks.append(nn.ModuleList(res_block)) + if i_level != self.num_resolutions - 1: + self.upsample_blocks.append(Upsample(block_in, True)) + curr_res = curr_res * 2 + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # upsampling + h = x + for k, i_level in enumerate(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.res_blocks[i_level][i_block](h, None) + if i_level != self.num_resolutions - 1: + h = self.upsample_blocks[k](h) + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class LatentRescaler(nn.Module): + def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2): + super().__init__() + # residual block, interpolate, residual block + self.factor = factor + self.conv_in = nn.Conv2d(in_channels, + mid_channels, + kernel_size=3, + stride=1, + padding=1) + self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0) for _ in range(depth)]) + self.attn = AttnBlock(mid_channels) + self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0) for _ in range(depth)]) + + self.conv_out = nn.Conv2d(mid_channels, + out_channels, + kernel_size=1, + ) + + def forward(self, x): + x = self.conv_in(x) + for block in self.res_block1: + x = block(x, None) + x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor)))) + x = self.attn(x) + for block in self.res_block2: + x = block(x, None) + x = self.conv_out(x) + return x + + +class MergedRescaleEncoder(nn.Module): + def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, + ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1): + super().__init__() + intermediate_chn = ch * ch_mult[-1] + self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult, + z_channels=intermediate_chn, double_z=False, resolution=resolution, + attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, + out_ch=None) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn, + mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth) + + def forward(self, x): + x = self.encoder(x) + x = self.rescaler(x) + return x + + +class MergedRescaleDecoder(nn.Module): + def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8), + dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1): + super().__init__() + tmp_chn = z_channels*ch_mult[-1] + self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout, + resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks, + ch_mult=ch_mult, resolution=resolution, ch=ch) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn, + out_channels=tmp_chn, depth=rescale_module_depth) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Upsampler(nn.Module): + def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2): + super().__init__() + assert out_size >= in_size + num_blocks = int(np.log2(out_size//in_size))+1 + factor_up = 1.+ (out_size % in_size) + print(f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}") + self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels, + out_channels=in_channels) + self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2, + attn_resolutions=[], in_channels=None, ch=in_channels, + ch_mult=[ch_mult for _ in range(num_blocks)]) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Resize(nn.Module): + def __init__(self, in_channels=None, learned=False, mode="bilinear"): + super().__init__() + self.with_conv = learned + self.mode = mode + if self.with_conv: + print(f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode") + raise NotImplementedError() + assert in_channels is not None + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=4, + stride=2, + padding=1) + + def forward(self, x, scale_factor=1.0): + if scale_factor==1.0: + return x + else: + x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor) + return x + +class FirstStagePostProcessor(nn.Module): + + def __init__(self, ch_mult:list, in_channels, + pretrained_model:nn.Module=None, + reshape=False, + n_channels=None, + dropout=0., + pretrained_config=None): + super().__init__() + if pretrained_config is None: + assert pretrained_model is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.pretrained_model = pretrained_model + else: + assert pretrained_config is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.instantiate_pretrained(pretrained_config) + + self.do_reshape = reshape + + if n_channels is None: + n_channels = self.pretrained_model.encoder.ch + + self.proj_norm = Normalize(in_channels,num_groups=in_channels//2) + self.proj = nn.Conv2d(in_channels,n_channels,kernel_size=3, + stride=1,padding=1) + + blocks = [] + downs = [] + ch_in = n_channels + for m in ch_mult: + blocks.append(ResnetBlock(in_channels=ch_in,out_channels=m*n_channels,dropout=dropout)) + ch_in = m * n_channels + downs.append(Downsample(ch_in, with_conv=False)) + + self.model = nn.ModuleList(blocks) + self.downsampler = nn.ModuleList(downs) + + + def instantiate_pretrained(self, config): + model = instantiate_from_config(config) + self.pretrained_model = model.eval() + # self.pretrained_model.train = False + for param in self.pretrained_model.parameters(): + param.requires_grad = False + + + @torch.no_grad() + def encode_with_pretrained(self,x): + c = self.pretrained_model.encode(x) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + return c + + def forward(self,x): + z_fs = self.encode_with_pretrained(x) + z = self.proj_norm(z_fs) + z = self.proj(z) + z = nonlinearity(z) + + for submodel, downmodel in zip(self.model,self.downsampler): + z = submodel(z,temb=None) + z = downmodel(z) + + if self.do_reshape: + z = rearrange(z,'b c h w -> b (h w) c') + return z + diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/openaimodel.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..4a6297633839a8f6056a639b04764f104f5f7d4e --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/openaimodel.py @@ -0,0 +1,961 @@ +from abc import abstractmethod +from functools import partial +import math +from typing import Iterable + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +from ..diffusionmodules.util import ( + checkpoint, + conv_nd, + linear, + avg_pool_nd, + zero_module, + normalization, + timestep_embedding, +) +from ..attention import SpatialTransformer + + +# dummy replace +def convert_module_to_f16(x): + pass + +def convert_module_to_f32(x): + pass + + +## go +class AttentionPool2d(nn.Module): + """ + Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py + """ + + def __init__( + self, + spacial_dim: int, + embed_dim: int, + num_heads_channels: int, + output_dim: int = None, + ): + super().__init__() + self.positional_embedding = nn.Parameter(th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5) + self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1) + self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1) + self.num_heads = embed_dim // num_heads_channels + self.attention = QKVAttention(self.num_heads) + + def forward(self, x): + b, c, *_spatial = x.shape + x = x.reshape(b, c, -1) # NC(HW) + x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1) + x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1) + x = self.qkv_proj(x) + x = self.attention(x) + x = self.c_proj(x) + return x[:, :, 0] + + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, x, emb, context=None): + for layer in self: + if isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialTransformer): + x = layer(x, context) + else: + x = layer(x) + return x + + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=padding) + + def forward(self, x): + assert x.shape[1] == self.channels + if self.dims == 3: + x = F.interpolate( + x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest" + ) + else: + x = F.interpolate(x, scale_factor=2, mode="nearest") + if self.use_conv: + x = self.conv(x) + return x + +class TransposedUpsample(nn.Module): + 'Learned 2x upsampling without padding' + def __init__(self, channels, out_channels=None, ks=5): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + + self.up = nn.ConvTranspose2d(self.channels,self.out_channels,kernel_size=ks,stride=2) + + def forward(self,x): + return self.up(x) + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None,padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, + ), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = th.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class AttentionBlock(nn.Module): + """ + An attention block that allows spatial positions to attend to each other. + Originally ported from here, but adapted to the N-d case. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66. + """ + + def __init__( + self, + channels, + num_heads=1, + num_head_channels=-1, + use_checkpoint=False, + use_new_attention_order=False, + ): + super().__init__() + self.channels = channels + if num_head_channels == -1: + self.num_heads = num_heads + else: + assert ( + channels % num_head_channels == 0 + ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}" + self.num_heads = channels // num_head_channels + self.use_checkpoint = use_checkpoint + self.norm = normalization(channels) + self.qkv = conv_nd(1, channels, channels * 3, 1) + if use_new_attention_order: + # split qkv before split heads + self.attention = QKVAttention(self.num_heads) + else: + # split heads before split qkv + self.attention = QKVAttentionLegacy(self.num_heads) + + self.proj_out = zero_module(conv_nd(1, channels, channels, 1)) + + def forward(self, x): + return checkpoint(self._forward, (x,), self.parameters(), True) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!! + #return pt_checkpoint(self._forward, x) # pytorch + + def _forward(self, x): + b, c, *spatial = x.shape + x = x.reshape(b, c, -1) + qkv = self.qkv(self.norm(x)) + h = self.attention(qkv) + h = self.proj_out(h) + return (x + h).reshape(b, c, *spatial) + + +def count_flops_attn(model, _x, y): + """ + A counter for the `thop` package to count the operations in an + attention operation. + Meant to be used like: + macs, params = thop.profile( + model, + inputs=(inputs, timestamps), + custom_ops={QKVAttention: QKVAttention.count_flops}, + ) + """ + b, c, *spatial = y[0].shape + num_spatial = int(np.prod(spatial)) + # We perform two matmuls with the same number of ops. + # The first computes the weight matrix, the second computes + # the combination of the value vectors. + matmul_ops = 2 * b * (num_spatial ** 2) * c + model.total_ops += th.DoubleTensor([matmul_ops]) + + +class QKVAttentionLegacy(nn.Module): + """ + A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class QKVAttention(nn.Module): + """ + A module which performs QKV attention and splits in a different order. + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.chunk(3, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", + (q * scale).view(bs * self.n_heads, ch, length), + (k * scale).view(bs * self.n_heads, ch, length), + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length)) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param attention_resolutions: a collection of downsample rates at which + attention will take place. May be a set, list, or tuple. + For example, if this contains 4, then at 4x downsampling, attention + will be used. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + use_checkpoint=False, + use_fp16=False, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + ): + super().__init__() + if use_spatial_transformer: + assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...' + + if context_dim is not None: + assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...' + from omegaconf.listconfig import ListConfig + if type(context_dim) == ListConfig: + context_dim = list(context_dim) + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set' + + if num_head_channels == -1: + assert num_heads != -1, 'Either num_heads or num_head_channels has to be set' + + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + if self.num_classes is not None: + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) if not use_spatial_transformer else SpatialTransformer( + ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) if not use_spatial_transformer else SpatialTransformer( + ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(num_res_blocks + 1): + ich = input_block_chans.pop() + layers = [ + ResBlock( + ch + ich, + time_embed_dim, + dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = model_channels * mult + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + #num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads_upsample, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) if not use_spatial_transformer else SpatialTransformer( + ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim + ) + ) + if level and i == num_res_blocks: + out_ch = ch + layers.append( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)), + ) + if self.predict_codebook_ids: + self.id_predictor = nn.Sequential( + normalization(ch), + conv_nd(dims, model_channels, n_embed, 1), + #nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + self.output_blocks.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + self.output_blocks.apply(convert_module_to_f32) + + def forward(self, x, timesteps=None, context=None, y=None,**kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. + :return: an [N x C x ...] Tensor of outputs. + """ + assert (y is not None) == ( + self.num_classes is not None + ), "must specify y if and only if the model is class-conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape == (x.shape[0],) + emb = emb + self.label_emb(y) + + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + h = self.middle_block(h, emb, context) + for module in self.output_blocks: + h = th.cat([h, hs.pop()], dim=1) + h = module(h, emb, context) + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) + + +class EncoderUNetModel(nn.Module): + """ + The half UNet model with attention and timestep embedding. + For usage, see UNet. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + use_checkpoint=False, + use_fp16=False, + num_heads=1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + pool="adaptive", + *args, + **kwargs + ): + super().__init__() + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + self.pool = pool + if pool == "adaptive": + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + nn.AdaptiveAvgPool2d((1, 1)), + zero_module(conv_nd(dims, ch, out_channels, 1)), + nn.Flatten(), + ) + elif pool == "attention": + assert num_head_channels != -1 + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + AttentionPool2d( + (image_size // ds), ch, num_head_channels, out_channels + ), + ) + elif pool == "spatial": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + nn.ReLU(), + nn.Linear(2048, self.out_channels), + ) + elif pool == "spatial_v2": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + normalization(2048), + nn.SiLU(), + nn.Linear(2048, self.out_channels), + ) + else: + raise NotImplementedError(f"Unexpected {pool} pooling") + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + + def forward(self, x, timesteps): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :return: an [N x K] Tensor of outputs. + """ + emb = self.time_embed(timestep_embedding(timesteps, self.model_channels)) + + results = [] + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = self.middle_block(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = th.cat(results, axis=-1) + return self.out(h) + else: + h = h.type(x.dtype) + return self.out(h) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/util.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/util.py new file mode 100644 index 0000000000000000000000000000000000000000..466e2859e6fe844c1ca98d380bb4d565479f0f60 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/diffusionmodules/util.py @@ -0,0 +1,265 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat + +from ...util import instantiate_from_config + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): + if ddim_discr_method == 'uniform': + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == 'quad': + ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) + else: + raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + steps_out = ddim_timesteps + 1 + if verbose: + print(f'Selected timesteps for ddim sampler: {steps_out}') + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) + if verbose: + print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') + print(f'For the chosen value of eta, which is {eta}, ' + f'this results in the following sigma_t schedule for ddim sampler {sigmas}') + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1))) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07f93e6676fad0fbb10ab1e6cc3177d34b4dd16a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c030e350097e05d8894a57ed0cb1557cbfc22fbd Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/__pycache__/distributions.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/distributions.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..f2b8ef901130efc171aa69742ca0244d94d3f2e9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/distributions/distributions.py @@ -0,0 +1,92 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/ema.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..c8c75af43565f6e140287644aaaefa97dd6e67c5 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/modules/ema.py @@ -0,0 +1,76 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError('Decay must be between 0 and 1') + + self.m_name2s_name = {} + self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) + self.register_buffer('num_updates', torch.tensor(0,dtype=torch.int) if use_num_upates + else torch.tensor(-1,dtype=torch.int)) + + for name, p in model.named_parameters(): + if p.requires_grad: + #remove as '.'-character is not allowed in buffers + s_name = name.replace('.','') + self.m_name2s_name.update({name:s_name}) + self.register_buffer(s_name,p.clone().detach().data) + + self.collected_params = [] + + def forward(self,model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay,(1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/util.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/util.py new file mode 100644 index 0000000000000000000000000000000000000000..51839cb1478d9fecb293277dc83d2693e3d26de4 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/ldm/util.py @@ -0,0 +1,86 @@ +import importlib + +import torch +import numpy as np + +from inspect import isfunction +from PIL import Image, ImageDraw, ImageFont + + +def log_txt_as_img(wh, xc, size=10): + # wh a tuple of (width, height) + # xc a list of captions to plot + b = len(xc) + txts = list() + for bi in range(b): + txt = Image.new("RGB", wh, color="white") + draw = ImageDraw.Draw(txt) + font = ImageFont.truetype('data/DejaVuSans.ttf', size=size) + nc = int(40 * (wh[0] / 256)) + lines = "\n".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc)) + + try: + draw.text((0, 0), lines, fill="black", font=font) + except UnicodeEncodeError: + print("Cant encode string for logging. Skipping.") + + txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 + txts.append(txt) + txts = np.stack(txts) + txts = torch.tensor(txts) + return txts + + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + + +def isimage(x): + if not isinstance(x,torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + + +def exists(x): + return x is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def mean_flat(tensor): + """ + https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86 + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") + return total_params + + +def instantiate_from_config(config): + if not "target" in config: + if config == '__is_first_stage__': + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..515080725b7d153f598781834b36a3a7df8de9ad Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee34f66a3c0e5b2e2a933c946c77bcc7c383751a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..339212a8d978989b21757e118f256f627b41cd44 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/quantize.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/quantize.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d89462c95eaf01c59a638d8b674ed362ea24dc6b Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/__pycache__/quantize.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/quantize.py b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/quantize.py new file mode 100644 index 0000000000000000000000000000000000000000..d75544e41fa01bce49dd822b1037963d62f79b51 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/ldsrlib/taming/modules/vqvae/quantize.py @@ -0,0 +1,445 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +from torch import einsum +from einops import rearrange + + +class VectorQuantizer(nn.Module): + """ + see https://github.com/MishaLaskin/vqvae/blob/d761a999e2267766400dc646d82d3ac3657771d4/models/quantizer.py + ____________________________________________ + Discretization bottleneck part of the VQ-VAE. + Inputs: + - n_e : number of embeddings + - e_dim : dimension of embedding + - beta : commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2 + _____________________________________________ + """ + + # NOTE: this class contains a bug regarding beta; see VectorQuantizer2 for + # a fix and use legacy=False to apply that fix. VectorQuantizer2 can be + # used wherever VectorQuantizer has been used before and is additionally + # more efficient. + def __init__(self, n_e, e_dim, beta): + super(VectorQuantizer, self).__init__() + self.n_e = n_e + self.e_dim = e_dim + self.beta = beta + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + def forward(self, z): + """ + Inputs the output of the encoder network z and maps it to a discrete + one-hot vector that is the index of the closest embedding vector e_j + z (continuous) -> z_q (discrete) + z.shape = (batch, channel, height, width) + quantization pipeline: + 1. get encoder input (B,C,H,W) + 2. flatten input to (B*H*W,C) + """ + # reshape z -> (batch, height, width, channel) and flatten + z = z.permute(0, 2, 3, 1).contiguous() + z_flattened = z.view(-1, self.e_dim) + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.matmul(z_flattened, self.embedding.weight.t()) + + ## could possible replace this here + # #\start... + # find closest encodings + min_encoding_indices = torch.argmin(d, dim=1).unsqueeze(1) + + min_encodings = torch.zeros( + min_encoding_indices.shape[0], self.n_e).to(z) + min_encodings.scatter_(1, min_encoding_indices, 1) + + # dtype min encodings: torch.float32 + # min_encodings shape: torch.Size([2048, 512]) + # min_encoding_indices.shape: torch.Size([2048, 1]) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape) + #.........\end + + # with: + # .........\start + #min_encoding_indices = torch.argmin(d, dim=1) + #z_q = self.embedding(min_encoding_indices) + # ......\end......... (TODO) + + # compute loss for embedding + loss = torch.mean((z_q.detach()-z)**2) + self.beta * \ + torch.mean((z_q - z.detach()) ** 2) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # perplexity + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10))) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q, loss, (perplexity, min_encodings, min_encoding_indices) + + def get_codebook_entry(self, indices, shape): + # shape specifying (batch, height, width, channel) + # TODO: check for more easy handling with nn.Embedding + min_encodings = torch.zeros(indices.shape[0], self.n_e).to(indices) + min_encodings.scatter_(1, indices[:,None], 1) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings.float(), self.embedding.weight) + + if shape is not None: + z_q = z_q.view(shape) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q + + +class GumbelQuantize(nn.Module): + """ + credit to @karpathy: https://github.com/karpathy/deep-vector-quantization/blob/main/model.py (thanks!) + Gumbel Softmax trick quantizer + Categorical Reparameterization with Gumbel-Softmax, Jang et al. 2016 + https://arxiv.org/abs/1611.01144 + """ + def __init__(self, num_hiddens, embedding_dim, n_embed, straight_through=True, + kl_weight=5e-4, temp_init=1.0, use_vqinterface=True, + remap=None, unknown_index="random"): + super().__init__() + + self.embedding_dim = embedding_dim + self.n_embed = n_embed + + self.straight_through = straight_through + self.temperature = temp_init + self.kl_weight = kl_weight + + self.proj = nn.Conv2d(num_hiddens, n_embed, 1) + self.embed = nn.Embedding(n_embed, embedding_dim) + + self.use_vqinterface = use_vqinterface + + self.remap = remap + if self.remap is not None: + self.register_buffer("used", torch.tensor(np.load(self.remap))) + self.re_embed = self.used.shape[0] + self.unknown_index = unknown_index # "random" or "extra" or integer + if self.unknown_index == "extra": + self.unknown_index = self.re_embed + self.re_embed = self.re_embed+1 + print(f"Remapping {self.n_embed} indices to {self.re_embed} indices. " + f"Using {self.unknown_index} for unknown indices.") + else: + self.re_embed = n_embed + + def remap_to_used(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + match = (inds[:,:,None]==used[None,None,...]).long() + new = match.argmax(-1) + unknown = match.sum(2)<1 + if self.unknown_index == "random": + new[unknown]=torch.randint(0,self.re_embed,size=new[unknown].shape).to(device=new.device) + else: + new[unknown] = self.unknown_index + return new.reshape(ishape) + + def unmap_to_all(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + if self.re_embed > self.used.shape[0]: # extra token + inds[inds>=self.used.shape[0]] = 0 # simply set to zero + back=torch.gather(used[None,:][inds.shape[0]*[0],:], 1, inds) + return back.reshape(ishape) + + def forward(self, z, temp=None, return_logits=False): + # force hard = True when we are in eval mode, as we must quantize. actually, always true seems to work + hard = self.straight_through if self.training else True + temp = self.temperature if temp is None else temp + + logits = self.proj(z) + if self.remap is not None: + # continue only with used logits + full_zeros = torch.zeros_like(logits) + logits = logits[:,self.used,...] + + soft_one_hot = F.gumbel_softmax(logits, tau=temp, dim=1, hard=hard) + if self.remap is not None: + # go back to all entries but unused set to zero + full_zeros[:,self.used,...] = soft_one_hot + soft_one_hot = full_zeros + z_q = einsum('b n h w, n d -> b d h w', soft_one_hot, self.embed.weight) + + # + kl divergence to the prior loss + qy = F.softmax(logits, dim=1) + diff = self.kl_weight * torch.sum(qy * torch.log(qy * self.n_embed + 1e-10), dim=1).mean() + + ind = soft_one_hot.argmax(dim=1) + if self.remap is not None: + ind = self.remap_to_used(ind) + if self.use_vqinterface: + if return_logits: + return z_q, diff, (None, None, ind), logits + return z_q, diff, (None, None, ind) + return z_q, diff, ind + + def get_codebook_entry(self, indices, shape): + b, h, w, c = shape + assert b*h*w == indices.shape[0] + indices = rearrange(indices, '(b h w) -> b h w', b=b, h=h, w=w) + if self.remap is not None: + indices = self.unmap_to_all(indices) + one_hot = F.one_hot(indices, num_classes=self.n_embed).permute(0, 3, 1, 2).float() + z_q = einsum('b n h w, n d -> b d h w', one_hot, self.embed.weight) + return z_q + + +class VectorQuantizer2(nn.Module): + """ + Improved version over VectorQuantizer, can be used as a drop-in replacement. Mostly + avoids costly matrix multiplications and allows for post-hoc remapping of indices. + """ + # NOTE: due to a bug the beta term was applied to the wrong term. for + # backwards compatibility we use the buggy version by default, but you can + # specify legacy=False to fix it. + def __init__(self, n_e, e_dim, beta, remap=None, unknown_index="random", + sane_index_shape=False, legacy=True): + super().__init__() + self.n_e = n_e + self.e_dim = e_dim + self.beta = beta + self.legacy = legacy + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + self.remap = remap + if self.remap is not None: + self.register_buffer("used", torch.tensor(np.load(self.remap))) + self.re_embed = self.used.shape[0] + self.unknown_index = unknown_index # "random" or "extra" or integer + if self.unknown_index == "extra": + self.unknown_index = self.re_embed + self.re_embed = self.re_embed+1 + print(f"Remapping {self.n_e} indices to {self.re_embed} indices. " + f"Using {self.unknown_index} for unknown indices.") + else: + self.re_embed = n_e + + self.sane_index_shape = sane_index_shape + + def remap_to_used(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + match = (inds[:,:,None]==used[None,None,...]).long() + new = match.argmax(-1) + unknown = match.sum(2)<1 + if self.unknown_index == "random": + new[unknown]=torch.randint(0,self.re_embed,size=new[unknown].shape).to(device=new.device) + else: + new[unknown] = self.unknown_index + return new.reshape(ishape) + + def unmap_to_all(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + if self.re_embed > self.used.shape[0]: # extra token + inds[inds>=self.used.shape[0]] = 0 # simply set to zero + back=torch.gather(used[None,:][inds.shape[0]*[0],:], 1, inds) + return back.reshape(ishape) + + def forward(self, z, temp=None, rescale_logits=False, return_logits=False): + assert temp is None or temp==1.0, "Only for interface compatible with Gumbel" + assert rescale_logits==False, "Only for interface compatible with Gumbel" + assert return_logits==False, "Only for interface compatible with Gumbel" + # reshape z -> (batch, height, width, channel) and flatten + z = rearrange(z, 'b c h w -> b h w c').contiguous() + z_flattened = z.view(-1, self.e_dim) + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.einsum('bd,dn->bn', z_flattened, rearrange(self.embedding.weight, 'n d -> d n')) + + min_encoding_indices = torch.argmin(d, dim=1) + z_q = self.embedding(min_encoding_indices).view(z.shape) + perplexity = None + min_encodings = None + + # compute loss for embedding + if not self.legacy: + loss = self.beta * torch.mean((z_q.detach()-z)**2) + \ + torch.mean((z_q - z.detach()) ** 2) + else: + loss = torch.mean((z_q.detach()-z)**2) + self.beta * \ + torch.mean((z_q - z.detach()) ** 2) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # reshape back to match original input shape + z_q = rearrange(z_q, 'b h w c -> b c h w').contiguous() + + if self.remap is not None: + min_encoding_indices = min_encoding_indices.reshape(z.shape[0],-1) # add batch axis + min_encoding_indices = self.remap_to_used(min_encoding_indices) + min_encoding_indices = min_encoding_indices.reshape(-1,1) # flatten + + if self.sane_index_shape: + min_encoding_indices = min_encoding_indices.reshape( + z_q.shape[0], z_q.shape[2], z_q.shape[3]) + + return z_q, loss, (perplexity, min_encodings, min_encoding_indices) + + def get_codebook_entry(self, indices, shape): + # shape specifying (batch, height, width, channel) + if self.remap is not None: + indices = indices.reshape(shape[0],-1) # add batch axis + indices = self.unmap_to_all(indices) + indices = indices.reshape(-1) # flatten again + + # get quantized latent vectors + z_q = self.embedding(indices) + + if shape is not None: + z_q = z_q.view(shape) + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q + +class EmbeddingEMA(nn.Module): + def __init__(self, num_tokens, codebook_dim, decay=0.99, eps=1e-5): + super().__init__() + self.decay = decay + self.eps = eps + weight = torch.randn(num_tokens, codebook_dim) + self.weight = nn.Parameter(weight, requires_grad = False) + self.cluster_size = nn.Parameter(torch.zeros(num_tokens), requires_grad = False) + self.embed_avg = nn.Parameter(weight.clone(), requires_grad = False) + self.update = True + + def forward(self, embed_id): + return F.embedding(embed_id, self.weight) + + def cluster_size_ema_update(self, new_cluster_size): + self.cluster_size.data.mul_(self.decay).add_(new_cluster_size, alpha=1 - self.decay) + + def embed_avg_ema_update(self, new_embed_avg): + self.embed_avg.data.mul_(self.decay).add_(new_embed_avg, alpha=1 - self.decay) + + def weight_update(self, num_tokens): + n = self.cluster_size.sum() + smoothed_cluster_size = ( + (self.cluster_size + self.eps) / (n + num_tokens * self.eps) * n + ) + #normalize embedding average with smoothed cluster size + embed_normalized = self.embed_avg / smoothed_cluster_size.unsqueeze(1) + self.weight.data.copy_(embed_normalized) + + +class EMAVectorQuantizer(nn.Module): + def __init__(self, n_embed, embedding_dim, beta, decay=0.99, eps=1e-5, + remap=None, unknown_index="random"): + super().__init__() + self.codebook_dim = codebook_dim + self.num_tokens = num_tokens + self.beta = beta + self.embedding = EmbeddingEMA(self.num_tokens, self.codebook_dim, decay, eps) + + self.remap = remap + if self.remap is not None: + self.register_buffer("used", torch.tensor(np.load(self.remap))) + self.re_embed = self.used.shape[0] + self.unknown_index = unknown_index # "random" or "extra" or integer + if self.unknown_index == "extra": + self.unknown_index = self.re_embed + self.re_embed = self.re_embed+1 + print(f"Remapping {self.n_embed} indices to {self.re_embed} indices. " + f"Using {self.unknown_index} for unknown indices.") + else: + self.re_embed = n_embed + + def remap_to_used(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + match = (inds[:,:,None]==used[None,None,...]).long() + new = match.argmax(-1) + unknown = match.sum(2)<1 + if self.unknown_index == "random": + new[unknown]=torch.randint(0,self.re_embed,size=new[unknown].shape).to(device=new.device) + else: + new[unknown] = self.unknown_index + return new.reshape(ishape) + + def unmap_to_all(self, inds): + ishape = inds.shape + assert len(ishape)>1 + inds = inds.reshape(ishape[0],-1) + used = self.used.to(inds) + if self.re_embed > self.used.shape[0]: # extra token + inds[inds>=self.used.shape[0]] = 0 # simply set to zero + back=torch.gather(used[None,:][inds.shape[0]*[0],:], 1, inds) + return back.reshape(ishape) + + def forward(self, z): + # reshape z -> (batch, height, width, channel) and flatten + #z, 'b c h w -> b h w c' + z = rearrange(z, 'b c h w -> b h w c') + z_flattened = z.reshape(-1, self.codebook_dim) + + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + d = z_flattened.pow(2).sum(dim=1, keepdim=True) + \ + self.embedding.weight.pow(2).sum(dim=1) - 2 * \ + torch.einsum('bd,nd->bn', z_flattened, self.embedding.weight) # 'n d -> d n' + + + encoding_indices = torch.argmin(d, dim=1) + + z_q = self.embedding(encoding_indices).view(z.shape) + encodings = F.one_hot(encoding_indices, self.num_tokens).type(z.dtype) + avg_probs = torch.mean(encodings, dim=0) + perplexity = torch.exp(-torch.sum(avg_probs * torch.log(avg_probs + 1e-10))) + + if self.training and self.embedding.update: + #EMA cluster size + encodings_sum = encodings.sum(0) + self.embedding.cluster_size_ema_update(encodings_sum) + #EMA embedding average + embed_sum = encodings.transpose(0,1) @ z_flattened + self.embedding.embed_avg_ema_update(embed_sum) + #normalize embed_avg and update weight + self.embedding.weight_update(self.num_tokens) + + # compute loss for embedding + loss = self.beta * F.mse_loss(z_q.detach(), z) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # reshape back to match original input shape + #z_q, 'b h w c -> b c h w' + z_q = rearrange(z_q, 'b h w c -> b c h w') + return z_q, loss, (perplexity, encodings, encoding_indices) diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo-dark.svg b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo-dark.svg new file mode 100644 index 0000000000000000000000000000000000000000..2c867bfee606f32165c52145139632c700e3ccfd --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo-dark.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo.svg b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo.svg new file mode 100644 index 0000000000000000000000000000000000000000..95e053cd6b4cc6bbc8061a7a7a4adaf0eb6ac0f1 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/logo.svg @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bd90a1ff6d9c089c8659a849e1c4ca93f8d3bdd --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/requirements.txt @@ -0,0 +1,7 @@ +torchvision +torch +einops +omegaconf +tqdm +pytorch-lightning +torchmetrics==0.11.4 \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow.png b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow.png new file mode 100644 index 0000000000000000000000000000000000000000..96c250603587e2e3c4b64f8b3d0a7ee5dee13bec --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d9c4aeb730c5df70d843cc2ee7bb38aa61c1faf0db34a633d7134f4939481ca +size 2937116 diff --git a/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow2.png b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow2.png new file mode 100644 index 0000000000000000000000000000000000000000..7c7c7da75c6dbe1f5275f2ae34142a33f0c84aee --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Flowty-LDSR/workflow2.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4b604148d7d6e76484e5d1e5f612771b6a04d0a119ba2ce5916bae00ce650de +size 1809828 diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..bd1c29a185723f8b498db7d22d2aef6d3f98688c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/.gitignore @@ -0,0 +1,2 @@ +**/__pycache__ +.DS_Store diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/LICENSE b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/README.md b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9df832c31feac6b5f7ea5198676e9ce76e0dff59 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/README.md @@ -0,0 +1,81 @@ +# ComfyUI-Fluxtapoz + +A set of nodes for editing images using Flux in ComfyUI + +## Examples + +See `example_workflows` directory for examples. + +No ControlNets are used in any of the following examples. + +## Rectified Flow Inversion (Unsampling from [RF Inversion](https://rf-inversion.github.io/)) + +Admittedly this has some small differences between the example images in the paper, but it's very close. Will be updating as I find the issue. +It's currently my recommended way to unsample an image for editing or style transfer. + +Use [this workflow](https://github.com/logtd/ComfyUI-Fluxtapoz/blob/main/example_workflows/example_rf_inversion_updated.json) for RF-Inversion. + +![rf_inversion](https://github.com/user-attachments/assets/f0517649-4dbb-4371-a8d5-3ae90e3b6368) + +##### Update [2024.10.16] + +Stylization now works! +![rf_inversion_stylization](https://github.com/user-attachments/assets/015825b8-9253-4270-a183-610c1420ae0d) + +It can also be used to mix or style images (although I'm still working out the settings for this) +![rf_inverse_mix](https://github.com/user-attachments/assets/2588fab7-3de6-4708-b1da-6da4c8be4edb) + +### Node Parameters + +#### Outverse Flux Model Pred Node + +- Ensure "reverse_ode" is set to True on the "Outverse Flux Model Pred" node. Sometimes when users upgrade this repo it doesn't load the workflow correctly. + +#### Flux Reverse ODE Sampler + +- latent_image -- the image to guide the sampling +- start_step -- the step that the sampler starts guiding the sampling towards the image in "latent_image" +- end_step -- the last step for guiding the sampling (not inclusive) +- eta -- the strength of the guidance. The paper does not decrease this below 0.7 +- eta_trend -- how the eta should increase/decrease/stay constant between start_step and end_step + +#### Flux Forward ODE Sampler + +- gamma -- the paper leaves this at 0.5 + +#### Guidance Suggestions + +- For sampling normal flux guidance works (~3.5) +- For unsampling use 0 + +#### Common Issues + +- Overlayed images -- try changing your start step and/or Eta. A start step that is too late won't be able to influence the image generation properly +- Not following edits -- try fewer steps (change start/end step) or lower eta +- Make sure your steps on the Forward (unsampling) and Reverse (sampling) samplers are the same (recommended 28 each) + +## Other Inversion Techniques + +### Inverse Noise (unsampling via DDIM) + +![unsampling_example](https://github.com/user-attachments/assets/9c604a31-5cc9-49c2-9a08-98e7872591c2) + +### Inject Inversed Noise + +See example workflow for how to use this one. It's similar to inverse noise/unsampling, but has better adherence to the input image. + +![inject_inversed_noise_example](https://github.com/user-attachments/assets/ee052855-12c6-47f7-8178-b4acfb2ca6b9) +![inject_unsampled_noise_cowboy](https://github.com/user-attachments/assets/4d92c591-e04d-4123-a432-d859a32e5f46) + +## Acknowledgements + +[RF-Inversion](https://rf-inversion.github.io/) + +``` +@article{rout2024rfinversion, + title={Semantic Image Inversion and Editing using Rectified Stochastic Differential Equations}, + author={Litu Rout and Yujia Chen and Nataniel Ruiz and Constantine Caramanis and Sanjay Shakkottai and Wen-Sheng Chu}, + journal={arXiv preprint arXiv:2410.10792}, + year={2024} +} +``` diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..facfb46d2683bc26e264ce5c1ad9e03f73c094c3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__init__.py @@ -0,0 +1,39 @@ +from .nodes.flip_sigmas_node import InFluxFlipSigmasNode +from .nodes.influx_model_pred_node import InFluxModelSamplingPredNode, OutFluxModelSamplingPredNode +from .nodes.flux_deguidance_node import FluxDeGuidance +from .nodes.inverse_sampler_node import FluxInverseSamplerNode +from .nodes.apply_ref_flux import ApplyRefFluxNode, ConfigureRefFluxNode +from .nodes.mix_noise_node import FluxNoiseMixerNode +from .nodes.rectified_sampler_nodes import FluxForwardODESamplerNode, FluxReverseODESamplerNode + + + +NODE_CLASS_MAPPINGS = { + "InFluxFlipSigmas": InFluxFlipSigmasNode, + "InFluxModelSamplingPred": InFluxModelSamplingPredNode, + "OutFluxModelSamplingPred": OutFluxModelSamplingPredNode, + "FluxDeGuidance": FluxDeGuidance, + "FluxInverseSampler": FluxInverseSamplerNode, + "ApplyRefFlux": ApplyRefFluxNode, + "ConfigureRefFlux": ConfigureRefFluxNode, + "FluxNoiseMixer": FluxNoiseMixerNode, + "FluxForwardODESampler": FluxForwardODESamplerNode, + "FluxReverseODESampler": FluxReverseODESamplerNode, + # "AddFluxFlow": AddFluxFlowNode, + # "ApplyFluxRaveAttention": ApplyFluxRaveAttentionNode, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "InFluxFlipSigmas": "Flip Flux Sigmas", + "InFluxModelSamplingPred": "Inverse Flux Model Pred", + "OutFluxModelSamplingPred": "Outverse Flux Model Pred", + "FluxDeGuidance": "Flux DeGuidance", + "FluxInverseSampler": "Flux Inverse Sampler", + "ApplyRefFlux": "Apply Ref Flux Model", + "ConfigureRefFlux": "Configure Ref for Flux", + "FluxNoiseMixer": "Flux Mix Noise", + "FluxForwardODESampler": "Flux Forward ODE Sampler", + "FluxReverseODESampler": "Flux Reverse ODE Sampler", + # "AddFluxFlow": "Add Flux Flow", + # "ApplyFluxRaveAttention": "Apply Flux Rave Attn", +} diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9be28d82e571da6c760c35e1613a03fbddec0a3 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_stylization.json b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_stylization.json new file mode 100644 index 0000000000000000000000000000000000000000..1a564a089adf69fcd568602f3854c4a9ebc34e2a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_stylization.json @@ -0,0 +1,2462 @@ +{ + "last_node_id": 165, + "last_link_id": 330, + "nodes": [ + { + "id": 22, + "type": "BasicGuider", + "pos": { + "0": 517.8887329101562, + "1": 193.40916442871094 + }, + "size": { + "0": 222.3482666015625, + "1": 46 + }, + "flags": {}, + "order": 43, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 323, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 253, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 30 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 46, + "type": "BasicGuider", + "pos": { + "0": -164.695556640625, + "1": 196.65341186523438 + }, + "size": { + "0": 161.1999969482422, + "1": 46 + }, + "flags": {}, + "order": 40, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 314, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 195, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 124 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 66, + "type": "SetNode", + "pos": { + "0": -956.0843505859375, + "1": 192.0753173828125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 24, + "mode": 0, + "inputs": [ + { + "name": "MODEL", + "type": "MODEL", + "link": 293 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_MODEL", + "properties": { + "previousName": "MODEL" + }, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 40, + "type": "ImageScale", + "pos": { + "0": -1291.8131103515625, + "1": 625.7698364257812 + }, + "size": { + "0": 210, + "1": 130 + }, + "flags": {}, + "order": 34, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 267 + }, + { + "name": "width", + "type": "INT", + "link": 181, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 182, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 180 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageScale" + }, + "widgets_values": [ + "nearest-exact", + 1024, + 1024, + "center" + ] + }, + { + "id": 13, + "type": "SamplerCustomAdvanced", + "pos": { + "0": 507.888671875, + "1": 293.40911865234375 + }, + "size": { + "0": 236.8000030517578, + "1": 106 + }, + "flags": {}, + "order": 46, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 127, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 30, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 317, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 237, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 223, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 24 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 50, + "type": "DisableNoise", + "pos": { + "0": -180.695556640625, + "1": 449.6534423828125 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 0, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 128 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 67, + "type": "SetNode", + "pos": { + "0": -960.2341918945312, + "1": 299.4647521972656 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 25, + "mode": 0, + "inputs": [ + { + "name": "CLIP", + "type": "CLIP", + "link": 294 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_CLIP", + "properties": { + "previousName": "CLIP" + }, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 65, + "type": "SetNode", + "pos": { + "0": -958.2341918945312, + "1": 407.4647521972656 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 26, + "mode": 0, + "inputs": [ + { + "name": "VAE", + "type": "VAE", + "link": 295 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_VAE", + "properties": { + "previousName": "VAE" + }, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 85, + "type": "SetNode", + "pos": { + "0": -970.2341918945312, + "1": 622.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 39, + "mode": 0, + "inputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "link": 180 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_IMG", + "properties": { + "previousName": "IMG" + }, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 76, + "type": "SetNode", + "pos": { + "0": -982.2343139648438, + "1": 810.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 32, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 174 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_WIDTH", + "properties": { + "previousName": "WIDTH" + }, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 79, + "type": "SetNode", + "pos": { + "0": -989.234375, + "1": 929.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 33, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 175 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_HEIGHT", + "properties": { + "previousName": "HEIGHT" + }, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 106, + "type": "GetNode", + "pos": { + "0": 357.3566589355469, + "1": -1461.5296630859375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 1, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 217 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 105, + "type": "SaveImage", + "pos": { + "0": 486, + "1": -1469 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 47, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 218 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ok" + ] + }, + { + "id": 81, + "type": "GetNode", + "pos": { + "0": -173.71441650390625, + "1": 1051.699462890625 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 2, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 176 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 82, + "type": "GetNode", + "pos": { + "0": -168.71441650390625, + "1": 1009.69970703125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 3, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 177 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 104, + "type": "VAEDecode", + "pos": { + "0": 351.3566589355469, + "1": -1553.5296630859375 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 45, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 219 + }, + { + "name": "vae", + "type": "VAE", + "link": 217 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 218 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 48, + "type": "DisableNoise", + "pos": { + "0": 568, + "1": 447 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 4, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 127 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 70, + "type": "GetNode", + "pos": { + "0": 192, + "1": 573 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 5, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 198 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 121, + "type": "Note", + "pos": { + "0": -570.1758422851562, + "1": 741.79296875 + }, + "size": { + "0": 268.1962585449219, + "1": 234.70770263671875 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [], + "outputs": [], + "properties": {}, + "widgets_values": [ + "More steps during unsampling gives more detail and less likely to get blurry results" + ], + "color": "#432", + "bgcolor": "#653" + }, + { + "id": 152, + "type": "UNETLoader", + "pos": { + "0": -1372, + "1": 156 + }, + "size": { + "0": 315, + "1": 82 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 293 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "UNETLoader" + }, + "widgets_values": [ + "flux1-dev-fp8-e4m3fn.safetensors", + "fp8_e4m3fn" + ] + }, + { + "id": 154, + "type": "DualCLIPLoader", + "pos": { + "0": -1356, + "1": 276 + }, + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 294 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "DualCLIPLoader" + }, + "widgets_values": [ + "t5xxl_fp16.safetensors", + "clip_l.safetensors", + "flux" + ] + }, + { + "id": 155, + "type": "VAELoader", + "pos": { + "0": -1385, + "1": 439 + }, + "size": { + "0": 315, + "1": 58 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 295 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "ae.safetensors" + ] + }, + { + "id": 60, + "type": "FluxDeGuidance", + "pos": { + "0": -667.695556640625, + "1": 195.65341186523438 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 37, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 152 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 195 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 0 + ] + }, + { + "id": 62, + "type": "FluxDeGuidance", + "pos": { + "0": 159, + "1": 204 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 35, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 159 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 253 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 3.5 + ] + }, + { + "id": 51, + "type": "FlipSigmas", + "pos": { + "0": -155, + "1": 610 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 36, + "mode": 0, + "inputs": [ + { + "name": "sigmas", + "type": "SIGMAS", + "link": 130 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 131 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FlipSigmas" + }, + "widgets_values": [] + }, + { + "id": 52, + "type": "InFluxModelSamplingPred", + "pos": { + "0": -198, + "1": 809 + }, + "size": { + "0": 210, + "1": 122 + }, + "flags": {}, + "order": 28, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 315 + }, + { + "name": "width", + "type": "INT", + "link": 177, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 176, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 314 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "InFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024 + ] + }, + { + "id": 73, + "type": "GetNode", + "pos": { + "0": -169, + "1": 969 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": false + }, + "order": 10, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 313, + 315 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 69, + "type": "GetNode", + "pos": { + "0": -584, + "1": 684 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 11, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 165 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 86, + "type": "GetNode", + "pos": { + "0": -584, + "1": 631 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 12, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 183 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 41, + "type": "VAEEncode", + "pos": { + "0": -599, + "1": 540 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": { + "collapsed": false + }, + "order": 29, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 183 + }, + { + "name": "vae", + "type": "VAE", + "link": 165 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 132, + 316 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + }, + "widgets_values": [] + }, + { + "id": 68, + "type": "GetNode", + "pos": { + "0": -599, + "1": 494 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 13, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 164 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 45, + "type": "SamplerCustomAdvanced", + "pos": { + "0": -217.695556640625, + "1": 291.6534423828125 + }, + "size": { + "0": 236.8000030517578, + "1": 107.92439270019531 + }, + "flags": {}, + "order": 44, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 128, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 124, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 222, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 131, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 132, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 219, + 223 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 108, + "type": "FluxForwardODESampler", + "pos": { + "0": -201, + "1": 500 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 222 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxForwardODESampler" + }, + "widgets_values": [ + 0.5 + ] + }, + { + "id": 49, + "type": "BasicScheduler", + "pos": { + "0": -200, + "1": 659 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 27, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 313, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 130 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 28, + 1 + ] + }, + { + "id": 8, + "type": "VAEDecode", + "pos": { + "0": 887, + "1": 90 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 48, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 24 + }, + { + "name": "vae", + "type": "VAE", + "link": 167 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 287, + 303 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 71, + "type": "GetNode", + "pos": { + "0": 901, + "1": 186 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 15, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 167 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 132, + "type": "GetNode", + "pos": { + "0": 1678, + "1": -24 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 16, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 280 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 83, + "type": "GetNode", + "pos": { + "0": 562, + "1": 1076 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 17, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 179 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 84, + "type": "GetNode", + "pos": { + "0": 566, + "1": 1032 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 18, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 178 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 17, + "type": "BasicScheduler", + "pos": { + "0": 536, + "1": 491 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 42, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 318, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 237 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 28, + 1 + ] + }, + { + "id": 72, + "type": "GetNode", + "pos": { + "0": 136, + "1": 954 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": false + }, + "order": 19, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 320 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 55, + "type": "OutFluxModelSamplingPred", + "pos": { + "0": 527, + "1": 837 + }, + "size": { + "0": 210, + "1": 146 + }, + "flags": {}, + "order": 38, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 322 + }, + { + "name": "width", + "type": "INT", + "link": 178, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 179, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 305, + 318, + 323 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "OutFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024, + true + ] + }, + { + "id": 78, + "type": "INTConstant", + "pos": { + "0": -1282.2342529296875, + "1": 808.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 20, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 174, + 181 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 80, + "type": "INTConstant", + "pos": { + "0": -1276.2342529296875, + "1": 917.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 21, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 175, + 182 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 131, + "type": "LoadImage", + "pos": { + "0": -2166, + "1": 198.05648803710938 + }, + "size": { + "0": 707.2029418945312, + "1": 917.039306640625 + }, + "flags": {}, + "order": 22, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 267 + ], + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "melting_gold.png", + "image" + ] + }, + { + "id": 38, + "type": "CLIPTextEncode", + "pos": { + "0": -663, + "1": 293 + }, + "size": { + "0": 383.0185852050781, + "1": 148.05877685546875 + }, + "flags": { + "collapsed": false + }, + "order": 30, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 164 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 152 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "3d melting gold render" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 160, + "type": "LoraLoaderModelOnly", + "pos": { + "0": 149, + "1": 822 + }, + "size": { + "0": 315, + "1": 82 + }, + "flags": {}, + "order": 31, + "mode": 4, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 320 + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 322 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LoraLoaderModelOnly" + }, + "widgets_values": [ + "Flux_1_Dev_LoRA_AestheticAnime.safetensors", + 1 + ] + }, + { + "id": 157, + "type": "PreviewImage", + "pos": { + "0": 1107, + "1": 346 + }, + "size": [ + 749.0810936896919, + 804.6389407543991 + ], + "flags": {}, + "order": 50, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 303 + } + ], + "outputs": [], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": { + "0": 99.80657196044922, + "1": 311.393798828125 + }, + "size": { + "0": 348.5190124511719, + "1": 193.99783325195312 + }, + "flags": {}, + "order": 23, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 198 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 159 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "viking on a warship in the style of 3d melting gold render" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 109, + "type": "FluxReverseODESampler", + "pos": { + "0": 533, + "1": 643 + }, + "size": { + "0": 210, + "1": 150 + }, + "flags": {}, + "order": 41, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 305 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 316 + } + ], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 317 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxReverseODESampler" + }, + "widgets_values": [ + 0.9, + 0, + 9, + "linear_decrease" + ] + }, + { + "id": 145, + "type": "ImageConcatMulti", + "pos": { + "0": 1663, + "1": 100 + }, + "size": { + "0": 210, + "1": 170 + }, + "flags": {}, + "order": 49, + "mode": 0, + "inputs": [ + { + "name": "image_1", + "type": "IMAGE", + "link": 280 + }, + { + "name": "image_2", + "type": "IMAGE", + "link": 287 + }, + { + "name": "image_3", + "type": "IMAGE", + "link": null + } + ], + "outputs": [ + { + "name": "images", + "type": "IMAGE", + "links": [ + 309 + ], + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 2, + "right", + false, + null + ] + }, + { + "id": 158, + "type": "PreviewImage", + "pos": { + "0": 1900, + "1": 372 + }, + "size": [ + 1267.3341782100497, + 668.9405804496673 + ], + "flags": {}, + "order": 51, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 309 + } + ], + "outputs": [], + "properties": { + "Node name for S&R": "PreviewImage" + } + } + ], + "links": [ + [ + 24, + 13, + 0, + 8, + 0, + "LATENT" + ], + [ + 30, + 22, + 0, + 13, + 1, + "GUIDER" + ], + [ + 124, + 46, + 0, + 45, + 1, + "GUIDER" + ], + [ + 127, + 48, + 0, + 13, + 0, + "NOISE" + ], + [ + 128, + 50, + 0, + 45, + 0, + "NOISE" + ], + [ + 130, + 49, + 0, + 51, + 0, + "SIGMAS" + ], + [ + 131, + 51, + 0, + 45, + 3, + "SIGMAS" + ], + [ + 132, + 41, + 0, + 45, + 4, + "LATENT" + ], + [ + 152, + 38, + 0, + 60, + 0, + "CONDITIONING" + ], + [ + 159, + 6, + 0, + 62, + 0, + "CONDITIONING" + ], + [ + 164, + 68, + 0, + 38, + 0, + "CLIP" + ], + [ + 165, + 69, + 0, + 41, + 1, + "VAE" + ], + [ + 167, + 71, + 0, + 8, + 1, + "VAE" + ], + [ + 174, + 78, + 0, + 76, + 0, + "*" + ], + [ + 175, + 80, + 0, + 79, + 0, + "*" + ], + [ + 176, + 81, + 0, + 52, + 2, + "INT" + ], + [ + 177, + 82, + 0, + 52, + 1, + "INT" + ], + [ + 178, + 84, + 0, + 55, + 1, + "INT" + ], + [ + 179, + 83, + 0, + 55, + 2, + "INT" + ], + [ + 180, + 40, + 0, + 85, + 0, + "*" + ], + [ + 181, + 78, + 0, + 40, + 1, + "INT" + ], + [ + 182, + 80, + 0, + 40, + 2, + "INT" + ], + [ + 183, + 86, + 0, + 41, + 0, + "IMAGE" + ], + [ + 195, + 60, + 0, + 46, + 1, + "CONDITIONING" + ], + [ + 198, + 70, + 0, + 6, + 0, + "CLIP" + ], + [ + 217, + 106, + 0, + 104, + 1, + "VAE" + ], + [ + 218, + 104, + 0, + 105, + 0, + "IMAGE" + ], + [ + 219, + 45, + 0, + 104, + 0, + "LATENT" + ], + [ + 222, + 108, + 0, + 45, + 2, + "SAMPLER" + ], + [ + 223, + 45, + 0, + 13, + 4, + "LATENT" + ], + [ + 237, + 17, + 0, + 13, + 3, + "SIGMAS" + ], + [ + 253, + 62, + 0, + 22, + 1, + "CONDITIONING" + ], + [ + 267, + 131, + 0, + 40, + 0, + "IMAGE" + ], + [ + 280, + 132, + 0, + 145, + 0, + "IMAGE" + ], + [ + 287, + 8, + 0, + 145, + 1, + "IMAGE" + ], + [ + 293, + 152, + 0, + 66, + 0, + "MODEL" + ], + [ + 294, + 154, + 0, + 67, + 0, + "CLIP" + ], + [ + 295, + 155, + 0, + 65, + 0, + "VAE" + ], + [ + 303, + 8, + 0, + 157, + 0, + "IMAGE" + ], + [ + 305, + 55, + 0, + 109, + 0, + "MODEL" + ], + [ + 309, + 145, + 0, + 158, + 0, + "IMAGE" + ], + [ + 313, + 73, + 0, + 49, + 0, + "MODEL" + ], + [ + 314, + 52, + 0, + 46, + 0, + "MODEL" + ], + [ + 315, + 73, + 0, + 52, + 0, + "MODEL" + ], + [ + 316, + 41, + 0, + 109, + 1, + "LATENT" + ], + [ + 317, + 109, + 0, + 13, + 2, + "SAMPLER" + ], + [ + 318, + 55, + 0, + 17, + 0, + "MODEL" + ], + [ + 320, + 72, + 0, + 160, + 0, + "MODEL" + ], + [ + 322, + 160, + 0, + 55, + 0, + "MODEL" + ], + [ + 323, + 55, + 0, + 22, + 0, + "MODEL" + ] + ], + "groups": [ + { + "title": "Setup", + "bounding": [ + -2176, + 118, + 1440, + 1007 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Sampling", + "bounding": [ + 80, + 119, + 676, + 981 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Unsampling", + "bounding": [ + -677, + 122, + 707, + 942 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + } + ], + "config": {}, + "extra": { + "ds": { + "scale": 0.7400249944258369, + "offset": [ + 2424.024247095835, + 417.37758989110347 + ] + }, + "groupNodes": {} + }, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_updated.json b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_updated.json new file mode 100644 index 0000000000000000000000000000000000000000..53a72aa2d39989a376f5451c646de9519dabe5cb --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_updated.json @@ -0,0 +1,2419 @@ +{ + "last_node_id": 159, + "last_link_id": 319, + "nodes": [ + { + "id": 22, + "type": "BasicGuider", + "pos": { + "0": 517.8887329101562, + "1": 193.40916442871094 + }, + "size": { + "0": 222.3482666015625, + "1": 46 + }, + "flags": {}, + "order": 41, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 306, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 253, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 30 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 46, + "type": "BasicGuider", + "pos": { + "0": -164.695556640625, + "1": 196.65341186523438 + }, + "size": { + "0": 161.1999969482422, + "1": 46 + }, + "flags": {}, + "order": 42, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 314, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 195, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 124 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 66, + "type": "SetNode", + "pos": { + "0": -956.0843505859375, + "1": 192.0753173828125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 27, + "mode": 0, + "inputs": [ + { + "name": "MODEL", + "type": "MODEL", + "link": 293 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_MODEL", + "properties": { + "previousName": "MODEL" + }, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 40, + "type": "ImageScale", + "pos": { + "0": -1291.8131103515625, + "1": 625.7698364257812 + }, + "size": { + "0": 210, + "1": 130 + }, + "flags": {}, + "order": 26, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 267 + }, + { + "name": "width", + "type": "INT", + "link": 181, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 182, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 180 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageScale" + }, + "widgets_values": [ + "nearest-exact", + 1024, + 1024, + "center" + ] + }, + { + "id": 13, + "type": "SamplerCustomAdvanced", + "pos": { + "0": 507.888671875, + "1": 293.40911865234375 + }, + "size": { + "0": 236.8000030517578, + "1": 106 + }, + "flags": {}, + "order": 45, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 127, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 30, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 317, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 237, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 223, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 24 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 50, + "type": "DisableNoise", + "pos": { + "0": -180.695556640625, + "1": 449.6534423828125 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 0, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 128 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 67, + "type": "SetNode", + "pos": { + "0": -960.2341918945312, + "1": 299.4647521972656 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 28, + "mode": 0, + "inputs": [ + { + "name": "CLIP", + "type": "CLIP", + "link": 294 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_CLIP", + "properties": { + "previousName": "CLIP" + }, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 65, + "type": "SetNode", + "pos": { + "0": -958.2341918945312, + "1": 407.4647521972656 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 29, + "mode": 0, + "inputs": [ + { + "name": "VAE", + "type": "VAE", + "link": 295 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_VAE", + "properties": { + "previousName": "VAE" + }, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 85, + "type": "SetNode", + "pos": { + "0": -970.2341918945312, + "1": 622.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 36, + "mode": 0, + "inputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "link": 180 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_IMG", + "properties": { + "previousName": "IMG" + }, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 76, + "type": "SetNode", + "pos": { + "0": -982.2343139648438, + "1": 810.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 24, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 174 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_WIDTH", + "properties": { + "previousName": "WIDTH" + }, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 79, + "type": "SetNode", + "pos": { + "0": -989.234375, + "1": 929.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 25, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 175 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_HEIGHT", + "properties": { + "previousName": "HEIGHT" + }, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 106, + "type": "GetNode", + "pos": { + "0": 357.3566589355469, + "1": -1461.5296630859375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 1, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 217 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 105, + "type": "SaveImage", + "pos": { + "0": 486, + "1": -1469 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 46, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 218 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ok" + ] + }, + { + "id": 104, + "type": "VAEDecode", + "pos": { + "0": 351.3566589355469, + "1": -1553.5296630859375 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 44, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 219 + }, + { + "name": "vae", + "type": "VAE", + "link": 217 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 218 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 48, + "type": "DisableNoise", + "pos": { + "0": 568, + "1": 447 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 2, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 127 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 70, + "type": "GetNode", + "pos": { + "0": 192, + "1": 573 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 3, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 198 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 78, + "type": "INTConstant", + "pos": { + "0": -1282.2342529296875, + "1": 808.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 4, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 174, + 181 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 80, + "type": "INTConstant", + "pos": { + "0": -1276.2342529296875, + "1": 917.4645385742188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 5, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 175, + 182 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 131, + "type": "LoadImage", + "pos": { + "0": -2166, + "1": 198.05648803710938 + }, + "size": { + "0": 707.2029418945312, + "1": 917.039306640625 + }, + "flags": {}, + "order": 6, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 267 + ], + "slot_index": 0 + }, + { + "name": "MASK", + "type": "MASK", + "links": null + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "Screenshot 2024-10-15 at 7.43.05 PM.png", + "image" + ] + }, + { + "id": 152, + "type": "UNETLoader", + "pos": { + "0": -1372, + "1": 156 + }, + "size": { + "0": 315, + "1": 82 + }, + "flags": {}, + "order": 7, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 293 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "UNETLoader" + }, + "widgets_values": [ + "flux1-dev-fp8-e4m3fn.safetensors", + "fp8_e4m3fn" + ] + }, + { + "id": 154, + "type": "DualCLIPLoader", + "pos": { + "0": -1356, + "1": 276 + }, + "size": { + "0": 315, + "1": 106 + }, + "flags": {}, + "order": 8, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 294 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "DualCLIPLoader" + }, + "widgets_values": [ + "t5xxl_fp16.safetensors", + "clip_l.safetensors", + "flux" + ] + }, + { + "id": 155, + "type": "VAELoader", + "pos": { + "0": -1385, + "1": 439 + }, + "size": { + "0": 315, + "1": 58 + }, + "flags": {}, + "order": 9, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 295 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "ae.safetensors" + ] + }, + { + "id": 60, + "type": "FluxDeGuidance", + "pos": { + "0": -667.695556640625, + "1": 195.65341186523438 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 37, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 152 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 195 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 0 + ] + }, + { + "id": 62, + "type": "FluxDeGuidance", + "pos": { + "0": 159, + "1": 204 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 35, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 159 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 253 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 3.5 + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": { + "0": 99.80657196044922, + "1": 311.393798828125 + }, + "size": { + "0": 348.5190124511719, + "1": 193.99783325195312 + }, + "flags": {}, + "order": 23, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 198 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 159 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "face of a boy in angry cartoon style " + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 69, + "type": "GetNode", + "pos": { + "0": -584, + "1": 684 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 10, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 165 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 86, + "type": "GetNode", + "pos": { + "0": -584, + "1": 631 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 11, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 183 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 41, + "type": "VAEEncode", + "pos": { + "0": -599, + "1": 540 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": { + "collapsed": false + }, + "order": 30, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 183 + }, + { + "name": "vae", + "type": "VAE", + "link": 165 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 132, + 316 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + }, + "widgets_values": [] + }, + { + "id": 68, + "type": "GetNode", + "pos": { + "0": -599, + "1": 494 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 12, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 164 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 38, + "type": "CLIPTextEncode", + "pos": { + "0": -663, + "1": 293 + }, + "size": { + "0": 383.0185852050781, + "1": 148.05877685546875 + }, + "flags": { + "collapsed": false + }, + "order": 31, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 164 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 152 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 45, + "type": "SamplerCustomAdvanced", + "pos": { + "0": -217.695556640625, + "1": 291.6534423828125 + }, + "size": { + "0": 236.8000030517578, + "1": 107.92439270019531 + }, + "flags": {}, + "order": 43, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 128, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 124, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 222, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 131, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 132, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 219, + 223 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 157, + "type": "PreviewImage", + "pos": { + "0": 1107, + "1": 346 + }, + "size": { + "0": 749.0811157226562, + "1": 804.638916015625 + }, + "flags": {}, + "order": 49, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 303 + } + ], + "outputs": [], + "properties": { + "Node name for S&R": "PreviewImage" + }, + "widgets_values": [] + }, + { + "id": 8, + "type": "VAEDecode", + "pos": { + "0": 887, + "1": 90 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 47, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 24 + }, + { + "name": "vae", + "type": "VAE", + "link": 167 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 287, + 303 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 71, + "type": "GetNode", + "pos": { + "0": 901, + "1": 186 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 13, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 167 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 72, + "type": "GetNode", + "pos": { + "0": 136, + "1": 954 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": false + }, + "order": 14, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 304 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 145, + "type": "ImageConcatMulti", + "pos": { + "0": 1663, + "1": 100 + }, + "size": { + "0": 210, + "1": 170 + }, + "flags": {}, + "order": 48, + "mode": 0, + "inputs": [ + { + "name": "image_1", + "type": "IMAGE", + "link": 280 + }, + { + "name": "image_2", + "type": "IMAGE", + "link": 287 + }, + { + "name": "image_3", + "type": "IMAGE", + "link": null + } + ], + "outputs": [ + { + "name": "images", + "type": "IMAGE", + "links": [ + 309 + ], + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 2, + "right", + false, + null + ] + }, + { + "id": 132, + "type": "GetNode", + "pos": { + "0": 1678, + "1": -24 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 15, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 280 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 158, + "type": "PreviewImage", + "pos": { + "0": 1900, + "1": 372 + }, + "size": { + "0": 1267.334228515625, + "1": 668.9405517578125 + }, + "flags": {}, + "order": 50, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 309 + } + ], + "outputs": [], + "properties": { + "Node name for S&R": "PreviewImage" + }, + "widgets_values": [] + }, + { + "id": 83, + "type": "GetNode", + "pos": { + "0": 562, + "1": 1076 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 16, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 179 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 84, + "type": "GetNode", + "pos": { + "0": 566, + "1": 1032 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 17, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 178 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 55, + "type": "OutFluxModelSamplingPred", + "pos": { + "0": 527, + "1": 837 + }, + "size": { + "0": 210, + "1": 146 + }, + "flags": {}, + "order": 32, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 304 + }, + { + "name": "width", + "type": "INT", + "link": 178, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 179, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 305, + 306, + 318 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "OutFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024, + true + ] + }, + { + "id": 109, + "type": "FluxReverseODESampler", + "pos": { + "0": 533, + "1": 643 + }, + "size": { + "0": 210, + "1": 150 + }, + "flags": {}, + "order": 38, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 305 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 316 + } + ], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 317 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxReverseODESampler" + }, + "widgets_values": [ + 0.9, + 0, + 9, + "constant" + ] + }, + { + "id": 81, + "type": "GetNode", + "pos": { + "0": -174, + "1": 1116 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 18, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 176 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 82, + "type": "GetNode", + "pos": { + "0": -169, + "1": 1071 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 19, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 177 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 73, + "type": "GetNode", + "pos": { + "0": -172, + "1": 1025 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 20, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 313, + 315 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 52, + "type": "InFluxModelSamplingPred", + "pos": { + "0": -203, + "1": 867 + }, + "size": { + "0": 210, + "1": 122 + }, + "flags": {}, + "order": 34, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 315 + }, + { + "name": "width", + "type": "INT", + "link": 177, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 176, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 314 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "InFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024 + ] + }, + { + "id": 49, + "type": "BasicScheduler", + "pos": { + "0": -207, + "1": 715 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 33, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 313, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 130 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 28, + 1 + ] + }, + { + "id": 51, + "type": "FlipSigmas", + "pos": { + "0": -163, + "1": 673 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 40, + "mode": 0, + "inputs": [ + { + "name": "sigmas", + "type": "SIGMAS", + "link": 130 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 131 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FlipSigmas" + }, + "widgets_values": [] + }, + { + "id": 121, + "type": "Note", + "pos": { + "0": -570.1758422851562, + "1": 741.79296875 + }, + "size": [ + 268.0312991580322, + 284.2141108280541 + ], + "flags": {}, + "order": 21, + "mode": 0, + "inputs": [], + "outputs": [], + "properties": {}, + "widgets_values": [ + "Notes:\n\n* Unsampling steps should equal Sampling steps\n\n* Gamma should be 0.5 for good results but feel free to play around. At 0.0 it will not convert any data from the given image into the noise.\n\n* Gudiance should be 0 and empty prompt for doing basic inverse noise\n\n* For doing stylization you can include a prompt and play around with guidance\n" + ], + "color": "#432", + "bgcolor": "#653" + }, + { + "id": 108, + "type": "FluxForwardODESampler", + "pos": { + "0": -253, + "1": 504 + }, + "size": [ + 279.9415419092593, + 106 + ], + "flags": {}, + "order": 22, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 222 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxForwardODESampler" + }, + "widgets_values": [ + 0.5, + 0, + "fixed" + ] + }, + { + "id": 17, + "type": "BasicScheduler", + "pos": { + "0": 536, + "1": 491 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 39, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 318, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 237 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 28, + 1 + ] + } + ], + "links": [ + [ + 24, + 13, + 0, + 8, + 0, + "LATENT" + ], + [ + 30, + 22, + 0, + 13, + 1, + "GUIDER" + ], + [ + 124, + 46, + 0, + 45, + 1, + "GUIDER" + ], + [ + 127, + 48, + 0, + 13, + 0, + "NOISE" + ], + [ + 128, + 50, + 0, + 45, + 0, + "NOISE" + ], + [ + 130, + 49, + 0, + 51, + 0, + "SIGMAS" + ], + [ + 131, + 51, + 0, + 45, + 3, + "SIGMAS" + ], + [ + 132, + 41, + 0, + 45, + 4, + "LATENT" + ], + [ + 152, + 38, + 0, + 60, + 0, + "CONDITIONING" + ], + [ + 159, + 6, + 0, + 62, + 0, + "CONDITIONING" + ], + [ + 164, + 68, + 0, + 38, + 0, + "CLIP" + ], + [ + 165, + 69, + 0, + 41, + 1, + "VAE" + ], + [ + 167, + 71, + 0, + 8, + 1, + "VAE" + ], + [ + 174, + 78, + 0, + 76, + 0, + "*" + ], + [ + 175, + 80, + 0, + 79, + 0, + "*" + ], + [ + 176, + 81, + 0, + 52, + 2, + "INT" + ], + [ + 177, + 82, + 0, + 52, + 1, + "INT" + ], + [ + 178, + 84, + 0, + 55, + 1, + "INT" + ], + [ + 179, + 83, + 0, + 55, + 2, + "INT" + ], + [ + 180, + 40, + 0, + 85, + 0, + "*" + ], + [ + 181, + 78, + 0, + 40, + 1, + "INT" + ], + [ + 182, + 80, + 0, + 40, + 2, + "INT" + ], + [ + 183, + 86, + 0, + 41, + 0, + "IMAGE" + ], + [ + 195, + 60, + 0, + 46, + 1, + "CONDITIONING" + ], + [ + 198, + 70, + 0, + 6, + 0, + "CLIP" + ], + [ + 217, + 106, + 0, + 104, + 1, + "VAE" + ], + [ + 218, + 104, + 0, + 105, + 0, + "IMAGE" + ], + [ + 219, + 45, + 0, + 104, + 0, + "LATENT" + ], + [ + 222, + 108, + 0, + 45, + 2, + "SAMPLER" + ], + [ + 223, + 45, + 0, + 13, + 4, + "LATENT" + ], + [ + 237, + 17, + 0, + 13, + 3, + "SIGMAS" + ], + [ + 253, + 62, + 0, + 22, + 1, + "CONDITIONING" + ], + [ + 267, + 131, + 0, + 40, + 0, + "IMAGE" + ], + [ + 280, + 132, + 0, + 145, + 0, + "IMAGE" + ], + [ + 287, + 8, + 0, + 145, + 1, + "IMAGE" + ], + [ + 293, + 152, + 0, + 66, + 0, + "MODEL" + ], + [ + 294, + 154, + 0, + 67, + 0, + "CLIP" + ], + [ + 295, + 155, + 0, + 65, + 0, + "VAE" + ], + [ + 303, + 8, + 0, + 157, + 0, + "IMAGE" + ], + [ + 304, + 72, + 0, + 55, + 0, + "MODEL" + ], + [ + 305, + 55, + 0, + 109, + 0, + "MODEL" + ], + [ + 306, + 55, + 0, + 22, + 0, + "MODEL" + ], + [ + 309, + 145, + 0, + 158, + 0, + "IMAGE" + ], + [ + 313, + 73, + 0, + 49, + 0, + "MODEL" + ], + [ + 314, + 52, + 0, + 46, + 0, + "MODEL" + ], + [ + 315, + 73, + 0, + 52, + 0, + "MODEL" + ], + [ + 316, + 41, + 0, + 109, + 1, + "LATENT" + ], + [ + 317, + 109, + 0, + 13, + 2, + "SAMPLER" + ], + [ + 318, + 55, + 0, + 17, + 0, + "MODEL" + ] + ], + "groups": [ + { + "title": "Setup", + "bounding": [ + -2176, + 118, + 1440, + 1007 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Sampling", + "bounding": [ + 80, + 119, + 676, + 981 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Unsampling", + "bounding": [ + -677, + 122, + 725, + 999 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + } + ], + "config": {}, + "extra": { + "ds": { + "scale": 0.7400249944258193, + "offset": [ + 1351.189735546649, + -22.905168482288904 + ] + }, + "groupNodes": {} + }, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_workflow.json b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_workflow.json new file mode 100644 index 0000000000000000000000000000000000000000..f38419fb5b138c10d5111b268e3b1f14ee7caf1b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_rf_inversion_workflow.json @@ -0,0 +1,2363 @@ +{ + "last_node_id": 122, + "last_link_id": 253, + "nodes": [ + { + "id": 22, + "type": "BasicGuider", + "pos": { + "0": 517.8887329101562, + "1": 193.40916442871094 + }, + "size": { + "0": 222.3482666015625, + "1": 46 + }, + "flags": {}, + "order": 40, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 146, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 253, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 30 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 46, + "type": "BasicGuider", + "pos": { + "0": -165.98114013671875, + "1": 190.95367431640625 + }, + "size": { + "0": 161.1999969482422, + "1": 46 + }, + "flags": {}, + "order": 38, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 136, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 195, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 124 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 66, + "type": "SetNode", + "pos": { + "0": -956.0843505859375, + "1": 192.01882934570312 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 21, + "mode": 0, + "inputs": [ + { + "name": "MODEL", + "type": "MODEL", + "link": 171 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_MODEL", + "properties": { + "previousName": "MODEL" + }, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 40, + "type": "ImageScale", + "pos": { + "0": -1291.8131103515625, + "1": 625.7133178710938 + }, + "size": { + "0": 210, + "1": 130 + }, + "flags": {}, + "order": 30, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 118 + }, + { + "name": "width", + "type": "INT", + "link": 181, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 182, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 180 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageScale" + }, + "widgets_values": [ + "nearest-exact", + 1024, + 1024, + "center" + ] + }, + { + "id": 75, + "type": "CheckpointLoaderSimple", + "pos": { + "0": -1272.0841064453125, + "1": 253.01895141601562 + }, + "size": { + "0": 210, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 171 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 172 + ], + "slot_index": 1, + "shape": 3 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 173 + ], + "slot_index": 2, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "flux1-dev-fp8.safetensors" + ] + }, + { + "id": 13, + "type": "SamplerCustomAdvanced", + "pos": { + "0": 507.888671875, + "1": 293.40911865234375 + }, + "size": { + "0": 236.8000030517578, + "1": 106 + }, + "flags": {}, + "order": 43, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 127, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 30, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 224, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 237, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 223, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 24 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 50, + "type": "DisableNoise", + "pos": { + "0": -181.98114013671875, + "1": 443.9537048339844 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 1, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 128 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 68, + "type": "GetNode", + "pos": { + "0": -599.98095703125, + "1": 365.9537048339844 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 2, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 164 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 86, + "type": "GetNode", + "pos": { + "0": -591.98095703125, + "1": 555.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 3, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 183 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 69, + "type": "GetNode", + "pos": { + "0": -584.98095703125, + "1": 621.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 4, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 165 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 67, + "type": "SetNode", + "pos": { + "0": -960.2341918945312, + "1": 299.40826416015625 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 22, + "mode": 0, + "inputs": [ + { + "name": "CLIP", + "type": "CLIP", + "link": 172 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_CLIP", + "properties": { + "previousName": "CLIP" + }, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 65, + "type": "SetNode", + "pos": { + "0": -958.2341918945312, + "1": 407.40826416015625 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 23, + "mode": 0, + "inputs": [ + { + "name": "VAE", + "type": "VAE", + "link": 173 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_VAE", + "properties": { + "previousName": "VAE" + }, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 85, + "type": "SetNode", + "pos": { + "0": -970.2341918945312, + "1": 622.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 35, + "mode": 0, + "inputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "link": 180 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_IMG", + "properties": { + "previousName": "IMG" + }, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 76, + "type": "SetNode", + "pos": { + "0": -982.2343139648438, + "1": 810.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 27, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 174 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_WIDTH", + "properties": { + "previousName": "WIDTH" + }, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 79, + "type": "SetNode", + "pos": { + "0": -989.234375, + "1": 929.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 28, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 175 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_HEIGHT", + "properties": { + "previousName": "HEIGHT" + }, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 71, + "type": "GetNode", + "pos": { + "0": 893, + "1": 286 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 5, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 167 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 62, + "type": "FluxDeGuidance", + "pos": { + "0": 159.80670166015625, + "1": 204.3938446044922 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 34, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 159 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 253 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 3.5 + ] + }, + { + "id": 106, + "type": "GetNode", + "pos": { + "0": 357.3566589355469, + "1": -1461.5296630859375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 6, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 217 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 105, + "type": "SaveImage", + "pos": { + "0": 486, + "1": -1469 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 44, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 218 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ok" + ] + }, + { + "id": 81, + "type": "GetNode", + "pos": { + "0": -175, + "1": 1046 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 7, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 176 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 82, + "type": "GetNode", + "pos": { + "0": -170, + "1": 1004 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 8, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 177 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 73, + "type": "GetNode", + "pos": { + "0": -170, + "1": 963 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 9, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 169 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 52, + "type": "InFluxModelSamplingPred", + "pos": { + "0": -203, + "1": 798 + }, + "size": { + "0": 210, + "1": 122 + }, + "flags": {}, + "order": 26, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 169 + }, + { + "name": "width", + "type": "INT", + "link": 177, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 176, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 135, + 136 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "InFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024 + ] + }, + { + "id": 51, + "type": "FlipSigmas", + "pos": { + "0": -157, + "1": 615 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 39, + "mode": 0, + "inputs": [ + { + "name": "sigmas", + "type": "SIGMAS", + "link": 130 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 131 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FlipSigmas" + }, + "widgets_values": [] + }, + { + "id": 45, + "type": "SamplerCustomAdvanced", + "pos": { + "0": -218.98114013671875, + "1": 285.9537048339844 + }, + "size": { + "0": 236.8000030517578, + "1": 107.92439270019531 + }, + "flags": {}, + "order": 41, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 128, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 124, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 222, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 131, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 132, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 219, + 223 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 87, + "type": "GetNode", + "pos": { + "0": 1348, + "1": 125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": false + }, + "order": 10, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 185 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 104, + "type": "VAEDecode", + "pos": { + "0": 351.3566589355469, + "1": -1553.5296630859375 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 42, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 219 + }, + { + "name": "vae", + "type": "VAE", + "link": 217 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 218 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 108, + "type": "FluxForwardODESampler", + "pos": { + "0": -202, + "1": 494 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 11, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 222 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxForwardODESampler" + }, + "widgets_values": [ + 0.5 + ] + }, + { + "id": 38, + "type": "CLIPTextEncode", + "pos": { + "0": -665, + "1": 298 + }, + "size": { + "0": 383.0185852050781, + "1": 148.05877685546875 + }, + "flags": { + "collapsed": true + }, + "order": 24, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 164 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 152 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 48, + "type": "DisableNoise", + "pos": { + "0": 568, + "1": 447 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 12, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 127 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 78, + "type": "INTConstant", + "pos": { + "0": -1282.2342529296875, + "1": 808.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 174, + 181 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 80, + "type": "INTConstant", + "pos": { + "0": -1276.2342529296875, + "1": 917.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 175, + 182 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1024 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 17, + "type": "BasicScheduler", + "pos": { + "0": 523, + "1": 480 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 36, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 145, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 237 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 28, + 1 + ] + }, + { + "id": 70, + "type": "GetNode", + "pos": { + "0": 192, + "1": 573 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 15, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 198 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 83, + "type": "GetNode", + "pos": { + "0": 567, + "1": 1042 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 16, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 179 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 84, + "type": "GetNode", + "pos": { + "0": 570, + "1": 983 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 17, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 178 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 55, + "type": "OutFluxModelSamplingPred", + "pos": { + "0": 527, + "1": 797 + }, + "size": { + "0": 210, + "1": 146 + }, + "flags": {}, + "order": 31, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 252 + }, + { + "name": "width", + "type": "INT", + "link": 178, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 179, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 145, + 146, + 225 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "OutFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024, + true + ] + }, + { + "id": 49, + "type": "BasicScheduler", + "pos": { + "0": -201, + "1": 653 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 33, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 135, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 130 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 100, + 1 + ] + }, + { + "id": 9, + "type": "SaveImage", + "pos": { + "0": 887, + "1": 364 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 46, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 9 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "a" + ] + }, + { + "id": 8, + "type": "VAEDecode", + "pos": { + "0": 887, + "1": 194 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 45, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 24 + }, + { + "name": "vae", + "type": "VAE", + "link": 167 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 9, + 186 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 88, + "type": "ImageConcanate", + "pos": { + "0": 1666, + "1": 183 + }, + "size": { + "0": 315, + "1": 102 + }, + "flags": {}, + "order": 47, + "mode": 0, + "inputs": [ + { + "name": "image1", + "type": "IMAGE", + "link": 185 + }, + { + "name": "image2", + "type": "IMAGE", + "link": 186 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 250 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageConcanate" + }, + "widgets_values": [ + "right", + true + ] + }, + { + "id": 120, + "type": "PreviewImage", + "pos": { + "0": 1958, + "1": 395 + }, + "size": [ + 210, + 246 + ], + "flags": {}, + "order": 48, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 250 + } + ], + "outputs": [], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 41, + "type": "VAEEncode", + "pos": { + "0": -635, + "1": 451 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 25, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 183 + }, + { + "name": "vae", + "type": "VAE", + "link": 165 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 132, + 251 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + }, + "widgets_values": [] + }, + { + "id": 39, + "type": "LoadImage", + "pos": { + "0": -2187, + "1": 231 + }, + "size": { + "0": 868.442626953125, + "1": 998.3667602539062 + }, + "flags": {}, + "order": 18, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 118 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "Screenshot 2024-10-15 at 7.43.05 PM.png", + "image" + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": { + "0": 99.80657196044922, + "1": 311.393798828125 + }, + "size": { + "0": 348.5190124511719, + "1": 193.99783325195312 + }, + "flags": {}, + "order": 29, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 198 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 159 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a cartoon cgi boy with huge eyes and an enormous smile, he is very happy" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 72, + "type": "GetNode", + "pos": { + "0": 138, + "1": 1039 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 19, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 252 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 121, + "type": "Note", + "pos": { + "0": -571.4612426757812, + "1": 736.0932006835938 + }, + "size": [ + 268.19626567957255, + 234.70770705582265 + ], + "flags": {}, + "order": 20, + "mode": 0, + "inputs": [], + "outputs": [], + "properties": {}, + "widgets_values": [ + "More steps during unsampling gives more detail and less likely to get blurry results" + ], + "color": "#432", + "bgcolor": "#653" + }, + { + "id": 109, + "type": "FluxReverseODESampler", + "pos": { + "0": 526, + "1": 633 + }, + "size": { + "0": 210, + "1": 126 + }, + "flags": {}, + "order": 37, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 225 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 251 + } + ], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 224 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxReverseODESampler" + }, + "widgets_values": [ + 0.7000000000000001, + 0, + 7 + ] + }, + { + "id": 60, + "type": "FluxDeGuidance", + "pos": { + "0": -668.98095703125, + "1": 189.95367431640625 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 32, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 152 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 195 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 1 + ] + } + ], + "links": [ + [ + 9, + 8, + 0, + 9, + 0, + "IMAGE" + ], + [ + 24, + 13, + 0, + 8, + 0, + "LATENT" + ], + [ + 30, + 22, + 0, + 13, + 1, + "GUIDER" + ], + [ + 118, + 39, + 0, + 40, + 0, + "IMAGE" + ], + [ + 124, + 46, + 0, + 45, + 1, + "GUIDER" + ], + [ + 127, + 48, + 0, + 13, + 0, + "NOISE" + ], + [ + 128, + 50, + 0, + 45, + 0, + "NOISE" + ], + [ + 130, + 49, + 0, + 51, + 0, + "SIGMAS" + ], + [ + 131, + 51, + 0, + 45, + 3, + "SIGMAS" + ], + [ + 132, + 41, + 0, + 45, + 4, + "LATENT" + ], + [ + 135, + 52, + 0, + 49, + 0, + "MODEL" + ], + [ + 136, + 52, + 0, + 46, + 0, + "MODEL" + ], + [ + 145, + 55, + 0, + 17, + 0, + "MODEL" + ], + [ + 146, + 55, + 0, + 22, + 0, + "MODEL" + ], + [ + 152, + 38, + 0, + 60, + 0, + "CONDITIONING" + ], + [ + 159, + 6, + 0, + 62, + 0, + "CONDITIONING" + ], + [ + 164, + 68, + 0, + 38, + 0, + "CLIP" + ], + [ + 165, + 69, + 0, + 41, + 1, + "VAE" + ], + [ + 167, + 71, + 0, + 8, + 1, + "VAE" + ], + [ + 169, + 73, + 0, + 52, + 0, + "MODEL" + ], + [ + 171, + 75, + 0, + 66, + 0, + "*" + ], + [ + 172, + 75, + 1, + 67, + 0, + "CLIP" + ], + [ + 173, + 75, + 2, + 65, + 0, + "VAE" + ], + [ + 174, + 78, + 0, + 76, + 0, + "*" + ], + [ + 175, + 80, + 0, + 79, + 0, + "*" + ], + [ + 176, + 81, + 0, + 52, + 2, + "INT" + ], + [ + 177, + 82, + 0, + 52, + 1, + "INT" + ], + [ + 178, + 84, + 0, + 55, + 1, + "INT" + ], + [ + 179, + 83, + 0, + 55, + 2, + "INT" + ], + [ + 180, + 40, + 0, + 85, + 0, + "*" + ], + [ + 181, + 78, + 0, + 40, + 1, + "INT" + ], + [ + 182, + 80, + 0, + 40, + 2, + "INT" + ], + [ + 183, + 86, + 0, + 41, + 0, + "IMAGE" + ], + [ + 185, + 87, + 0, + 88, + 0, + "IMAGE" + ], + [ + 186, + 8, + 0, + 88, + 1, + "IMAGE" + ], + [ + 195, + 60, + 0, + 46, + 1, + "CONDITIONING" + ], + [ + 198, + 70, + 0, + 6, + 0, + "CLIP" + ], + [ + 217, + 106, + 0, + 104, + 1, + "VAE" + ], + [ + 218, + 104, + 0, + 105, + 0, + "IMAGE" + ], + [ + 219, + 45, + 0, + 104, + 0, + "LATENT" + ], + [ + 222, + 108, + 0, + 45, + 2, + "SAMPLER" + ], + [ + 223, + 45, + 0, + 13, + 4, + "LATENT" + ], + [ + 224, + 109, + 0, + 13, + 2, + "SAMPLER" + ], + [ + 225, + 55, + 0, + 109, + 0, + "MODEL" + ], + [ + 237, + 17, + 0, + 13, + 3, + "SIGMAS" + ], + [ + 250, + 88, + 0, + 120, + 0, + "IMAGE" + ], + [ + 251, + 41, + 0, + 109, + 1, + "LATENT" + ], + [ + 252, + 72, + 0, + 55, + 0, + "MODEL" + ], + [ + 253, + 62, + 0, + 22, + 1, + "CONDITIONING" + ] + ], + "groups": [ + { + "title": "Unsampling", + "bounding": [ + -678, + 116, + 707, + 942 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Sampling", + "bounding": [ + 80, + 119, + 677, + 933 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Setup", + "bounding": [ + -2197, + 118, + 1461, + 1121 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + } + ], + "config": {}, + "extra": { + "ds": { + "scale": 0.21435888100002873, + "offset": [ + 2219.766473616704, + 1064.2695838292261 + ] + }, + "groupNodes": {} + }, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_unsample_inject_workflow.json b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_unsample_inject_workflow.json new file mode 100644 index 0000000000000000000000000000000000000000..ae2583fe727701803dec611c0aadacb701c51634 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/example_workflows/example_unsample_inject_workflow.json @@ -0,0 +1,2373 @@ +{ + "last_node_id": 93, + "last_link_id": 201, + "nodes": [ + { + "id": 22, + "type": "BasicGuider", + "pos": { + "0": 517.8887329101562, + "1": 193.40916442871094 + }, + "size": { + "0": 222.3482666015625, + "1": 46 + }, + "flags": {}, + "order": 39, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 146, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 194, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 30 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 46, + "type": "BasicGuider", + "pos": { + "0": -165.98114013671875, + "1": 190.95367431640625 + }, + "size": { + "0": 161.1999969482422, + "1": 46 + }, + "flags": {}, + "order": 38, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 136, + "slot_index": 0 + }, + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 195, + "slot_index": 1 + } + ], + "outputs": [ + { + "name": "GUIDER", + "type": "GUIDER", + "links": [ + 124 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicGuider" + }, + "widgets_values": [] + }, + { + "id": 66, + "type": "SetNode", + "pos": { + "0": -956.0843505859375, + "1": 192.01882934570312 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 21, + "mode": 0, + "inputs": [ + { + "name": "MODEL", + "type": "MODEL", + "link": 171 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_MODEL", + "properties": { + "previousName": "MODEL" + }, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 40, + "type": "ImageScale", + "pos": { + "0": -1291.8131103515625, + "1": 625.7133178710938 + }, + "size": { + "0": 210, + "1": 130 + }, + "flags": {}, + "order": 31, + "mode": 0, + "inputs": [ + { + "name": "image", + "type": "IMAGE", + "link": 118 + }, + { + "name": "width", + "type": "INT", + "link": 181, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 182, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 180 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageScale" + }, + "widgets_values": [ + "nearest-exact", + 1024, + 1024, + "center" + ] + }, + { + "id": 75, + "type": "CheckpointLoaderSimple", + "pos": { + "0": -1272.0841064453125, + "1": 253.01895141601562 + }, + "size": { + "0": 210, + "1": 98 + }, + "flags": {}, + "order": 0, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 171 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 172 + ], + "slot_index": 1, + "shape": 3 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 173 + ], + "slot_index": 2, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "flux1-dev-fp8.safetensors" + ] + }, + { + "id": 45, + "type": "SamplerCustomAdvanced", + "pos": { + "0": -218.98114013671875, + "1": 285.9537048339844 + }, + "size": { + "0": 236.8000030517578, + "1": 107.92439270019531 + }, + "flags": {}, + "order": 40, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 128, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 124, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 161, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 131, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 132, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 188 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 13, + "type": "SamplerCustomAdvanced", + "pos": { + "0": 507.888671875, + "1": 293.40911865234375 + }, + "size": { + "0": 236.8000030517578, + "1": 106 + }, + "flags": {}, + "order": 43, + "mode": 0, + "inputs": [ + { + "name": "noise", + "type": "NOISE", + "link": 127, + "slot_index": 0 + }, + { + "name": "guider", + "type": "GUIDER", + "link": 30, + "slot_index": 1 + }, + { + "name": "sampler", + "type": "SAMPLER", + "link": 19, + "slot_index": 2 + }, + { + "name": "sigmas", + "type": "SIGMAS", + "link": 20, + "slot_index": 3 + }, + { + "name": "latent_image", + "type": "LATENT", + "link": 193, + "slot_index": 4 + } + ], + "outputs": [ + { + "name": "output", + "type": "LATENT", + "links": [ + 24 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "denoised_output", + "type": "LATENT", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "SamplerCustomAdvanced" + }, + "widgets_values": [] + }, + { + "id": 50, + "type": "DisableNoise", + "pos": { + "0": -181.98114013671875, + "1": 443.9537048339844 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 1, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 128 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 64, + "type": "FluxInverseSampler", + "pos": { + "0": -202.98114013671875, + "1": 494.9537048339844 + }, + "size": { + "0": 210, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 2, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 161 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxInverseSampler" + }, + "widgets_values": [] + }, + { + "id": 51, + "type": "FlipSigmas", + "pos": { + "0": -176.9811248779297, + "1": 557.95361328125 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 37, + "mode": 0, + "inputs": [ + { + "name": "sigmas", + "type": "SIGMAS", + "link": 130 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 131 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FlipSigmas" + }, + "widgets_values": [] + }, + { + "id": 49, + "type": "BasicScheduler", + "pos": { + "0": -209.9811553955078, + "1": 606.95361328125 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 32, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 135, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 130 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 60, + 1 + ] + }, + { + "id": 52, + "type": "InFluxModelSamplingPred", + "pos": { + "0": -202.98114013671875, + "1": 760.95361328125 + }, + "size": { + "0": 210, + "1": 122 + }, + "flags": {}, + "order": 24, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 169 + }, + { + "name": "width", + "type": "INT", + "link": 177, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 176, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 135, + 136 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "InFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024 + ] + }, + { + "id": 73, + "type": "GetNode", + "pos": { + "0": -162.98114013671875, + "1": 927.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 3, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 169 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 82, + "type": "GetNode", + "pos": { + "0": -152.98114013671875, + "1": 989.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 4, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 177 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 81, + "type": "GetNode", + "pos": { + "0": -149.98114013671875, + "1": 1047.953857421875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 5, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 176 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 60, + "type": "FluxDeGuidance", + "pos": { + "0": -668.98095703125, + "1": 189.95367431640625 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 33, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 152 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 195 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 0 + ] + }, + { + "id": 38, + "type": "CLIPTextEncode", + "pos": { + "0": -664.98095703125, + "1": 297.9537048339844 + }, + "size": { + "0": 383.0185852050781, + "1": 148.05877685546875 + }, + "flags": { + "collapsed": true + }, + "order": 25, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 164 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 152 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 68, + "type": "GetNode", + "pos": { + "0": -599.98095703125, + "1": 365.9537048339844 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 6, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 164 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 41, + "type": "VAEEncode", + "pos": { + "0": -634.98095703125, + "1": 450.9537048339844 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 26, + "mode": 0, + "inputs": [ + { + "name": "pixels", + "type": "IMAGE", + "link": 183 + }, + { + "name": "vae", + "type": "VAE", + "link": 165 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 132, + 189 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + }, + "widgets_values": [] + }, + { + "id": 86, + "type": "GetNode", + "pos": { + "0": -591.98095703125, + "1": 555.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 7, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 183 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 69, + "type": "GetNode", + "pos": { + "0": -584.98095703125, + "1": 621.95361328125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 8, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 165 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 48, + "type": "DisableNoise", + "pos": { + "0": 579.8067016601562, + "1": 457.393798828125 + }, + "size": { + "0": 140, + "1": 26 + }, + "flags": { + "collapsed": true + }, + "order": 9, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "NOISE", + "type": "NOISE", + "links": [ + 127 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "DisableNoise" + }, + "widgets_values": [] + }, + { + "id": 16, + "type": "KSamplerSelect", + "pos": { + "0": 535.8068237304688, + "1": 502.393798828125 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 10, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "SAMPLER", + "type": "SAMPLER", + "links": [ + 19 + ], + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "KSamplerSelect" + }, + "widgets_values": [ + "dpmpp_2m" + ] + }, + { + "id": 55, + "type": "OutFluxModelSamplingPred", + "pos": { + "0": 528.8067626953125, + "1": 769.3939208984375 + }, + "size": { + "0": 210, + "1": 122 + }, + "flags": {}, + "order": 30, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 201 + }, + { + "name": "width", + "type": "INT", + "link": 178, + "widget": { + "name": "width" + } + }, + { + "name": "height", + "type": "INT", + "link": 179, + "widget": { + "name": "height" + } + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 145, + 146 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "OutFluxModelSamplingPred" + }, + "widgets_values": [ + 1.15, + 0.5, + 1024, + 1024 + ] + }, + { + "id": 84, + "type": "GetNode", + "pos": { + "0": 582.8067016601562, + "1": 991.3939208984375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 11, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 178 + ], + "slot_index": 0 + } + ], + "title": "Get_WIDTH", + "properties": {}, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 83, + "type": "GetNode", + "pos": { + "0": 583.8067016601562, + "1": 1042.39404296875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 12, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 179 + ], + "slot_index": 0 + } + ], + "title": "Get_HEIGHT", + "properties": {}, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 67, + "type": "SetNode", + "pos": { + "0": -960.2341918945312, + "1": 299.40826416015625 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 22, + "mode": 0, + "inputs": [ + { + "name": "CLIP", + "type": "CLIP", + "link": 172 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_CLIP", + "properties": { + "previousName": "CLIP" + }, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 65, + "type": "SetNode", + "pos": { + "0": -958.2341918945312, + "1": 407.40826416015625 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 23, + "mode": 0, + "inputs": [ + { + "name": "VAE", + "type": "VAE", + "link": 173 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_VAE", + "properties": { + "previousName": "VAE" + }, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 85, + "type": "SetNode", + "pos": { + "0": -970.2341918945312, + "1": 622.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 36, + "mode": 0, + "inputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "link": 180 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_IMG", + "properties": { + "previousName": "IMG" + }, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 78, + "type": "INTConstant", + "pos": { + "0": -1282.2342529296875, + "1": 808.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 13, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 174, + 181 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 1344 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 76, + "type": "SetNode", + "pos": { + "0": -982.2343139648438, + "1": 810.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 27, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 174 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_WIDTH", + "properties": { + "previousName": "WIDTH" + }, + "widgets_values": [ + "WIDTH" + ] + }, + { + "id": 80, + "type": "INTConstant", + "pos": { + "0": -1276.2342529296875, + "1": 917.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 14, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "value", + "type": "INT", + "links": [ + 175, + 182 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "INTConstant" + }, + "widgets_values": [ + 768 + ], + "color": "#1b4669", + "bgcolor": "#29699c" + }, + { + "id": 79, + "type": "SetNode", + "pos": { + "0": -989.234375, + "1": 929.4080810546875 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": {}, + "order": 28, + "mode": 0, + "inputs": [ + { + "name": "INT", + "type": "INT", + "link": 175 + } + ], + "outputs": [ + { + "name": "*", + "type": "*", + "links": null + } + ], + "title": "Set_HEIGHT", + "properties": { + "previousName": "HEIGHT" + }, + "widgets_values": [ + "HEIGHT" + ] + }, + { + "id": 62, + "type": "FluxDeGuidance", + "pos": { + "0": 159.80670166015625, + "1": 204.3938446044922 + }, + "size": { + "0": 211.60000610351562, + "1": 58 + }, + "flags": {}, + "order": 34, + "mode": 0, + "inputs": [ + { + "name": "conditioning", + "type": "CONDITIONING", + "link": 159 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 194 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "FluxDeGuidance" + }, + "widgets_values": [ + 4 + ] + }, + { + "id": 70, + "type": "GetNode", + "pos": { + "0": 162.80670166015625, + "1": 561.3939208984375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": false + }, + "order": 15, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 198 + ], + "slot_index": 0 + } + ], + "title": "Get_CLIP", + "properties": {}, + "widgets_values": [ + "CLIP" + ] + }, + { + "id": 72, + "type": "GetNode", + "pos": { + "0": 142.80665588378906, + "1": 1043.378662109375 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 16, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 201 + ], + "slot_index": 0 + } + ], + "title": "Get_MODEL", + "properties": {}, + "widgets_values": [ + "MODEL" + ] + }, + { + "id": 53, + "type": "VAEDecode", + "pos": { + "0": 568, + "1": -324 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 42, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 192 + }, + { + "name": "vae", + "type": "VAE", + "link": 170 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 141 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 74, + "type": "GetNode", + "pos": { + "0": 583, + "1": -228 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 17, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 170 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 8, + "type": "VAEDecode", + "pos": { + "0": 887, + "1": 194 + }, + "size": { + "0": 210, + "1": 46 + }, + "flags": {}, + "order": 45, + "mode": 0, + "inputs": [ + { + "name": "samples", + "type": "LATENT", + "link": 24 + }, + { + "name": "vae", + "type": "VAE", + "link": 167 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 9, + 186 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + }, + "widgets_values": [] + }, + { + "id": 71, + "type": "GetNode", + "pos": { + "0": 893, + "1": 286 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 18, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 167 + ], + "slot_index": 0 + } + ], + "title": "Get_VAE", + "properties": {}, + "widgets_values": [ + "VAE" + ] + }, + { + "id": 9, + "type": "SaveImage", + "pos": { + "0": 886, + "1": 363 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 46, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 9 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 87, + "type": "GetNode", + "pos": { + "0": 1544, + "1": 188 + }, + "size": { + "0": 210, + "1": 58 + }, + "flags": { + "collapsed": true + }, + "order": 19, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 185 + ], + "slot_index": 0 + } + ], + "title": "Get_IMG", + "properties": {}, + "widgets_values": [ + "IMG" + ] + }, + { + "id": 88, + "type": "ImageConcanate", + "pos": { + "0": 1666, + "1": 183 + }, + "size": { + "0": 315, + "1": 102 + }, + "flags": {}, + "order": 47, + "mode": 0, + "inputs": [ + { + "name": "image1", + "type": "IMAGE", + "link": 185 + }, + { + "name": "image2", + "type": "IMAGE", + "link": 186 + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 187 + ], + "slot_index": 0, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "ImageConcanate" + }, + "widgets_values": [ + "right", + true + ] + }, + { + "id": 54, + "type": "SaveImage", + "pos": { + "0": 1142, + "1": -1335 + }, + "size": { + "0": 985.3012084960938, + "1": 1060.3828125 + }, + "flags": {}, + "order": 44, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 141 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 39, + "type": "LoadImage", + "pos": { + "0": -2187, + "1": 231 + }, + "size": { + "0": 868.442626953125, + "1": 998.3667602539062 + }, + "flags": {}, + "order": 20, + "mode": 0, + "inputs": [], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 118 + ], + "slot_index": 0, + "shape": 3 + }, + { + "name": "MASK", + "type": "MASK", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "LoadImage" + }, + "widgets_values": [ + "ddddd.jpg", + "image" + ] + }, + { + "id": 90, + "type": "FluxNoiseMixer", + "pos": { + "0": 176.8066864013672, + "1": 670.3939208984375 + }, + "size": { + "0": 210, + "1": 174 + }, + "flags": {}, + "order": 41, + "mode": 0, + "inputs": [ + { + "name": "latent", + "type": "LATENT", + "link": 189 + }, + { + "name": "noise", + "type": "LATENT", + "link": 188 + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 192, + 193 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "FluxNoiseMixer" + }, + "widgets_values": [ + 0.99, + 0, + "mix", + "add", + false + ] + }, + { + "id": 6, + "type": "CLIPTextEncode", + "pos": { + "0": 99.80657196044922, + "1": 311.393798828125 + }, + "size": { + "0": 348.0340881347656, + "1": 182.8946990966797 + }, + "flags": {}, + "order": 29, + "mode": 0, + "inputs": [ + { + "name": "clip", + "type": "CLIP", + "link": 198 + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 159 + ], + "slot_index": 0 + } + ], + "title": "CLIP Text Encode (Positive Prompt)", + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "An anime scene of a man in a suit running to catch a train with a man on the back. The man running is holding a large, vintage-style suitcase, and the scene is filled with dust. The environment appears dry and somewhat rural, with train tracks visible and a soft golden light. " + ], + "color": "#232", + "bgcolor": "#353" + }, + { + "id": 17, + "type": "BasicScheduler", + "pos": { + "0": 536.806884765625, + "1": 611.3939208984375 + }, + "size": { + "0": 210, + "1": 106 + }, + "flags": {}, + "order": 35, + "mode": 0, + "inputs": [ + { + "name": "model", + "type": "MODEL", + "link": 145, + "slot_index": 0 + } + ], + "outputs": [ + { + "name": "SIGMAS", + "type": "SIGMAS", + "links": [ + 20 + ], + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "BasicScheduler" + }, + "widgets_values": [ + "simple", + 20, + 0.9 + ] + }, + { + "id": 89, + "type": "SaveImage", + "pos": { + "0": 2055, + "1": -2 + }, + "size": { + "0": 2062.944580078125, + "1": 1798.65966796875 + }, + "flags": {}, + "order": 48, + "mode": 0, + "inputs": [ + { + "name": "images", + "type": "IMAGE", + "link": 187 + } + ], + "outputs": [], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + } + ], + "links": [ + [ + 9, + 8, + 0, + 9, + 0, + "IMAGE" + ], + [ + 19, + 16, + 0, + 13, + 2, + "SAMPLER" + ], + [ + 20, + 17, + 0, + 13, + 3, + "SIGMAS" + ], + [ + 24, + 13, + 0, + 8, + 0, + "LATENT" + ], + [ + 30, + 22, + 0, + 13, + 1, + "GUIDER" + ], + [ + 118, + 39, + 0, + 40, + 0, + "IMAGE" + ], + [ + 124, + 46, + 0, + 45, + 1, + "GUIDER" + ], + [ + 127, + 48, + 0, + 13, + 0, + "NOISE" + ], + [ + 128, + 50, + 0, + 45, + 0, + "NOISE" + ], + [ + 130, + 49, + 0, + 51, + 0, + "SIGMAS" + ], + [ + 131, + 51, + 0, + 45, + 3, + "SIGMAS" + ], + [ + 132, + 41, + 0, + 45, + 4, + "LATENT" + ], + [ + 135, + 52, + 0, + 49, + 0, + "MODEL" + ], + [ + 136, + 52, + 0, + 46, + 0, + "MODEL" + ], + [ + 141, + 53, + 0, + 54, + 0, + "IMAGE" + ], + [ + 145, + 55, + 0, + 17, + 0, + "MODEL" + ], + [ + 146, + 55, + 0, + 22, + 0, + "MODEL" + ], + [ + 152, + 38, + 0, + 60, + 0, + "CONDITIONING" + ], + [ + 159, + 6, + 0, + 62, + 0, + "CONDITIONING" + ], + [ + 161, + 64, + 0, + 45, + 2, + "SAMPLER" + ], + [ + 164, + 68, + 0, + 38, + 0, + "CLIP" + ], + [ + 165, + 69, + 0, + 41, + 1, + "VAE" + ], + [ + 167, + 71, + 0, + 8, + 1, + "VAE" + ], + [ + 169, + 73, + 0, + 52, + 0, + "MODEL" + ], + [ + 170, + 74, + 0, + 53, + 1, + "VAE" + ], + [ + 171, + 75, + 0, + 66, + 0, + "*" + ], + [ + 172, + 75, + 1, + 67, + 0, + "CLIP" + ], + [ + 173, + 75, + 2, + 65, + 0, + "VAE" + ], + [ + 174, + 78, + 0, + 76, + 0, + "*" + ], + [ + 175, + 80, + 0, + 79, + 0, + "*" + ], + [ + 176, + 81, + 0, + 52, + 2, + "INT" + ], + [ + 177, + 82, + 0, + 52, + 1, + "INT" + ], + [ + 178, + 84, + 0, + 55, + 1, + "INT" + ], + [ + 179, + 83, + 0, + 55, + 2, + "INT" + ], + [ + 180, + 40, + 0, + 85, + 0, + "*" + ], + [ + 181, + 78, + 0, + 40, + 1, + "INT" + ], + [ + 182, + 80, + 0, + 40, + 2, + "INT" + ], + [ + 183, + 86, + 0, + 41, + 0, + "IMAGE" + ], + [ + 185, + 87, + 0, + 88, + 0, + "IMAGE" + ], + [ + 186, + 8, + 0, + 88, + 1, + "IMAGE" + ], + [ + 187, + 88, + 0, + 89, + 0, + "IMAGE" + ], + [ + 188, + 45, + 0, + 90, + 1, + "LATENT" + ], + [ + 189, + 41, + 0, + 90, + 0, + "LATENT" + ], + [ + 192, + 90, + 0, + 53, + 0, + "LATENT" + ], + [ + 193, + 90, + 0, + 13, + 4, + "LATENT" + ], + [ + 194, + 62, + 0, + 22, + 1, + "CONDITIONING" + ], + [ + 195, + 60, + 0, + 46, + 1, + "CONDITIONING" + ], + [ + 198, + 70, + 0, + 6, + 0, + "CLIP" + ], + [ + 201, + 72, + 0, + 55, + 0, + "MODEL" + ] + ], + "groups": [ + { + "title": "Setup", + "bounding": [ + -2197, + 118, + 1461, + 1121 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Sampling", + "bounding": [ + 80, + 119, + 677, + 933 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + }, + { + "title": "Unsampling", + "bounding": [ + -678, + 116, + 707, + 942 + ], + "color": "#3f789e", + "font_size": 24, + "flags": {} + } + ], + "config": {}, + "extra": { + "ds": { + "scale": 0.5559917313493434, + "offset": [ + 235.38462966291002, + -84.00688553137525 + ] + }, + "groupNodes": {} + }, + "version": 0.4 +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/layers.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/layers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d9a07e47b0308a4fef82871e9585d368229c5ad Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/layers.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/model.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8895410142e2ddf7aeb9a5bfff85198bb93bb8ef Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/__pycache__/model.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/layers.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..4c6144954749e7a145d7bfc4ac6caa315723328d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/layers.py @@ -0,0 +1,165 @@ +from typing import List +import torch +import torch.nn.functional as F +from einops import rearrange +from torch import Tensor + +from comfy.ldm.flux.math import apply_rope +from comfy.ldm.flux.layers import SingleStreamBlock as OriginalSingleStreamBlock, DoubleStreamBlock as OriginalDoubleStreamBlock + + +import torch +from einops import rearrange +from torch import Tensor +from comfy.ldm.modules.attention import optimized_attention + +# from ..utils.rave_rope_attention import rave_rope_attention +# from ..utils.rave_attention import rave_attention +# from ..utils.joint_attention import joint_attention + + +def attention(q: Tensor, k: Tensor, v: Tensor, pe: Tensor, skip_rope: bool= False) -> Tensor: + if not skip_rope: + q, k = apply_rope(q, k, pe) + heads = q.shape[1] + x = optimized_attention(q, k, v, heads, skip_reshape=True) + return x + + +def ref_attention2(q: Tensor, k:Tensor, v:Tensor, pe: Tensor, ref_pes: List[Tensor], transformer_options, timesteps) -> Tensor: + # rope tgt + timesteps = timesteps.item() + q_tgt, k_tgt = apply_rope(q[[0]], k[[0]], pe) + strength = transformer_options['REF_OPTIONS']['strength'] + + # rope refs + q_ref = q[[-1],:,256:,:] + k_ref = k[[-1],:,256:,:] + + _, k_ref1 = apply_rope(q_ref, k_ref, ref_pes[0]) + # k_ref1 = k_ref1[[-1], :, 256:, :] * strength + + # if 0.85 <= timesteps: + + # k_ref1[:, :, :, 16+0 :16+0 +28+8-1] *= 0.1 + # k_ref1[:, :, :, 16+56:16+56+28+8-1] *= 0.1 + + # elif 0.6 <= timesteps < 0.85: + + # k_ref1[:, :, :, 16+0 :16+0 +28-20-1] *= 0.1 + # k_ref1[:, :, :, 16+56:16+56+28-20-1] *= 0.1 + + # _, k_ref2 = apply_rope(q_ref, k_ref, ref_pes[1]) + # k_ref2 = k_ref2[[-1], :, 256:, :] + v_ref = v[[-1],:, 256:, :] + + k_tgt = torch.cat([k_tgt, k_ref1], dim=2) + v_tgt = torch.cat([v[[0]], v_ref], dim=2) + tgt_out = attention(q_tgt, k_tgt, v_tgt, pe, skip_rope=True) + + ref_out = attention(q[[-1]], k[[-1]], v[[-1]], pe) + + return torch.cat([tgt_out, ref_out], dim=0) + + +def ref_attention(q: Tensor, k:Tensor, v:Tensor, pe: Tensor, ref_pes: List[Tensor], transformer_options, timesteps) -> Tensor: + # rope tgt + timesteps = timesteps.item() + strength = transformer_options['REF_OPTIONS']['strength'] + + # rope refs + q_ref1 = torch.cat([q[[0]], q[[-1], :,256:,:]], dim=2) + k_ref1 = torch.cat([k[[0]], k[[-1], :,256:,:]*strength], dim=2) + v_ref = torch.cat([v[[0]], v[[-1], :,256:,:]], dim=2) + + _, k_ref1 = apply_rope(q_ref1, k_ref1, ref_pes[1]) + + tgt_out = attention(q[[0]], k_ref1, v_ref, pe, skip_rope=True) + + ref_out = attention(q[[-1]], k[[-1]], v[[-1]], pe) + + return torch.cat([tgt_out, ref_out], dim=0) + + +class DoubleStreamBlock(OriginalDoubleStreamBlock): + def forward(self, img: Tensor, txt: Tensor, vec: Tensor, pe: Tensor, ref_pes: None | List[Tensor], timestep, transformer_options={}): + img_mod1, img_mod2 = self.img_mod(vec) + txt_mod1, txt_mod2 = self.txt_mod(vec) + + # prepare image for attention + img_modulated = self.img_norm1(img) + img_modulated = (1 + img_mod1.scale) * img_modulated + img_mod1.shift + img_qkv = self.img_attn.qkv(img_modulated) + img_q, img_k, img_v = rearrange(img_qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads) + img_q, img_k = self.img_attn.norm(img_q, img_k, img_v) + + # prepare txt for attention + txt_modulated = self.txt_norm1(txt) + txt_modulated = (1 + txt_mod1.scale) * txt_modulated + txt_mod1.shift + txt_qkv = self.txt_attn.qkv(txt_modulated) + txt_q, txt_k, txt_v = rearrange(txt_qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads) + txt_q, txt_k = self.txt_attn.norm(txt_q, txt_k, txt_v) + + + # run actual attention + q = torch.cat((txt_q, img_q), dim=2) + k = torch.cat((txt_k, img_k), dim=2) + v = torch.cat((txt_v, img_v), dim=2) + ref_options = transformer_options.get('REF_OPTIONS', None) + if ref_options is not None and ref_pes is not None: + attn = ref_attention(q, k, v, pe, ref_pes, transformer_options, timestep[0]) + else: + attn = attention(q, k, v, pe=pe) + + txt_attn, img_attn = attn[:, : txt.shape[1]], attn[:, txt.shape[1] :] + txt_attn = txt_attn[0:1].repeat(img_attn.shape[0], 1, 1) + + # img_attn.shape [16, 2304, 3072] + + # calculate the img bloks + img = img + img_mod1.gate * self.img_attn.proj(img_attn) + img = img + img_mod2.gate * self.img_mlp((1 + img_mod2.scale) * self.img_norm2(img) + img_mod2.shift) + + # calculate the txt bloks + txt = txt + txt_mod1.gate * self.txt_attn.proj(txt_attn) + txt = txt + txt_mod2.gate * self.txt_mlp((1 + txt_mod2.scale) * self.txt_norm2(txt) + txt_mod2.shift) + return img, txt + + +class SingleStreamBlock(OriginalSingleStreamBlock): + def forward(self, x: Tensor, vec: Tensor, pe: Tensor, ref_pes, timestep, transformer_options={}) -> Tensor: + mod, _ = self.modulation(vec) + x_mod = (1 + mod.scale) * self.pre_norm(x) + mod.shift + qkv, mlp = torch.split(self.linear1(x_mod), [3 * self.hidden_size, self.mlp_hidden_dim], dim=-1) + + q, k, v = rearrange(qkv, "B L (K H D) -> K B H L D", K=3, H=self.num_heads) + q, k = self.norm(q, k, v) + + ref_options = transformer_options.get('REF_OPTIONS', None) + if ref_options is not None and ref_pes is not None: + attn = ref_attention(q, k, v, pe, ref_pes, transformer_options, timestep[0]) + else: + attn = attention(q, k, v, pe=pe) + # txt_attn, img_attn = attn[:, :256], attn[:, 256:] + + # txt_attn = temporal_attention(txt_attn, self.num_heads, transformer_options) + # attn[:, :256] = txt_attn + + # img_attn = interframe_attention(img_attn, self.num_heads, transformer_options) + # attn[:, 256:] = img_attn + + # compute activation in mlp stream, cat again and run second linear layer + output = self.linear2(torch.cat((attn, self.mlp_act(mlp)), 2)) + return x + mod.gate * output + + +def inject_blocks(diffusion_model): + for i, block in enumerate(diffusion_model.double_blocks): + block.__class__ = DoubleStreamBlock + block.idx = i + + for i, block in enumerate(diffusion_model.single_blocks): + block.__class__ = SingleStreamBlock + block.idx = i + + return diffusion_model \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/model.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/model.py new file mode 100644 index 0000000000000000000000000000000000000000..00af8506239ca4c2972a9b894092b80946de2758 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/flux/model.py @@ -0,0 +1,147 @@ +#Original code can be found on: https://github.com/black-forest-labs/flux +from typing import List +import torch +from torch import Tensor, nn + +from comfy.ldm.flux.layers import timestep_embedding +from comfy.ldm.flux.model import Flux as OriginalFlux + +from einops import rearrange, repeat +import comfy.ldm.common_dit + +from ..utils.noise_utils import add_noise_flux, add_noise + + +class Flux(OriginalFlux): + def forward_orig( + self, + img: Tensor, + img_ids: Tensor, + txt: Tensor, + txt_ids: Tensor, + timesteps: Tensor, + y: Tensor, + guidance: Tensor = None, + control=None, + transformer_options = {}, + ref_img_ids: List[Tensor] | None = None, + ) -> Tensor: + if img.ndim != 3 or txt.ndim != 3: + raise ValueError("Input img and txt tensors must have 3 dimensions.") + + # running on sequences img + img = self.img_in(img) + vec = self.time_in(timestep_embedding(timesteps, 256).to(img.dtype)) + if self.params.guidance_embed: + if guidance is None: + raise ValueError("Didn't get guidance strength for guidance distilled model.") + vec = vec + self.guidance_in(timestep_embedding(guidance, 256).to(img.dtype)) + + vec = vec + self.vector_in(y) + txt = self.txt_in(txt) + + ref_pes = None + if ref_img_ids is not None: + ids = torch.cat((txt_ids[:-1], img_ids), dim=1) + pe = self.pe_embedder(ids) + ref_pe1 = self.pe_embedder(ref_img_ids[0]) + ref_id2 = torch.cat((txt_ids[-1:], img_ids, ref_img_ids[1]), dim=1) + ref_pe2 = self.pe_embedder(ref_id2) + ref_pes = [ref_pe1, ref_pe2] + else: + ids = torch.cat((txt_ids, img_ids), dim=1) + pe = self.pe_embedder(ids) + + for i, block in enumerate(self.double_blocks): + + img, txt = block(img=img, txt=txt, vec=vec, pe=pe, ref_pes=ref_pes, timestep=timesteps, transformer_options=transformer_options) + + if control is not None: # Controlnet + control_i = control.get("input") + if i < len(control_i): + add = control_i[i] + if add is not None: + img += add + + img = torch.cat((txt, img), 1) + for i, block in enumerate(self.single_blocks): + img = block(img, vec=vec, pe=pe, ref_pes=ref_pes, timestep=timesteps, transformer_options=transformer_options) + + if control is not None: # Controlnet + control_o = control.get("output") + if i < len(control_o): + add = control_o[i] + if add is not None: + img[:, txt.shape[1] :, ...] += add + + img = img[:, txt.shape[1] :, ...] + + img = self.final_layer(img, vec) # (N, T, patch_size ** 2 * out_channels) + return img + + def _get_img_ids(self, x, bs, h_len, w_len, h_start, h_end, w_start, w_end): + img_ids = torch.zeros((h_len, w_len, 3), device=x.device, dtype=x.dtype) + img_ids[..., 1] = img_ids[..., 1] + torch.linspace(h_start, h_end - 1, steps=h_len, device=x.device, dtype=x.dtype)[:, None] + img_ids[..., 2] = img_ids[..., 2] + torch.linspace(w_start, w_end - 1, steps=w_len, device=x.device, dtype=x.dtype)[None, :] + img_ids = repeat(img_ids, "h w c -> b (h w) c", b=bs) + return img_ids + + def forward(self, x, timestep, context, y, guidance, control=None, transformer_options={}, **kwargs): + bs, c, h, w = x.shape + transformer_options['original_shape'] = x.shape + patch_size = 2 + x = comfy.ldm.common_dit.pad_to_patch_size(x, (patch_size, patch_size)) + + h_len = ((h + (patch_size // 2)) // patch_size) + w_len = ((w + (patch_size // 2)) // patch_size) + + img = rearrange(x, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + + txt_ids = torch.zeros((bs, context.shape[1], 3), device=x.device, dtype=x.dtype) + img_ids_orig = self._get_img_ids(x, bs, h_len, w_len, 0, h_len, 0, w_len) + + ref_options = transformer_options.get('REF_OPTIONS', None) + + perform_ref = False + if ref_options is not None: + ref_start_percent = ref_options.get('start_percent', 0) + ref_end_percent = ref_options.get('end_percent', -1) + sigma_percents = ref_options.get('sigma_to_percent', {}) + step_percent = sigma_percents[timestep[0].item()] + perform_ref = ref_start_percent <= step_percent < ref_end_percent + sigma_to_step = ref_options.get('sigma_to_step', {}) + + ref_img_ids = None + if perform_ref: + # ref + ref_latent = ref_options['ref_latent'] + ref_latent = ref_latent.to(x.device) + sigma = ref_options.get('sigmas', [])[sigma_to_step[timestep[0].item()]].to(x.device) + ref_latent = add_noise_flux(ref_latent, torch.randn_like(ref_latent), sigma) + # noise = torch.randn_like(ref_latent) + # ref_latent = add_noise(ref_latent, noise, sigma) + ref_latent = rearrange(ref_latent, "b c (h ph) (w pw) -> b (h w) (c ph pw)", ph=patch_size, pw=patch_size) + img = torch.cat([img, ref_latent], dim=0) # this won't work with cfg + # horizontal translation + diff = 2 + ref_img_id1 = self._get_img_ids(x, bs, h_len, w_len, diff+h_len, diff+2*h_len, w_len, diff+2*w_len) + ref_img_id2 = self._get_img_ids(x, bs, h_len, w_len, 0, h_len, 0, 2*w_len) + ref_img_ids = [ref_img_id1, ref_img_id2] + + timestep = timestep.repeat(2) + txt_ids = txt_ids.repeat(2, 1, 1) + context = context.repeat(2, 1, 1) + y = y.repeat(2, 1) + guidance = guidance.repeat(2) + + out = self.forward_orig(img, img_ids_orig, context, txt_ids, timestep, y, guidance, control, transformer_options=transformer_options, ref_img_ids=ref_img_ids) + + if perform_ref: + out = out[-1:] + return rearrange(out, "b (h w) (c ph pw) -> b c (h ph) (w pw)", h=h_len, w=w_len, ph=2, pw=2)[:,:,:h,:w] + + +def inject_flux(diffusion_model: OriginalFlux): + diffusion_model.__class__ = Flux + diffusion_model.is_ref = True + return diffusion_model \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/apply_ref_flux.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/apply_ref_flux.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..770370268bdd2285b80cd7be28cb9043cbfb50a4 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/apply_ref_flux.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flip_sigmas_node.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flip_sigmas_node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2bb1c150c84a5c5092946587fa61198459241ead Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flip_sigmas_node.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flux_deguidance_node.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flux_deguidance_node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c618ae231cadb9361d3b90da03a0716ed517a7a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/flux_deguidance_node.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/influx_model_pred_node.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/influx_model_pred_node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba50d7ecc20abf9a5d7b4957040ded9bc08dbbd7 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/influx_model_pred_node.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/inverse_sampler_node.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/inverse_sampler_node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..befa66458a5cf350c5579f17b1c09844ee281ed9 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/inverse_sampler_node.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/mix_noise_node.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/mix_noise_node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36b1ea161253252beb89a2ca5e8a88a3793029d6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/mix_noise_node.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/rectified_sampler_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/rectified_sampler_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ad87fccafc6b5d724e075afcd15570b101aee67c Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/__pycache__/rectified_sampler_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/add_flow_flux_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/add_flow_flux_node.py new file mode 100644 index 0000000000000000000000000000000000000000..df944b4ed447380298e58bd9b8e75008b6f59e83 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/add_flow_flux_node.py @@ -0,0 +1,29 @@ +import comfy.sd +import comfy.model_sampling +import comfy.latent_formats +import nodes + + +class AddFluxFlowNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "flow": ("FLOW",), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, flow): + m = model.clone() + model_options = {**model.model_options} + model.model_options = model_options + transformer_options = {**model.model_options.get('transformer_options', {})} + model.model_options['transformer_options'] = transformer_options + + transformer_options['FLOW'] = flow + + return (m, ) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_rave_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_rave_node.py new file mode 100644 index 0000000000000000000000000000000000000000..9a541c2dfc275096c88d9673cd94b9e139730546 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_rave_node.py @@ -0,0 +1,41 @@ + +default_attn = { + 'inputs': [True] * 10, + 'input_idxs': list(range(10)), + 'middle_0': True, + 'outputs': [True] * 12, + 'output_idxs': list(range(12)) +} + + +class ApplyFluxRaveAttentionNode: + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "model": ("MODEL",), + "grid_size": ("INT", {"default": 3, "min": 1, "max": 10}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + }, + "optional": { + "attn_override": ("ATTN_OVERRIDE",) + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply" + + CATEGORY = "attention" + + def apply(self, model, grid_size, seed, attn_override=default_attn): + model = model.clone() + + transformer_options = {**model.model_options.get('transformer_options', {})} + model.model_options['transformer_options'] = transformer_options + + transformer_options['RAVE'] = { + "grid_size": grid_size, + "seed": seed, + } + + return (model, ) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_ref_flux.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_ref_flux.py new file mode 100644 index 0000000000000000000000000000000000000000..ece426f464bd4a4acdeaacb5c9f0e38dd5cfb1d7 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/apply_ref_flux.py @@ -0,0 +1,65 @@ +from ..flux.model import inject_flux +from ..flux.layers import inject_blocks + + +class ApplyRefFluxNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply" + + CATEGORY = "reference" + + def apply(self, model): + # if hasattr(model.model.diffusion_model, 'is_ref') and model.model.diffusion_model.is_ref: + # return (model,) + inject_flux(model.model.diffusion_model) + inject_blocks(model.model.diffusion_model) + return (model,) + + +class ConfigureRefFluxNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "latent": ("LATENT",), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "end_percent": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + "strength": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.001}), + "sigmas": ("SIGMAS",) + }} + RETURN_TYPES = ("MODEL",) + FUNCTION = "apply" + + CATEGORY = "reference" + + def apply(self, + model, + latent, + start_percent, + end_percent, + strength, + sigmas): + model = model.clone() + sigma_to_percent = { sigma.item(): idx/len(sigmas) for idx, sigma in enumerate(sigmas)} + sigma_to_step = { sigma.item(): idx for idx, sigma in enumerate(sigmas)} + + transformer_options = model.model_options.get('transformer_options', {}) + transformer_options = { **transformer_options } + process_latent_in = model.get_model_object("process_latent_in") + transformer_options['REF_OPTIONS'] = { + 'ref_latent': process_latent_in(latent['samples']), + 'start_percent': start_percent, + 'end_percent': end_percent, + 'sigma_to_percent': sigma_to_percent, + 'sigma_to_step': sigma_to_step, + 'strength': strength, + 'sigmas': sigmas, + } + model.model_options['transformer_options'] = transformer_options + + return (model,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flip_sigmas_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flip_sigmas_node.py new file mode 100644 index 0000000000000000000000000000000000000000..a6a6d3c8ff90bddcca55fbc62c5b11c62fa3a040 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flip_sigmas_node.py @@ -0,0 +1,17 @@ + +class InFluxFlipSigmasNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "sigmas": ("SIGMAS",), + }} + + RETURN_TYPES = ("SIGMAS",) + FUNCTION = "flip" + + CATEGORY = "flux" + + def flip(self, sigmas): + sigmas = sigmas.flip() + if sigmas[0] == 0: + sigmas[0] = 1e-3 + return (sigmas, ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flux_deguidance_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flux_deguidance_node.py new file mode 100644 index 0000000000000000000000000000000000000000..948dfc5f6a9b503a05e30920293371e179a58e5a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/flux_deguidance_node.py @@ -0,0 +1,18 @@ +import node_helpers + +class FluxDeGuidance: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning": ("CONDITIONING", ), + "guidance": ("FLOAT", {"default": 3.5, "min": -100.0, "max": 100.0, "step": 0.1}), + }} + + RETURN_TYPES = ("CONDITIONING",) + FUNCTION = "append" + + CATEGORY = "advanced/conditioning/flux" + + def append(self, conditioning, guidance): + c = node_helpers.conditioning_set_values(conditioning, {"guidance": guidance}) + return (c, ) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/influx_model_pred_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/influx_model_pred_node.py new file mode 100644 index 0000000000000000000000000000000000000000..524ced9d82eafd258017fb6e5675622c3e067af0 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/influx_model_pred_node.py @@ -0,0 +1,125 @@ +import comfy.sd +import comfy.model_sampling +import comfy.latent_formats +import nodes + + +class InverseCONST: + def calculate_input(self, sigma, noise): + return noise + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_output + + def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): + return latent_image + + def inverse_noise_scaling(self, sigma, latent): + return latent + + +class InFluxModelSamplingPredNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "max_shift": ("FLOAT", {"default": 1.15, "min": 0.0, "max": 100.0, "step":0.01}), + "base_shift": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01}), + "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, max_shift, base_shift, width, height): + m = model.clone() + + x1 = 256 + x2 = 4096 + mm = (max_shift - base_shift) / (x2 - x1) + b = base_shift - mm * x1 + shift = (width * height / (8 * 8 * 2 * 2)) * mm + b + + sampling_base = comfy.model_sampling.ModelSamplingFlux + sampling_type = InverseCONST + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(shift=shift) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) + + +class OutCONST: + def calculate_input(self, sigma, noise): + return noise + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_input - model_output * sigma + + def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): + return latent_image + + def inverse_noise_scaling(self, sigma, latent): + return latent / (1.0 - sigma) + + +class ReverseCONST: + def calculate_input(self, sigma, noise): + return noise + + def calculate_denoised(self, sigma, model_output, model_input): + sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1)) + return model_output # model_input - model_output * sigma + + def noise_scaling(self, sigma, noise, latent_image, max_denoise=False): + return latent_image + + def inverse_noise_scaling(self, sigma, latent): + return latent / (1.0 - sigma) + + +class OutFluxModelSamplingPredNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { "model": ("MODEL",), + "max_shift": ("FLOAT", {"default": 1.15, "min": 0.0, "max": 100.0, "step":0.01}), + "base_shift": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01}), + "width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}), + "reverse_ode": ("BOOLEAN", {"default": False}), + }} + + RETURN_TYPES = ("MODEL",) + FUNCTION = "patch" + + CATEGORY = "advanced/model" + + def patch(self, model, max_shift, base_shift, width, height, reverse_ode=False): + m = model.clone() + + x1 = 256 + x2 = 4096 + mm = (max_shift - base_shift) / (x2 - x1) + b = base_shift - mm * x1 + shift = (width * height / (8 * 8 * 2 * 2)) * mm + b + + sampling_base = comfy.model_sampling.ModelSamplingFlux + if reverse_ode: + sampling_type = ReverseCONST + else: + sampling_type = OutCONST + + class ModelSamplingAdvanced(sampling_base, sampling_type): + pass + + model_sampling = ModelSamplingAdvanced(model.model.model_config) + model_sampling.set_parameters(shift=shift) + m.add_object_patch("model_sampling", model_sampling) + return (m, ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/inverse_sampler_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/inverse_sampler_node.py new file mode 100644 index 0000000000000000000000000000000000000000..205f9af051c29c92d506dc5a2d58a57e1320e383 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/inverse_sampler_node.py @@ -0,0 +1,37 @@ +import torch +from tqdm import trange + +from comfy.samplers import KSAMPLER + + +@torch.no_grad() +def sample_inverse(model, x, sigmas, extra_args=None, callback=None, disable=None, s_churn=0., s_tmin=0., s_tmax=float('inf'), s_noise=1.): + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + for i in trange(len(sigmas) - 1, disable=disable): + sigma_hat = sigmas[i] + + denoised = model(x, sigma_hat * s_in, **extra_args) + if callback is not None: + callback({'x': x, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigma_hat, 'denoised': denoised}) + dt = sigmas[i + 1] - sigma_hat + x = x + denoised * dt + return x + + +class FluxInverseSamplerNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + }, "optional": { + }} + RETURN_TYPES = ("SAMPLER",) + FUNCTION = "build" + + CATEGORY = "flux" + + def build(self): + + sampler = KSAMPLER(sample_inverse) + + return (sampler, torch.Tensor([0])) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/mix_noise_node.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/mix_noise_node.py new file mode 100644 index 0000000000000000000000000000000000000000..527871d7e67058b2013c75935e28a6e4c3164213 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/mix_noise_node.py @@ -0,0 +1,50 @@ +import torch + + +def mix(latent_image, noise_image, mix_percent): + return ((1 - mix_percent) * latent_image + mix_percent * + noise_image) / ((mix_percent**2 + (1-mix_percent)**2) ** 0.5) + + + + +class FluxNoiseMixerNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latent": ("LATENT", ), + "noise": ("LATENT", ), + "mix_percent": ("FLOAT", {"default": 0.5, "min": 0, "max": 1.0, "step": 0.01}), + "random_noise": ("FLOAT", {"default": 0.0, "min": 0, "max": 100.0, "step": 0.01}), + "mix_type": (['mix', 'add'],), + "random_mix_type": (['mix', 'add'],), + "take_diff": ("BOOLEAN", ), + }} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "append" + + CATEGORY = "fluxtapoz" + + def append(self, latent, noise, mix_percent, random_noise, mix_type, random_mix_type, take_diff): + latent_image = latent.copy() + noise = noise['samples'] + latent = latent_image['samples'] + + random_noise_latent = torch.randn_like(noise) + if random_mix_type == 'mix': + noise = mix(noise, random_noise_latent, random_noise) + # noise = (noise * (1-random_noise) + random_noise_latent * (random_noise)) + elif random_mix_type == 'add': + noise += random_noise_latent * random_noise + + if mix_type == 'mix': + new_latent = mix(latent, noise, mix_percent) + # new_latent = (latent * (1-mix_percent) + noise * (mix_percent)) + elif mix_type == 'add': + new_latent = latent + noise * mix_percent + + if take_diff: + new_latent = new_latent - latent * mix_percent + latent_image['samples'] = new_latent + return (latent_image, ) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/rectified_sampler_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/rectified_sampler_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..bddc71a33583851074ebf91983e468dd301ef4e9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/nodes/rectified_sampler_nodes.py @@ -0,0 +1,136 @@ +import torch +from tqdm import trange + +from comfy.samplers import KSAMPLER + + +def get_sample_forward(gamma, seed): + # Controlled Forward ODE (Algorithm 1) + generator = torch.Generator() + generator.manual_seed(seed) + + @torch.no_grad() + def sample_forward(model, y0, sigmas, extra_args=None, callback=None, disable=None): + extra_args = {} if extra_args is None else extra_args + Y = y0.clone() + y1 = torch.randn(Y.shape, generator=generator).to(y0.device) + N = len(sigmas)-1 + s_in = y0.new_ones([y0.shape[0]]) + for i in trange(N, disable=disable): + t_i = model.inner_model.inner_model.model_sampling.timestep(sigmas[i]) + + # 6. Unconditional Vector field uti(Yti) = u(Yti, ti, Φ(“”); φ) + unconditional_vector_field = model(Y, s_in * sigmas[i], **extra_args) # this implementation takes sigma instead of timestep + + # 7.Conditional Vector field uti(Yti|y1) = (y1−Yti)/1−ti + conditional_vector_field = (y1-Y)/(1-t_i) + + # 8. Controlled Vector field ti(Yti) = uti(Yti) + γ (uti(Yti|y1) − uti(Yti)) + controlled_vector_field = unconditional_vector_field + gamma * (conditional_vector_field - unconditional_vector_field) + + # 9. Next state Yti+1 = Yti + ˆuti(Yti) (σ(ti+1) − σ(ti)) + Y = Y + controlled_vector_field * (sigmas[i+1] - sigmas[i]) + + if callback is not None: + callback({'x': Y, 'denoised': Y, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i]}) + + return Y + + return sample_forward + + +def generate_eta_values(steps, start_time, end_time, eta, eta_trend): + eta_values = [0] * steps + + if eta_trend == 'constant': + for i in range(start_time, end_time): + eta_values[i] = eta + elif eta_trend == 'linear_increase': + for i in range(start_time, end_time): + progress = (i - start_time) / (end_time - start_time - 1) + eta_values[i] = eta * progress + elif eta_trend == 'linear_decrease': + for i in range(start_time, end_time): + progress = 1 - (i - start_time) / (end_time - start_time - 1) + eta_values[i] = eta * progress + + return eta_values + + +def get_sample_reverse(latent_image, eta, start_time, end_time, eta_trend): + # Controlled Reverse ODE (Algorithm 2) + @torch.no_grad() + def sample_reverse(model, y1, sigmas, extra_args=None, callback=None, disable=None): + extra_args = {} if extra_args is None else extra_args + X = y1.clone() + N = len(sigmas)-1 + y0 = latent_image.clone().to(y1.device) + s_in = y0.new_ones([y0.shape[0]]) + eta_values = generate_eta_values(N, start_time, end_time, eta, eta_trend) + for i in trange(N, disable=disable): + # t_i = 1-model.inner_model.inner_model.model_sampling.timestep(sigmas[i]) # TODO: figure out which one to use + t_i = i/N # Empiracally better results + sigma = sigmas[i] + + # 5. Unconditional Vector field uti(Xti) = -u(Xti, 1-ti, Φ(“prompt”); φ) + unconditional_vector_field = -model(X, sigma*s_in, **extra_args) # this implementation takes sigma instead of timestep + + # 6.Conditional Vector field uti(Xti|y0) = (y0−Xti)/(1−ti) + conditional_vector_field = (y0-X)/(1-t_i) + + # 7. Controlled Vector field ti(Yti) = uti(Yti) + γ (uti(Yti|y1) − uti(Yti)) + controlled_vector_field = unconditional_vector_field + eta_values[i] * (conditional_vector_field - unconditional_vector_field) + + # 8. Next state Yti+1 = Yti + ˆuti(Yti) (σ(ti+1) − σ(ti)) + X = X + controlled_vector_field * (sigmas[i] - sigmas[i+1]) + + if callback is not None: + callback({'x': X, 'denoised': X, 'i': i, 'sigma': sigmas[i], 'sigma_hat': sigmas[i]}) + + return X + + return sample_reverse + + +class FluxForwardODESamplerNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "gamma": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step": 0.01}), + }, "optional": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff }), + }} + RETURN_TYPES = ("SAMPLER",) + FUNCTION = "build" + + CATEGORY = "flux" + + def build(self, gamma, seed=0): + sampler = KSAMPLER(get_sample_forward(gamma, seed)) + + return (sampler, ) + + +class FluxReverseODESamplerNode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "latent_image": ("LATENT",), + "eta": ("FLOAT", {"default": 0.8, "min": 0.0, "max": 100.0, "step": 0.01}), + "start_step": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "end_step": ("INT", {"default": 5, "min": 0, "max": 1000, "step": 1}), + }, "optional": { + "eta_trend": (['constant', 'linear_increase', 'linear_decrease'],) + }} + RETURN_TYPES = ("SAMPLER",) + FUNCTION = "build" + + CATEGORY = "flux" + + def build(self, model, latent_image, eta, start_step, end_step, eta_trend='constant'): + process_latent_in = model.get_model_object("process_latent_in") + latent_image = process_latent_in(latent_image['samples']) + sampler = KSAMPLER(get_sample_reverse(latent_image, eta, start_step, end_step, eta_trend)) + + return (sampler, ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/pyproject.toml b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..aabd4d97147b23941f16072da389e5aeddaaf2ae --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "comfyui-fluxtapoz" +description = "ComfyUI nodes for image editing with Flux, such as RF-Inversion and more" +version = "1.0.0" +license = {file = "LICENSE"} +dependencies = ["einshape"] + +[project.urls] +Repository = "https://github.com/logtd/ComfyUI-Fluxtapoz" +# Used by Comfy Registry https://comfyregistry.org + +[tool.comfy] +PublisherId = "logtd" +DisplayName = "ComfyUI-Fluxtapoz" +Icon = "" diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..91e485a62b8488a5552304b0e42fadcee042d919 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/requirements.txt @@ -0,0 +1 @@ +einshape \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/__pycache__/noise_utils.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/__pycache__/noise_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b71f2fc37883a4bd4ec0515ce04d626d7dc09b6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/__pycache__/noise_utils.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/module_utils.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/module_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a22b0f93b6ab3a430a412ca98e7282ec6d9454dd --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/module_utils.py @@ -0,0 +1,7 @@ +def isinstance_str(x: object, cls_name: str): + for _cls in x.__class__.__mro__: + if _cls.__name__ == cls_name: + return True + + return False + diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/noise_utils.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/noise_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..75d140127aef18ce3192951bd420eebb509f4fa9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/noise_utils.py @@ -0,0 +1,26 @@ +import torch + + +def get_alphacumprod(sigma): + return 1 / ((sigma * sigma) + 1) + + +def add_noise(src_latent, noise, sigma): + alphas_cumprod = get_alphacumprod(sigma) + + sqrt_alpha_prod = alphas_cumprod ** 0.5 + sqrt_alpha_prod = sqrt_alpha_prod.flatten() + while len(sqrt_alpha_prod.shape) < len(src_latent.shape): + sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1) + + sqrt_one_minus_alpha_prod = (1 - alphas_cumprod) ** 0.5 + sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten() + while len(sqrt_one_minus_alpha_prod.shape) < len(src_latent.shape): + sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1) + + noisy_samples = sqrt_alpha_prod * src_latent + sqrt_one_minus_alpha_prod * noise + return noisy_samples + + +def add_noise_flux(src_latent, noise, sigma): + return sigma * noise + (1.0 - sigma) * src_latent \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_attention.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..4a8ee60a0610627affc1da0380c4c088a61eb940 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_attention.py @@ -0,0 +1,117 @@ +import random +import torch +from einops import rearrange + +from comfy.ldm.modules.attention import optimized_attention + +from .rave_utils import grid_to_list, list_to_grid, shuffle_indices, shuffle_tensors2 + + +def padding_count(n_frames, grid_frame_count): + remainder = n_frames % grid_frame_count + if remainder == 0: + return 0 + else: + difference = grid_frame_count - remainder + return difference + + +def unpatchify(x, h, w, p=2): + x = rearrange(x, 'b (h w) (p q d) -> b (h p) (w q) d', h=h, p=p, q=p) + return rearrange(x, 'b h w d -> b (h w) d') + + +def patchify(x, h, w, p=2): + return rearrange(x, 'b (p h q w) d -> b (h w) (p q d)', p=p, q=p, h=h, w=w) + + +def rave_attention(q, k, v, extra_options, n_heads): + # get h,w + batch_size, sequence_length, dim = q.shape + shape = extra_options['original_shape'] + oh, ow = shape[-2:] + ratio = oh/ow + d = sequence_length + w = int((d/ratio)**(0.5)) + h = int(d/w) + + rave_opts = extra_options.get('RAVE', {}) + grid_size = rave_opts.get('grid_size', 2) + seed = rave_opts.get('seed', 1) + len_conds = len(extra_options['cond_or_uncond']) + n_frames = batch_size // len_conds + original_n_frames = n_frames + + grid_frame_count = grid_size * grid_size + n_padding_frames = padding_count(n_frames, grid_frame_count) + if n_padding_frames > 0: + random.seed(seed) + cond_qs = [] + cond_ks = [] + cond_vs = [] + padding_frames = [random.randint( + 0, n_frames-1) for _ in range(n_padding_frames)] + for cond_idx in range(len_conds): + start, end = cond_idx*n_frames, (cond_idx+1)*n_frames + cond_q = q[start:end] + cond_q = torch.cat([cond_q, cond_q[padding_frames]]) + cond_qs.append(cond_q) + cond_k = k[start:end] + cond_k = torch.cat([cond_k, cond_k[padding_frames]]) + cond_ks.append(cond_k) + cond_v = v[start:end] + cond_v = torch.cat([cond_v, cond_v[padding_frames]]) + cond_vs.append(cond_v) + + q = torch.cat(cond_qs) + k = torch.cat(cond_ks) + v = torch.cat(cond_vs) + + n_frames = n_frames + n_padding_frames + + q = rearrange(q, 'b (h w) c -> b h w c', h=h, w=w) + k = rearrange(k, 'b (h w) c -> b h w c', h=h, w=w) + v = rearrange(v, 'b (h w) c -> b h w c', h=h, w=w) + + target_indexes = shuffle_indices(n_frames, seed=seed) + + original_indexes = list(range(n_frames)) + qs = [] + ks = [] + vs = [] + + for i in range(len_conds): + start, end = i*n_frames, (i+1)*n_frames + q[start:end] = shuffle_tensors2( + q[start:end], original_indexes, target_indexes) + qs.append(list_to_grid(q[start:end], grid_size)) + k[start:end] = shuffle_tensors2( + k[start:end], original_indexes, target_indexes) + ks.append(list_to_grid(k[start:end], grid_size)) + v[start:end] = shuffle_tensors2( + v[start:end], original_indexes, target_indexes) + vs.append(list_to_grid(v[start:end], grid_size)) + + q = torch.cat(qs) + k = torch.cat(ks) + v = torch.cat(vs) + + q = rearrange(q, 'b h w c -> b (h w) c') + k = rearrange(k, 'b h w c -> b (h w) c') + v = rearrange(v, 'b h w c -> b (h w) c') + out = optimized_attention(q, k, v, n_heads, None) + + gh, gw = grid_size*h, grid_size*w + out = rearrange(out, 'b (h w) c -> b h w c', h=gh, w=gw) + out = grid_to_list(out, grid_size) + out = rearrange(out, 'b h w c -> b (h w) c') + + outs = [] + for i in range(len_conds): + start, end = i*n_frames, (i+1)*n_frames + cond_out = shuffle_tensors2( + out[start:end], target_indexes, original_indexes) + cond_out = cond_out[:original_n_frames] + outs.append(cond_out) + + return torch.cat(outs) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_rope_attention.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_rope_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..e123ee1c755d339f81f34126c2625013898d3a0b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_rope_attention.py @@ -0,0 +1,130 @@ +import random +import torch +from torch import Tensor +from einops import rearrange + +from comfy.ldm.flux.math import apply_rope +from comfy.ldm.modules.attention import optimized_attention + +from .rave_utils import grid_to_list, list_to_grid, shuffle_indices, shuffle_tensors2 + + +def padding_count(n_frames, grid_frame_count): + remainder = n_frames % grid_frame_count + if remainder == 0: + return 0 + else: + difference = grid_frame_count - remainder + return difference + + +def attention(q: torch.Tensor, k: Tensor, v: Tensor, pe: Tensor,) -> Tensor: + q, k = apply_rope(q, k, pe) + heads = q.shape[1] + x = optimized_attention(q, k, v, heads, skip_reshape=True) + return x + + +def rave_rope_attention(q, k, v, txt_q, txt_k, txt_v, pe, extra_options, n_heads, txt_shape): + q = rearrange(q, 'b n r d -> b r (n d)') + k = rearrange(k, 'b n r d -> b r (n d)') + v = rearrange(v, 'b n r d -> b r (n d)') + rave_opts = extra_options.get('RAVE', {}) + grid_size = rave_opts.get('grid_size', 2) + seed = rave_opts.get('seed', 1) + batch_size, sequence_length, dim = q.shape + len_conds = len(extra_options['cond_or_uncond']) + n_frames = batch_size // len_conds + original_n_frames = n_frames + + grid_frame_count = grid_size * grid_size + n_padding_frames = padding_count(n_frames, grid_frame_count) + if n_padding_frames > 0: + random.seed(seed) + cond_qs = [] + cond_ks = [] + cond_vs = [] + padding_frames = [random.randint( + 0, n_frames-1) for _ in range(n_padding_frames)] + for cond_idx in range(len_conds): + start, end = cond_idx*n_frames, (cond_idx+1)*n_frames + cond_q = q[start:end] + cond_q = torch.cat([cond_q, cond_q[padding_frames]]) + cond_qs.append(cond_q) + cond_k = k[start:end] + cond_k = torch.cat([cond_k, cond_k[padding_frames]]) + cond_ks.append(cond_k) + cond_v = v[start:end] + cond_v = torch.cat([cond_v, cond_v[padding_frames]]) + cond_vs.append(cond_v) + + q = torch.cat(cond_qs) + k = torch.cat(cond_ks) + v = torch.cat(cond_vs) + + n_frames = n_frames + n_padding_frames + + # get h,w + shape = extra_options['original_shape'] + oh, ow = shape[-2:] + ratio = oh/ow + d = sequence_length + w = int((d/ratio)**(0.5)) + h = int(d/w) + + q = rearrange(q, 'b (h w) c -> b h w c', h=h, w=w) + k = rearrange(k, 'b (h w) c -> b h w c', h=h, w=w) + v = rearrange(v, 'b (h w) c -> b h w c', h=h, w=w) + + target_indexes = shuffle_indices(n_frames, seed=seed) + + original_indexes = list(range(n_frames)) + qs = [] + ks = [] + vs = [] + + for i in range(len_conds): + start, end = i*n_frames, (i+1)*n_frames + q[start:end] = shuffle_tensors2( + q[start:end], original_indexes, target_indexes) + qs.append(list_to_grid(q[start:end], grid_size)) + k[start:end] = shuffle_tensors2( + k[start:end], original_indexes, target_indexes) + ks.append(list_to_grid(k[start:end], grid_size)) + v[start:end] = shuffle_tensors2( + v[start:end], original_indexes, target_indexes) + vs.append(list_to_grid(v[start:end], grid_size)) + + q = torch.cat(qs) + k = torch.cat(ks) + v = torch.cat(vs) + + q = rearrange(q, 'b h w (n d) -> b n (h w) d', n=n_heads) + k = rearrange(k, 'b h w (n d) -> b n (h w) d', n=n_heads) + v = rearrange(v, 'b h w (n d) -> b n (h w) d', n=n_heads) + # out = optimized_attention(q, k, v, n_heads, None) + txt_q = txt_q[:len(q)] + txt_k = txt_k[:len(q)] + txt_v = txt_v[:len(q)] + q = torch.cat((txt_q, q), dim=2) + k = torch.cat((txt_k, k), dim=2) + v = torch.cat((txt_v, v), dim=2) + out = attention(q, k, v, pe) + + txt_attn, out = out[:, :txt_shape], out[:, txt_shape:] + txt_attn = txt_attn.repeat(grid_size*grid_size, 1, 1) + + gh, gw = grid_size*h, grid_size*w + out = rearrange(out, 'b (h w) c -> b h w c', h=gh, w=gw) + out = grid_to_list(out, grid_size) + out = rearrange(out, 'b h w c -> b (h w) c') + + outs = [] + for i in range(len_conds): + start, end = i*n_frames, (i+1)*n_frames + cond_out = shuffle_tensors2( + out[start:end], target_indexes, original_indexes) + cond_out = cond_out[:original_n_frames] + outs.append(cond_out) + + return txt_attn, torch.cat(outs) diff --git a/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_utils.py b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0a0197f61f6dd53f9cceb1ad10079d23d518f815 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Fluxtapoz/utils/rave_utils.py @@ -0,0 +1,57 @@ +import torch +import random + + +def shuffle_indices(size, seed=None): + if seed is not None: + random.seed(seed) + indices = list(range(size)) + random.shuffle(indices) + return indices + + +def shuffle_tensors2(tensor, current_indices, target_indices): + tensor_dict = {current_idx: t for current_idx, + t in zip(current_indices, tensor)} + shuffled_tensors = [tensor_dict[current_idx] + for current_idx in target_indices] + return torch.stack(shuffled_tensors) + + +def grid_to_list(tensor, grid_size): + frame_count = len(tensor) * grid_size * grid_size + flattened_list = [flatten_grid(grid.unsqueeze( + 0), [grid_size, grid_size]) for grid in tensor] + list_tensor = torch.cat(flattened_list, dim=-2) + return torch.cat(torch.chunk(list_tensor, frame_count, dim=-2), dim=0) + + +def list_to_grid(tensor, grid_size): + grid_frame_count = grid_size * grid_size + grid_count = len(tensor) // grid_frame_count + flat_grids = [torch.cat([a for a in tensor[i * grid_frame_count:(i + 1) + * grid_frame_count]], dim=-2).unsqueeze(0) for i in range(grid_count)] + unflattened_grids = [unflatten_grid( + flat_grid, [grid_size, grid_size]) for flat_grid in flat_grids] + return torch.cat(unflattened_grids, dim=0) + + +def flatten_grid(x, grid_shape): + B, H, W, C = x.size() + hs, ws = grid_shape + img_h = H // hs + flattened = torch.cat(torch.split(x, img_h, dim=1), dim=2) + return flattened + + +def unflatten_grid(x, grid_shape): + ''' + x: B x C x H x W + ''' + B, H, W, C = x.size() + hs, ws = grid_shape + img_w = W // (ws) + + unflattened = torch.cat(torch.split(x, img_w, dim=2), dim=1) + + return unflattened \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c16001cc49079aaeba0e41bb39dcbf6c4ac3733d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/.gitignore @@ -0,0 +1,3 @@ +ckpts +__pycache__ +test_result \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/All_in_one_v1_3.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/All_in_one_v1_3.png new file mode 100644 index 0000000000000000000000000000000000000000..364c54b9001c4a450dc434f9fd310c59a2a98af2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/All_in_one_v1_3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90735b644e0c35634642b65f2a8041a9a4da380d27b9bcc4d3bbef47869bd92a +size 1462273 diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/LICENSE b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..2a8000ad9540222a0f8f50ac7fb8b04fa8dd0cd3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Fannovel16 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/README.md b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f5d0f3c06b520f1f7a9724e67cc780d8709e2cbb --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/README.md @@ -0,0 +1,194 @@ +# ComfyUI Frame Interpolation (ComfyUI VFI) (WIP) + +A custom node set for Video Frame Interpolation in ComfyUI. +**UPDATE** Memory management is improved. Now this extension takes less RAM and VRAM than before. + +**UPDATE 2** VFI nodes now accept scheduling multipiler values + +![](./interpolation_schedule.png) +![](./test_vfi_schedule.gif) + +## Nodes +* KSampler Gradually Adding More Denoise (efficient) +* GMFSS Fortuna VFI +* IFRNet VFI +* IFUnet VFI +* M2M VFI +* RIFE VFI (4.0 - 4.9) (Note that option `fast_mode` won't do anything from v4.5+ as `contextnet` is removed) +* FILM VFI +* Sepconv VFI +* AMT VFI +* Make Interpolation State List +* STMFNet VFI (requires at least 4 frames, can only do 2x interpolation for now) +* FLAVR VFI (same conditions as STMFNet) + +## Install +### ComfyUI Manager +Incompatibile issue with it is now fixed + +Following this guide to install this extension + +https://github.com/ltdrdata/ComfyUI-Manager#how-to-use +### Command-line +#### Windows +Run install.bat + +For Window users, if you are having trouble with cupy, please run `install.bat` instead of `install-cupy.py` or `python install.py`. +#### Linux +Open your shell app and start venv if it is used for ComfyUI. Run: +``` +python install.py +``` +## Support for non-CUDA device (experimental) +If you don't have a NVidia card, you can try `taichi` ops backend powered by [Taichi Lang](https://www.taichi-lang.org/) + +On Windows, you can install it by running `install.bat` or `pip install taichi` on Linux + +Then change value of `ops_backend` from `cupy` to `taichi` in `config.yaml` + +If `NotImplementedError` appears, a VFI node in the workflow isn't supported by taichi + +## Usage +All VFI nodes can be accessed in **category** `ComfyUI-Frame-Interpolation/VFI` if the installation is successful and require a `IMAGE` containing frames (at least 2, or at least 4 for STMF-Net/FLAVR). + +Regarding STMFNet and FLAVR, if you only have two or three frames, you should use: Load Images -> Other VFI node (FILM is recommended in this case) with `multiplier=4` -> STMFNet VFI/FLAVR VFI + +`clear_cache_after_n_frames` is used to avoid out-of-memory. Decreasing it makes the chance lower but also increases processing time. + +It is recommended to use LoadImages (LoadImagesFromDirectory) from [ComfyUI-Advanced-ControlNet](https://github.com/Kosinkadink/ComfyUI-Advanced-ControlNet/) and [ComfyUI-VideoHelperSuite](https://github.com/Kosinkadink/ComfyUI-VideoHelperSuite) along side with this extension. + +## Example +### Simple workflow +Workflow metadata isn't embeded +Download these two images [anime0.png](./demo_frames/anime0.png) and [anime1.png](./demo_frames/anime0.png) and put them into a folder like `E:\test` in this image. +![](./example.png) + +### Complex workflow +It's used in AnimationDiff (can load workflow metadata) +![](All_in_one_v1_3.png) + +## Credit +Big thanks for styler00dollar for making [VSGAN-tensorrt-docker](https://github.com/styler00dollar/VSGAN-tensorrt-docker). About 99% the code of this repo comes from it. + +Citation for each VFI node: +### GMFSS Fortuna +The All-In-One GMFSS: Dedicated for Anime Video Frame Interpolation + +https://github.com/98mxr/GMFSS_Fortuna + +### IFRNet +```bibtex +@InProceedings{Kong_2022_CVPR, + author = {Kong, Lingtong and Jiang, Boyuan and Luo, Donghao and Chu, Wenqing and Huang, Xiaoming and Tai, Ying and Wang, Chengjie and Yang, Jie}, + title = {IFRNet: Intermediate Feature Refine Network for Efficient Frame Interpolation}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2022} +} +``` + +### IFUnet +RIFE with IFUNet, FusionNet and RefineNet + +https://github.com/98mxr/IFUNet +### M2M +```bibtex +@InProceedings{hu2022m2m, + title={Many-to-many Splatting for Efficient Video Frame Interpolation}, + author={Hu, Ping and Niklaus, Simon and Sclaroff, Stan and Saenko, Kate}, + journal={CVPR}, + year={2022} + } +``` + +### RIFE +```bibtex +@inproceedings{huang2022rife, + title={Real-Time Intermediate Flow Estimation for Video Frame Interpolation}, + author={Huang, Zhewei and Zhang, Tianyuan and Heng, Wen and Shi, Boxin and Zhou, Shuchang}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2022} +} +``` + +### FILM +[Frame interpolation in PyTorch](https://github.com/dajes/frame-interpolation-pytorch) + +```bibtex +@inproceedings{reda2022film, + title = {FILM: Frame Interpolation for Large Motion}, + author = {Fitsum Reda and Janne Kontkanen and Eric Tabellion and Deqing Sun and Caroline Pantofaru and Brian Curless}, + booktitle = {European Conference on Computer Vision (ECCV)}, + year = {2022} +} +``` + +```bibtex +@misc{film-tf, + title = {Tensorflow 2 Implementation of "FILM: Frame Interpolation for Large Motion"}, + author = {Fitsum Reda and Janne Kontkanen and Eric Tabellion and Deqing Sun and Caroline Pantofaru and Brian Curless}, + year = {2022}, + publisher = {GitHub}, + journal = {GitHub repository}, + howpublished = {\url{https://github.com/google-research/frame-interpolation}} +} +``` + +### Sepconv +```bibtex +[1] @inproceedings{Niklaus_WACV_2021, + author = {Simon Niklaus and Long Mai and Oliver Wang}, + title = {Revisiting Adaptive Convolutions for Video Frame Interpolation}, + booktitle = {IEEE Winter Conference on Applications of Computer Vision}, + year = {2021} + } +``` + +```bibtex +[2] @inproceedings{Niklaus_ICCV_2017, + author = {Simon Niklaus and Long Mai and Feng Liu}, + title = {Video Frame Interpolation via Adaptive Separable Convolution}, + booktitle = {IEEE International Conference on Computer Vision}, + year = {2017} + } +``` + +```bibtex +[3] @inproceedings{Niklaus_CVPR_2017, + author = {Simon Niklaus and Long Mai and Feng Liu}, + title = {Video Frame Interpolation via Adaptive Convolution}, + booktitle = {IEEE Conference on Computer Vision and Pattern Recognition}, + year = {2017} + } +``` + +### AMT + ```bibtex + @inproceedings{licvpr23amt, + title={AMT: All-Pairs Multi-Field Transforms for Efficient Frame Interpolation}, + author={Li, Zhen and Zhu, Zuo-Liang and Han, Ling-Hao and Hou, Qibin and Guo, Chun-Le and Cheng, Ming-Ming}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2023} + } + ``` + +### ST-MFNet +```bibtex +@InProceedings{Danier_2022_CVPR, + author = {Danier, Duolikun and Zhang, Fan and Bull, David}, + title = {ST-MFNet: A Spatio-Temporal Multi-Flow Network for Frame Interpolation}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2022}, + pages = {3521-3531} +} +``` + +### FLAVR +```bibtex +@article{kalluri2021flavr, + title={FLAVR: Flow-Agnostic Video Representations for Fast Frame Interpolation}, + author={Kalluri, Tarun and Pathak, Deepak and Chandraker, Manmohan and Tran, Du}, + booktitle={arxiv}, + year={2021} +} +``` diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b362e2c1dbec391c923bda205c46da3a235d63ee --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__init__.py @@ -0,0 +1,42 @@ +import os +import sys +sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) + +from .other_nodes import Gradually_More_Denoise_KSampler + +#Some models are commented out because the code is not completed +#from vfi_models.eisai import EISAI_VFI +from vfi_models.gmfss_fortuna import GMFSS_Fortuna_VFI +from vfi_models.ifrnet import IFRNet_VFI +from vfi_models.ifunet import IFUnet_VFI +from vfi_models.m2m import M2M_VFI +from vfi_models.rife import RIFE_VFI +from vfi_models.sepconv import SepconvVFI +from vfi_models.amt import AMT_VFI +from vfi_models.film import FILM_VFI +from vfi_models.stmfnet import STMFNet_VFI +from vfi_models.flavr import FLAVR_VFI +from vfi_models.cain import CAIN_VFI +from vfi_utils import MakeInterpolationStateList, FloatToInt + +NODE_CLASS_MAPPINGS = { + "KSampler Gradually Adding More Denoise (efficient)": Gradually_More_Denoise_KSampler, +# "EISAI VFI": EISAI_VFI, + "GMFSS Fortuna VFI": GMFSS_Fortuna_VFI, + "IFRNet VFI": IFRNet_VFI, + "IFUnet VFI": IFUnet_VFI, + "M2M VFI": M2M_VFI, + "RIFE VFI": RIFE_VFI, + "Sepconv VFI": SepconvVFI, + "AMT VFI": AMT_VFI, + "FILM VFI": FILM_VFI, + "Make Interpolation State List": MakeInterpolationStateList, + "STMFNet VFI": STMFNet_VFI, + "FLAVR VFI": FLAVR_VFI, + "CAIN VFI": CAIN_VFI, + "VFI FloatToInt": FloatToInt +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "RIFE VFI": "RIFE VFI (recommend rife47 and rife49)" +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7302966b078599fcc1e28cf1d2971cd422e88059 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/other_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/other_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30fb8cf3a6a262001c070e176ea005a550577b3e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/other_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/vfi_utils.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/vfi_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..944f96d8f1384889ae09e41e386ba06b3b006d7d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/__pycache__/vfi_utils.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/config.yaml b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b99d4a76ab08cc1157e349745770dd1f37c9ffca --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/config.yaml @@ -0,0 +1,3 @@ +#Plz don't delete this file, just edit it when neccessary. +ckpts_path: "./ckpts" +ops_backend: "cupy" #Either "taichi" or "cupy" \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime0.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime0.png new file mode 100644 index 0000000000000000000000000000000000000000..14ced4fca7312f54170865de3eada135d3e1de6e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime0.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime1.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime1.png new file mode 100644 index 0000000000000000000000000000000000000000..1e0c70c7cb802e0e505e8339164b738c4822a1ad Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime1.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi0.jpg b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi0.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8585b7ecca56e40c6eb8f923b00f1d4e048dc965 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi0.jpg differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi1.jpg b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..27890f7fd25013b40073e3444c20e963247b8e84 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi1.jpg differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real0.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real0.png new file mode 100644 index 0000000000000000000000000000000000000000..863cfb642782237734eb87916e1f24b25d2bc28b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4792023ccf17c8231c6eb5ee40de528d515e2f8c419b3949985411a122a4de4f +size 1230238 diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real1.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real1.png new file mode 100644 index 0000000000000000000000000000000000000000..beabe73d95c2114baa3b2d4a4d88ef0dbfd6adb3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37c8e6ec527c81895e5a66ea49cdd18b85045f9fed6fdfb75b45f438649235bf +size 1213845 diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00003.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00003.png new file mode 100644 index 0000000000000000000000000000000000000000..181e260efa842a0789204ed099e7501b2faacaf5 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00003.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00004.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00004.png new file mode 100644 index 0000000000000000000000000000000000000000..80ebc6fe8f3ea1d2a6c32892f494611470ab76c6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00004.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00005.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00005.png new file mode 100644 index 0000000000000000000000000000000000000000..a63737d651d79c2835229450358a5ef887c9b686 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00005.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet0.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet0.png new file mode 100644 index 0000000000000000000000000000000000000000..e2aee63e5d74778c69182b0b0118e0e2033637bc Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet0.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet1.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet1.png new file mode 100644 index 0000000000000000000000000000000000000000..0582b712a1333ff2c9dcc6bad23dae99b6be2a9b Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet1.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/example.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/example.png new file mode 100644 index 0000000000000000000000000000000000000000..dbd370165d90ccbe4afd63181758fdcd4418ce6f Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/example.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install-taichi.bat b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install-taichi.bat new file mode 100644 index 0000000000000000000000000000000000000000..d601f71c20e8ea2d768ee710a277666f2bd68643 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install-taichi.bat @@ -0,0 +1,11 @@ +@echo off +echo Installing Taichi lang backend... + +if exist "%python_exec%" ( + %python_exec% -s -m pip install taichi +) else ( + echo Installing with system Python + pip install taichi +) + +pause \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.bat b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.bat new file mode 100644 index 0000000000000000000000000000000000000000..84e0f7eb536a2c9bfc3313a377086b9cf4aa508f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.bat @@ -0,0 +1,16 @@ +@echo off + +set "requirements_txt=%~dp0\requirements-no-cupy.txt" +set "python_exec=..\..\..\python_embeded\python.exe" + +echo Installing ComfyUI Frame Interpolation.. + +if exist "%python_exec%" ( + echo Installing with ComfyUI Portable + %python_exec% -s install.py +) else ( + echo Installing with system Python + python install.py +) + +pause \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.py new file mode 100644 index 0000000000000000000000000000000000000000..ecbe35da8bd39829134a6ee7759251e0a203d25b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/install.py @@ -0,0 +1,59 @@ +import os +from pathlib import Path +import sys +import platform + +def get_cuda_ver_from_dir(cuda_home): + nvrtc = filter(lambda lib_file: "nvrtc-builtins" in lib_file, os.listdir(cuda_home)) + nvrtc = list(nvrtc) + if len(nvrtc) == 0: + return + nvrtc = nvrtc[0] + if ('102' in nvrtc) or ('10.2' in nvrtc): + return '102' + if '110' in nvrtc or ('11.0' in nvrtc): + return '110' + if '111' in nvrtc or ('11.1' in nvrtc): + return '111' + if '11' in nvrtc: + return '11x' + if '12' in nvrtc: + return '12x' + +s_param = '-s' if "python_embeded" in sys.executable else '' + +def get_cuda_home_path(): + if "CUDA_HOME" in os.environ: + return os.environ["CUDA_HOME"] + import torch + torch_lib_path = Path(torch.__file__).parent / "lib" + torch_lib_path = str(torch_lib_path.resolve()) + if os.path.exists(torch_lib_path): + nvrtc = filter(lambda lib_file: "nvrtc-builtins" in lib_file, os.listdir(torch_lib_path)) + nvrtc = list(nvrtc) + return torch_lib_path if len(nvrtc) > 0 else None + +def install_cupy(): + cuda_home = get_cuda_home_path() + try: + if cuda_home is not None: + os.environ["CUDA_HOME"] = cuda_home + os.environ["CUDA_PATH"] = cuda_home + import cupy + print("CuPy is already installed.") + except: + print("Uninstall cupy if existed...") + os.system(f'"{sys.executable}" {s_param} -m pip uninstall -y cupy-wheel cupy-cuda102 cupy-cuda110 cupy-cuda111 cupy-cuda11x cupy-cuda12x') + print("Installing cupy...") + cuda_ver = get_cuda_ver_from_dir(cuda_home) + cupy_package = f"cupy-cuda{cuda_ver}" if cuda_ver is not None else "cupy-wheel" + os.system(f'"{sys.executable}" {s_param} -m pip install {cupy_package}') + +with open(Path(__file__).parent / "requirements-no-cupy.txt", 'r') as f: + for package in f.readlines(): + package = package.strip() + print(f"Installing {package}...") + os.system(f'"{sys.executable}" {s_param} -m pip install {package}') + +print("Checking cupy...") +install_cupy() \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/interpolation_schedule.png b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/interpolation_schedule.png new file mode 100644 index 0000000000000000000000000000000000000000..ff92add09e66b8f967a72cc78efca4b1bca073d6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/interpolation_schedule.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/other_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/other_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..75b76fe109424a389eb4802dc4aa8c59c53329ec --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/other_nodes.py @@ -0,0 +1,88 @@ +import latent_preview +import comfy +import einops +import torch + +def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False): + device = comfy.model_management.get_torch_device() + latent_image = latent["samples"] + + if disable_noise: + noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") + else: + batch_inds = latent["batch_index"] if "batch_index" in latent else None + noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + preview_format = "JPEG" + if preview_format not in ["JPEG", "PNG"]: + preview_format = "JPEG" + + previewer = latent_preview.get_previewer(device, model.model.latent_format) + + pbar = comfy.utils.ProgressBar(steps) + def callback(step, x0, x, total_steps): + preview_bytes = None + if previewer: + preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) + pbar.update_absolute(step + 1, total_steps, preview_bytes) + + samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, + denoise=denoise, disable_noise=disable_noise, start_step=start_step, last_step=last_step, + force_full_denoise=force_full_denoise, noise_mask=noise_mask, callback=callback, seed=seed) + out = latent.copy() + out["samples"] = samples + return (out, ) + +class Gradually_More_Denoise_KSampler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "latent_image": ("LATENT", ), + + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + + "start_denoise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "denoise_increment": ("FLOAT", {"default": 0.1, "min": 0.0, "max": 1.0, "step": 0.1}), + "denoise_increment_steps": ("INT", {"default": 20, "min": 1, "max": 10000}) + }, + "optional": { "optional_vae": ("VAE",) } + } + + RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", ) + RETURN_NAMES = ("MODEL", "CONDITIONING+", "CONDITIONING-", "LATENT", "VAE", ) + OUTPUT_NODE = True + FUNCTION = "sample" + CATEGORY = "ComfyUI-Frame-Interpolation/others" + + def sample(self, model, positive, negative, latent_image, optional_vae, + seed, steps, cfg, sampler_name, scheduler,start_denoise, denoise_increment, denoise_increment_steps): + if start_denoise + denoise_increment * denoise_increment_steps > 1.0: + raise Exception(f"Max denoise strength can't over 1.0 (start_denoise={start_denoise}, denoise_increment={denoise_increment}, denoise_increment_steps={denoise_increment_steps}") + + copied_latent = latent_image.copy() + out_samples = [] + + for latent_sample in copied_latent["samples"]: + latent = {"samples": einops.rearrange(latent_sample, "c h w -> 1 c h w")} + #Latent's shape is NCHW + gradually_denoising_samples = [ + common_ksampler( + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=start_denoise + denoise_increment * i + )[0]["samples"] + for i in range(denoise_increment_steps) + ] + out_samples.extend(gradually_denoising_samples) + + copied_latent["samples"] = torch.cat(out_samples, dim=0) + return (model, positive, negative, copied_latent, optional_vae) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-no-cupy.txt b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-no-cupy.txt new file mode 100644 index 0000000000000000000000000000000000000000..c490ca515d47b6623861b051fd822d24bc2ecd7e --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-no-cupy.txt @@ -0,0 +1,9 @@ +torch +numpy +einops +opencv-contrib-python +kornia +scipy +Pillow +torchvision +tqdm \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-with-cupy.txt b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-with-cupy.txt new file mode 100644 index 0000000000000000000000000000000000000000..bdfeb47253006b2897a2dd3ff1e7c91ccb41e1b2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/requirements-with-cupy.txt @@ -0,0 +1,10 @@ +torch +numpy +einops +opencv-contrib-python +kornia +scipy +Pillow +torchvision +tqdm +cupy-wheel \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test.py new file mode 100644 index 0000000000000000000000000000000000000000..3c06ae20dc974344ed091dce2e410666a9fdd56f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test.py @@ -0,0 +1,38 @@ +import os +import sys +sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) + +import shutil +import torch +import torch.nn.functional as F +import PIL +import torchvision.transforms.functional as transform +from vfi_utils import load_file_from_github_release +from vfi_models import gmfss_fortuna, ifrnet, ifunet, m2m, rife, sepconv, amt, xvfi, cain, flavr +import numpy as np + +frame_0 = torch.from_numpy(np.array(PIL.Image.open("demo_frames/anime0.png").convert("RGB")).astype(np.float32) / 255.0).unsqueeze(0) +frame_1 = torch.from_numpy(np.array(PIL.Image.open("demo_frames/anime1.png").convert("RGB")).astype(np.float32) / 255.0).unsqueeze(0) + + +if os.path.exists("test_result"): + shutil.rmtree("test_result") + +vfi_node_class = gmfss_fortuna.GMFSS_Fortuna_VFI() +for i, ckpt_name in enumerate(vfi_node_class.INPUT_TYPES()["required"]["ckpt_name"][0][:2]): + result = vfi_node_class.vfi(ckpt_name, torch.cat([ + frame_0, + frame_1, + frame_0, + frame_1 + ], dim=0).cuda(), multipler=4, batch_size=2)[0] + print(result.shape) + print(f"Generated {result.size(0)} frames") + frames = [PIL.Image.fromarray(np.clip((frame * 255).numpy(), 0, 255).astype(np.uint8)) for frame in result] + print(result[0].shape) + os.makedirs(f"test_result/video{i}", exist_ok=True) + for j, frame in enumerate(frames): + frame.save(f"test_result/video{i}/{j}.jpg") + frames[0].save(f"test_result/video{i}.gif", save_all=True, append_images=frames[1:], optimize=True, duration=1/3, loop=0) + os.startfile(f"test_result{os.path.sep}video{i}.gif") +#torchvision.io.video.write_video("test.mp4", einops.rearrange(result, "n c h w -> n h w c").cpu(), fps=1) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test_vfi_schedule.gif b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test_vfi_schedule.gif new file mode 100644 index 0000000000000000000000000000000000000000..52fabecc38ae3050e723e8b4eb213fd41ad36195 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/test_vfi_schedule.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:931fcd4c2cc84b457cbc1b1c3b8745a2bf292ff7dc43d4f733a2c510ad90353d +size 8409697 diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..20740c2a4d9d669a625db6f729ef11b18c449e9a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__init__.py @@ -0,0 +1,87 @@ +import pathlib +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_direct_url, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +from comfy.model_management import get_torch_device +from .amt_arch import AMT_S, AMT_L, AMT_G, InputPadder + +#https://github.com/MCG-NKU/AMT/tree/main/cfgs +CKPT_CONFIGS = { + "amt-s.pth": { + "network": AMT_S, + "params": { "corr_radius": 3, "corr_lvls": 4, "num_flows": 3 } + }, + "amt-l.pth": { + "network": AMT_L, + "params": { "corr_radius": 3, "corr_lvls": 4, "num_flows": 5 } + }, + "amt-g.pth": { + "network": AMT_G, + "params": { "corr_radius": 3, "corr_lvls": 4, "num_flows": 5 } + }, + "gopro_amt-s.pth": { + "network": AMT_S, + "params": { "corr_radius": 3, "corr_lvls": 4, "num_flows": 3 } + } +} + + +MODEL_TYPE = pathlib.Path(__file__).parent.name + +class AMT_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (list(CKPT_CONFIGS.keys()), ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 1, "min": 1, "max": 100}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames: typing.SupportsInt = 1, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + model_path = load_file_from_direct_url(MODEL_TYPE, f"https://huggingface.co/lalala125/AMT/resolve/main/{ckpt_name}") + ckpt_config = CKPT_CONFIGS[ckpt_name] + + interpolation_model = ckpt_config["network"](**ckpt_config["params"]) + interpolation_model.load_state_dict(torch.load(model_path)["state_dict"]) + interpolation_model.eval().to(get_torch_device()) + + frames = preprocess_frames(frames) + padder = InputPadder(frames.shape, 16) + frames = padder.pad(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model): + return model( + frame_0, + frame_1, + embt=torch.FloatTensor([timestep] * frame_0.shape[0]).view(frame_0.shape[0], 1, 1, 1).to(get_torch_device()), + scale_factor=1.0, + eval=True + )["imgt_pred"] + + args = [interpolation_model] + out = generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + out = padder.unpad(out) + out = postprocess_frames(out) + return (out,) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1904f02aa0161bd80ffe3cb65e7cf987e29406c Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/amt_arch.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/amt_arch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..09cc06a5f5bdb3e325e236ec7642f000f198833a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/__pycache__/amt_arch.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/amt_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/amt_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..448c4d56193a833d3a1c89ea3e8475258f11958a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/amt/amt_arch.py @@ -0,0 +1,1590 @@ +""" +https://github.com/MCG-NKU/AMT/blob/main/utils/dist_utils.py +https://github.com/MCG-NKU/AMT/blob/main/utils/flow_utils.py +https://github.com/MCG-NKU/AMT/blob/main/utils/utils.py +https://github.com/MCG-NKU/AMT/blob/main/networks/blocks/feat_enc.py +https://github.com/MCG-NKU/AMT/blob/main/networks/blocks/ifrnet.py +https://github.com/MCG-NKU/AMT/blob/main/networks/blocks/multi_flow.py +https://github.com/MCG-NKU/AMT/blob/main/networks/blocks/raft.py +https://github.com/MCG-NKU/AMT/blob/main/networks/AMT-S.py +https://github.com/MCG-NKU/AMT/blob/main/networks/AMT-L.py +https://github.com/MCG-NKU/AMT/blob/main/networks/AMT-G.py +""" +#Removed imageio by removing readImage, writeImage +#The model will receive image tensors from other ComfyUI's nodes so they are unneccessary + +import torch +import torch.nn as nn +import numpy as np +from PIL import ImageFile +import torch.nn.functional as F +ImageFile.LOAD_TRUNCATED_IMAGES = True +import re +import sys +import random + +def warp(img, flow): + B, _, H, W = flow.shape + xx = torch.linspace(-1.0, 1.0, W).view(1, 1, 1, W).expand(B, -1, H, -1) + yy = torch.linspace(-1.0, 1.0, H).view(1, 1, H, 1).expand(B, -1, -1, W) + grid = torch.cat([xx, yy], 1).to(img) + flow_ = torch.cat([flow[:, 0:1, :, :] / ((W - 1.0) / 2.0), flow[:, 1:2, :, :] / ((H - 1.0) / 2.0)], 1) + grid_ = (grid + flow_).permute(0, 2, 3, 1) + output = F.grid_sample(input=img, grid=grid_, mode='bilinear', padding_mode='border', align_corners=True) + return output + + +def make_colorwheel(): + """ + Generates a color wheel for optical flow visualization as presented in: + Baker et al. "A Database and Evaluation Methodology for Optical Flow" (ICCV, 2007) + URL: http://vision.middlebury.edu/flow/flowEval-iccv07.pdf + Code follows the original C++ source code of Daniel Scharstein. + Code follows the the Matlab source code of Deqing Sun. + Returns: + np.ndarray: Color wheel + """ + + RY = 15 + YG = 6 + GC = 4 + CB = 11 + BM = 13 + MR = 6 + + ncols = RY + YG + GC + CB + BM + MR + colorwheel = np.zeros((ncols, 3)) + col = 0 + + # RY + colorwheel[0:RY, 0] = 255 + colorwheel[0:RY, 1] = np.floor(255*np.arange(0,RY)/RY) + col = col+RY + # YG + colorwheel[col:col+YG, 0] = 255 - np.floor(255*np.arange(0,YG)/YG) + colorwheel[col:col+YG, 1] = 255 + col = col+YG + # GC + colorwheel[col:col+GC, 1] = 255 + colorwheel[col:col+GC, 2] = np.floor(255*np.arange(0,GC)/GC) + col = col+GC + # CB + colorwheel[col:col+CB, 1] = 255 - np.floor(255*np.arange(CB)/CB) + colorwheel[col:col+CB, 2] = 255 + col = col+CB + # BM + colorwheel[col:col+BM, 2] = 255 + colorwheel[col:col+BM, 0] = np.floor(255*np.arange(0,BM)/BM) + col = col+BM + # MR + colorwheel[col:col+MR, 2] = 255 - np.floor(255*np.arange(MR)/MR) + colorwheel[col:col+MR, 0] = 255 + return colorwheel + +def flow_uv_to_colors(u, v, convert_to_bgr=False): + """ + Applies the flow color wheel to (possibly clipped) flow components u and v. + According to the C++ source code of Daniel Scharstein + According to the Matlab source code of Deqing Sun + Args: + u (np.ndarray): Input horizontal flow of shape [H,W] + v (np.ndarray): Input vertical flow of shape [H,W] + convert_to_bgr (bool, optional): Convert output image to BGR. Defaults to False. + Returns: + np.ndarray: Flow visualization image of shape [H,W,3] + """ + flow_image = np.zeros((u.shape[0], u.shape[1], 3), np.uint8) + colorwheel = make_colorwheel() # shape [55x3] + ncols = colorwheel.shape[0] + rad = np.sqrt(np.square(u) + np.square(v)) + a = np.arctan2(-v, -u)/np.pi + fk = (a+1) / 2*(ncols-1) + k0 = np.floor(fk).astype(np.int32) + k1 = k0 + 1 + k1[k1 == ncols] = 0 + f = fk - k0 + for i in range(colorwheel.shape[1]): + tmp = colorwheel[:,i] + col0 = tmp[k0] / 255.0 + col1 = tmp[k1] / 255.0 + col = (1-f)*col0 + f*col1 + idx = (rad <= 1) + col[idx] = 1 - rad[idx] * (1-col[idx]) + col[~idx] = col[~idx] * 0.75 # out of range + # Note the 2-i => BGR instead of RGB + ch_idx = 2-i if convert_to_bgr else i + flow_image[:,:,ch_idx] = np.floor(255 * col) + return flow_image + +def flow_to_image(flow_uv, clip_flow=None, convert_to_bgr=False): + """ + Expects a two dimensional flow image of shape. + Args: + flow_uv (np.ndarray): Flow UV image of shape [H,W,2] + clip_flow (float, optional): Clip maximum of flow values. Defaults to None. + convert_to_bgr (bool, optional): Convert output image to BGR. Defaults to False. + Returns: + np.ndarray: Flow visualization image of shape [H,W,3] + """ + assert flow_uv.ndim == 3, 'input flow must have three dimensions' + assert flow_uv.shape[2] == 2, 'input flow must have shape [H,W,2]' + if clip_flow is not None: + flow_uv = np.clip(flow_uv, 0, clip_flow) + u = flow_uv[:,:,0] + v = flow_uv[:,:,1] + rad = np.sqrt(np.square(u) + np.square(v)) + rad_max = np.max(rad) + epsilon = 1e-5 + u = u / (rad_max + epsilon) + v = v / (rad_max + epsilon) + return flow_uv_to_colors(u, v, convert_to_bgr) + + + + + + + + + + + +class AverageMeter(): + def __init__(self): + self.reset() + + def reset(self): + self.val = 0. + self.avg = 0. + self.sum = 0. + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +class AverageMeterGroups: + def __init__(self) -> None: + self.meter_dict = dict() + + def update(self, dict, n=1): + for name, val in dict.items(): + if self.meter_dict.get(name) is None: + self.meter_dict[name] = AverageMeter() + self.meter_dict[name].update(val, n) + + def reset(self, name=None): + if name is None: + for v in self.meter_dict.values(): + v.reset() + else: + meter = self.meter_dict.get(name) + if meter is not None: + meter.reset() + + def avg(self, name): + meter = self.meter_dict.get(name) + if meter is not None: + return meter.avg + + +class InputPadder: + """ Pads images such that dimensions are divisible by divisor """ + def __init__(self, dims, divisor=16): + self.ht, self.wd = dims[-2:] + pad_ht = (((self.ht // divisor) + 1) * divisor - self.ht) % divisor + pad_wd = (((self.wd // divisor) + 1) * divisor - self.wd) % divisor + self._pad = [pad_wd//2, pad_wd - pad_wd//2, pad_ht//2, pad_ht - pad_ht//2] + + def pad(self, input_tensor): + return F.pad(input_tensor, self._pad, mode='replicate') + + def unpad(self, input_tensor): + return self._unpad(input_tensor) + + def _unpad(self, x): + ht, wd = x.shape[-2:] + c = [self._pad[2], ht-self._pad[3], self._pad[0], wd-self._pad[1]] + return x[..., c[0]:c[1], c[2]:c[3]] + + +def img2tensor(img): + if img.shape[-1] > 3: + img = img[:,:,:3] + return torch.tensor(img).permute(2, 0, 1).unsqueeze(0) / 255.0 + + +def tensor2img(img_t): + return (img_t * 255.).detach( + ).squeeze(0).permute(1, 2, 0).cpu().numpy( + ).clip(0, 255).astype(np.uint8) + +def seed_all(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + + +def readPFM(file): + file = open(file, 'rb') + + color = None + width = None + height = None + scale = None + endian = None + + header = file.readline().rstrip() + if header.decode("ascii") == 'PF': + color = True + elif header.decode("ascii") == 'Pf': + color = False + else: + raise Exception('Not a PFM file.') + + dim_match = re.match(r'^(\d+)\s(\d+)\s$', file.readline().decode("ascii")) + if dim_match: + width, height = list(map(int, dim_match.groups())) + else: + raise Exception('Malformed PFM header.') + + scale = float(file.readline().decode("ascii").rstrip()) + if scale < 0: + endian = '<' + scale = -scale + else: + endian = '>' + + data = np.fromfile(file, endian + 'f') + shape = (height, width, 3) if color else (height, width) + + data = np.reshape(data, shape) + data = np.flipud(data) + return data, scale + + +def writePFM(file, image, scale=1): + file = open(file, 'wb') + + color = None + + if image.dtype.name != 'float32': + raise Exception('Image dtype must be float32.') + + image = np.flipud(image) + + if len(image.shape) == 3 and image.shape[2] == 3: + color = True + elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: + color = False + else: + raise Exception('Image must have H x W x 3, H x W x 1 or H x W dimensions.') + + file.write('PF\n' if color else 'Pf\n'.encode()) + file.write('%d %d\n'.encode() % (image.shape[1], image.shape[0])) + + endian = image.dtype.byteorder + + if endian == '<' or endian == '=' and sys.byteorder == 'little': + scale = -scale + + file.write('%f\n'.encode() % scale) + + image.tofile(file) + + +def readFlow(name): + if name.endswith('.pfm') or name.endswith('.PFM'): + return readPFM(name)[0][:,:,0:2] + + f = open(name, 'rb') + + header = f.read(4) + if header.decode("utf-8") != 'PIEH': + raise Exception('Flow file header does not contain PIEH') + + width = np.fromfile(f, np.int32, 1).squeeze() + height = np.fromfile(f, np.int32, 1).squeeze() + + flow = np.fromfile(f, np.float32, width * height * 2).reshape((height, width, 2)) + + return flow.astype(np.float32) + +def writeFlow(name, flow): + f = open(name, 'wb') + f.write('PIEH'.encode('utf-8')) + np.array([flow.shape[1], flow.shape[0]], dtype=np.int32).tofile(f) + flow = flow.astype(np.float32) + flow.tofile(f) + + +def readFloat(name): + f = open(name, 'rb') + + if(f.readline().decode("utf-8")) != 'float\n': + raise Exception('float file %s did not contain keyword' % name) + + dim = int(f.readline()) + + dims = [] + count = 1 + for i in range(0, dim): + d = int(f.readline()) + dims.append(d) + count *= d + + dims = list(reversed(dims)) + + data = np.fromfile(f, np.float32, count).reshape(dims) + if dim > 2: + data = np.transpose(data, (2, 1, 0)) + data = np.transpose(data, (1, 0, 2)) + + return data + + +def writeFloat(name, data): + f = open(name, 'wb') + + dim=len(data.shape) + if dim>3: + raise Exception('bad float file dimension: %d' % dim) + + f.write(('float\n').encode('ascii')) + f.write(('%d\n' % dim).encode('ascii')) + + if dim == 1: + f.write(('%d\n' % data.shape[0]).encode('ascii')) + else: + f.write(('%d\n' % data.shape[1]).encode('ascii')) + f.write(('%d\n' % data.shape[0]).encode('ascii')) + for i in range(2, dim): + f.write(('%d\n' % data.shape[i]).encode('ascii')) + + data = data.astype(np.float32) + if dim==2: + data.tofile(f) + + else: + np.transpose(data, (2, 0, 1)).tofile(f) + + +def check_dim_and_resize(tensor_list): + shape_list = [] + for t in tensor_list: + shape_list.append(t.shape[2:]) + + if len(set(shape_list)) > 1: + desired_shape = shape_list[0] + print(f'Inconsistent size of input video frames. All frames will be resized to {desired_shape}') + + resize_tensor_list = [] + for t in tensor_list: + resize_tensor_list.append(torch.nn.functional.interpolate(t, size=tuple(desired_shape), mode='bilinear')) + + tensor_list = resize_tensor_list + + return tensor_list + + + + + + + + + + + +class BottleneckBlock(nn.Module): + def __init__(self, in_planes, planes, norm_fn='group', stride=1): + super(BottleneckBlock, self).__init__() + + self.conv1 = nn.Conv2d(in_planes, planes//4, kernel_size=1, padding=0) + self.conv2 = nn.Conv2d(planes//4, planes//4, kernel_size=3, padding=1, stride=stride) + self.conv3 = nn.Conv2d(planes//4, planes, kernel_size=1, padding=0) + self.relu = nn.ReLU(inplace=True) + + num_groups = planes // 8 + + if norm_fn == 'group': + self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes//4) + self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes//4) + self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + if not stride == 1: + self.norm4 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + + elif norm_fn == 'batch': + self.norm1 = nn.BatchNorm2d(planes//4) + self.norm2 = nn.BatchNorm2d(planes//4) + self.norm3 = nn.BatchNorm2d(planes) + if not stride == 1: + self.norm4 = nn.BatchNorm2d(planes) + + elif norm_fn == 'instance': + self.norm1 = nn.InstanceNorm2d(planes//4) + self.norm2 = nn.InstanceNorm2d(planes//4) + self.norm3 = nn.InstanceNorm2d(planes) + if not stride == 1: + self.norm4 = nn.InstanceNorm2d(planes) + + elif norm_fn == 'none': + self.norm1 = nn.Sequential() + self.norm2 = nn.Sequential() + self.norm3 = nn.Sequential() + if not stride == 1: + self.norm4 = nn.Sequential() + + if stride == 1: + self.downsample = None + + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm4) + + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + y = self.relu(self.norm3(self.conv3(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x+y) + + +class ResidualBlock(nn.Module): + def __init__(self, in_planes, planes, norm_fn='group', stride=1): + super(ResidualBlock, self).__init__() + + self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, padding=1, stride=stride) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1) + self.relu = nn.ReLU(inplace=True) + + num_groups = planes // 8 + + if norm_fn == 'group': + self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + if not stride == 1: + self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + + elif norm_fn == 'batch': + self.norm1 = nn.BatchNorm2d(planes) + self.norm2 = nn.BatchNorm2d(planes) + if not stride == 1: + self.norm3 = nn.BatchNorm2d(planes) + + elif norm_fn == 'instance': + self.norm1 = nn.InstanceNorm2d(planes) + self.norm2 = nn.InstanceNorm2d(planes) + if not stride == 1: + self.norm3 = nn.InstanceNorm2d(planes) + + elif norm_fn == 'none': + self.norm1 = nn.Sequential() + self.norm2 = nn.Sequential() + if not stride == 1: + self.norm3 = nn.Sequential() + + if stride == 1: + self.downsample = None + + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3) + + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x+y) + + +class SmallEncoder(nn.Module): + def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0): + super(SmallEncoder, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == 'group': + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=32) + + elif self.norm_fn == 'batch': + self.norm1 = nn.BatchNorm2d(32) + + elif self.norm_fn == 'instance': + self.norm1 = nn.InstanceNorm2d(32) + + elif self.norm_fn == 'none': + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(3, 32, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 32 + self.layer1 = self._make_layer(32, stride=1) + self.layer2 = self._make_layer(64, stride=2) + self.layer3 = self._make_layer(96, stride=2) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + self.conv2 = nn.Conv2d(96, output_dim, kernel_size=1) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = BottleneckBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = BottleneckBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + + def forward(self, x): + + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + +class BasicEncoder(nn.Module): + def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0): + super(BasicEncoder, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == 'group': + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64) + + elif self.norm_fn == 'batch': + self.norm1 = nn.BatchNorm2d(64) + + elif self.norm_fn == 'instance': + self.norm1 = nn.InstanceNorm2d(64) + + elif self.norm_fn == 'none': + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 64 + self.layer1 = self._make_layer(64, stride=1) + self.layer2 = self._make_layer(72, stride=2) + self.layer3 = self._make_layer(128, stride=2) + + # output convolution + self.conv2 = nn.Conv2d(128, output_dim, kernel_size=1) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + + def forward(self, x): + + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + +class LargeEncoder(nn.Module): + def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0): + super(LargeEncoder, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == 'group': + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64) + + elif self.norm_fn == 'batch': + self.norm1 = nn.BatchNorm2d(64) + + elif self.norm_fn == 'instance': + self.norm1 = nn.InstanceNorm2d(64) + + elif self.norm_fn == 'none': + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 64 + self.layer1 = self._make_layer(64, stride=1) + self.layer2 = self._make_layer(112, stride=2) + self.layer3 = self._make_layer(160, stride=2) + self.layer3_2 = self._make_layer(160, stride=1) + + # output convolution + self.conv2 = nn.Conv2d(self.in_planes, output_dim, kernel_size=1) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + + def forward(self, x): + + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer3_2(x) + + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + + + + + + + + + + + +def resize(x, scale_factor): + return F.interpolate(x, scale_factor=scale_factor, mode="bilinear", align_corners=False) + +def convrelu(in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True): + return nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias=bias), + nn.PReLU(out_channels) + ) + +class ResBlock(nn.Module): + def __init__(self, in_channels, side_channels, bias=True): + super(ResBlock, self).__init__() + self.side_channels = side_channels + self.conv1 = nn.Sequential( + nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias), + nn.PReLU(in_channels) + ) + self.conv2 = nn.Sequential( + nn.Conv2d(side_channels, side_channels, kernel_size=3, stride=1, padding=1, bias=bias), + nn.PReLU(side_channels) + ) + self.conv3 = nn.Sequential( + nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias), + nn.PReLU(in_channels) + ) + self.conv4 = nn.Sequential( + nn.Conv2d(side_channels, side_channels, kernel_size=3, stride=1, padding=1, bias=bias), + nn.PReLU(side_channels) + ) + self.conv5 = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias) + self.prelu = nn.PReLU(in_channels) + + def forward(self, x): + out = self.conv1(x) + + res_feat = out[:, :-self.side_channels, ...] + side_feat = out[:, -self.side_channels:, :, :] + side_feat = self.conv2(side_feat) + out = self.conv3(torch.cat([res_feat, side_feat], 1)) + + res_feat = out[:, :-self.side_channels, ...] + side_feat = out[:, -self.side_channels:, :, :] + side_feat = self.conv4(side_feat) + out = self.conv5(torch.cat([res_feat, side_feat], 1)) + + out = self.prelu(x + out) + return out + +class Encoder(nn.Module): + def __init__(self, channels, large=False): + super(Encoder, self).__init__() + self.channels = channels + prev_ch = 3 + for idx, ch in enumerate(channels, 1): + k = 7 if large and idx == 1 else 3 + p = 3 if k ==7 else 1 + self.register_module(f'pyramid{idx}', + nn.Sequential( + convrelu(prev_ch, ch, k, 2, p), + convrelu(ch, ch, 3, 1, 1) + )) + prev_ch = ch + + def forward(self, in_x): + fs = [] + for idx in range(len(self.channels)): + out_x = getattr(self, f'pyramid{idx+1}')(in_x) + fs.append(out_x) + in_x = out_x + return fs + +class InitDecoder(nn.Module): + def __init__(self, in_ch, out_ch, skip_ch) -> None: + super().__init__() + self.convblock = nn.Sequential( + convrelu(in_ch*2+1, in_ch*2), + ResBlock(in_ch*2, skip_ch), + nn.ConvTranspose2d(in_ch*2, out_ch+4, 4, 2, 1, bias=True) + ) + def forward(self, f0, f1, embt): + h, w = f0.shape[2:] + embt = embt.repeat(1, 1, h, w) + out = self.convblock(torch.cat([f0, f1, embt], 1)) + flow0, flow1 = torch.chunk(out[:, :4, ...], 2, 1) + ft_ = out[:, 4:, ...] + return flow0, flow1, ft_ + +class IntermediateDecoder(nn.Module): + def __init__(self, in_ch, out_ch, skip_ch) -> None: + super().__init__() + self.convblock = nn.Sequential( + convrelu(in_ch*3+4, in_ch*3), + ResBlock(in_ch*3, skip_ch), + nn.ConvTranspose2d(in_ch*3, out_ch+4, 4, 2, 1, bias=True) + ) + def forward(self, ft_, f0, f1, flow0_in, flow1_in): + f0_warp = warp(f0, flow0_in) + f1_warp = warp(f1, flow1_in) + f_in = torch.cat([ft_, f0_warp, f1_warp, flow0_in, flow1_in], 1) + out = self.convblock(f_in) + flow0, flow1 = torch.chunk(out[:, :4, ...], 2, 1) + ft_ = out[:, 4:, ...] + flow0 = flow0 + 2.0 * resize(flow0_in, scale_factor=2.0) + flow1 = flow1 + 2.0 * resize(flow1_in, scale_factor=2.0) + return flow0, flow1, ft_ + + + + + + + + + + + +def multi_flow_combine(comb_block, img0, img1, flow0, flow1, + mask=None, img_res=None, mean=None): + ''' + A parallel implementation of multiple flow field warping + comb_block: An nn.Seqential object. + img shape: [b, c, h, w] + flow shape: [b, 2*num_flows, h, w] + mask (opt): + If 'mask' is None, the function conduct a simple average. + img_res (opt): + If 'img_res' is None, the function adds zero instead. + mean (opt): + If 'mean' is None, the function adds zero instead. + ''' + b, c, h, w = flow0.shape + num_flows = c // 2 + flow0 = flow0.reshape(b, num_flows, 2, h, w).reshape(-1, 2, h, w) + flow1 = flow1.reshape(b, num_flows, 2, h, w).reshape(-1, 2, h, w) + + mask = mask.reshape(b, num_flows, 1, h, w + ).reshape(-1, 1, h, w) if mask is not None else None + img_res = img_res.reshape(b, num_flows, 3, h, w + ).reshape(-1, 3, h, w) if img_res is not None else 0 + img0 = torch.stack([img0] * num_flows, 1).reshape(-1, 3, h, w) + img1 = torch.stack([img1] * num_flows, 1).reshape(-1, 3, h, w) + mean = torch.stack([mean] * num_flows, 1).reshape(-1, 1, 1, 1 + ) if mean is not None else 0 + + img0_warp = warp(img0, flow0) + img1_warp = warp(img1, flow1) + img_warps = mask * img0_warp + (1 - mask) * img1_warp + mean + img_res + img_warps = img_warps.reshape(b, num_flows, 3, h, w) + imgt_pred = img_warps.mean(1) + comb_block(img_warps.view(b, -1, h, w)) + return imgt_pred + + +class MultiFlowDecoder(nn.Module): + def __init__(self, in_ch, skip_ch, num_flows=3): + super(MultiFlowDecoder, self).__init__() + self.num_flows = num_flows + self.convblock = nn.Sequential( + convrelu(in_ch*3+4, in_ch*3), + ResBlock(in_ch*3, skip_ch), + nn.ConvTranspose2d(in_ch*3, 8*num_flows, 4, 2, 1, bias=True) + ) + + def forward(self, ft_, f0, f1, flow0, flow1): + n = self.num_flows + f0_warp = warp(f0, flow0) + f1_warp = warp(f1, flow1) + out = self.convblock(torch.cat([ft_, f0_warp, f1_warp, flow0, flow1], 1)) + delta_flow0, delta_flow1, mask, img_res = torch.split(out, [2*n, 2*n, n, 3*n], 1) + mask = torch.sigmoid(mask) + + flow0 = delta_flow0 + 2.0 * resize(flow0, scale_factor=2.0 + ).repeat(1, self.num_flows, 1, 1) + flow1 = delta_flow1 + 2.0 * resize(flow1, scale_factor=2.0 + ).repeat(1, self.num_flows, 1, 1) + + return flow0, flow1, mask, img_res + + + + + + + + + + + +def resize(x, scale_factor): + return F.interpolate(x, scale_factor=scale_factor, mode="bilinear", align_corners=False) + + +def bilinear_sampler(img, coords, mask=False): + """ Wrapper for grid_sample, uses pixel coordinates """ + H, W = img.shape[-2:] + xgrid, ygrid = coords.split([1,1], dim=-1) + xgrid = 2*xgrid/(W-1) - 1 + ygrid = 2*ygrid/(H-1) - 1 + + grid = torch.cat([xgrid, ygrid], dim=-1) + img = F.grid_sample(img, grid, align_corners=True) + + if mask: + mask = (xgrid > -1) & (ygrid > -1) & (xgrid < 1) & (ygrid < 1) + return img, mask.float() + + return img + + +def coords_grid(batch, ht, wd, device): + coords = torch.meshgrid(torch.arange(ht, device=device), + torch.arange(wd, device=device), + indexing='ij') + coords = torch.stack(coords[::-1], dim=0).float() + return coords[None].repeat(batch, 1, 1, 1) + + +class SmallUpdateBlock(nn.Module): + def __init__(self, cdim, hidden_dim, flow_dim, corr_dim, fc_dim, + corr_levels=4, radius=3, scale_factor=None): + super(SmallUpdateBlock, self).__init__() + cor_planes = corr_levels * (2 * radius + 1) **2 + self.scale_factor = scale_factor + + self.convc1 = nn.Conv2d(2 * cor_planes, corr_dim, 1, padding=0) + self.convf1 = nn.Conv2d(4, flow_dim*2, 7, padding=3) + self.convf2 = nn.Conv2d(flow_dim*2, flow_dim, 3, padding=1) + self.conv = nn.Conv2d(corr_dim+flow_dim, fc_dim, 3, padding=1) + + self.gru = nn.Sequential( + nn.Conv2d(fc_dim+4+cdim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + ) + + self.feat_head = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, cdim, 3, padding=1), + ) + + self.flow_head = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, 4, 3, padding=1), + ) + + self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True) + + def forward(self, net, flow, corr): + net = resize(net, 1 / self.scale_factor + ) if self.scale_factor is not None else net + cor = self.lrelu(self.convc1(corr)) + flo = self.lrelu(self.convf1(flow)) + flo = self.lrelu(self.convf2(flo)) + cor_flo = torch.cat([cor, flo], dim=1) + inp = self.lrelu(self.conv(cor_flo)) + inp = torch.cat([inp, flow, net], dim=1) + + out = self.gru(inp) + delta_net = self.feat_head(out) + delta_flow = self.flow_head(out) + + if self.scale_factor is not None: + delta_net = resize(delta_net, scale_factor=self.scale_factor) + delta_flow = self.scale_factor * resize(delta_flow, scale_factor=self.scale_factor) + + return delta_net, delta_flow + + +class BasicUpdateBlock(nn.Module): + def __init__(self, cdim, hidden_dim, flow_dim, corr_dim, corr_dim2, + fc_dim, corr_levels=4, radius=3, scale_factor=None, out_num=1): + super(BasicUpdateBlock, self).__init__() + cor_planes = corr_levels * (2 * radius + 1) **2 + + self.scale_factor = scale_factor + self.convc1 = nn.Conv2d(2 * cor_planes, corr_dim, 1, padding=0) + self.convc2 = nn.Conv2d(corr_dim, corr_dim2, 3, padding=1) + self.convf1 = nn.Conv2d(4, flow_dim*2, 7, padding=3) + self.convf2 = nn.Conv2d(flow_dim*2, flow_dim, 3, padding=1) + self.conv = nn.Conv2d(flow_dim+corr_dim2, fc_dim, 3, padding=1) + + self.gru = nn.Sequential( + nn.Conv2d(fc_dim+4+cdim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + ) + + self.feat_head = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, cdim, 3, padding=1), + ) + + self.flow_head = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1), + nn.LeakyReLU(negative_slope=0.1, inplace=True), + nn.Conv2d(hidden_dim, 4*out_num, 3, padding=1), + ) + + self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True) + + def forward(self, net, flow, corr): + net = resize(net, 1 / self.scale_factor + ) if self.scale_factor is not None else net + cor = self.lrelu(self.convc1(corr)) + cor = self.lrelu(self.convc2(cor)) + flo = self.lrelu(self.convf1(flow)) + flo = self.lrelu(self.convf2(flo)) + cor_flo = torch.cat([cor, flo], dim=1) + inp = self.lrelu(self.conv(cor_flo)) + inp = torch.cat([inp, flow, net], dim=1) + + out = self.gru(inp) + delta_net = self.feat_head(out) + delta_flow = self.flow_head(out) + + if self.scale_factor is not None: + delta_net = resize(delta_net, scale_factor=self.scale_factor) + delta_flow = self.scale_factor * resize(delta_flow, scale_factor=self.scale_factor) + return delta_net, delta_flow + + +class BidirCorrBlock: + def __init__(self, fmap1, fmap2, num_levels=4, radius=4): + self.num_levels = num_levels + self.radius = radius + self.corr_pyramid = [] + self.corr_pyramid_T = [] + + corr = BidirCorrBlock.corr(fmap1, fmap2) + batch, h1, w1, dim, h2, w2 = corr.shape + corr_T = corr.clone().permute(0, 4, 5, 3, 1, 2) + + corr = corr.reshape(batch*h1*w1, dim, h2, w2) + corr_T = corr_T.reshape(batch*h2*w2, dim, h1, w1) + + self.corr_pyramid.append(corr) + self.corr_pyramid_T.append(corr_T) + + for _ in range(self.num_levels-1): + corr = F.avg_pool2d(corr, 2, stride=2) + corr_T = F.avg_pool2d(corr_T, 2, stride=2) + self.corr_pyramid.append(corr) + self.corr_pyramid_T.append(corr_T) + + def __call__(self, coords0, coords1): + r = self.radius + coords0 = coords0.permute(0, 2, 3, 1) + coords1 = coords1.permute(0, 2, 3, 1) + assert coords0.shape == coords1.shape, f"coords0 shape: [{coords0.shape}] is not equal to [{coords1.shape}]" + batch, h1, w1, _ = coords0.shape + + out_pyramid = [] + out_pyramid_T = [] + for i in range(self.num_levels): + corr = self.corr_pyramid[i] + corr_T = self.corr_pyramid_T[i] + + dx = torch.linspace(-r, r, 2*r+1, device=coords0.device) + dy = torch.linspace(-r, r, 2*r+1, device=coords0.device) + delta = torch.stack(torch.meshgrid(dy, dx, indexing='ij'), axis=-1) + delta_lvl = delta.view(1, 2*r+1, 2*r+1, 2) + + centroid_lvl_0 = coords0.reshape(batch*h1*w1, 1, 1, 2) / 2**i + centroid_lvl_1 = coords1.reshape(batch*h1*w1, 1, 1, 2) / 2**i + coords_lvl_0 = centroid_lvl_0 + delta_lvl + coords_lvl_1 = centroid_lvl_1 + delta_lvl + + corr = bilinear_sampler(corr, coords_lvl_0) + corr_T = bilinear_sampler(corr_T, coords_lvl_1) + corr = corr.view(batch, h1, w1, -1) + corr_T = corr_T.view(batch, h1, w1, -1) + out_pyramid.append(corr) + out_pyramid_T.append(corr_T) + + out = torch.cat(out_pyramid, dim=-1) + out_T = torch.cat(out_pyramid_T, dim=-1) + return out.permute(0, 3, 1, 2).contiguous().float(), out_T.permute(0, 3, 1, 2).contiguous().float() + + @staticmethod + def corr(fmap1, fmap2): + batch, dim, ht, wd = fmap1.shape + fmap1 = fmap1.view(batch, dim, ht*wd) + fmap2 = fmap2.view(batch, dim, ht*wd) + + corr = torch.matmul(fmap1.transpose(1,2), fmap2) + corr = corr.view(batch, ht, wd, 1, ht, wd) + return corr / torch.sqrt(torch.tensor(dim).float()) + + + + + + + + + + + +class AMT_S(nn.Module): + def __init__(self, + corr_radius=3, + corr_lvls=4, + num_flows=3, + channels=[20, 32, 44, 56], + skip_channels=20): + super(AMT_S, self).__init__() + self.radius = corr_radius + self.corr_levels = corr_lvls + self.num_flows = num_flows + self.channels = channels + self.skip_channels = skip_channels + + self.feat_encoder = SmallEncoder(output_dim=84, norm_fn='instance', dropout=0.) + self.encoder = Encoder(channels) + + self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels) + self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels) + self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels) + self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows) + + self.update4 = self._get_updateblock(44) + self.update3 = self._get_updateblock(32, 2) + self.update2 = self._get_updateblock(20, 4) + + self.comb_block = nn.Sequential( + nn.Conv2d(3*num_flows, 6*num_flows, 3, 1, 1), + nn.PReLU(6*num_flows), + nn.Conv2d(6*num_flows, 3, 3, 1, 1), + ) + + def _get_updateblock(self, cdim, scale_factor=None): + return SmallUpdateBlock(cdim=cdim, hidden_dim=76, flow_dim=20, corr_dim=64, + fc_dim=68, scale_factor=scale_factor, + corr_levels=self.corr_levels, radius=self.radius) + + def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1): + # convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0 + # based on linear assumption + t1_scale = 1. / embt + t0_scale = 1. / (1. - embt) + if downsample != 1: + inv = 1 / downsample + flow0 = inv * resize(flow0, scale_factor=inv) + flow1 = inv * resize(flow1, scale_factor=inv) + + corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale) + corr = torch.cat([corr0, corr1], dim=1) + flow = torch.cat([flow0, flow1], dim=1) + return corr, flow + + def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs): + mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True) + img0 = img0 - mean_ + img1 = img1 - mean_ + img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0 + img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1 + b, _, h, w = img0_.shape + coord = coords_grid(b, h // 8, w // 8, img0.device) + + fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8] + corr_fn = BidirCorrBlock(fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels) + + # f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4] + # f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16] + f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_) + f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_) + + ######################################### the 4th decoder ######################################### + up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt) + corr_4, flow_4 = self._corr_scale_lookup(corr_fn, coord, + up_flow0_4, up_flow1_4, + embt, downsample=1) + + # residue update with lookup corr + delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4) + delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1) + up_flow0_4 = up_flow0_4 + delta_flow0_4 + up_flow1_4 = up_flow1_4 + delta_flow1_4 + ft_3_ = ft_3_ + delta_ft_3_ + + ######################################### the 3rd decoder ######################################### + up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4) + corr_3, flow_3 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_3, up_flow1_3, + embt, downsample=2) + + # residue update with lookup corr + delta_ft_2_, delta_flow_3 = self.update3(ft_2_, flow_3, corr_3) + delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1) + up_flow0_3 = up_flow0_3 + delta_flow0_3 + up_flow1_3 = up_flow1_3 + delta_flow1_3 + ft_2_ = ft_2_ + delta_ft_2_ + + ######################################### the 2nd decoder ######################################### + up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3) + corr_2, flow_2 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_2, up_flow1_2, + embt, downsample=4) + + # residue update with lookup corr + delta_ft_1_, delta_flow_2 = self.update2(ft_1_, flow_2, corr_2) + delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1) + up_flow0_2 = up_flow0_2 + delta_flow0_2 + up_flow1_2 = up_flow1_2 + delta_flow1_2 + ft_1_ = ft_1_ + delta_ft_1_ + + ######################################### the 1st decoder ######################################### + up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2) + + if scale_factor != 1.0: + up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + mask = resize(mask, scale_factor=(1.0/scale_factor)) + img_res = resize(img_res, scale_factor=(1.0/scale_factor)) + + # Merge multiple predictions + imgt_pred = multi_flow_combine(self.comb_block, img0, img1, up_flow0_1, up_flow1_1, + mask, img_res, mean_) + imgt_pred = torch.clamp(imgt_pred, 0, 1) + + if eval: + return { 'imgt_pred': imgt_pred, } + else: + up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w) + up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w) + return { + 'imgt_pred': imgt_pred, + 'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4], + 'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4], + 'ft_pred': [ft_1_, ft_2_, ft_3_], + } + + + + + + + + + + + +class AMT_L(nn.Module): + def __init__(self, + corr_radius=3, + corr_lvls=4, + num_flows=5, + channels=[48, 64, 72, 128], + skip_channels=48 + ): + super(AMT_L, self).__init__() + self.radius = corr_radius + self.corr_levels = corr_lvls + self.num_flows = num_flows + + self.feat_encoder = BasicEncoder(output_dim=128, norm_fn='instance', dropout=0.) + self.encoder = Encoder([48, 64, 72, 128], large=True) + + self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels) + self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels) + self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels) + self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows) + + self.update4 = self._get_updateblock(72, None) + self.update3 = self._get_updateblock(64, 2.0) + self.update2 = self._get_updateblock(48, 4.0) + + self.comb_block = nn.Sequential( + nn.Conv2d(3*self.num_flows, 6*self.num_flows, 7, 1, 3), + nn.PReLU(6*self.num_flows), + nn.Conv2d(6*self.num_flows, 3, 7, 1, 3), + ) + + def _get_updateblock(self, cdim, scale_factor=None): + return BasicUpdateBlock(cdim=cdim, hidden_dim=128, flow_dim=48, + corr_dim=256, corr_dim2=160, fc_dim=124, + scale_factor=scale_factor, corr_levels=self.corr_levels, + radius=self.radius) + + def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1): + # convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0 + # based on linear assumption + t1_scale = 1. / embt + t0_scale = 1. / (1. - embt) + if downsample != 1: + inv = 1 / downsample + flow0 = inv * resize(flow0, scale_factor=inv) + flow1 = inv * resize(flow1, scale_factor=inv) + + corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale) + corr = torch.cat([corr0, corr1], dim=1) + flow = torch.cat([flow0, flow1], dim=1) + return corr, flow + + def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs): + mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True) + img0 = img0 - mean_ + img1 = img1 - mean_ + img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0 + img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1 + b, _, h, w = img0_.shape + coord = coords_grid(b, h // 8, w // 8, img0.device) + + fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8] + corr_fn = BidirCorrBlock(fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels) + + # f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4] + # f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16] + f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_) + f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_) + + ######################################### the 4th decoder ######################################### + up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt) + corr_4, flow_4 = self._corr_scale_lookup(corr_fn, coord, + up_flow0_4, up_flow1_4, + embt, downsample=1) + + # residue update with lookup corr + delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4) + delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1) + up_flow0_4 = up_flow0_4 + delta_flow0_4 + up_flow1_4 = up_flow1_4 + delta_flow1_4 + ft_3_ = ft_3_ + delta_ft_3_ + + ######################################### the 3rd decoder ######################################### + up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4) + corr_3, flow_3 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_3, up_flow1_3, + embt, downsample=2) + + # residue update with lookup corr + delta_ft_2_, delta_flow_3 = self.update3(ft_2_, flow_3, corr_3) + delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1) + up_flow0_3 = up_flow0_3 + delta_flow0_3 + up_flow1_3 = up_flow1_3 + delta_flow1_3 + ft_2_ = ft_2_ + delta_ft_2_ + + ######################################### the 2nd decoder ######################################### + up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3) + corr_2, flow_2 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_2, up_flow1_2, + embt, downsample=4) + + # residue update with lookup corr + delta_ft_1_, delta_flow_2 = self.update2(ft_1_, flow_2, corr_2) + delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1) + up_flow0_2 = up_flow0_2 + delta_flow0_2 + up_flow1_2 = up_flow1_2 + delta_flow1_2 + ft_1_ = ft_1_ + delta_ft_1_ + + ######################################### the 1st decoder ######################################### + up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2) + + if scale_factor != 1.0: + up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + mask = resize(mask, scale_factor=(1.0/scale_factor)) + img_res = resize(img_res, scale_factor=(1.0/scale_factor)) + + # Merge multiple predictions + imgt_pred = multi_flow_combine(self.comb_block, img0, img1, up_flow0_1, up_flow1_1, + mask, img_res, mean_) + imgt_pred = torch.clamp(imgt_pred, 0, 1) + + if eval: + return { 'imgt_pred': imgt_pred, } + else: + up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w) + up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w) + return { + 'imgt_pred': imgt_pred, + 'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4], + 'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4], + 'ft_pred': [ft_1_, ft_2_, ft_3_], + } + + + + + + + + + + + +class AMT_G(nn.Module): + def __init__(self, + corr_radius=3, + corr_lvls=4, + num_flows=5, + channels=[84, 96, 112, 128], + skip_channels=84): + super(AMT_G, self).__init__() + self.radius = corr_radius + self.corr_levels = corr_lvls + self.num_flows = num_flows + + self.feat_encoder = LargeEncoder(output_dim=128, norm_fn='instance', dropout=0.) + self.encoder = Encoder(channels, large=True) + self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels) + self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels) + self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels) + self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows) + + self.update4 = self._get_updateblock(112, None) + self.update3_low = self._get_updateblock(96, 2.0) + self.update2_low = self._get_updateblock(84, 4.0) + + self.update3_high = self._get_updateblock(96, None) + self.update2_high = self._get_updateblock(84, None) + + self.comb_block = nn.Sequential( + nn.Conv2d(3*self.num_flows, 6*self.num_flows, 7, 1, 3), + nn.PReLU(6*self.num_flows), + nn.Conv2d(6*self.num_flows, 3, 7, 1, 3), + ) + + def _get_updateblock(self, cdim, scale_factor=None): + return BasicUpdateBlock(cdim=cdim, hidden_dim=192, flow_dim=64, + corr_dim=256, corr_dim2=192, fc_dim=188, + scale_factor=scale_factor, corr_levels=self.corr_levels, + radius=self.radius) + + def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1): + # convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0 + # based on linear assumption + t1_scale = 1. / embt + t0_scale = 1. / (1. - embt) + if downsample != 1: + inv = 1 / downsample + flow0 = inv * resize(flow0, scale_factor=inv) + flow1 = inv * resize(flow1, scale_factor=inv) + + corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale) + corr = torch.cat([corr0, corr1], dim=1) + flow = torch.cat([flow0, flow1], dim=1) + return corr, flow + + def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs): + mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True) + img0 = img0 - mean_ + img1 = img1 - mean_ + img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0 + img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1 + b, _, h, w = img0_.shape + coord = coords_grid(b, h // 8, w // 8, img0.device) + + fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8] + corr_fn = BidirCorrBlock(fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels) + + # f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4] + # f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16] + f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_) + f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_) + + ######################################### the 4th decoder ######################################### + up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt) + corr_4, flow_4 = self._corr_scale_lookup(corr_fn, coord, + up_flow0_4, up_flow1_4, + embt, downsample=1) + + # residue update with lookup corr + delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4) + delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1) + up_flow0_4 = up_flow0_4 + delta_flow0_4 + up_flow1_4 = up_flow1_4 + delta_flow1_4 + ft_3_ = ft_3_ + delta_ft_3_ + + ######################################### the 3rd decoder ######################################### + up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4) + corr_3, flow_3 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_3, up_flow1_3, + embt, downsample=2) + + # residue update with lookup corr + delta_ft_2_, delta_flow_3 = self.update3_low(ft_2_, flow_3, corr_3) + delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1) + up_flow0_3 = up_flow0_3 + delta_flow0_3 + up_flow1_3 = up_flow1_3 + delta_flow1_3 + ft_2_ = ft_2_ + delta_ft_2_ + + # residue update with lookup corr (hr) + corr_3 = resize(corr_3, scale_factor=2.0) + up_flow_3 = torch.cat([up_flow0_3, up_flow1_3], dim=1) + delta_ft_2_, delta_up_flow_3 = self.update3_high(ft_2_, up_flow_3, corr_3) + ft_2_ += delta_ft_2_ + up_flow0_3 += delta_up_flow_3[:, 0:2] + up_flow1_3 += delta_up_flow_3[:, 2:4] + + ######################################### the 2nd decoder ######################################### + up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3) + corr_2, flow_2 = self._corr_scale_lookup(corr_fn, + coord, up_flow0_2, up_flow1_2, + embt, downsample=4) + + # residue update with lookup corr + delta_ft_1_, delta_flow_2 = self.update2_low(ft_1_, flow_2, corr_2) + delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1) + up_flow0_2 = up_flow0_2 + delta_flow0_2 + up_flow1_2 = up_flow1_2 + delta_flow1_2 + ft_1_ = ft_1_ + delta_ft_1_ + + # residue update with lookup corr (hr) + corr_2 = resize(corr_2, scale_factor=4.0) + up_flow_2 = torch.cat([up_flow0_2, up_flow1_2], dim=1) + delta_ft_1_, delta_up_flow_2 = self.update2_high(ft_1_, up_flow_2, corr_2) + ft_1_ += delta_ft_1_ + up_flow0_2 += delta_up_flow_2[:, 0:2] + up_flow1_2 += delta_up_flow_2[:, 2:4] + + ######################################### the 1st decoder ######################################### + up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2) + + if scale_factor != 1.0: + up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor) + mask = resize(mask, scale_factor=(1.0/scale_factor)) + img_res = resize(img_res, scale_factor=(1.0/scale_factor)) + + # Merge multiple predictions + imgt_pred = multi_flow_combine(self.comb_block, img0, img1, up_flow0_1, up_flow1_1, + mask, img_res, mean_) + imgt_pred = torch.clamp(imgt_pred, 0, 1) + + if eval: + return { 'imgt_pred': imgt_pred, } + else: + up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w) + up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w) + return { + 'imgt_pred': imgt_pred, + 'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4], + 'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4], + 'ft_pred': [ft_1_, ft_2_, ft_3_], + } \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8d1166d1fad2639bb9ce49a2e2ff3471ef6a6415 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__init__.py @@ -0,0 +1,64 @@ +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +from comfy.model_management import get_torch_device + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["pretrained_cain.pth"] + + +class CAIN_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames: typing.SupportsInt = 1, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .cain_arch import CAIN + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + sd = torch.load(model_path)["state_dict"] + sd = {key.replace('module.', ''): value for key, value in sd.items()} + + + global interpolation_model + interpolation_model = CAIN(depth=3) + interpolation_model.load_state_dict(sd) + interpolation_model.eval().to(get_torch_device()) + del sd + + frames = preprocess_frames(frames) + + + def return_middle_frame(frame_0, frame_1, timestep, model): + #CAIN does some direct modifications to input frame tensors so we need to clone them + return model(frame_0.detach().clone(), frame_1.detach().clone())[0] + + args = [interpolation_model] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, use_timestep=False, dtype=torch.float32) + ) + return (out,) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a02fab5511ca558fe56c2999190a5f04cc20506a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..abd8db30b7f4260ea847da249d4d4114f8091d95 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_arch.py @@ -0,0 +1,74 @@ +import math +import numpy as np + +import torch +import torch.nn as nn + +from .common import * + + +class Encoder(nn.Module): + def __init__(self, in_channels=3, depth=3): + super(Encoder, self).__init__() + + # Shuffle pixels to expand in channel dimension + # shuffler_list = [PixelShuffle(0.5) for i in range(depth)] + # self.shuffler = nn.Sequential(*shuffler_list) + self.shuffler = PixelShuffle(1 / 2**depth) + + relu = nn.LeakyReLU(0.2, True) + + # FF_RCAN or FF_Resblocks + self.interpolate = Interpolation(5, 12, in_channels * (4**depth), act=relu) + + def forward(self, x1, x2): + """ + Encoder: Shuffle-spread --> Feature Fusion --> Return fused features + """ + feats1 = self.shuffler(x1) + feats2 = self.shuffler(x2) + + feats = self.interpolate(feats1, feats2) + + return feats + + +class Decoder(nn.Module): + def __init__(self, depth=3): + super(Decoder, self).__init__() + + # shuffler_list = [PixelShuffle(2) for i in range(depth)] + # self.shuffler = nn.Sequential(*shuffler_list) + self.shuffler = PixelShuffle(2**depth) + + def forward(self, feats): + out = self.shuffler(feats) + return out + + +class CAIN(nn.Module): + def __init__(self, depth=3): + super(CAIN, self).__init__() + + self.encoder = Encoder(in_channels=3, depth=depth) + self.decoder = Decoder(depth=depth) + + def forward(self, x1, x2): + x1, m1 = sub_mean(x1) + x2, m2 = sub_mean(x2) + + if not self.training: + paddingInput, paddingOutput = InOutPaddings(x1) + x1 = paddingInput(x1) + x2 = paddingInput(x2) + + feats = self.encoder(x1, x2) + out = self.decoder(feats) + + if not self.training: + out = paddingOutput(out) + + mi = (m1 + m2) / 2 + out += mi + + return out, feats \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_encdec_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_encdec_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..ea10ed87e38bee995f435bb4305e221e4c13a4a6 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_encdec_arch.py @@ -0,0 +1,95 @@ +import math +import numpy as np + +import torch +import torch.nn as nn + +from .common import * +from comfy.model_management import get_torch_device + +class Encoder(nn.Module): + def __init__(self, in_channels=3, depth=3, nf_start=32, norm=False): + super(Encoder, self).__init__() + self.device = get_torch_device() + + nf = nf_start + relu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + self.body = nn.Sequential( + ConvNorm(in_channels, nf * 1, 7, stride=1, norm=norm), + relu, + ConvNorm(nf * 1, nf * 2, 5, stride=2, norm=norm), + relu, + ConvNorm(nf * 2, nf * 4, 5, stride=2, norm=norm), + relu, + ConvNorm(nf * 4, nf * 6, 5, stride=2, norm=norm) + ) + + self.interpolate = Interpolation(5, 12, nf * 6, reduction=16, act=relu) + + def forward(self, x1, x2): + """ + Encoder: Feature Extraction --> Feature Fusion --> Return + """ + feats1 = self.body(x1) + feats2 = self.body(x2) + + feats = self.interpolate(feats1, feats2) + + return feats + + +class Decoder(nn.Module): + def __init__(self, in_channels=192, out_channels=3, depth=3, norm=False, up_mode='shuffle'): + super(Decoder, self).__init__() + self.device = get_torch_device() + + relu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + nf = [in_channels, (in_channels*2)//3, in_channels//3, in_channels//6] + #nf = [192, 128, 64, 32] + #nf = [186, 124, 62, 31] + self.body = nn.Sequential( + UpConvNorm(nf[0], nf[1], mode=up_mode, norm=norm), + ResBlock(nf[1], nf[1], norm=norm, act=relu), + UpConvNorm(nf[1], nf[2], mode=up_mode, norm=norm), + ResBlock(nf[2], nf[2], norm=norm, act=relu), + UpConvNorm(nf[2], nf[3], mode=up_mode, norm=norm), + ResBlock(nf[3], nf[3], norm=norm, act=relu), + conv7x7(nf[3], out_channels) + ) + + def forward(self, feats): + out = self.body(feats) + #out = self.conv_final(out) + + return out + + +class CAIN_EncDec(nn.Module): + def __init__(self, depth=3, n_resblocks=3, start_filts=32, up_mode='shuffle'): + super(CAIN_EncDec, self).__init__() + self.depth = depth + + self.encoder = Encoder(in_channels=3, depth=depth, norm=False) + self.decoder = Decoder(in_channels=start_filts*6, depth=depth, norm=False, up_mode=up_mode) + + def forward(self, x1, x2): + x1, m1 = sub_mean(x1) + x2, m2 = sub_mean(x2) + + if not self.training: + paddingInput, paddingOutput = InOutPaddings(x1) + x1 = paddingInput(x1) + x2 = paddingInput(x2) + + feats = self.encoder(x1, x2) + out = self.decoder(feats) + + if not self.training: + out = paddingOutput(out) + + mi = (m1 + m2)/2 + out += mi + + return out, feats \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_noca_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_noca_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..08fbb117e9acef30d49627d5c906622b13197623 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/cain_noca_arch.py @@ -0,0 +1,73 @@ +import math +import numpy as np + +import torch +import torch.nn as nn + +from .common import * +from comfy.model_management import get_torch_device + +class Encoder(nn.Module): + def __init__(self, in_channels=3, depth=3): + super(Encoder, self).__init__() + self.device = get_torch_device() + + self.shuffler = PixelShuffle(1/2**depth) + # self.shuffler = nn.Sequential( + # PixelShuffle(1/2), + # PixelShuffle(1/2), + # PixelShuffle(1/2)) + self.interpolate = Interpolation_res(5, 12, in_channels * (4**depth)) + + def forward(self, x1, x2): + feats1 = self.shuffler(x1) + feats2 = self.shuffler(x2) + + feats = self.interpolate(feats1, feats2) + + return feats + + +class Decoder(nn.Module): + def __init__(self, depth=3): + super(Decoder, self).__init__() + self.device = get_torch_device() + + self.shuffler = PixelShuffle(2**depth) + # self.shuffler = nn.Sequential( + # PixelShuffle(2), + # PixelShuffle(2), + # PixelShuffle(2)) + + def forward(self, feats): + out = self.shuffler(feats) + return out + + +class CAIN_NoCA(nn.Module): + def __init__(self, depth=3): + super(CAIN_NoCA, self).__init__() + self.depth = depth + + self.encoder = Encoder(in_channels=3, depth=depth) + self.decoder = Decoder(depth=depth) + + def forward(self, x1, x2): + x1, m1 = sub_mean(x1) + x2, m2 = sub_mean(x2) + + if not self.training: + paddingInput, paddingOutput = InOutPaddings(x1) + x1 = paddingInput(x1) + x2 = paddingInput(x2) + + feats = self.encoder(x1, x2) + out = self.decoder(feats) + + if not self.training: + out = paddingOutput(out) + + mi = (m1 + m2) / 2 + out += mi + + return out, feats \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/common.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/common.py new file mode 100644 index 0000000000000000000000000000000000000000..aa1600a231d32eec39785883cb26b93fd9c64e62 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/cain/common.py @@ -0,0 +1,361 @@ +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + +def sub_mean(x): + mean = x.mean(2, keepdim=True).mean(3, keepdim=True) + x -= mean + return x, mean + +def InOutPaddings(x): + w, h = x.size(3), x.size(2) + padding_width, padding_height = 0, 0 + if w != ((w >> 7) << 7): + padding_width = (((w >> 7) + 1) << 7) - w + if h != ((h >> 7) << 7): + padding_height = (((h >> 7) + 1) << 7) - h + paddingInput = nn.ReflectionPad2d(padding=[padding_width // 2, padding_width - padding_width // 2, + padding_height // 2, padding_height - padding_height // 2]) + paddingOutput = nn.ReflectionPad2d(padding=[0 - padding_width // 2, padding_width // 2 - padding_width, + 0 - padding_height // 2, padding_height // 2 - padding_height]) + return paddingInput, paddingOutput + + +class ConvNorm(nn.Module): + def __init__(self, in_feat, out_feat, kernel_size, stride=1, norm=False): + super(ConvNorm, self).__init__() + + reflection_padding = kernel_size // 2 + self.reflection_pad = nn.ReflectionPad2d(reflection_padding) + self.conv = nn.Conv2d(in_feat, out_feat, stride=stride, kernel_size=kernel_size, bias=True) + + self.norm = norm + if norm == 'IN': + self.norm = nn.InstanceNorm2d(out_feat, track_running_stats=True) + elif norm == 'BN': + self.norm = nn.BatchNorm2d(out_feat) + + def forward(self, x): + out = self.reflection_pad(x) + out = self.conv(out) + if self.norm: + out = self.norm(out) + return out + + +class UpConvNorm(nn.Module): + def __init__(self, in_channels, out_channels, mode='transpose', norm=False): + super(UpConvNorm, self).__init__() + + if mode == 'transpose': + self.upconv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=4, stride=2, padding=1) + elif mode == 'shuffle': + self.upconv = nn.Sequential( + ConvNorm(in_channels, 4*out_channels, kernel_size=3, stride=1, norm=norm), + PixelShuffle(2)) + else: + # out_channels is always going to be the same as in_channels + self.upconv = nn.Sequential( + nn.Upsample(mode='bilinear', scale_factor=2, align_corners=False), + ConvNorm(in_channels, out_channels, kernel_size=1, stride=1, norm=norm)) + + def forward(self, x): + out = self.upconv(x) + return out + + + +class meanShift(nn.Module): + def __init__(self, rgbRange, rgbMean, sign, nChannel=3): + super(meanShift, self).__init__() + if nChannel == 1: + l = rgbMean[0] * rgbRange * float(sign) + + self.shifter = nn.Conv2d(1, 1, kernel_size=1, stride=1, padding=0) + self.shifter.weight.data = torch.eye(1).view(1, 1, 1, 1) + self.shifter.bias.data = torch.Tensor([l]) + elif nChannel == 3: + r = rgbMean[0] * rgbRange * float(sign) + g = rgbMean[1] * rgbRange * float(sign) + b = rgbMean[2] * rgbRange * float(sign) + + self.shifter = nn.Conv2d(3, 3, kernel_size=1, stride=1, padding=0) + self.shifter.weight.data = torch.eye(3).view(3, 3, 1, 1) + self.shifter.bias.data = torch.Tensor([r, g, b]) + else: + r = rgbMean[0] * rgbRange * float(sign) + g = rgbMean[1] * rgbRange * float(sign) + b = rgbMean[2] * rgbRange * float(sign) + self.shifter = nn.Conv2d(6, 6, kernel_size=1, stride=1, padding=0) + self.shifter.weight.data = torch.eye(6).view(6, 6, 1, 1) + self.shifter.bias.data = torch.Tensor([r, g, b, r, g, b]) + + # Freeze the meanShift layer + for params in self.shifter.parameters(): + params.requires_grad = False + + def forward(self, x): + x = self.shifter(x) + + return x + + +""" CONV - (BN) - RELU - CONV - (BN) """ +class ResBlock(nn.Module): + def __init__(self, in_feat, out_feat, kernel_size=3, reduction=False, bias=True, # 'reduction' is just for placeholder + norm=False, act=nn.ReLU(True), downscale=False): + super(ResBlock, self).__init__() + + self.body = nn.Sequential( + ConvNorm(in_feat, out_feat, kernel_size=kernel_size, stride=2 if downscale else 1), + act, + ConvNorm(out_feat, out_feat, kernel_size=kernel_size, stride=1) + ) + + self.downscale = None + if downscale: + self.downscale = nn.Conv2d(in_feat, out_feat, kernel_size=1, stride=2) + + def forward(self, x): + res = x + out = self.body(x) + if self.downscale is not None: + res = self.downscale(res) + out += res + + return out + + +## Channel Attention (CA) Layer +class CALayer(nn.Module): + def __init__(self, channel, reduction=16): + super(CALayer, self).__init__() + # global average pooling: feature --> point + self.avg_pool = nn.AdaptiveAvgPool2d(1) + # feature channel downscale and upscale --> channel weight + self.conv_du = nn.Sequential( + nn.Conv2d(channel, channel // reduction, 1, padding=0, bias=True), + nn.ReLU(inplace=True), + nn.Conv2d(channel // reduction, channel, 1, padding=0, bias=True), + nn.Sigmoid() + ) + + def forward(self, x): + y = self.avg_pool(x) + y = self.conv_du(y) + return x * y, y + + +## Residual Channel Attention Block (RCAB) +class RCAB(nn.Module): + def __init__(self, in_feat, out_feat, kernel_size, reduction, bias=True, + norm=False, act=nn.ReLU(True), downscale=False, return_ca=False): + super(RCAB, self).__init__() + + self.body = nn.Sequential( + ConvNorm(in_feat, out_feat, kernel_size, stride=2 if downscale else 1, norm=norm), + act, + ConvNorm(out_feat, out_feat, kernel_size, stride=1, norm=norm), + CALayer(out_feat, reduction) + ) + self.downscale = downscale + if downscale: + self.downConv = nn.Conv2d(in_feat, out_feat, kernel_size=3, stride=2, padding=1) + self.return_ca = return_ca + + def forward(self, x): + res = x + out, ca = self.body(x) + if self.downscale: + res = self.downConv(res) + out += res + + if self.return_ca: + return out, ca + else: + return out + + +## Residual Group (RG) +class ResidualGroup(nn.Module): + def __init__(self, Block, n_resblocks, n_feat, kernel_size, reduction, act, norm=False): + super(ResidualGroup, self).__init__() + + modules_body = [Block(n_feat, n_feat, kernel_size, reduction, bias=True, norm=norm, act=act) + for _ in range(n_resblocks)] + modules_body.append(ConvNorm(n_feat, n_feat, kernel_size, stride=1, norm=norm)) + self.body = nn.Sequential(*modules_body) + + def forward(self, x): + res = self.body(x) + res += x + return res + + +def pixel_shuffle(input, scale_factor): + batch_size, channels, in_height, in_width = input.size() + + out_channels = int(int(channels / scale_factor) / scale_factor) + out_height = int(in_height * scale_factor) + out_width = int(in_width * scale_factor) + + if scale_factor >= 1: + input_view = input.contiguous().view(batch_size, out_channels, scale_factor, scale_factor, in_height, in_width) + shuffle_out = input_view.permute(0, 1, 4, 2, 5, 3).contiguous() + else: + block_size = int(1 / scale_factor) + input_view = input.contiguous().view(batch_size, channels, out_height, block_size, out_width, block_size) + shuffle_out = input_view.permute(0, 1, 3, 5, 2, 4).contiguous() + + return shuffle_out.view(batch_size, out_channels, out_height, out_width) + + +class PixelShuffle(nn.Module): + def __init__(self, scale_factor): + super(PixelShuffle, self).__init__() + self.scale_factor = scale_factor + + def forward(self, x): + return pixel_shuffle(x, self.scale_factor) + def extra_repr(self): + return 'scale_factor={}'.format(self.scale_factor) + + +def conv(in_channels, out_channels, kernel_size, + stride=1, bias=True, groups=1): + return nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=kernel_size//2, + stride=1, + bias=bias, + groups=groups) + + +def conv1x1(in_channels, out_channels, stride=1, bias=True, groups=1): + return nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=bias, + groups=groups) + +def conv3x3(in_channels, out_channels, stride=1, + padding=1, bias=True, groups=1): + return nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=padding, + bias=bias, + groups=groups) + +def conv5x5(in_channels, out_channels, stride=1, + padding=2, bias=True, groups=1): + return nn.Conv2d( + in_channels, + out_channels, + kernel_size=5, + stride=stride, + padding=padding, + bias=bias, + groups=groups) + +def conv7x7(in_channels, out_channels, stride=1, + padding=3, bias=True, groups=1): + return nn.Conv2d( + in_channels, + out_channels, + kernel_size=7, + stride=stride, + padding=padding, + bias=bias, + groups=groups) + +def upconv2x2(in_channels, out_channels, mode='shuffle'): + if mode == 'transpose': + return nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=4, + stride=2, + padding=1) + elif mode == 'shuffle': + return nn.Sequential( + conv3x3(in_channels, 4*out_channels), + PixelShuffle(2)) + else: + # out_channels is always going to be the same as in_channels + return nn.Sequential( + nn.Upsample(mode='bilinear', scale_factor=2, align_corners=False), + conv1x1(in_channels, out_channels)) + + + +class Interpolation(nn.Module): + def __init__(self, n_resgroups, n_resblocks, n_feats, + reduction=16, act=nn.LeakyReLU(0.2, True), norm=False): + super(Interpolation, self).__init__() + + # define modules: head, body, tail + self.headConv = conv3x3(n_feats * 2, n_feats) + + modules_body = [ + ResidualGroup( + RCAB, + n_resblocks=n_resblocks, + n_feat=n_feats, + kernel_size=3, + reduction=reduction, + act=act, + norm=norm) + for _ in range(n_resgroups)] + self.body = nn.Sequential(*modules_body) + + self.tailConv = conv3x3(n_feats, n_feats) + + def forward(self, x0, x1): + # Build input tensor + x = torch.cat([x0, x1], dim=1) + x = self.headConv(x) + + res = self.body(x) + res += x + + out = self.tailConv(res) + return out + + +class Interpolation_res(nn.Module): + def __init__(self, n_resgroups, n_resblocks, n_feats, + act=nn.LeakyReLU(0.2, True), norm=False): + super(Interpolation_res, self).__init__() + + # define modules: head, body, tail (reduces concatenated inputs to n_feat) + self.headConv = conv3x3(n_feats * 2, n_feats) + + modules_body = [ResidualGroup(ResBlock, n_resblocks=n_resblocks, n_feat=n_feats, kernel_size=3, + reduction=0, act=act, norm=norm) + for _ in range(n_resgroups)] + self.body = nn.Sequential(*modules_body) + + self.tailConv = conv3x3(n_feats, n_feats) + + def forward(self, x0, x1): + # Build input tensor + x = torch.cat([x0, x1], dim=1) + x = self.headConv(x) + + res = x + for m in self.body: + res = m(res) + res += x + + x = self.tailConv(res) + + return x \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3602505192621731e00acde67489c6a51360adb8 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/__init__.py @@ -0,0 +1,84 @@ +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +import torch +import torch.nn as nn +from comfy.model_management import soft_empty_cache, get_torch_device + +MODEL_TYPE = pathlib.Path(__file__).parent.name +MODEL_FILE_NAMES = { + "ssl": "eisai_ssl.pt", + "dtm": "eisai_dtm.pt", + "raft": "eisai_anime_interp_full.ckpt" +} + +class EISAI(nn.Module): + def __init__(self, model_file_names) -> None: + from .eisai_arch import SoftsplatLite, DTM, RAFT + super(EISAI, self).__init__() + self.raft = RAFT(load_file_from_github_release(MODEL_TYPE, model_file_names["raft"])) + self.raft.to(get_torch_device()).eval() + + self.ssl = SoftsplatLite() + self.ssl.load_state_dict(torch.load(load_file_from_github_release(MODEL_TYPE, model_file_names["ssl"]))) + self.ssl.to(get_torch_device()).eval() + + self.dtm = DTM() + self.dtm.load_state_dict(torch.load(load_file_from_github_release(MODEL_TYPE, model_file_names["dtm"]))) + self.dtm.to(get_torch_device()).eval() + + def forward(self, img0, img1, t): + with torch.no_grad(): + flow0, _ = self.raft(img0, img1) + flow1, _ = self.raft(img1, img0) + x = { + "images": torch.stack([img0, img1], dim=1), + "flows": torch.stack([flow0, flow1], dim=1), + } + out_ssl, _ = self.ssl(x, t=t, return_more=True) + out_dtm, _ = self.dtm(x, out_ssl, _, return_more=False) + return out_dtm[:, :3] + +class EISAI_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (["eisai"], ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + interpolation_model = EISAI(MODEL_FILE_NAMES) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model): + return model(frame_0, frame_1, t=timestep) + + scale = 1 + + args = [interpolation_model, scale] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/eisai_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/eisai_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..4a3abae7187b26fedd86219058a7e8e024e59e30 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/eisai/eisai_arch.py @@ -0,0 +1,2586 @@ +""" +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_scripts/interpolate.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/models/ssldtm.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/util_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/twodee_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/pytorch_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/distance_transform_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/sketchers_v1.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/interpolator_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/gridnet_v1.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/flow_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_util/softsplat_v0.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/raft_v1/rfr_new.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/raft_v1/extractor.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/raft_v1/update.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/raft_v1/corr.py +https://github.com/ShuhongChen/eisai-anime-interpolator/blob/master/_train/frame_interpolation/helpers/raft_v1/utils.py +""" + +import copy +import cv2 +import torch.nn.functional as F +import torchvision.transforms.functional as F +import gc +from PIL import Image, ImageFile, ImageFont, ImageDraw +import inspect +from scipy import interpolate +import kornia +import math +from argparse import Namespace +import torch.nn as nn +import numpy as np +import os +from functools import partial +import pathlib +import PIL +import re +import requests +from scipy.spatial.transform import Rotation +import scipy +import shutil +import torchvision.transforms as T +import time +import torch +import torchvision as tv +import zlib +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from tqdm.auto import tqdm as std_tqdm +from tqdm.auto import trange as std_trange +from vfi_models.ops import FunctionSoftsplat, batch_edt +from comfy.model_management import get_torch_device + +device = get_torch_device() +autocast = torch.autocast +tqdm = partial(std_tqdm, dynamic_ncols=True) +trange = partial(std_trange, dynamic_ncols=True) +ImageFile.LOAD_TRUNCATED_IMAGES = True + + +def pixel_ij(x, rounding=True): + if isinstance(x, np.ndarray): + x = x.tolist() + return tuple( + pixel_rounder(i, rounding) + for i in (x if isinstance(x, tuple) or isinstance(x, list) else (x, x)) + ) + + +def rescale_dry(x, factor): + h, w = x[-2:] if isinstance(x, tuple) or isinstance(x, list) else I(x).size + return (h * factor, w * factor) + + +def pixel_rounder(n, mode): + if mode == True or mode == "round": + return round(n) + elif mode == "ceil": + return math.ceil(n) + elif mode == "floor": + return math.floor(n) + else: + return n + + +def diam(x): + if isinstance(x, tuple) or isinstance(x, list): + h, w = x[-2:] + elif isinstance(x, I): + h, w = x.size + else: + h, w = x.shape[-2:] + return np.sqrt(h**2 + w**2) + + +def pixel_logit(x, pixel_margin=1): + x = (x * (255 - 2 * pixel_margin) + pixel_margin) / 255 + return torch.log(x / (1 - x)) + + +class InputPadder: + """Pads images such that dimensions are divisible by 8""" + + def __init__(self, dims): + self.ht, self.wd = dims[-2:] + pad_ht = (((self.ht // 8) + 1) * 8 - self.ht) % 8 + pad_wd = (((self.wd // 8) + 1) * 8 - self.wd) % 8 + self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, 0, pad_ht] + + def pad(self, *inputs): + return [F.pad(x, self._pad, mode="replicate") for x in inputs] + + def unpad(self, x): + ht, wd = x.shape[-2:] + c = [self._pad[2], ht - self._pad[3], self._pad[0], wd - self._pad[1]] + return x[..., c[0] : c[1], c[2] : c[3]] + + +def forward_interpolate(flow): + flow = flow.detach().cpu().numpy() + dx, dy = flow[0], flow[1] + + ht, wd = dx.shape + x0, y0 = np.meshgrid(np.arange(wd), np.arange(ht)) + + x1 = x0 + dx + y1 = y0 + dy + + x1 = x1.reshape(-1) + y1 = y1.reshape(-1) + dx = dx.reshape(-1) + dy = dy.reshape(-1) + + valid = (x1 > 0) & (x1 < wd) & (y1 > 0) & (y1 < ht) + x1 = x1[valid] + y1 = y1[valid] + dx = dx[valid] + dy = dy[valid] + + flow_x = interpolate.griddata((x1, y1), dx, (x0, y0), method="cubic", fill_value=0) + + flow_y = interpolate.griddata((x1, y1), dy, (x0, y0), method="cubic", fill_value=0) + + flow = np.stack([flow_x, flow_y], axis=0) + return torch.from_numpy(flow).float() + + +def bilinear_sampler(img, coords, mode="bilinear", mask=False): + """Wrapper for grid_sample, uses pixel coordinates""" + H, W = img.shape[-2:] + xgrid, ygrid = coords.split([1, 1], dim=-1) + xgrid = 2 * xgrid / (W - 1) - 1 + ygrid = 2 * ygrid / (H - 1) - 1 + + grid = torch.cat([xgrid, ygrid], dim=-1) + # print(img.size()) + img = F.grid_sample(img, grid, align_corners=True) + + if mask: + mask = (xgrid > -1) & (ygrid > -1) & (xgrid < 1) & (ygrid < 1) + return img, mask.float() + + return img + + +def coords_grid(batch, ht, wd): + coords = torch.meshgrid(torch.arange(ht), torch.arange(wd)) + coords = torch.stack(coords[::-1], dim=0).float() + return coords[None].repeat(batch, 1, 1, 1) + + +def upflow8(flow, mode="bilinear"): + new_size = (8 * flow.shape[2], 8 * flow.shape[3]) + return 8 * F.interpolate(flow, size=new_size, mode=mode, align_corners=True) + + +class CorrBlock: + def __init__(self, fmap1, fmap2, num_levels=4, radius=4): + self.num_levels = num_levels + self.radius = radius + self.corr_pyramid = [] + + # all pairs correlation + corr = CorrBlock.corr(fmap1, fmap2) + + batch, h1, w1, dim, h2, w2 = corr.shape + corr = corr.reshape(batch * h1 * w1, dim, h2, w2) + + self.corr_pyramid.append(corr) + for i in range(self.num_levels - 1): + corr = F.avg_pool2d(corr, 2, stride=2) + self.corr_pyramid.append(corr) + + def __call__(self, coords): + r = self.radius + coords = coords.permute(0, 2, 3, 1) + batch, h1, w1, _ = coords.shape + + out_pyramid = [] + for i in range(self.num_levels): + corr = self.corr_pyramid[i] + dx = torch.linspace(-r, r, 2 * r + 1) + dy = torch.linspace(-r, r, 2 * r + 1) + delta = torch.stack(torch.meshgrid(dy, dx), dim=-1).to(coords.device) + + centroid_lvl = coords.reshape(batch * h1 * w1, 1, 1, 2) / 2**i + delta_lvl = delta.view(1, 2 * r + 1, 2 * r + 1, 2) + coords_lvl = centroid_lvl + delta_lvl + + corr = bilinear_sampler(corr, coords_lvl) + corr = corr.view(batch, h1, w1, -1) + out_pyramid.append(corr) + + out = torch.cat(out_pyramid, dim=-1) + return out.permute(0, 3, 1, 2).contiguous().float() + + @staticmethod + def corr(fmap1, fmap2): + batch, dim, ht, wd = fmap1.shape + fmap1 = fmap1.view(batch, dim, ht * wd) + fmap2 = fmap2.view(batch, dim, ht * wd) + + corr = torch.matmul(fmap1.transpose(1, 2), fmap2) + corr = corr.view(batch, ht, wd, 1, ht, wd) + return corr / torch.sqrt(torch.tensor(dim).float()) + + +class FlowHead(nn.Module): + def __init__(self, input_dim=128, hidden_dim=256): + super(FlowHead, self).__init__() + self.conv1 = nn.Conv2d(input_dim, hidden_dim, 3, padding=1) + self.conv2 = nn.Conv2d(hidden_dim, 2, 3, padding=1) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + return self.conv2(self.relu(self.conv1(x))) + + +class ConvGRU(nn.Module): + def __init__(self, hidden_dim=128, input_dim=192 + 128): + super(ConvGRU, self).__init__() + self.convz = nn.Conv2d(hidden_dim + input_dim, hidden_dim, 3, padding=1) + self.convr = nn.Conv2d(hidden_dim + input_dim, hidden_dim, 3, padding=1) + self.convq = nn.Conv2d(hidden_dim + input_dim, hidden_dim, 3, padding=1) + + def forward(self, h, x): + hx = torch.cat([h, x], dim=1) + + z = torch.sigmoid(self.convz(hx)) + r = torch.sigmoid(self.convr(hx)) + q = torch.tanh(self.convq(torch.cat([r * h, x], dim=1))) + + h = (1 - z) * h + z * q + return h + + +class SepConvGRU(nn.Module): + def __init__(self, hidden_dim=128, input_dim=192 + 128): + super(SepConvGRU, self).__init__() + self.convz1 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (1, 5), padding=(0, 2) + ) + self.convr1 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (1, 5), padding=(0, 2) + ) + self.convq1 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (1, 5), padding=(0, 2) + ) + + self.convz2 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (5, 1), padding=(2, 0) + ) + self.convr2 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (5, 1), padding=(2, 0) + ) + self.convq2 = nn.Conv2d( + hidden_dim + input_dim, hidden_dim, (5, 1), padding=(2, 0) + ) + + def forward(self, h, x): + # horizontal + hx = torch.cat([h, x], dim=1) + z = torch.sigmoid(self.convz1(hx)) + r = torch.sigmoid(self.convr1(hx)) + q = torch.tanh(self.convq1(torch.cat([r * h, x], dim=1))) + h = (1 - z) * h + z * q + + # vertical + hx = torch.cat([h, x], dim=1) + z = torch.sigmoid(self.convz2(hx)) + r = torch.sigmoid(self.convr2(hx)) + q = torch.tanh(self.convq2(torch.cat([r * h, x], dim=1))) + h = (1 - z) * h + z * q + + return h + + +class SmallMotionEncoder(nn.Module): + def __init__(self, args): + super(SmallMotionEncoder, self).__init__() + cor_planes = args.corr_levels * (2 * args.corr_radius + 1) ** 2 + self.convc1 = nn.Conv2d(cor_planes, 96, 1, padding=0) + self.convf1 = nn.Conv2d(2, 64, 7, padding=3) + self.convf2 = nn.Conv2d(64, 32, 3, padding=1) + self.conv = nn.Conv2d(128, 80, 3, padding=1) + + def forward(self, flow, corr): + cor = F.relu(self.convc1(corr)) + flo = F.relu(self.convf1(flow)) + flo = F.relu(self.convf2(flo)) + cor_flo = torch.cat([cor, flo], dim=1) + out = F.relu(self.conv(cor_flo)) + return torch.cat([out, flow], dim=1) + + +class BasicMotionEncoder(nn.Module): + def __init__(self, args): + super(BasicMotionEncoder, self).__init__() + cor_planes = args.corr_levels * (2 * args.corr_radius + 1) ** 2 + self.convc1 = nn.Conv2d(cor_planes, 256, 1, padding=0) + self.convc2 = nn.Conv2d(256, 192, 3, padding=1) + self.convf1 = nn.Conv2d(2, 128, 7, padding=3) + self.convf2 = nn.Conv2d(128, 64, 3, padding=1) + self.conv = nn.Conv2d(64 + 192, 128 - 2, 3, padding=1) + + def forward(self, flow, corr): + cor = F.relu(self.convc1(corr)) + cor = F.relu(self.convc2(cor)) + flo = F.relu(self.convf1(flow)) + flo = F.relu(self.convf2(flo)) + + cor_flo = torch.cat([cor, flo], dim=1) + out = F.relu(self.conv(cor_flo)) + return torch.cat([out, flow], dim=1) + + +class SmallUpdateBlock(nn.Module): + def __init__(self, args, hidden_dim=96): + super(SmallUpdateBlock, self).__init__() + self.encoder = SmallMotionEncoder(args) + self.gru = ConvGRU(hidden_dim=hidden_dim, input_dim=82 + 64) + self.flow_head = FlowHead(hidden_dim, hidden_dim=128) + + def forward(self, net, inp, corr, flow): + motion_features = self.encoder(flow, corr) + inp = torch.cat([inp, motion_features], dim=1) + net = self.gru(net, inp) + delta_flow = self.flow_head(net) + + return net, None, delta_flow + + +class BasicUpdateBlock(nn.Module): + def __init__(self, args, hidden_dim=128, input_dim=128): + super(BasicUpdateBlock, self).__init__() + self.args = args + self.encoder = BasicMotionEncoder(args) + self.gru = SepConvGRU(hidden_dim=hidden_dim, input_dim=128 + hidden_dim) + self.flow_head = FlowHead(hidden_dim, hidden_dim=256) + + self.mask = nn.Sequential( + nn.Conv2d(128, 256, 3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(256, 64 * 9, 1, padding=0), + ) + + def forward(self, net, inp, corr, flow, upsample=True): + motion_features = self.encoder(flow, corr) + inp = torch.cat([inp, motion_features], dim=1) + + net = self.gru(net, inp) + delta_flow = self.flow_head(net) + + # scale mask to balence gradients + mask = 0.25 * self.mask(net) + return net, mask, delta_flow + + +class ResidualBlock(nn.Module): + def __init__(self, in_planes, planes, norm_fn="group", stride=1): + super(ResidualBlock, self).__init__() + + self.conv1 = nn.Conv2d( + in_planes, planes, kernel_size=3, padding=1, stride=stride + ) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1) + self.relu = nn.ReLU(inplace=True) + + num_groups = planes // 8 + + if norm_fn == "group": + self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + if not stride == 1: + self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + + elif norm_fn == "batch": + self.norm1 = nn.BatchNorm2d(planes) + self.norm2 = nn.BatchNorm2d(planes) + if not stride == 1: + self.norm3 = nn.BatchNorm2d(planes) + + elif norm_fn == "instance": + self.norm1 = nn.InstanceNorm2d(planes) + self.norm2 = nn.InstanceNorm2d(planes) + if not stride == 1: + self.norm3 = nn.InstanceNorm2d(planes) + + elif norm_fn == "none": + self.norm1 = nn.Sequential() + self.norm2 = nn.Sequential() + if not stride == 1: + self.norm3 = nn.Sequential() + + if stride == 1: + self.downsample = None + + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3 + ) + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x + y) + + +class BottleneckBlock(nn.Module): + def __init__(self, in_planes, planes, norm_fn="group", stride=1): + super(BottleneckBlock, self).__init__() + + self.conv1 = nn.Conv2d(in_planes, planes // 4, kernel_size=1, padding=0) + self.conv2 = nn.Conv2d( + planes // 4, planes // 4, kernel_size=3, padding=1, stride=stride + ) + self.conv3 = nn.Conv2d(planes // 4, planes, kernel_size=1, padding=0) + self.relu = nn.ReLU(inplace=True) + + num_groups = planes // 8 + + if norm_fn == "group": + self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes // 4) + self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes // 4) + self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + if not stride == 1: + self.norm4 = nn.GroupNorm(num_groups=num_groups, num_channels=planes) + + elif norm_fn == "batch": + self.norm1 = nn.BatchNorm2d(planes // 4) + self.norm2 = nn.BatchNorm2d(planes // 4) + self.norm3 = nn.BatchNorm2d(planes) + if not stride == 1: + self.norm4 = nn.BatchNorm2d(planes) + + elif norm_fn == "instance": + self.norm1 = nn.InstanceNorm2d(planes // 4) + self.norm2 = nn.InstanceNorm2d(planes // 4) + self.norm3 = nn.InstanceNorm2d(planes) + if not stride == 1: + self.norm4 = nn.InstanceNorm2d(planes) + + elif norm_fn == "none": + self.norm1 = nn.Sequential() + self.norm2 = nn.Sequential() + self.norm3 = nn.Sequential() + if not stride == 1: + self.norm4 = nn.Sequential() + + if stride == 1: + self.downsample = None + + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm4 + ) + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + y = self.relu(self.norm3(self.conv3(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x + y) + + +class BasicEncoder(nn.Module): + def __init__(self, output_dim=128, norm_fn="batch", dropout=0.0): + super(BasicEncoder, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == "group": + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64) + + elif self.norm_fn == "batch": + self.norm1 = nn.BatchNorm2d(64) + + elif self.norm_fn == "instance": + self.norm1 = nn.InstanceNorm2d(64) + + elif self.norm_fn == "none": + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 64 + self.layer1 = self._make_layer(64, stride=1) + self.layer2 = self._make_layer(96, stride=2) + self.layer3 = self._make_layer(128, stride=2) + + # output convolution + self.conv2 = nn.Conv2d(128, output_dim, kernel_size=1) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + + +class BasicEncoder1(nn.Module): + def __init__(self, output_dim=128, norm_fn="batch", dropout=0.0): + super(BasicEncoder1, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == "group": + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64) + + elif self.norm_fn == "batch": + self.norm1 = nn.BatchNorm2d(64) + + elif self.norm_fn == "instance": + self.norm1 = nn.InstanceNorm2d(64) + + elif self.norm_fn == "none": + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(2, 64, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 64 + self.layer1 = self._make_layer(64, stride=1) + self.layer2 = self._make_layer(96, stride=2) + self.layer3 = self._make_layer(128, stride=2) + + # output convolution + self.conv2 = nn.Conv2d(128, output_dim, kernel_size=1) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + + +class SmallEncoder(nn.Module): + def __init__(self, output_dim=128, norm_fn="batch", dropout=0.0): + super(SmallEncoder, self).__init__() + self.norm_fn = norm_fn + + if self.norm_fn == "group": + self.norm1 = nn.GroupNorm(num_groups=8, num_channels=32) + + elif self.norm_fn == "batch": + self.norm1 = nn.BatchNorm2d(32) + + elif self.norm_fn == "instance": + self.norm1 = nn.InstanceNorm2d(32) + + elif self.norm_fn == "none": + self.norm1 = nn.Sequential() + + self.conv1 = nn.Conv2d(3, 32, kernel_size=7, stride=2, padding=3) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = 32 + self.layer1 = self._make_layer(32, stride=1) + self.layer2 = self._make_layer(64, stride=2) + self.layer3 = self._make_layer(96, stride=2) + + self.dropout = None + if dropout > 0: + self.dropout = nn.Dropout2d(p=dropout) + + self.conv2 = nn.Conv2d(96, output_dim, kernel_size=1) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1): + layer1 = BottleneckBlock(self.in_planes, dim, self.norm_fn, stride=stride) + layer2 = BottleneckBlock(dim, dim, self.norm_fn, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # if input is list, combine batch dimension + is_list = isinstance(x, tuple) or isinstance(x, list) + if is_list: + batch_dim = x[0].shape[0] + x = torch.cat(x, dim=0) + + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.conv2(x) + + if self.training and self.dropout is not None: + x = self.dropout(x) + + if is_list: + x = torch.split(x, [batch_dim, batch_dim], dim=0) + + return x + + +################################################## +# RFR is implemented based on RAFT optical flow # +################################################## + + +def backwarp(img, flow): + _, _, H, W = img.size() + + u = flow[:, 0, :, :] + v = flow[:, 1, :, :] + + gridX, gridY = np.meshgrid(np.arange(W), np.arange(H)) + + gridX = torch.tensor( + gridX, + requires_grad=False, + ).cuda() + gridY = torch.tensor( + gridY, + requires_grad=False, + ).cuda() + x = gridX.unsqueeze(0).expand_as(u).float() + u + y = gridY.unsqueeze(0).expand_as(v).float() + v + # range -1 to 1 + x = 2 * (x / (W - 1) - 0.5) + y = 2 * (y / (H - 1) - 0.5) + # stacking X and Y + grid = torch.stack((x, y), dim=3) + # Sample pixels using bilinear interpolation. + imgOut = torch.nn.functional.grid_sample(img, grid, align_corners=True) + + return imgOut + + +class ErrorAttention(nn.Module): + """A three-layer network for predicting mask""" + + def __init__(self, input, output): + super(ErrorAttention, self).__init__() + self.conv1 = nn.Conv2d(input, 32, 5, padding=2) + self.conv2 = nn.Conv2d(32, 32, 3, padding=1) + self.conv3 = nn.Conv2d(38, output, 3, padding=1) + self.prelu1 = nn.PReLU() + self.prelu2 = nn.PReLU() + + def forward(self, x1): + x = self.prelu1(self.conv1(x1)) + x = self.prelu2(torch.cat([self.conv2(x), x1], dim=1)) + x = self.conv3(x) + return x + + +class RFR(nn.Module): + def __init__(self, args): + super(RFR, self).__init__() + self.attention2 = ErrorAttention(6, 1) + self.hidden_dim = hdim = 128 + self.context_dim = cdim = 128 + args.corr_levels = 4 + args.corr_radius = 4 + args.dropout = 0 + self.args = args + + # feature network, context network, and update block + self.fnet = BasicEncoder(output_dim=256, norm_fn="none", dropout=args.dropout) + # self.cnet = BasicEncoder(output_dim=hdim+cdim, norm_fn='none', dropout=args.dropout) + self.update_block = BasicUpdateBlock(self.args, hidden_dim=hdim) + + def freeze_bn(self): + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() + + def initialize_flow(self, img): + """Flow is represented as difference between two coordinate grids flow = coords1 - coords0""" + N, C, H, W = img.shape + coords0 = coords_grid(N, H // 8, W // 8).to(img.device) + coords1 = coords_grid(N, H // 8, W // 8).to(img.device) + + # optical flow computed as difference: flow = coords1 - coords0 + return coords0, coords1 + + def upsample_flow(self, flow, mask): + """Upsample flow field [H/8, W/8, 2] -> [H, W, 2] using convex combination""" + N, _, H, W = flow.shape + mask = mask.view(N, 1, 9, 8, 8, H, W) + mask = torch.softmax(mask, dim=2) + + up_flow = F.unfold(8 * flow, [3, 3], padding=1) + up_flow = up_flow.view(N, 2, 9, 1, 1, H, W) + + up_flow = torch.sum(mask * up_flow, dim=2) + up_flow = up_flow.permute(0, 1, 4, 2, 5, 3) + return up_flow.reshape(N, 2, 8 * H, 8 * W) + + def forward( + self, image1, image2, iters=12, flow_init=None, upsample=True, test_mode=False + ): + H, W = image1.size()[2:4] + H8 = H // 8 * 8 + W8 = W // 8 * 8 + + if flow_init is not None: + flow_init_resize = F.interpolate( + flow_init, size=(H8 // 8, W8 // 8), mode="nearest" + ) + + flow_init_resize[:, :1] = ( + flow_init_resize[:, :1].clone() * (W8 // 8 * 1.0) / flow_init.size()[3] + ) + flow_init_resize[:, 1:] = ( + flow_init_resize[:, 1:].clone() * (H8 // 8 * 1.0) / flow_init.size()[2] + ) + + if not hasattr(self.args, "not_use_rfr_mask") or ( + hasattr(self.args, "not_use_rfr_mask") + and (not self.args.not_use_rfr_mask) + ): + im18 = F.interpolate(image1, size=(H8 // 8, W8 // 8), mode="bilinear") + im28 = F.interpolate(image2, size=(H8 // 8, W8 // 8), mode="bilinear") + + warp21 = backwarp(im28, flow_init_resize) + error21 = torch.sum(torch.abs(warp21 - im18), dim=1, keepdim=True) + # print('errormin', error21.min(), error21.max()) + f12init = ( + torch.exp( + -self.attention2( + torch.cat([im18, error21, flow_init_resize], dim=1) + ) + ** 2 + ) + * flow_init_resize + ) + else: + flow_init_resize = None + flow_init = torch.zeros( + image1.size()[0], 2, image1.size()[2] // 8, image1.size()[3] // 8 + ).cuda() + error21 = torch.zeros( + image1.size()[0], 1, image1.size()[2] // 8, image1.size()[3] // 8 + ).cuda() + + f12_init = flow_init + # print('None inital flow!') + + image1 = F.interpolate(image1, size=(H8, W8), mode="bilinear") + image2 = F.interpolate(image2, size=(H8, W8), mode="bilinear") + + f12s, f12, f12_init = self.forward_pred( + image1, image2, iters, flow_init_resize, upsample, test_mode + ) + + if hasattr(self.args, "requires_sq_flow") and self.args.requires_sq_flow: + for ii in range(len(f12s)): + f12s[ii] = F.interpolate(f12s[ii], size=(H, W), mode="bilinear") + f12s[ii][:, :1] = f12s[ii][:, :1].clone() / (1.0 * W8) * W + f12s[ii][:, 1:] = f12s[ii][:, 1:].clone() / (1.0 * H8) * H + if self.training: + return f12s + else: + return [f12s[-1]], f12_init + else: + f12[:, :1] = f12[:, :1].clone() / (1.0 * W8) * W + f12[:, 1:] = f12[:, 1:].clone() / (1.0 * H8) * H + + f12 = F.interpolate(f12, size=(H, W), mode="bilinear") + # print('wo!!') + return ( + f12, + f12_init, + error21, + ) + + def forward_pred( + self, image1, image2, iters=12, flow_init=None, upsample=True, test_mode=False + ): + """Estimate optical flow between pair of frames""" + + image1 = image1.contiguous() + image2 = image2.contiguous() + + hdim = self.hidden_dim + cdim = self.context_dim + + # run the feature network + with autocast(device.type, enabled=self.args.mixed_precision): + fmap1, fmap2 = self.fnet([image1, image2]) + fmap1 = fmap1.float() + fmap2 = fmap2.float() + corr_fn = CorrBlock(fmap1, fmap2, radius=self.args.corr_radius) + + # run the context network + with autocast(device.type, enabled=self.args.mixed_precision): + cnet = self.fnet(image1) + net, inp = torch.split(cnet, [hdim, cdim], dim=1) + net = torch.tanh(net) + inp = torch.relu(inp) + + coords0, coords1 = self.initialize_flow(image1) + + if flow_init is not None: + coords1 = coords1 + flow_init + + flow_predictions = [] + for itr in range(iters): + coords1 = coords1.detach() + if itr == 0: + if flow_init is not None: + coords1 = coords1 + flow_init + corr = corr_fn(coords1) # index correlation volume + + flow = coords1 - coords0 + with autocast(device.type, enabled=self.args.mixed_precision): + net, up_mask, delta_flow = self.update_block(net, inp, corr, flow) + + # F(t+1) = F(t) + \Delta(t) + coords1 = coords1 + delta_flow + + # upsample predictions + if up_mask is None: + flow_up = upflow8(coords1 - coords0) + else: + flow_up = self.upsample_flow(coords1 - coords0, up_mask) + + flow_predictions.append(flow_up) + + return flow_predictions, flow_up, flow_init + +####################### WARPING ####################### + + +# expects batched tensors, considered low-level operation +# img: bs, ch, h, w +# flow: bs, xy (pix displace), h, w +def flow_backwarp( + img, flow, resample="bilinear", padding_mode="border", align_corners=False +): + if len(img.shape) != 4: + img = img[None,] + if len(flow.shape) != 4: + flow = flow[None,] + q = ( + 2 + * flow + / torch.tensor( + [ + flow.shape[-2], + flow.shape[-1], + ], + device=flow.device, + dtype=torch.float, + )[None, :, None, None] + ) + q = q + torch.stack( + torch.meshgrid( + torch.linspace(-1, 1, flow.shape[-2]), + torch.linspace(-1, 1, flow.shape[-1]), + ) + )[ + None, + ].to( + flow.device + ) + if img.dtype != q.dtype: + img = img.type(q.dtype) + + return nn.functional.grid_sample( + img, + q.flip(dims=(1,)).permute(0, 2, 3, 1), + mode=resample, # nearest, bicubic, bilinear + padding_mode=padding_mode, # border, zeros, reflection + align_corners=align_corners, + ) + + +backwarp = flow_warp = flow_backwarp + + +# mode: sum, avg, lin, softmax +# lin/softmax w/out metric defaults to avg +# must use gpu, move back to cpu if retain_device +# typical metric: -20 * | img0 - backwarp(img1,flow) | +# From Fannovel16: Changed mode params for common ops. +def flow_forewarp( + img, flow, mode="average", metric=None, mask=False, retain_device=True +): + # setup + #if mode == "sum": + # mode = "summation" + #elif mode == "avg": + # mode = "average" + if mode in ["lin", "linear"]: + #mode = "linear" if metric is not None else "average" + mode = "linear" if metric is not None else "avg" + elif mode in ["sm", "softmax"]: + #mode = "softmax" if metric is not None else "average" + mode = "soft" if metric is not None else "avg" + if len(img.shape) != 4: + img = img[None,] + if len(flow.shape) != 4: + flow = flow[None,] + if metric is not None and len(metric.shape) != 4: + metric = metric[None,] + flow = flow.flip(dims=(1,)) + if img.dtype != torch.float32: + img = img.type(torch.float32) + if flow.dtype != torch.float32: + flow = flow.type(torch.float32) + if metric is not None and metric.dtype != torch.float32: + metric = metric.type(torch.float32) + + # move to gpu if necessary + assert img.device == flow.device + if metric is not None: + assert img.device == metric.device + was_cpu = img.device.type == "cpu" + if was_cpu: + img = img.to("cuda") + flow = flow.to("cuda") + if metric is not None: + metric = metric.to("cuda") + + # add mask + if mask: + bs, ch, h, w = img.shape + img = torch.cat( + [img, torch.ones(bs, 1, h, w, dtype=img.dtype, device=img.device)], dim=1 + ) + + # forward, move back to cpu if desired + ans = FunctionSoftsplat(img, flow, metric, mode) + if was_cpu and retain_device: + ans = ans.cpu() + return ans + + +forewarp = flow_forewarp + + +# resizing utility +def flow_resize(flow, size, mode="nearest", align_corners=False): + # flow: bs,xy,h,w + size = pixel_ij(size, rounding=True) + if flow.dtype != torch.float: + flow = flow.float() + if len(flow.shape) == 3: + flow = flow[None,] + if flow.shape[-2:] == size: + return flow + return ( + nn.functional.interpolate( + flow, + size=size, + mode=mode, + align_corners=align_corners if mode != "nearest" else None, + ) + * torch.tensor( + [b / a for a, b in zip(flow.shape[-2:], size)], + device=flow.device, + )[None, :, None, None] + ) + + +####################### TRADITIONAL ####################### + +# dense +_lucaskanade = lambda a, b: np.moveaxis( + cv2.optflow.calcOpticalFlowSparseToDense( + a, + b, # grid_step=5, sigma=0.5, + ), + 2, + 0, +)[ + None, +] +_farneback = lambda a, b: np.moveaxis( + cv2.calcOpticalFlowFarneback( + a, + b, + None, + 0.6, + 3, + 25, + 7, + 5, + 1.2, + cv2.OPTFLOW_FARNEBACK_GAUSSIAN, + ), + 2, + 0, +)[ + None, +] +_dtvl1_ = cv2.optflow.createOptFlow_DualTVL1() +_dtvl1 = lambda a, b: np.moveaxis( + _dtvl1_.calc( + a, + b, + None, + ), + 2, + 0, +)[ + None, +] +_simple = lambda a, b: np.moveaxis( + cv2.optflow.calcOpticalFlowSF( + a, + b, + 3, + 5, + 5, + ), + 2, + 0, +)[ + None, +] +_pca_ = cv2.optflow.createOptFlow_PCAFlow() +_pca = lambda a, b: np.moveaxis( + _pca_.calc( + a, + b, + None, + ), + 2, + 0, +)[ + None, +] +_drlof = lambda a, b: np.moveaxis( + cv2.optflow.calcOpticalFlowDenseRLOF( + a, + b, + None, + ), + 2, + 0, +)[ + None, +] +_deepflow_ = cv2.optflow.createOptFlow_DeepFlow() +_deepflow = lambda a, b: np.moveaxis( + _deepflow_.calc( + a, + b, + None, + ), + 2, + 0, +)[ + None, +] + + +def cv2flow(a, b, method="lucaskanade", back=False): + if method == "lucaskanade": + f = _lucaskanade + a = a.convert("L").cv2() + b = b.convert("L").cv2() + elif method == "farneback": + f = _farneback + a = a.convert("L").cv2() + b = b.convert("L").cv2() + elif method == "dtvl1": + f = _dtvl1 + a = a.convert("L").cv2() + b = b.convert("L").cv2() + elif method == "simple": + f = _simple + a = a.convert("RGB").cv2() + b = b.convert("RGB").cv2() + elif method == "pca": + f = _pca + a = a.convert("L").cv2() + b = b.convert("L").cv2() + elif method == "drlof": + f = _drlof + a = a.convert("RGB").cv2() + b = b.convert("RGB").cv2() + elif method == "deepflow": + f = _deepflow + a = a.convert("L").cv2() + b = b.convert("L").cv2() + else: + assert 0 + ans = f(b, a) + if back: + ans = np.concatenate( + [ + ans, + f(a, b), + ] + ) + return torch.tensor(ans).flip(dims=(1,)) + + +####################### FLOWNET2 ####################### + + +def flownet2(img_a, img_b, mode="shm", back=False): + # package + url = f"http://localhost:8109/get-flow" + if mode == "shm": + t = time.time() + fn_a = img_a.save(mkfile(f"/dev/shm/_flownet2/{t}/img_a.png")) + fn_b = img_b.save(mkfile(f"/dev/shm/_flownet2/{t}/img_b.png")) + elif mode == "net": + assert False, "not impl" + q = u2d.img2uri(img.pil("RGB")) + q.decode() + resp = requests.get( + url, + params={ + "img_a": fn_a, + "img_b": fn_b, + "mode": mode, + "back": back, + # 'vis': vis, + }, + ) + + # return + ans = {"response": resp} + if resp.status_code == 200: + j = resp.json() + ans["time"] = j["time"] + ans["output"] = { + "flow": torch.tensor(load(j["fn_flow"])), + } + # if vis: + # ans['output']['vis'] = I(j['fn_vis']) + if mode == "shm": + shutil.rmtree(f"/dev/shm/_flownet2/{t}") + return ans + + +####################### VISUALIZATION ####################### + + +class Gridnet(nn.Module): + def __init__(self, channels_0, channels_1, channels_2, total_dropout_p, depth): + super().__init__() + self.channels_0 = ch0 = channels_0 + self.channels_1 = ch1 = channels_1 + self.channels_2 = ch2 = channels_2 + self.total_dropout_p = p = total_dropout_p + self.depth = depth + self.encoders = nn.ModuleList( + [GridnetEncoder(ch0, ch1, ch2) for i in range(self.depth)] + ) + self.decoders = nn.ModuleList( + [GridnetDecoder(ch0, ch1, ch2) for i in range(self.depth)] + ) + self.total_dropout = GridnetTotalDropout(p) + return + + def forward(self, x): + for e, enc in enumerate(self.encoders): + t = [self.total_dropout(i) for i in t] if e != 0 else x + t = enc(t) + for d, dec in enumerate(self.decoders): + t = [self.total_dropout(i) for i in t] + t = dec(t) + return t + + +class GridnetEncoder(nn.Module): + def __init__(self, channels_0, channels_1, channels_2): + super().__init__() + self.channels_0 = ch0 = channels_0 + self.channels_1 = ch1 = channels_1 + self.channels_2 = ch2 = channels_2 + self.resnet_0 = GridnetResnet(ch0) + self.resnet_1 = GridnetResnet(ch1) + self.resnet_2 = GridnetResnet(ch2) + self.downsample_01 = GridnetDownsample(ch0, ch1) + self.downsample_12 = GridnetDownsample(ch1, ch2) + return + + def forward(self, x): + out = [ + None, + ] * 3 + out[0] = self.resnet_0(x[0]) + out[1] = self.resnet_1(x[1]) + self.downsample_01(out[0]) + out[2] = self.resnet_2(x[2]) + self.downsample_12(out[1]) + return out + + +class GridnetDecoder(nn.Module): + def __init__(self, channels_0, channels_1, channels_2): + super().__init__() + self.channels_0 = ch0 = channels_0 + self.channels_1 = ch1 = channels_1 + self.channels_2 = ch2 = channels_2 + self.resnet_0 = GridnetResnet(ch0) + self.resnet_1 = GridnetResnet(ch1) + self.resnet_2 = GridnetResnet(ch2) + self.upsample_10 = GridnetUpsample(ch1, ch0) + self.upsample_21 = GridnetUpsample(ch2, ch1) + return + + def forward(self, x): + out = [ + None, + ] * 3 + out[2] = self.resnet_2(x[2]) + out[1] = self.resnet_1(x[1]) + self.upsample_21(out[2]) + out[0] = self.resnet_0(x[0]) + self.upsample_10(out[1]) + return out + + +class GridnetConverter(nn.Module): + def __init__(self, channels_in, channels_out): + super().__init__() + self.channels_in = cin = channels_in + self.channels_out = cout = channels_out + self.nets = nn.ModuleList( + [ + nn.Sequential( + nn.PReLU(a), + nn.Conv2d(a, b, kernel_size=1, padding=0), + nn.BatchNorm2d(b), + ) + for a, b in zip(cin, cout) + ] + ) + return + + def forward(self, x): + return [m(q) for m, q in zip(self.nets, x)] + + +class GridnetResnet(nn.Module): + def __init__(self, channels): + super().__init__() + self.channels = ch = channels + self.net = nn.Sequential( + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + ) + return + + def forward(self, x): + return x + self.net(x) + + +class GridnetDownsample(nn.Module): + def __init__(self, channels_in, channels_out): + super().__init__() + self.channels_in = chin = channels_in + self.channels_out = chout = channels_out + self.net = nn.Sequential( + nn.PReLU(chin), + nn.Conv2d(chin, chin, kernel_size=3, padding=1, stride=2), + nn.BatchNorm2d(chin), + nn.PReLU(chin), + nn.Conv2d(chin, chout, kernel_size=3, padding=1), + nn.BatchNorm2d(chout), + ) + return + + def forward(self, x): + return self.net(x) + + +class GridnetUpsample(nn.Module): + def __init__(self, channels_in, channels_out): + super().__init__() + self.channels_in = chin = channels_in + self.channels_out = chout = channels_out + self.net = nn.Sequential( + nn.Upsample(scale_factor=2, mode="nearest"), + nn.PReLU(chin), + nn.Conv2d(chin, chout, kernel_size=3, padding=1), + nn.BatchNorm2d(chout), + nn.PReLU(chout), + nn.Conv2d(chout, chout, kernel_size=3, padding=1), + nn.BatchNorm2d(chout), + ) + return + + def forward(self, x): + return self.net(x) + + +class GridnetTotalDropout(nn.Module): + def __init__(self, p): + super().__init__() + assert 0 <= p < 1 + self.p = p + self.weight = 1 / (1 - p) + return + + def get_drop(self, x): + d = torch.rand(len(x))[:, None, None, None] < self.p + d = (1 - d.float()).to(x.device) * self.weight + return d + + def forward(self, x, force_drop=None): + if force_drop is True: + ans = x * self.get_drop(x) + elif force_drop is False: + ans = x + else: + if self.training: + ans = x * self.get_drop(x) + else: + ans = x + return ans + + +class Interpolator(nn.Module): + def __init__(self, size, mode="bilinear"): + super().__init__() + self.size = size + self.mode = mode + return + + def forward(self, x, is_flow=False): + if x.shape[-2] == self.size: + return x + if len(x.shape) == 4: + # bs,ch,h,w + bs, ch, h, w = x.shape + ans = nn.functional.interpolate( + x, + size=self.size, + mode=self.mode, + align_corners=(False, None)[self.mode == "nearest"], + ) + if is_flow: + ans = ( + ans + * torch.tensor( + [b / a for a, b in zip((h, w), self.size)], + device=ans.device, + )[None, :, None, None] + ) + return ans + elif len(x.shape) == 5: + # bs,k,ch,h,w (merge bs and k) + bs, k, ch, h, w = x.shape + return self.forward( + x.view(bs * k, ch, h, w), + is_flow=is_flow, + ).view(bs, k, ch, *self.size) + else: + assert 0 + + +###################### CANNY ###################### + + +def canny(img, a=100, b=200): + img = I(img).convert("L") + return I(cv2.Canny(img.cv2(), a, b)) + + +# https://www.pyimagesearch.com/2015/04/06/zero-parameter-automatic-canny-edge-detection-with-python-and-opencv/ +def canny_pis(img, sigma=0.33): + # compute the median of the single channel pixel intensities + img = I(img).convert("L").uint8(ch_last=False) + v = np.median(img) + # apply automatic Canny edge detection using the computed median + lower = int(max(0, (1.0 - sigma) * v)) + upper = int(min(255, (1.0 + sigma) * v)) + edged = cv2.Canny(img[0], lower, upper) + # return the edged image + return I(edged) + + +# https://en.wikipedia.org/wiki/Otsu%27s_method +def canny_otsu(img): + img = I(img).convert("L").uint8(ch_last=False) + high, _ = cv2.threshold(img[0], 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) + low = 0.5 * high + return I(cv2.Canny(img[0], low, high)) + + +def xdog(img, t=1.0, epsilon=0.04, phi=100, sigma=3, k=1.6): + img = I(img).convert("L").uint8(ch_last=False) + grey = np.asarray(img, dtype=np.float32) + g0 = scipy.ndimage.gaussian_filter(grey, sigma) + g1 = scipy.ndimage.gaussian_filter(grey, sigma * k) + + # ans = ((1+p) * g0 - p * g1) / 255 + ans = (g0 - t * g1) / 255 + ans = 1 + np.tanh(phi * (ans - epsilon)) * (ans < epsilon) + return ans + + +def dog(img, t=1.0, sigma=1.0, k=1.6, epsilon=0.01, kernel_factor=4, clip=True): + img = I(img).convert("L").tensor()[None] + kern0 = max(2 * int(sigma * kernel_factor) + 1, 3) + kern1 = max(2 * int(sigma * k * kernel_factor) + 1, 3) + g0 = kornia.filters.gaussian_blur2d( + img, + (kern0, kern0), + (sigma, sigma), + border_type="replicate", + ) + g1 = kornia.filters.gaussian_blur2d( + img, + (kern1, kern1), + (sigma * k, sigma * k), + border_type="replicate", + ) + ans = 0.5 + t * (g1 - g0) - epsilon + ans = ans.clip(0, 1) if clip else ans + return ans[0].numpy() + + +# input: (bs,rgb(a),h,w) or (bs,1,h,w) +# returns: (bs,1,h,w) +def batch_dog(img, t=1.0, sigma=1.0, k=1.6, epsilon=0.01, kernel_factor=4, clip=True): + # to grayscale if needed + bs, ch, h, w = img.shape + if ch in [3, 4]: + img = kornia.color.rgb_to_grayscale(img[:, :3]) + else: + assert ch == 1 + + # calculate dog + kern0 = max(2 * int(sigma * kernel_factor) + 1, 3) + kern1 = max(2 * int(sigma * k * kernel_factor) + 1, 3) + g0 = kornia.filters.gaussian_blur2d( + img, + (kern0, kern0), + (sigma, sigma), + border_type="replicate", + ) + g1 = kornia.filters.gaussian_blur2d( + img, + (kern1, kern1), + (sigma * k, sigma * k), + border_type="replicate", + ) + ans = 0.5 + t * (g1 - g0) - epsilon + ans = ans.clip(0, 1) if clip else ans + return ans + + +############### DERIVED DISTANCES ############### + +# input: (bs,h,w) or (bs,1,h,w) +# returns: (bs,) +# normalized s.t. metric is same across proportional image scales + + +# average of two asymmetric distances +# normalized by diameter and area +def batch_chamfer_distance(gt, pred, block=1024, return_more=False): + t = batch_chamfer_distance_t(gt, pred, block=block) + p = batch_chamfer_distance_p(gt, pred, block=block) + cd = (t + p) / 2 + return cd + + +def batch_chamfer_distance_t(gt, pred, block=1024, return_more=False): + assert gt.device == pred.device and gt.shape == pred.shape + bs, h, w = gt.shape[0], gt.shape[-2], gt.shape[-1] + dpred = batch_edt(pred, block=block) + cd = (gt * dpred).float().mean((-2, -1)) / np.sqrt(h**2 + w**2) + if len(cd.shape) == 2: + assert cd.shape[1] == 1 + cd = cd.squeeze(1) + return cd + + +def batch_chamfer_distance_p(gt, pred, block=1024, return_more=False): + assert gt.device == pred.device and gt.shape == pred.shape + bs, h, w = gt.shape[0], gt.shape[-2], gt.shape[-1] + dgt = batch_edt(gt, block=block) + cd = (pred * dgt).float().mean((-2, -1)) / np.sqrt(h**2 + w**2) + if len(cd.shape) == 2: + assert cd.shape[1] == 1 + cd = cd.squeeze(1) + return cd + + +# normalized by diameter +# always between [0,1] +def batch_hausdorff_distance(gt, pred, block=1024, return_more=False): + assert gt.device == pred.device and gt.shape == pred.shape + bs, h, w = gt.shape[0], gt.shape[-2], gt.shape[-1] + dgt = batch_edt(gt, block=block) + dpred = batch_edt(pred, block=block) + hd = torch.stack( + [ + (dgt * pred).amax(dim=(-2, -1)), + (dpred * gt).amax(dim=(-2, -1)), + ] + ).amax(dim=0).float() / np.sqrt(h**2 + w**2) + if len(hd.shape) == 2: + assert hd.shape[1] == 1 + hd = hd.squeeze(1) + return hd + + +#################### UTILITIES #################### + + +def reset_parameters(model): + for layer in model.children(): + if hasattr(layer, "reset_parameters"): + layer.reset_parameters() + return model + + +def channel_squeeze(x, dim=1): + a = x.shape[:dim] + b = x.shape[dim + 2 :] + return x.reshape(*a, -1, *b) + + +def channel_unsqueeze(x, shape, dim=1): + a = x.shape[:dim] + b = x.shape[dim + 1 :] + return x.reshape(*a, *shape, *b) + + +def default_collate(items, device=None): + return to(dict(torch.utils.data.dataloader.default_collate(items)), device) + + +def to(x, device): + if device is None: + return x + if issubclass(x.__class__, dict): + return dict( + { + k: v.to(device) if isinstance(v, torch.Tensor) else v + for k, v in x.items() + } + ) + if isinstance(x, torch.Tensor): + return x.to(device) + if isinstance(x, np.ndarray): + return torch.tensor(x).to(device) + assert 0, "data not understood" + + +################ PARSING ################ + +from argparse import Namespace + +# args: all args +# bargs: base args +# pargs: data processing args +# largs: data loading args +# margs: model args +# targs: training args + + +# typically used to read dataset filters +def read_filter(fn, cast=None, sort=True, sort_key=None): + if cast is None: + cast = lambda x: x + ans = [cast(line) for line in read(fn).split("\n") if line != ""] + if sort: + return sorted(ans, key=sort_key) + else: + return ans + + +################ FILE MANAGEMENT ################ + + +def mkfile(fn, parents=True, exist_ok=True): + dn = "/".join(fn.split("/")[:-1]) + mkdir(dn, parents=parents, exist_ok=exist_ok) + return fn + + +def mkdir(dn, parents=True, exist_ok=True): + pathlib.Path(dn).mkdir(parents=parents, exist_ok=exist_ok) + return dn if (not dn[-1] == "/" or dn == "/") else dn[:-1] + + +def fstrip(fn, return_more=False): + dspl = fn.split("/") + dn = "/".join(dspl[:-1]) if len(dspl) > 1 else "." + fn = dspl[-1] + fspl = fn.split(".") + if len(fspl) == 1: + bn = fspl[0] + ext = "" + else: + bn = ".".join(fspl[:-1]) + ext = fspl[-1] + if return_more: + return Namespace( + dn=dn, + fn=fn, + path=f"{dn}/{fn}", + bn_path=f"{dn}/{bn}", + bn=bn, + ext=ext, + ) + else: + return bn + + +def read(fn, mode="r"): + with open(fn, mode) as handle: + return handle.read() + + +def write(text, fn, mode="w"): + mkfile(fn, parents=True, exist_ok=True) + with open(fn, mode) as handle: + return handle.write(text) + + +import pickle + + +def dump(obj, fn, mode="wb"): + mkfile(fn, parents=True, exist_ok=True) + with open(fn, mode) as handle: + return pickle.dump(obj, handle) + + +def load(fn, mode="rb"): + with open(fn, mode) as handle: + return pickle.load(handle) + + +import json + + +def jwrite(x, fn, mode="w", indent="\t", sort_keys=False): + mkfile(fn, parents=True, exist_ok=True) + with open(fn, mode) as handle: + return json.dump(x, handle, indent=indent, sort_keys=sort_keys) + + +def jread(fn, mode="r"): + with open(fn, mode) as handle: + return json.load(handle) + + +try: + import yaml + + def ywrite(x, fn, mode="w", default_flow_style=False): + mkfile(fn, parents=True, exist_ok=True) + with open(fn, mode) as handle: + return yaml.dump(x, handle, default_flow_style=default_flow_style) + + def yread(fn, mode="r"): + with open(fn, mode) as handle: + return yaml.safe_load(handle) + +except: + pass + +try: + import pyunpack +except: + pass + +try: + import mysql + import mysql.connector +except: + pass + + +################ MISC ################ + +hakase = "./env/__hakase__.jpg" +if not os.path.isfile(hakase): + hakase = "./__env__/__hakase__.jpg" + + +def mem(units="m"): + return ( + psProcess(os.getpid()).memory_info().rss + / { + "b": 1, + "k": 1e3, + "m": 1e6, + "g": 1e9, + "t": 1e12, + }[units[0].lower()] + ) + + +def chunk(array, length, colwise=True): + if colwise: + return [array[i : i + length] for i in range(0, len(array), length)] + else: + return chunk(array, int(math.ceil(len(array) / length)), colwise=True) + + +def classtree(x): + return inspect.getclasstree(inspect.getmro(x)) + + +################ AESTHETIC ################ + + +class Table: + def __init__( + self, + table, + delimiter=" ", + orientation="br", + double_colon=True, + ): + self.delimiter = delimiter + self.orientation = orientation + self.t = Table.parse(table, delimiter, orientation, double_colon) + return + + # rendering + def __str__(self): + return self.render() + + def __repr__(self): + return self.render() + + def render(self): + # set up empty entry + empty = ("", Table._spec(self.orientation, transpose=False)) + + # calculate table size + t = copy.deepcopy(self.t) + totalrows = len(t) + totalcols = [len(r) for r in t] + assert min(totalcols) == max(totalcols) + totalcols = totalcols[0] + + # string-ify + for i in range(totalrows): + for j in range(totalcols): + x, s = t[i][j] + sp = s[11] + if sp: + x = eval(f'f"{{{x}{sp}}}"') + Table._put((str(x), s), t, (i, j), empty) + + # expand delimiters + _repl = ( + lambda s: s[:2] + (1, 0, 0, 0, 0) + s[7:10] + (1,) + s[11:] + if s[2] + else s[:2] + (0, 0, 0, 0, 0) + s[7:10] + (1,) + s[11:] + ) + for i, row in enumerate(t): + for j, (x, s_own) in enumerate(row): + # expand delim_up(^) + if s_own[3]: + u, v = i, j + while 0 <= u: + _, s = t[u][v] + if (i, j) != (u, v) and (s[2] and not s[10]): + break + Table._put((x, _repl(s)), t, (u, v), empty) + u -= 1 + + # expand delim_down(v) + if s_own[4]: + u, v = i, j + while u < totalrows: + _, s = t[u][v] + if (i, j) != (u, v) and (s[2] and not s[10]): + break + Table._put((x, _repl(s)), t, (u, v), empty) + u += 1 + + # expand delim_right(>) + if s_own[5]: + u, v = i, j + while v < totalcols: + _, s = t[u][v] + if (i, j) != (u, v) and (s[2] and not s[10]): + break + Table._put((x, _repl(s)), t, (u, v), empty) + v += 1 + + # expand delim_left(<) + if s_own[6]: + u, v = i, j + while 0 <= v: + _, s = t[u][v] + if (i, j) != (u, v) and (s[2] and not s[10]): + break + Table._put((x, _repl(s)), t, (u, v), empty) + v -= 1 + + # justification calculation + widths = [ + 0, + ] * totalcols # j + heights = [ + 0, + ] * totalrows # i + for i, row in enumerate(t): + for j, (x, s) in enumerate(row): + # height caclulation + heights[i] = max(heights[i], x.count("\n")) + + # width calculation; non-delim fillers no contribution + if s[2] or not s[10]: + w = max(len(q) for q in x.split("\n")) + widths[j] = max(widths[j], w) + # no newline ==> height=1 + heights = [h + 1 for h in heights] + + # render table + rend = [] + roff = 0 + for i, row in enumerate(t): + for j, (x, s) in enumerate(row): + w, h = widths[j], heights[i] + + # expand fillers and delimiters + if s[2] or s[10]: + xs = x.split("\n") + xw0 = min(len(l) for l in xs) + xw1 = max(len(l) for l in xs) + xh = len(xs) + if (xw0 == xw1 == w) and (xh == h): + pass + elif xw0 == xw1 == w: + x = "\n".join( + [ + xs[0], + ] + * h + ) + elif xh == h: + x = "\n".join([(l[0] if l else "") * w for l in xs]) + else: + x = x[0] if x else " " + x = "\n".join( + [ + x * w, + ] + * h + ) + + # justify horizontally + x = [l.rjust(w) if s[0] else l.ljust(w) for l in x.split("\n")] + + # justify vertically + plus = [ + " " * w, + ] * (h - len(x)) + x = plus + x if not s[1] else x + plus + + # input to table + for r, xline in enumerate(x): + Table._put(xline, rend, (roff + r, j), None) + roff += h + + # return rendered string + return "\n".join(["".join(r) for r in rend]) + + # parsing + def _spec(s, transpose=False): + if ":" in s: + i = s.index(":") + sp = s[i:] + s = s[:i] + else: + sp = "" + s = s.lower() + return ( + int("r" in s), # 0:: 0:left(l) 1:right(r) + int("t" in s), # 1:: 0:bottom(b) 1:top(t) + int(any([i in s for i in [".", "<", ">", "^", "v"]])), # 2:: delim_here(.) + int("^" in s if not transpose else "<" in s), # 3:: delim_up(^) + int("v" in s if not transpose else ">" in s), # 4:: delim_down(v) + int(">" in s if not transpose else "v" in s), # 5:: delim_right(>) + int("<" in s if not transpose else "^" in s), # 6:: delim_left(<) + int("+" in s), # 7:: subtable(+) + int("-" in s if not transpose else "|" in s), # 8:: subtable_horiz(-) + int("|" in s if not transpose else "-" in s), # 9:: subtable_vert(|) + int("_" in s), # 10:: fill(_); if delim, overwrite; else fit + sp, # 11:: special(:) f-string for numbers + ) + + def _put(obj, t, ij, empty): + i, j = ij + while i >= len(t): + t.append([]) + while j >= len(t[i]): + t[i].append(empty) + t[i][j] = obj + return + + def parse( + table, + delimiter=" ", + orientation="br", + double_colon=True, + ): + # disabling transpose + transpose = False + + # set up empty entry + empty = ("", Table._spec(orientation, transpose)) + + # transpose + t = [] + for i, row in enumerate(table): + for j, item in enumerate(row): + ij = (i, j) if not transpose else (j, i) + if type(item) == tuple and len(item) == 2 and type(item[1]) == str: + item = (item[0], Table._spec(item[1], transpose)) + elif double_colon and type(item) == str and "::" in item: + x, s = item.split("::") + item = (x, Table._spec(s, transpose)) + else: + item = (item, Table._spec(orientation, transpose)) + Table._put(item, t, ij, empty) + + # normalization + maxcol = 0 + maxrow = len(t) + for i, row in enumerate(t): + # take element number into account + maxcol = max(maxcol, len([i for i in row if not i[1][2]])) + + # take subtables into account + for j, (x, s) in enumerate(row): + if s[7]: + r = len(x) + maxrow = max(maxrow, i + r) + c = max(len(q) for q in x) + maxcol = max(maxcol, j + c) + elif s[8]: + c = len(x) + maxcol = max(maxcol, j + c) + elif s[9]: + r = len(x) + maxrow = max(maxrow, i + r) + totalcols = 2 * maxcol + 1 + totalrows = maxrow + t += [[]] * (totalrows - len(t)) + newt = [] + delim = (delimiter, Table._spec("._" + orientation, transpose)) + for i, row in enumerate(t): + wasd = False + tcount = 0 + for j in range(totalcols): + item = t[i][tcount] if tcount < len(t[i]) else empty + isd = item[1][2] + if wasd and isd: + Table._put(empty, newt, (i, j), empty) + wasd = False + elif wasd and not isd: + Table._put(item, newt, (i, j), empty) + tcount += 1 + wasd = False + elif not wasd and isd: + Table._put(item, newt, (i, j), empty) + tcount += 1 + wasd = True + elif not wasd and not isd: + Table._put(delim, newt, (i, j), empty) + wasd = True + t = newt + + # normalization: add dummy last column for delimiter + for row in t: + row.append(empty) + + # expand subtables + delim_cols = [i for i in range(totalcols) if i % 2 == 0] + while True: + # find a table + ij = None + for i, row in enumerate(t): + for j, item in enumerate(row): + st, s = item + if s[7]: + ij = i, j, 7, st, s + break + elif s[8]: + ij = i, j, 8, st, s + break + elif s[9]: + ij = i, j, 9, st, s + break + if ij is not None: + break + if ij is None: + break + + # replace its specs + i, j, k, st, s = ij + s = list(s) + s[7] = s[8] = s[9] = 0 + s = tuple(s) + + # expand it + if k == 7: # 2d table + for x, row in enumerate(st): + for y, obj in enumerate(row): + a = i + x if not transpose else i + y + b = j + 2 * y if not transpose else j + 2 * x + Table._put((obj, s), t, (a, b), None) + if k == 8: # subtable_horiz + for y, obj in enumerate(st): + Table._put((obj, s), t, (i, j + 2 * y), None) + if k == 9: # subtable_vert + for x, obj in enumerate(st): + Table._put((obj, s), t, (i + x, j), None) + + # return, finally + return t + + +class Resnet(nn.Module): + def __init__(self, channels): + super().__init__() + self.channels = ch = channels + self.net = nn.Sequential( + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + ) + return + + def forward(self, x): + return x + self.net(x) + + +class Synthesizer(nn.Module): + def __init__( + self, size, channels_image, channels_flow, channels_mask, channels_feature + ): + super().__init__() + self.size = size + self.diam = diam(self.size) + self.channels_image = cimg = channels_image + self.channels_flow = cflow = channels_flow + self.channels_mask = cmask = channels_mask + self.channels_feature = cfeat = channels_feature + self.channels = ch = cimg + cflow // 2 + cmask + cfeat + self.interpolator = Interpolator(self.size, mode="bilinear") + self.net = nn.Sequential( + nn.Conv2d(ch + 3, 64, kernel_size=1, padding=0), + Resnet(64), + nn.Sequential( + nn.PReLU(64), + nn.Conv2d(64, 32, kernel_size=3, padding=1), + nn.BatchNorm2d(32), + ), + Resnet(32), + nn.Sequential( + nn.PReLU(32), + nn.Conv2d(32, 16, kernel_size=3, padding=1), + nn.BatchNorm2d(16), + ), + Resnet(16), + nn.Sequential( + nn.PReLU(16), + nn.Conv2d(16, 3, kernel_size=3, padding=1), + ), + ) + return + + def forward(self, images, flows, masks, features, return_more=False): + itp = self.interpolator + images = [ + (images[0] + images[1]) / 2, + ] + images + logimgs = [itp(pixel_logit(i[:, :3])) for i in images] + cat = torch.cat( + [ + *logimgs, + *[itp(f).norm(dim=1, keepdim=True) / self.diam for f in flows], + *[itp(m) for m in masks], + *[itp(f) for f in features], + ], + dim=1, + ) + residual = self.net(cat) + return torch.sigmoid(logimgs[0] + 0.5 * residual), ( + locals() if return_more else None + ) + + +class FlowZMetric(nn.Module): + def __init__(self): + super().__init__() + return + + def forward(self, img0, img1, flow0, flow1, return_more=False): + # B(i0,f0) = i1 + # B(i1,f1) = i0 + # F(x,f0,z0) + # F(x,f1,z1) + img0 = kornia.color.rgb_to_lab(img0[:, :3]) + img1 = kornia.color.rgb_to_lab(img1[:, :3]) + return [ + -0.1 * (img1 - flow_backwarp(img0, flow0)).norm(dim=1, keepdim=True), # z0 + -0.1 * (img0 - flow_backwarp(img1, flow1)).norm(dim=1, keepdim=True), # z1 + ], (locals() if return_more else None) + + +class NEDT(nn.Module): + def __init__(self): + super().__init__() + return + + def forward( + self, + img, + t=2.0, + sigma_factor=1 / 540, + k=1.6, + epsilon=0.01, + kernel_factor=4, + exp_factor=540 / 15, + return_more=False, + ): + with torch.no_grad(): + dog = batch_dog( + img, + t=t, + sigma=img.shape[-2] * sigma_factor, + k=k, + epsilon=epsilon, + kernel_factor=kernel_factor, + clip=False, + ) + edt = batch_edt((dog > 0.5).float()) + ans = 1 - (-edt * exp_factor / max(edt.shape[-2:])).exp() + return ans, (locals() if return_more else None) + + +class HalfWarper(nn.Module): + def __init__(self): + super().__init__() + self.channels_image = 4 * 3 + self.channels_flow = 2 * 2 + self.channels_mask = 2 * 1 + self.channels = self.channels_image + self.channels_flow + self.channels_mask + + def morph_open(self, x, k): + if k == 0: + return x + else: + with torch.no_grad(): + return kornia.morphology.opening(x, torch.ones(k, k, device=x.device)) + + def forward(self, img0, img1, flow0, flow1, z0, z1, k, t=0.5, return_more=False): + # forewarps + flow0_ = (1 - t) * flow0 + flow1_ = t * flow1 + f01 = forewarp(img0, flow1_, mode="sm", metric=z1, mask=True) + f10 = forewarp(img1, flow0_, mode="sm", metric=z0, mask=True) + f01i, f01m = f01[:, :-1], self.morph_open(f01[:, -1:], k=k) + f10i, f10m = f10[:, :-1], self.morph_open(f10[:, -1:], k=k) + + # base guess + base0 = f01m * f01i + (1 - f01m) * f10i + base1 = f10m * f10i + (1 - f10m) * f01i + ans = [ + [ # images + base0, + base1, + f01i, + f10i, + ], + [ # flows + flow0_, + flow1_, + ], + [ # masks + f01m, + f10m, + ], + ] + return ans, (locals() if return_more else None) + + +class ResnetFeatureExtractor(nn.Module): + def __init__(self, inferserve_query, size_in=None): + super().__init__() + self.inferserve_query = iq = inferserve_query + self.size_in = si = size_in + if iq[0] == "torchvision": + # use pytorch pretrained resnet50 + self.base_hparams = None + resnet = tv.models.resnet50(pretrained=True) + + self.resize = T.Resize(256) + self.resnet_preprocess = T.Normalize( + mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225], + ) + self.conv1 = resnet.conv1 + self.bn1 = resnet.bn1 + self.relu = resnet.relu # 64ch, 128p (assuming 256p input) + self.maxpool = resnet.maxpool + self.layer1 = resnet.layer1 # 256ch, 64p + self.layer2 = resnet.layer2 # 512ch, 32p + else: + base = userving.infer_model_load(*iq).eval() + self.base_hparams = base.hparams + + self.resize = T.Resize(base.hparams.largs.size) + self.resnet_preprocess = base.resnet_preprocess + self.conv1 = base.resnet.conv1 + self.bn1 = base.resnet.bn1 + self.relu = base.resnet.relu # 64ch, 128p (assuming 256p input) + self.maxpool = base.resnet.maxpool + self.layer1 = base.resnet.layer1 # 256ch, 64p + self.layer2 = base.resnet.layer2 # 512ch, 32p + if self.size_in is None: + self.sizes_out = None + else: + s = self.resize.size + self.sizes_out = [ + pixel_ij( + rescale_dry(si, (s // 2) / si[0]), rounding="ceil" + ), # conv1, 128p + pixel_ij( + rescale_dry(si, (s // 4) / si[0]), rounding="ceil" + ), # layer1, 64p + pixel_ij( + rescale_dry(si, (s // 8) / si[0]), rounding="ceil" + ), # layer2, 32p + ] + self.channels = [ + 64, + 256, + 512, + ] + return + + def forward(self, x, force_sizes_out=False, return_more=False): + ans = [] + x = x[:, :3] + x = self.resize(x) + x = self.resnet_preprocess(x) + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + ans.append(x) # conv1 + x = self.maxpool(x) + x = self.layer1(x) + ans.append(x) # layer1 + x = self.layer2(x) + ans.append(x) # layer2 + if force_sizes_out or (self.sizes_out is None): + self.sizes_out = [tuple(q.shape[-2:]) for q in ans] + return ans, (locals() if return_more else None) + + +class NetNedt(nn.Module): + def __init__(self): + super().__init__() + chin = 3 + 1 + 4 + 4 + 1 + 1 + ch = 16 + chout = 1 + self.net = nn.Sequential( + nn.PReLU(chin), + nn.Conv2d(chin, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, chout, kernel_size=3, padding=1), + ) + return + + def forward(self, out_base, out_base_nedt, hw_imgs, hw_masks, return_more=False): + cat = torch.cat( + [ + out_base, # 3 + out_base_nedt, # 1 + hw_imgs[0], # 4 + hw_imgs[1], # 4 + hw_masks[0], # 1 + hw_masks[1], # 1 + ], + dim=1, + ) + log = pixel_logit(cat.clip(0, 1)) + ans = torch.sigmoid(self.net(log)) + return ans, (locals() if return_more else None) + + +class NetTail(nn.Module): + def __init__(self): + super().__init__() + chin = 3 + 1 + 1 + ch = 16 + chout = 3 + self.net = nn.Sequential( + nn.PReLU(chin), + nn.Conv2d(chin, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, ch, kernel_size=3, padding=1), + nn.BatchNorm2d(ch), + nn.PReLU(ch), + nn.Conv2d(ch, chout, kernel_size=3, padding=1), + ) + return + + def forward(self, out_base, out_base_nedt, pred_nedt, return_more=False): + cat = torch.cat( + [ + out_base, # 3 + out_base_nedt, # 1 + pred_nedt, # 1 + ], + dim=1, + ) + log = pixel_logit(cat.clip(0, 1)) + ans = torch.sigmoid(log[:, :3] + self.net(log)) + return ans, (locals() if return_more else None) + + +class SoftsplatLite(nn.Module): + def __init__(self): + super().__init__() + self.feature_extractor = ResnetFeatureExtractor( + ("torchvision", "resnet50"), + (540, 960), + ) + self.z_metric = FlowZMetric() + self.flow_downsamplers = [ + Interpolator(s, mode="bilinear") for s in self.feature_extractor.sizes_out + ] + self.gridnet_converter = GridnetConverter( + self.feature_extractor.channels, + [32, 64, 128], + ) + self.gridnet = Gridnet( + *[32, 64, 128], + total_dropout_p=0.0, + depth=1, # equivalent to u-net + ) + self.nedt = NEDT() + self.half_warper = HalfWarper() + self.synthesizer = Synthesizer( + (540, 960), + self.half_warper.channels_image, + self.half_warper.channels_flow, + self.half_warper.channels_mask, + self.gridnet.channels_0, + ) + return + + def forward(self, x, t=0.5, k=5, return_more=False): + rm = return_more + flow0, flow1 = x["flows"].swapaxes(0, 1) + img0, img1 = x["images"][:, 0], x["images"][:, -1] + (z0, z1), locs_z = self.z_metric(img0, img1, flow0, flow1, return_more=rm) + img0 = torch.cat([img0, self.nedt(img0)[0]], dim=1) + img1 = torch.cat([img1, self.nedt(img1)[0]], dim=1) + + # images and flows + (hw_imgs, hw_flows, hw_masks), locs_hw = self.half_warper( + img0, + img1, + flow0, + flow1, + z0, + z1, + k, + t=t, + return_more=rm, + ) + + # features + feats0, locs_fe0 = self.feature_extractor(img0, return_more=rm) + feats1, locs_fe1 = self.feature_extractor(img1, return_more=rm) + warps = [] + for ft0, ft1, ds in zip(feats0, feats1, self.flow_downsamplers): + (w, _, _), _ = self.half_warper( + ft0, + ft1, + ds(flow0, 1), + ds(flow1, 1), + ds(z0), + ds(z1), + k, + t=t, + ) + warps.append((w[0] + w[1]) / 2) + feats = self.gridnet(self.gridnet_converter(warps)) + + # synthesis + pred, locs_synth = self.synthesizer( + hw_imgs, + hw_flows, + hw_masks, + [ + feats[0], + ], + return_more=rm, + ) + return pred, (locals() if rm else None) + + +class DTM(nn.Module): + def __init__(self): + super().__init__() + self.net_nedt = NetNedt() + self.net_tail = NetTail() + self.nedt = NEDT() + return + + def forward(self, x, out_base, locs_base, return_more=False): + rm = return_more + with torch.no_grad(): + out_base_nedt, locs_base_nedt = self.nedt(out_base, return_more=rm) + hw_imgs, hw_masks = locs_base["hw_imgs"], locs_base["hw_masks"] + pred_nedt, locs_nedt = self.net_nedt( + out_base, out_base_nedt, hw_imgs, hw_masks, return_more=rm + ) + pred, locs_tail = self.net_tail( + out_base, out_base_nedt, pred_nedt.clone().detach(), return_more=rm + ) + return torch.cat([pred, pred_nedt], dim=1), (locals() if rm else None) + + +class RAFT(nn.Module): + def __init__(self, path="/workspace/tensorrt/models/anime_interp_full.ckpt"): + super().__init__() + self.raft = RFR( + Namespace( + small=False, + mixed_precision=False, + ) + ) + if path is not None: + sd = torch.load(path)["model_state_dict"] + self.raft.load_state_dict( + { + k[len("module.flownet.") :]: v + for k, v in sd.items() + if k.startswith("module.flownet.") + }, + strict=False, + ) + return + + def forward(self, img0, img1, flow0=None, iters=12, return_more=False): + if flow0 is not None: + flow0 = flow0.flip(dims=(1,)) + out = self.raft(img1, img0, iters=iters, flow_init=flow0) + return out[0].flip(dims=(1,)), (locals() if return_more else None) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1f89f1bdc63d149a36c72614b3f1c7ed7ffa7962 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__init__.py @@ -0,0 +1,113 @@ +import torch +from comfy.model_management import get_torch_device, soft_empty_cache +import bisect +import numpy as np +import typing +from vfi_utils import InterpolationStateList, load_file_from_github_release, preprocess_frames, postprocess_frames +import pathlib +import gc + +MODEL_TYPE = pathlib.Path(__file__).parent.name +DEVICE = get_torch_device() +def inference(model, img_batch_1, img_batch_2, inter_frames): + results = [ + img_batch_1, + img_batch_2 + ] + + idxes = [0, inter_frames + 1] + remains = list(range(1, inter_frames + 1)) + + splits = torch.linspace(0, 1, inter_frames + 2) + + for _ in range(len(remains)): + starts = splits[idxes[:-1]] + ends = splits[idxes[1:]] + distances = ((splits[None, remains] - starts[:, None]) / (ends[:, None] - starts[:, None]) - .5).abs() + matrix = torch.argmin(distances).item() + start_i, step = np.unravel_index(matrix, distances.shape) + end_i = start_i + 1 + + x0 = results[start_i].to(DEVICE) + x1 = results[end_i].to(DEVICE) + dt = x0.new_full((1, 1), (splits[remains[step]] - splits[idxes[start_i]])) / (splits[idxes[end_i]] - splits[idxes[start_i]]) + + with torch.no_grad(): + prediction = model(x0, x1, dt) + insert_position = bisect.bisect_left(idxes, remains[step]) + idxes.insert(insert_position, remains[step]) + results.insert(insert_position, prediction.clamp(0, 1).float()) + del remains[step] + + return [tensor.flip(0) for tensor in results] + +class FILM_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (["film_net_fp32.pt"], ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + interpolation_states = optional_interpolation_states + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + model = torch.jit.load(model_path, map_location='cpu') + model.eval() + model = model.to(DEVICE) + dtype = torch.float32 + + frames = preprocess_frames(frames) + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + output_frames = [] + + if type(multiplier) == int: + multipliers = [multiplier] * len(frames) + else: + multipliers = list(map(int, multiplier)) + multipliers += [2] * (len(frames) - len(multipliers) - 1) + for frame_itr in range(len(frames) - 1): # Skip the final frame since there are no frames after it + if interpolation_states is not None and interpolation_states.is_frame_skipped(frame_itr): + continue + #Ensure that input frames are in fp32 - the same dtype as model + frame_0 = frames[frame_itr:frame_itr+1].to(DEVICE).float() + frame_1 = frames[frame_itr+1:frame_itr+2].to(DEVICE).float() + relust = inference(model, frame_0, frame_1, multipliers[frame_itr] - 1) + output_frames.extend([frame.detach().cpu().to(dtype=dtype) for frame in relust[:-1]]) + + number_of_frames_processed_since_last_cleared_cuda_cache += 1 + # Try to avoid a memory overflow by clearing cuda cache regularly + if number_of_frames_processed_since_last_cleared_cuda_cache >= clear_cache_after_n_frames: + print("Comfy-VFI: Clearing cache...", end = ' ') + soft_empty_cache() + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + print("Done cache clearing") + gc.collect() + + output_frames.append(frames[-1:].to(dtype=dtype)) # Append final frame + output_frames = [frame.cpu() for frame in output_frames] #Ensure all frames are in cpu + out = torch.cat(output_frames, dim=0) + # clear cache for courtesy + print("Comfy-VFI: Final clearing cache...", end = ' ') + soft_empty_cache() + print("Done cache clearing") + return (postprocess_frames(out), ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c22fc1b72c5ee81a7872d94e8b336fa1c88ef2d0 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/film_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/film_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..cee86d8b0c9c3f9a5b3032716bb235aa9eaa1f50 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/film/film_arch.py @@ -0,0 +1,788 @@ +""" +https://github.com/dajes/frame-interpolation-pytorch/blob/main/feature_extractor.py +https://github.com/dajes/frame-interpolation-pytorch/blob/main/fusion.py +https://github.com/dajes/frame-interpolation-pytorch/blob/main/interpolator.py +https://github.com/dajes/frame-interpolation-pytorch/blob/main/pyramid_flow_estimator.py +https://github.com/dajes/frame-interpolation-pytorch/blob/main/util.py +""" + +"""PyTorch layer for extracting image features for the film_net interpolator. + +The feature extractor implemented here converts an image pyramid into a pyramid +of deep features. The feature pyramid serves a similar purpose as U-Net +architecture's encoder, but we use a special cascaded architecture described in +Multi-view Image Fusion [1]. + +For comprehensiveness, below is a short description of the idea. While the +description is a bit involved, the cascaded feature pyramid can be used just +like any image feature pyramid. + +Why cascaded architeture? +========================= +To understand the concept it is worth reviewing a traditional feature pyramid +first: *A traditional feature pyramid* as in U-net or in many optical flow +networks is built by alternating between convolutions and pooling, starting +from the input image. + +It is well known that early features of such architecture correspond to low +level concepts such as edges in the image whereas later layers extract +semantically higher level concepts such as object classes etc. In other words, +the meaning of the filters in each resolution level is different. For problems +such as semantic segmentation and many others this is a desirable property. + +However, the asymmetric features preclude sharing weights across resolution +levels in the feature extractor itself and in any subsequent neural networks +that follow. This can be a downside, since optical flow prediction, for +instance is symmetric across resolution levels. The cascaded feature +architecture addresses this shortcoming. + +How is it built? +================ +The *cascaded* feature pyramid contains feature vectors that have constant +length and meaning on each resolution level, except few of the finest ones. The +advantage of this is that the subsequent optical flow layer can learn +synergically from many resolutions. This means that coarse level prediction can +benefit from finer resolution training examples, which can be useful with +moderately sized datasets to avoid overfitting. + +The cascaded feature pyramid is built by extracting shallower subtree pyramids, +each one of them similar to the traditional architecture. Each subtree +pyramid S_i is extracted starting from each resolution level: + +image resolution 0 -> S_0 +image resolution 1 -> S_1 +image resolution 2 -> S_2 +... + +If we denote the features at level j of subtree i as S_i_j, the cascaded pyramid +is constructed by concatenating features as follows (assuming subtree depth=3): + +lvl +feat_0 = concat( S_0_0 ) +feat_1 = concat( S_1_0 S_0_1 ) +feat_2 = concat( S_2_0 S_1_1 S_0_2 ) +feat_3 = concat( S_3_0 S_2_1 S_1_2 ) +feat_4 = concat( S_4_0 S_3_1 S_2_2 ) +feat_5 = concat( S_5_0 S_4_1 S_3_2 ) + .... + +In above, all levels except feat_0 and feat_1 have the same number of features +with similar semantic meaning. This enables training a single optical flow +predictor module shared by levels 2,3,4,5... . For more details and evaluation +see [1]. + +[1] Multi-view Image Fusion, Trinidad et al. 2019 +""" +from typing import List + +import torch +from torch import nn +from torch.nn import functional as F + + +class SubTreeExtractor(nn.Module): + """Extracts a hierarchical set of features from an image. + + This is a conventional, hierarchical image feature extractor, that extracts + [k, k*2, k*4... ] filters for the image pyramid where k=options.sub_levels. + Each level is followed by average pooling. + """ + + def __init__(self, in_channels=3, channels=64, n_layers=4): + super().__init__() + convs = [] + for i in range(n_layers): + convs.append(nn.Sequential( + conv(in_channels, (channels << i), 3), + conv((channels << i), (channels << i), 3) + )) + in_channels = channels << i + self.convs = nn.ModuleList(convs) + + def forward(self, image: torch.Tensor, n: int) -> List[torch.Tensor]: + """Extracts a pyramid of features from the image. + + Args: + image: TORCH.Tensor with shape BATCH_SIZE x HEIGHT x WIDTH x CHANNELS. + n: number of pyramid levels to extract. This can be less or equal to + options.sub_levels given in the __init__. + Returns: + The pyramid of features, starting from the finest level. Each element + contains the output after the last convolution on the corresponding + pyramid level. + """ + head = image + pyramid = [] + for i, layer in enumerate(self.convs): + head = layer(head) + pyramid.append(head) + if i < n - 1: + head = F.avg_pool2d(head, kernel_size=2, stride=2) + return pyramid + + +class FeatureExtractor(nn.Module): + """Extracts features from an image pyramid using a cascaded architecture. + """ + + def __init__(self, in_channels=3, channels=64, sub_levels=4): + super().__init__() + self.extract_sublevels = SubTreeExtractor(in_channels, channels, sub_levels) + self.sub_levels = sub_levels + + def forward(self, image_pyramid: List[torch.Tensor]) -> List[torch.Tensor]: + """Extracts a cascaded feature pyramid. + + Args: + image_pyramid: Image pyramid as a list, starting from the finest level. + Returns: + A pyramid of cascaded features. + """ + sub_pyramids: List[List[torch.Tensor]] = [] + for i in range(len(image_pyramid)): + # At each level of the image pyramid, creates a sub_pyramid of features + # with 'sub_levels' pyramid levels, re-using the same SubTreeExtractor. + # We use the same instance since we want to share the weights. + # + # However, we cap the depth of the sub_pyramid so we don't create features + # that are beyond the coarsest level of the cascaded feature pyramid we + # want to generate. + capped_sub_levels = min(len(image_pyramid) - i, self.sub_levels) + sub_pyramids.append(self.extract_sublevels(image_pyramid[i], capped_sub_levels)) + # Below we generate the cascades of features on each level of the feature + # pyramid. Assuming sub_levels=3, The layout of the features will be + # as shown in the example on file documentation above. + feature_pyramid: List[torch.Tensor] = [] + for i in range(len(image_pyramid)): + features = sub_pyramids[i][0] + for j in range(1, self.sub_levels): + if j <= i: + features = torch.cat([features, sub_pyramids[i - j][j]], dim=1) + feature_pyramid.append(features) + return feature_pyramid + + + + + + + + + + + +"""The final fusion stage for the film_net frame interpolator. + +The inputs to this module are the warped input images, image features and +flow fields, all aligned to the target frame (often midway point between the +two original inputs). The output is the final image. FILM has no explicit +occlusion handling -- instead using the abovementioned information this module +automatically decides how to best blend the inputs together to produce content +in areas where the pixels can only be borrowed from one of the inputs. + +Similarly, this module also decides on how much to blend in each input in case +of fractional timestep that is not at the halfway point. For example, if the two +inputs images are at t=0 and t=1, and we were to synthesize a frame at t=0.1, +it often makes most sense to favor the first input. However, this is not +always the case -- in particular in occluded pixels. + +The architecture of the Fusion module follows U-net [1] architecture's decoder +side, e.g. each pyramid level consists of concatenation with upsampled coarser +level output, and two 3x3 convolutions. + +The upsampling is implemented as 'resize convolution', e.g. nearest neighbor +upsampling followed by 2x2 convolution as explained in [2]. The classic U-net +uses max-pooling which has a tendency to create checkerboard artifacts. + +[1] Ronneberger et al. U-Net: Convolutional Networks for Biomedical Image + Segmentation, 2015, https://arxiv.org/pdf/1505.04597.pdf +[2] https://distill.pub/2016/deconv-checkerboard/ +""" +from typing import List + +import torch +from torch import nn +from torch.nn import functional as F + + +_NUMBER_OF_COLOR_CHANNELS = 3 + + +def get_channels_at_level(level, filters): + n_images = 2 + channels = _NUMBER_OF_COLOR_CHANNELS + flows = 2 + + return (sum(filters << i for i in range(level)) + channels + flows) * n_images + + +class Fusion(nn.Module): + """The decoder.""" + + def __init__(self, n_layers=4, specialized_layers=3, filters=64): + """ + Args: + m: specialized levels + """ + super().__init__() + + # The final convolution that outputs RGB: + self.output_conv = nn.Conv2d(filters, 3, kernel_size=1) + + # Each item 'convs[i]' will contain the list of convolutions to be applied + # for pyramid level 'i'. + self.convs = nn.ModuleList() + + # Create the convolutions. Roughly following the feature extractor, we + # double the number of filters when the resolution halves, but only up to + # the specialized_levels, after which we use the same number of filters on + # all levels. + # + # We create the convs in fine-to-coarse order, so that the array index + # for the convs will correspond to our normal indexing (0=finest level). + # in_channels: tuple = (128, 202, 256, 522, 512, 1162, 1930, 2442) + + in_channels = get_channels_at_level(n_layers, filters) + increase = 0 + for i in range(n_layers)[::-1]: + num_filters = (filters << i) if i < specialized_layers else (filters << specialized_layers) + convs = nn.ModuleList([ + conv(in_channels, num_filters, size=2, activation=None), + conv(in_channels + (increase or num_filters), num_filters, size=3), + conv(num_filters, num_filters, size=3)] + ) + self.convs.append(convs) + in_channels = num_filters + increase = get_channels_at_level(i, filters) - num_filters // 2 + + def forward(self, pyramid: List[torch.Tensor]) -> torch.Tensor: + """Runs the fusion module. + + Args: + pyramid: The input feature pyramid as list of tensors. Each tensor being + in (B x H x W x C) format, with finest level tensor first. + + Returns: + A batch of RGB images. + Raises: + ValueError, if len(pyramid) != config.fusion_pyramid_levels as provided in + the constructor. + """ + + # As a slight difference to a conventional decoder (e.g. U-net), we don't + # apply any extra convolutions to the coarsest level, but just pass it + # to finer levels for concatenation. This choice has not been thoroughly + # evaluated, but is motivated by the educated guess that the fusion part + # probably does not need large spatial context, because at this point the + # features are spatially aligned by the preceding warp. + net = pyramid[-1] + + # Loop starting from the 2nd coarsest level: + # for i in reversed(range(0, len(pyramid) - 1)): + for k, layers in enumerate(self.convs): + i = len(self.convs) - 1 - k + # Resize the tensor from coarser level to match for concatenation. + level_size = pyramid[i].shape[2:4] + net = F.interpolate(net, size=level_size, mode='nearest') + net = layers[0](net) + net = torch.cat([pyramid[i], net], dim=1) + net = layers[1](net) + net = layers[2](net) + net = self.output_conv(net) + return net + + + + + + + + + + + +"""The film_net frame interpolator main model code. + +Basics +====== +The film_net is an end-to-end learned neural frame interpolator implemented as +a PyTorch model. It has the following inputs and outputs: + +Inputs: + x0: image A. + x1: image B. + time: desired sub-frame time. + +Outputs: + image: the predicted in-between image at the chosen time in range [0, 1]. + +Additional outputs include forward and backward warped image pyramids, flow +pyramids, etc., that can be visualized for debugging and analysis. + +Note that many training sets only contain triplets with ground truth at +time=0.5. If a model has been trained with such training set, it will only work +well for synthesizing frames at time=0.5. Such models can only generate more +in-between frames using recursion. + +Architecture +============ +The inference consists of three main stages: 1) feature extraction 2) warping +3) fusion. On high-level, the architecture has similarities to Context-aware +Synthesis for Video Frame Interpolation [1], but the exact architecture is +closer to Multi-view Image Fusion [2] with some modifications for the frame +interpolation use-case. + +Feature extraction stage employs the cascaded multi-scale architecture described +in [2]. The advantage of this architecture is that coarse level flow prediction +can be learned from finer resolution image samples. This is especially useful +to avoid overfitting with moderately sized datasets. + +The warping stage uses a residual flow prediction idea that is similar to +PWC-Net [3], Multi-view Image Fusion [2] and many others. + +The fusion stage is similar to U-Net's decoder where the skip connections are +connected to warped image and feature pyramids. This is described in [2]. + +Implementation Conventions +==================== +Pyramids +-------- +Throughtout the model, all image and feature pyramids are stored as python lists +with finest level first followed by downscaled versions obtained by successively +halving the resolution. The depths of all pyramids are determined by +options.pyramid_levels. The only exception to this is internal to the feature +extractor, where smaller feature pyramids are temporarily constructed with depth +options.sub_levels. + +Color ranges & gamma +-------------------- +The model code makes no assumptions on whether the images are in gamma or +linearized space or what is the range of RGB color values. So a model can be +trained with different choices. This does not mean that all the choices lead to +similar results. In practice the model has been proven to work well with RGB +scale = [0,1] with gamma-space images (i.e. not linearized). + +[1] Context-aware Synthesis for Video Frame Interpolation, Niklaus and Liu, 2018 +[2] Multi-view Image Fusion, Trinidad et al, 2019 +[3] PWC-Net: CNNs for Optical Flow Using Pyramid, Warping, and Cost Volume +""" +from typing import Dict, List + +import torch +from torch import nn + + + +class Interpolator(nn.Module): + def __init__( + self, + pyramid_levels=7, + fusion_pyramid_levels=5, + specialized_levels=3, + sub_levels=4, + filters=64, + flow_convs=(3, 3, 3, 3), + flow_filters=(32, 64, 128, 256), + ): + super().__init__() + self.pyramid_levels = pyramid_levels + self.fusion_pyramid_levels = fusion_pyramid_levels + + self.extract = FeatureExtractor(3, filters, sub_levels) + self.predict_flow = PyramidFlowEstimator(filters, flow_convs, flow_filters) + self.fuse = Fusion(sub_levels, specialized_levels, filters) + + def shuffle_images(self, x0, x1): + return [ + build_image_pyramid(x0, self.pyramid_levels), + build_image_pyramid(x1, self.pyramid_levels) + ] + + def debug_forward(self, x0, x1, batch_dt) -> Dict[str, List[torch.Tensor]]: + image_pyramids = self.shuffle_images(x0, x1) + + # Siamese feature pyramids: + feature_pyramids = [self.extract(image_pyramids[0]), self.extract(image_pyramids[1])] + + # Predict forward flow. + forward_residual_flow_pyramid = self.predict_flow(feature_pyramids[0], feature_pyramids[1]) + + # Predict backward flow. + backward_residual_flow_pyramid = self.predict_flow(feature_pyramids[1], feature_pyramids[0]) + + # Concatenate features and images: + + # Note that we keep up to 'fusion_pyramid_levels' levels as only those + # are used by the fusion module. + + forward_flow_pyramid = flow_pyramid_synthesis(forward_residual_flow_pyramid)[:self.fusion_pyramid_levels] + + backward_flow_pyramid = flow_pyramid_synthesis(backward_residual_flow_pyramid)[:self.fusion_pyramid_levels] + + # We multiply the flows with t and 1-t to warp to the desired fractional time. + # + # Note: In film_net we fix time to be 0.5, and recursively invoke the interpo- + # lator for multi-frame interpolation. Below, we create a constant tensor of + # shape [B]. We use the `time` tensor to infer the batch size. + mid_time = torch.full_like(batch_dt, .5) + backward_flow = multiply_pyramid(backward_flow_pyramid, mid_time[:, 0]) + forward_flow = multiply_pyramid(forward_flow_pyramid, 1 - mid_time[:, 0]) + + pyramids_to_warp = [ + concatenate_pyramids(image_pyramids[0][:self.fusion_pyramid_levels], + feature_pyramids[0][:self.fusion_pyramid_levels]), + concatenate_pyramids(image_pyramids[1][:self.fusion_pyramid_levels], + feature_pyramids[1][:self.fusion_pyramid_levels]) + ] + + # Warp features and images using the flow. Note that we use backward warping + # and backward flow is used to read from image 0 and forward flow from + # image 1. + forward_warped_pyramid = pyramid_warp(pyramids_to_warp[0], backward_flow) + backward_warped_pyramid = pyramid_warp(pyramids_to_warp[1], forward_flow) + + aligned_pyramid = concatenate_pyramids(forward_warped_pyramid, + backward_warped_pyramid) + aligned_pyramid = concatenate_pyramids(aligned_pyramid, backward_flow) + aligned_pyramid = concatenate_pyramids(aligned_pyramid, forward_flow) + + return { + 'image': [self.fuse(aligned_pyramid)], + 'forward_residual_flow_pyramid': forward_residual_flow_pyramid, + 'backward_residual_flow_pyramid': backward_residual_flow_pyramid, + 'forward_flow_pyramid': forward_flow_pyramid, + 'backward_flow_pyramid': backward_flow_pyramid, + } + + + def forward(self, x0, x1, batch_dt) -> torch.Tensor: + return self.debug_forward(x0, x1, batch_dt)['image'][0] + + + + + + + + + + +"""PyTorch layer for estimating optical flow by a residual flow pyramid. + +This approach of estimating optical flow between two images can be traced back +to [1], but is also used by later neural optical flow computation methods such +as SpyNet [2] and PWC-Net [3]. + +The basic idea is that the optical flow is first estimated in a coarse +resolution, then the flow is upsampled to warp the higher resolution image and +then a residual correction is computed and added to the estimated flow. This +process is repeated in a pyramid on coarse to fine order to successively +increase the resolution of both optical flow and the warped image. + +In here, the optical flow predictor is used as an internal component for the +film_net frame interpolator, to warp the two input images into the inbetween, +target frame. + +[1] F. Glazer, Hierarchical motion detection. PhD thesis, 1987. +[2] A. Ranjan and M. J. Black, Optical Flow Estimation using a Spatial Pyramid + Network. 2016 +[3] D. Sun X. Yang, M-Y. Liu and J. Kautz, PWC-Net: CNNs for Optical Flow Using + Pyramid, Warping, and Cost Volume, 2017 +""" +from typing import List + +import torch +from torch import nn +from torch.nn import functional as F + + + +class FlowEstimator(nn.Module): + """Small-receptive field predictor for computing the flow between two images. + + This is used to compute the residual flow fields in PyramidFlowEstimator. + + Note that while the number of 3x3 convolutions & filters to apply is + configurable, two extra 1x1 convolutions are appended to extract the flow in + the end. + + Attributes: + name: The name of the layer + num_convs: Number of 3x3 convolutions to apply + num_filters: Number of filters in each 3x3 convolution + """ + + def __init__(self, in_channels: int, num_convs: int, num_filters: int): + super(FlowEstimator, self).__init__() + + self._convs = nn.ModuleList() + for i in range(num_convs): + self._convs.append(conv(in_channels=in_channels, out_channels=num_filters, size=3)) + in_channels = num_filters + self._convs.append(conv(in_channels, num_filters // 2, size=1)) + in_channels = num_filters // 2 + # For the final convolution, we want no activation at all to predict the + # optical flow vector values. We have done extensive testing on explicitly + # bounding these values using sigmoid, but it turned out that having no + # activation gives better results. + self._convs.append(conv(in_channels, 2, size=1, activation=None)) + + def forward(self, features_a: torch.Tensor, features_b: torch.Tensor) -> torch.Tensor: + """Estimates optical flow between two images. + + Args: + features_a: per pixel feature vectors for image A (B x H x W x C) + features_b: per pixel feature vectors for image B (B x H x W x C) + + Returns: + A tensor with optical flow from A to B + """ + net = torch.cat([features_a, features_b], dim=1) + for conv in self._convs: + net = conv(net) + return net + + +class PyramidFlowEstimator(nn.Module): + """Predicts optical flow by coarse-to-fine refinement. + """ + + def __init__(self, filters: int = 64, + flow_convs: tuple = (3, 3, 3, 3), + flow_filters: tuple = (32, 64, 128, 256)): + super(PyramidFlowEstimator, self).__init__() + + in_channels = filters << 1 + predictors = [] + for i in range(len(flow_convs)): + predictors.append( + FlowEstimator( + in_channels=in_channels, + num_convs=flow_convs[i], + num_filters=flow_filters[i])) + in_channels += filters << (i + 2) + self._predictor = predictors[-1] + self._predictors = nn.ModuleList(predictors[:-1][::-1]) + + def forward(self, feature_pyramid_a: List[torch.Tensor], + feature_pyramid_b: List[torch.Tensor]) -> List[torch.Tensor]: + """Estimates residual flow pyramids between two image pyramids. + + Each image pyramid is represented as a list of tensors in fine-to-coarse + order. Each individual image is represented as a tensor where each pixel is + a vector of image features. + + flow_pyramid_synthesis can be used to convert the residual flow + pyramid returned by this method into a flow pyramid, where each level + encodes the flow instead of a residual correction. + + Args: + feature_pyramid_a: image pyramid as a list in fine-to-coarse order + feature_pyramid_b: image pyramid as a list in fine-to-coarse order + + Returns: + List of flow tensors, in fine-to-coarse order, each level encoding the + difference against the bilinearly upsampled version from the coarser + level. The coarsest flow tensor, e.g. the last element in the array is the + 'DC-term', e.g. not a residual (alternatively you can think of it being a + residual against zero). + """ + levels = len(feature_pyramid_a) + v = self._predictor(feature_pyramid_a[-1], feature_pyramid_b[-1]) + residuals = [v] + for i in range(levels - 2, len(self._predictors) - 1, -1): + # Upsamples the flow to match the current pyramid level. Also, scales the + # magnitude by two to reflect the new size. + level_size = feature_pyramid_a[i].shape[2:4] + v = F.interpolate(2 * v, size=level_size, mode='bilinear') + # Warp feature_pyramid_b[i] image based on the current flow estimate. + warped = warp(feature_pyramid_b[i], v) + # Estimate the residual flow between pyramid_a[i] and warped image: + v_residual = self._predictor(feature_pyramid_a[i], warped) + residuals.insert(0, v_residual) + v = v_residual + v + + for k, predictor in enumerate(self._predictors): + i = len(self._predictors) - 1 - k + # Upsamples the flow to match the current pyramid level. Also, scales the + # magnitude by two to reflect the new size. + level_size = feature_pyramid_a[i].shape[2:4] + v = F.interpolate(2 * v, size=level_size, mode='bilinear') + # Warp feature_pyramid_b[i] image based on the current flow estimate. + warped = warp(feature_pyramid_b[i], v) + # Estimate the residual flow between pyramid_a[i] and warped image: + v_residual = predictor(feature_pyramid_a[i], warped) + residuals.insert(0, v_residual) + v = v_residual + v + return residuals + + + + + + + + + + +"""Various utilities used in the film_net frame interpolator model.""" +from typing import List, Optional + +import cv2 +import numpy as np +import torch +from torch import nn +from torch.nn import functional as F + + +def pad_batch(batch, align): + height, width = batch.shape[1:3] + height_to_pad = (align - height % align) if height % align != 0 else 0 + width_to_pad = (align - width % align) if width % align != 0 else 0 + + crop_region = [height_to_pad >> 1, width_to_pad >> 1, height + (height_to_pad >> 1), width + (width_to_pad >> 1)] + batch = np.pad(batch, ((0, 0), (height_to_pad >> 1, height_to_pad - (height_to_pad >> 1)), + (width_to_pad >> 1, width_to_pad - (width_to_pad >> 1)), (0, 0)), mode='constant') + return batch, crop_region + + +def load_image(path, align=64): + image = cv2.cvtColor(cv2.imread(path), cv2.COLOR_BGR2RGB).astype(np.float32) / np.float32(255) + image_batch, crop_region = pad_batch(np.expand_dims(image, axis=0), align) + return image_batch, crop_region + + +def build_image_pyramid(image: torch.Tensor, pyramid_levels: int = 3) -> List[torch.Tensor]: + """Builds an image pyramid from a given image. + + The original image is included in the pyramid and the rest are generated by + successively halving the resolution. + + Args: + image: the input image. + options: film_net options object + + Returns: + A list of images starting from the finest with options.pyramid_levels items + """ + + pyramid = [] + for i in range(pyramid_levels): + pyramid.append(image) + if i < pyramid_levels - 1: + image = F.avg_pool2d(image, 2, 2) + return pyramid + + +def warp(image: torch.Tensor, flow: torch.Tensor) -> torch.Tensor: + """Backward warps the image using the given flow. + + Specifically, the output pixel in batch b, at position x, y will be computed + as follows: + (flowed_y, flowed_x) = (y+flow[b, y, x, 1], x+flow[b, y, x, 0]) + output[b, y, x] = bilinear_lookup(image, b, flowed_y, flowed_x) + + Note that the flow vectors are expected as [x, y], e.g. x in position 0 and + y in position 1. + + Args: + image: An image with shape BxHxWxC. + flow: A flow with shape BxHxWx2, with the two channels denoting the relative + offset in order: (dx, dy). + Returns: + A warped image. + """ + flow = -flow.flip(1) + + dtype = flow.dtype + device = flow.device + + # warped = tfa_image.dense_image_warp(image, flow) + # Same as above but with pytorch + ls1 = 1 - 1 / flow.shape[3] + ls2 = 1 - 1 / flow.shape[2] + + normalized_flow2 = flow.permute(0, 2, 3, 1) / torch.tensor( + [flow.shape[2] * .5, flow.shape[3] * .5], dtype=dtype, device=device)[None, None, None] + normalized_flow2 = torch.stack([ + torch.linspace(-ls1, ls1, flow.shape[3], dtype=dtype, device=device)[None, None, :] - normalized_flow2[..., 1], + torch.linspace(-ls2, ls2, flow.shape[2], dtype=dtype, device=device)[None, :, None] - normalized_flow2[..., 0], + ], dim=3) + + warped = F.grid_sample(image, normalized_flow2, + mode='bilinear', padding_mode='border', align_corners=False) + return warped.reshape(image.shape) + + +def multiply_pyramid(pyramid: List[torch.Tensor], + scalar: torch.Tensor) -> List[torch.Tensor]: + """Multiplies all image batches in the pyramid by a batch of scalars. + + Args: + pyramid: Pyramid of image batches. + scalar: Batch of scalars. + + Returns: + An image pyramid with all images multiplied by the scalar. + """ + # To multiply each image with its corresponding scalar, we first transpose + # the batch of images from BxHxWxC-format to CxHxWxB. This can then be + # multiplied with a batch of scalars, then we transpose back to the standard + # BxHxWxC form. + return [image * scalar for image in pyramid] + + +def flow_pyramid_synthesis( + residual_pyramid: List[torch.Tensor]) -> List[torch.Tensor]: + """Converts a residual flow pyramid into a flow pyramid.""" + flow = residual_pyramid[-1] + flow_pyramid: List[torch.Tensor] = [flow] + for residual_flow in residual_pyramid[:-1][::-1]: + level_size = residual_flow.shape[2:4] + flow = F.interpolate(2 * flow, size=level_size, mode='bilinear') + flow = residual_flow + flow + flow_pyramid.insert(0, flow) + return flow_pyramid + + +def pyramid_warp(feature_pyramid: List[torch.Tensor], + flow_pyramid: List[torch.Tensor]) -> List[torch.Tensor]: + """Warps the feature pyramid using the flow pyramid. + + Args: + feature_pyramid: feature pyramid starting from the finest level. + flow_pyramid: flow fields, starting from the finest level. + + Returns: + Reverse warped feature pyramid. + """ + warped_feature_pyramid = [] + for features, flow in zip(feature_pyramid, flow_pyramid): + warped_feature_pyramid.append(warp(features, flow)) + return warped_feature_pyramid + + +def concatenate_pyramids(pyramid1: List[torch.Tensor], + pyramid2: List[torch.Tensor]) -> List[torch.Tensor]: + """Concatenates each pyramid level together in the channel dimension.""" + result = [] + for features1, features2 in zip(pyramid1, pyramid2): + result.append(torch.cat([features1, features2], dim=1)) + return result + + +def conv(in_channels, out_channels, size, activation: Optional[str] = 'relu'): + # Since PyTorch doesn't have an in-built activation in Conv2d, we use a + # Sequential layer to combine Conv2d and Leaky ReLU in one module. + _conv = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=size, + padding='same') + if activation is None: + return _conv + assert activation == 'relu' + return nn.Sequential( + _conv, + nn.LeakyReLU(.2) + ) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..64a4b6bed3ba40712317708b175214bd773633fd --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__init__.py @@ -0,0 +1,115 @@ +import torch +from comfy.model_management import get_torch_device, soft_empty_cache +import numpy as np +import typing +from vfi_utils import InterpolationStateList, load_file_from_github_release, preprocess_frames, postprocess_frames, assert_batch_size +import pathlib +import warnings +from .flavr_arch import UNet_3D_3D, InputPadder +import gc + +device = get_torch_device() +NBR_FRAME = 4 + +def build_flavr(model_path): + sd = torch.load(model_path)['state_dict'] + sd = {k.partition("module.")[-1]:v for k,v in sd.items()} + + #Ref: Class UNet_3D_3D + model = UNet_3D_3D("unet_18", n_inputs=NBR_FRAME, n_outputs=sd["outconv.1.weight"].shape[0] // 3, joinType="concat" , upmode="transpose") + model.load_state_dict(sd) + model.to(device).eval() + del sd + return model + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["FLAVR_2x.pth", "FLAVR_4x.pth", "FLAVR_8x.pth"] + +class FLAVR_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 2}), #TODO: Implement recursively invoking interpolator for multi-frame interpolation + "duplicate_first_last_frames": ("BOOLEAN", {"default": False}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + #Reference: https://github.com/danier97/ST-MFNet/blob/main/interpolate_yuv.py#L93 + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + duplicate_first_last_frames: bool = False, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + if multiplier != 2: + warnings.warn("Currently, FLAVR only supports 2x interpolation. The process will continue but please set multiplier=2 afterward") + + assert_batch_size(frames, batch_size=4, vfi_name="ST-MFNet") + interpolation_states = optional_interpolation_states + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + model = build_flavr(model_path) + frames = preprocess_frames(frames) + padder = InputPadder(frames.shape, 16) + frames = padder.pad(frames) + + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + output_frames = [] + for frame_itr in range(len(frames) - 3): + #Does skipping frame i+1 make sanse in this case? + if interpolation_states is not None and interpolation_states.is_frame_skipped(frame_itr) and interpolation_states.is_frame_skipped(frame_itr + 1): + continue + + #Ensure that input frames are in fp32 - the same dtype as model + frame0, frame1, frame2, frame3 = ( + frames[frame_itr:frame_itr+1].float(), + frames[frame_itr+1:frame_itr+2].float(), + frames[frame_itr+2:frame_itr+3].float(), + frames[frame_itr+3:frame_itr+4].float() + ) + new_frame = model([frame0.to(device), frame1.to(device), frame2.to(device), frame3.to(device)])[0].detach().cpu() + number_of_frames_processed_since_last_cleared_cuda_cache += 2 + + if frame_itr == 0: + output_frames.append(frame0) + if duplicate_first_last_frames: + output_frames.append(frame0) # repeat the first frame + output_frames.append(frame1) + output_frames.append(new_frame) + output_frames.append(frame2) + if frame_itr == len(frames) - 4: + output_frames.append(frame3) + if duplicate_first_last_frames: + output_frames.append(frame3) # repeat the last frame + + # Try to avoid a memory overflow by clearing cuda cache regularly + if number_of_frames_processed_since_last_cleared_cuda_cache >= clear_cache_after_n_frames: + print("Comfy-VFI: Clearing cache...", end = ' ') + soft_empty_cache() + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + print("Done cache clearing") + gc.collect() + + dtype = torch.float32 + output_frames = [frame.cpu().to(dtype=dtype) for frame in output_frames] #Ensure all frames are in cpu + out = torch.cat(output_frames, dim=0) + out = padder.unpad(out) + # clear cache for courtesy + print("Comfy-VFI: Final clearing cache...", end=' ') + soft_empty_cache() + print("Done cache clearing") + return (postprocess_frames(out), ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..556ba828caa03d2a5d041d339f4e3f598a0cbc33 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/flavr_arch.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/flavr_arch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68a222e391d487e941cc9b0c206464065b36c0a0 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/__pycache__/flavr_arch.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/flavr_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/flavr_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..60c27d35864dbceef3af73ba230ba175a18f9b12 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/flavr_arch.py @@ -0,0 +1,217 @@ +""" +https://github.com/tarun005/FLAVR/blob/main/model/FLAVR_arch.py +https://github.com/tarun005/FLAVR/blob/main/model/resnet_3D.py (only SEGating) +""" +import math +import numpy as np +import importlib + +import torch +import torch.nn as nn +import torch.nn.functional as F + +class SEGating(nn.Module): + + def __init__(self , inplanes , reduction=16): + + super().__init__() + + self.pool = nn.AdaptiveAvgPool3d(1) + self.attn_layer = nn.Sequential( + nn.Conv3d(inplanes , inplanes , kernel_size=1 , stride=1 , bias=True), + nn.Sigmoid() + ) + + def forward(self , x): + + out = self.pool(x) + y = self.attn_layer(out) + return x * y + +def joinTensors(X1 , X2 , type="concat"): + + if type == "concat": + return torch.cat([X1 , X2] , dim=1) + elif type == "add": + return X1 + X2 + else: + return X1 + + +class Conv_2d(nn.Module): + + def __init__(self, in_ch, out_ch, kernel_size, stride=1, padding=0, bias=False, batchnorm=False): + + super().__init__() + self.conv = [nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias)] + + if batchnorm: + self.conv += [nn.BatchNorm2d(out_ch)] + + self.conv = nn.Sequential(*self.conv) + + def forward(self, x): + + return self.conv(x) + +class upConv3D(nn.Module): + + def __init__(self, in_ch, out_ch, kernel_size, stride, padding, upmode="transpose" , batchnorm=False): + + super().__init__() + + self.upmode = upmode + + if self.upmode=="transpose": + self.upconv = nn.ModuleList( + [nn.ConvTranspose3d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding), + SEGating(out_ch) + ] + ) + + else: + self.upconv = nn.ModuleList( + [nn.Upsample(mode='trilinear', scale_factor=(1,2,2), align_corners=False), + nn.Conv3d(in_ch, out_ch , kernel_size=1 , stride=1), + SEGating(out_ch) + ] + ) + + if batchnorm: + self.upconv += [nn.BatchNorm3d(out_ch)] + + self.upconv = nn.Sequential(*self.upconv) + + def forward(self, x): + + return self.upconv(x) + +class Conv_3d(nn.Module): + + def __init__(self, in_ch, out_ch, kernel_size, stride=1, padding=0, bias=True, batchnorm=False): + + super().__init__() + self.conv = [nn.Conv3d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias), + SEGating(out_ch) + ] + + if batchnorm: + self.conv += [nn.BatchNorm3d(out_ch)] + + self.conv = nn.Sequential(*self.conv) + + def forward(self, x): + + return self.conv(x) + +class upConv2D(nn.Module): + + def __init__(self, in_ch, out_ch, kernel_size, stride, padding, upmode="transpose" , batchnorm=False): + + super().__init__() + + self.upmode = upmode + + if self.upmode=="transpose": + self.upconv = [nn.ConvTranspose2d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding)] + + else: + self.upconv = [ + nn.Upsample(mode='bilinear', scale_factor=2, align_corners=False), + nn.Conv2d(in_ch, out_ch , kernel_size=1 , stride=1) + ] + + if batchnorm: + self.upconv += [nn.BatchNorm2d(out_ch)] + + self.upconv = nn.Sequential(*self.upconv) + + def forward(self, x): + + return self.upconv(x) + + +class UNet_3D_3D(nn.Module): + def __init__(self, block , n_inputs, n_outputs, batchnorm=False , joinType="concat" , upmode="transpose"): + super().__init__() + + nf = [512 , 256 , 128 , 64] + out_channels = 3*n_outputs + self.joinType = joinType + self.n_outputs = n_outputs + + growth = 2 if joinType == "concat" else 1 + self.lrelu = nn.LeakyReLU(0.2, True) + + unet_3D = importlib.import_module(".resnet_3D", "models.flavr") + if n_outputs > 1: + unet_3D.useBias = True + self.encoder = getattr(unet_3D , block)(pretrained=False , bn=batchnorm) + + self.decoder = nn.Sequential( + Conv_3d(nf[0], nf[1] , kernel_size=3, padding=1, bias=True, batchnorm=batchnorm), + upConv3D(nf[1]*growth, nf[2], kernel_size=(3,4,4), stride=(1,2,2), padding=(1,1,1) , upmode=upmode, batchnorm=batchnorm), + upConv3D(nf[2]*growth, nf[3], kernel_size=(3,4,4), stride=(1,2,2), padding=(1,1,1) , upmode=upmode, batchnorm=batchnorm), + Conv_3d(nf[3]*growth, nf[3] , kernel_size=3, padding=1, bias=True, batchnorm=batchnorm), + upConv3D(nf[3]*growth , nf[3], kernel_size=(3,4,4), stride=(1,2,2), padding=(1,1,1) , upmode=upmode, batchnorm=batchnorm) + ) + + self.feature_fuse = Conv_2d(nf[3]*n_inputs , nf[3] , kernel_size=1 , stride=1, batchnorm=batchnorm) + + self.outconv = nn.Sequential( + nn.ReflectionPad2d(3), + nn.Conv2d(nf[3], out_channels , kernel_size=7 , stride=1, padding=0) + ) + + def forward(self, images): + + images = torch.stack(images , dim=2) + + ## Batch mean normalization works slightly better than global mean normalization, thanks to https://github.com/myungsub/CAIN + mean_ = images.mean(2, keepdim=True).mean(3, keepdim=True).mean(4,keepdim=True) + images = images-mean_ + + x_0 , x_1 , x_2 , x_3 , x_4 = self.encoder(images) + + dx_3 = self.lrelu(self.decoder[0](x_4)) + dx_3 = joinTensors(dx_3 , x_3 , type=self.joinType) + + dx_2 = self.lrelu(self.decoder[1](dx_3)) + dx_2 = joinTensors(dx_2 , x_2 , type=self.joinType) + + dx_1 = self.lrelu(self.decoder[2](dx_2)) + dx_1 = joinTensors(dx_1 , x_1 , type=self.joinType) + + dx_0 = self.lrelu(self.decoder[3](dx_1)) + dx_0 = joinTensors(dx_0 , x_0 , type=self.joinType) + + dx_out = self.lrelu(self.decoder[4](dx_0)) + dx_out = torch.cat(torch.unbind(dx_out , 2) , 1) + + out = self.lrelu(self.feature_fuse(dx_out)) + out = self.outconv(out) + + out = torch.split(out, dim=1, split_size_or_sections=3) + mean_ = mean_.squeeze(2) + out = [o+mean_ for o in out] + + return out + +class InputPadder: + """ Pads images such that dimensions are divisible by divisor """ + def __init__(self, dims, divisor=16): + self.ht, self.wd = dims[-2:] + pad_ht = (((self.ht // divisor) + 1) * divisor - self.ht) % divisor + pad_wd = (((self.wd // divisor) + 1) * divisor - self.wd) % divisor + self._pad = [pad_wd//2, pad_wd - pad_wd//2, pad_ht//2, pad_ht - pad_ht//2] + + def pad(self, input_tensor): + return F.pad(input_tensor, self._pad, mode='replicate') + + def unpad(self, input_tensor): + return self._unpad(input_tensor) + + def _unpad(self, x): + ht, wd = x.shape[-2:] + c = [self._pad[2], ht-self._pad[3], self._pad[0], wd-self._pad[1]] + return x[..., c[0]:c[1], c[2]:c[3]] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/resnet_3D.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/resnet_3D.py new file mode 100644 index 0000000000000000000000000000000000000000..ba8b9a85a9edcf24ed8ff685c667dbcdbff55e69 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/flavr/resnet_3D.py @@ -0,0 +1,288 @@ +# Modified from https://github.com/pytorch/vision/tree/master/torchvision/models/video + +import torch +import torch.nn as nn + +__all__ = ['unet_18', 'unet_34'] + +useBias = False + +class identity(nn.Module): + + def __init__(self , *args , **kwargs): + + super().__init__() + + def forward(self , x): + return x + +class Conv3DSimple(nn.Conv3d): + def __init__(self, + in_planes, + out_planes, + midplanes=None, + stride=1, + padding=1): + + super(Conv3DSimple, self).__init__( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=(3, 3, 3), + stride=stride, + padding=padding, + bias=useBias) + + @staticmethod + def get_downsample_stride(stride , temporal_stride): + if temporal_stride: + return (temporal_stride, stride, stride) + else: + return (stride , stride , stride) + +class BasicStem(nn.Sequential): + """The default conv-batchnorm-relu stem + """ + def __init__(self): + super().__init__( + nn.Conv3d(3, 64, kernel_size=(3, 7, 7), stride=(1, 2, 2), + padding=(1, 3, 3), bias=useBias), + batchnorm(64), + nn.ReLU(inplace=False)) + + +class Conv2Plus1D(nn.Sequential): + + def __init__(self, + in_planes, + out_planes, + midplanes, + stride=1, + padding=1): + if not isinstance(stride , int): + temporal_stride , stride , stride = stride + else: + temporal_stride = stride + + super(Conv2Plus1D, self).__init__( + nn.Conv3d(in_planes, midplanes, kernel_size=(1, 3, 3), + stride=(1, stride, stride), padding=(0, padding, padding), + bias=False), + # batchnorm(midplanes), + nn.ReLU(inplace=True), + nn.Conv3d(midplanes, out_planes, kernel_size=(3, 1, 1), + stride=(temporal_stride, 1, 1), padding=(padding, 0, 0), + bias=False)) + + @staticmethod + def get_downsample_stride(stride , temporal_stride): + if temporal_stride: + return (temporal_stride, stride, stride) + else: + return (stride , stride , stride) + +class R2Plus1dStem(nn.Sequential): + """R(2+1)D stem is different than the default one as it uses separated 3D convolution + """ + def __init__(self): + super().__init__( + nn.Conv3d(3, 45, kernel_size=(1, 7, 7), + stride=(1, 2, 2), padding=(0, 3, 3), + bias=False), + batchnorm(45), + nn.ReLU(inplace=True), + nn.Conv3d(45, 64, kernel_size=(3, 1, 1), + stride=(1, 1, 1), padding=(1, 0, 0), + bias=False), + batchnorm(64), + nn.ReLU(inplace=True)) + + +class SEGating(nn.Module): + + def __init__(self , inplanes , reduction=16): + + super().__init__() + + self.pool = nn.AdaptiveAvgPool3d(1) + self.attn_layer = nn.Sequential( + nn.Conv3d(inplanes , inplanes , kernel_size=1 , stride=1 , bias=True), + nn.Sigmoid() + ) + + def forward(self , x): + + out = self.pool(x) + y = self.attn_layer(out) + return x * y + +class BasicBlock(nn.Module): + + expansion = 1 + + def __init__(self, inplanes, planes, conv_builder, stride=1, downsample=None): + midplanes = (inplanes * planes * 3 * 3 * 3) // (inplanes * 3 * 3 + 3 * planes) + + super(BasicBlock, self).__init__() + self.conv1 = nn.Sequential( + conv_builder(inplanes, planes, midplanes, stride), + batchnorm(planes), + nn.ReLU(inplace=True) + ) + self.conv2 = nn.Sequential( + conv_builder(planes, planes, midplanes), + batchnorm(planes) + ) + self.fg = SEGating(planes) ## Feature Gating + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + out = self.conv1(x) + out = self.conv2(out) + out = self.fg(out) + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + +class VideoResNet(nn.Module): + + def __init__(self, block, conv_makers, layers, + stem, zero_init_residual=False): + """Generic resnet video generator. + + Args: + block (nn.Module): resnet building block + conv_makers (list(functions)): generator function for each layer + layers (List[int]): number of blocks per layer + stem (nn.Module, optional): Resnet stem, if None, defaults to conv-bn-relu. Defaults to None. + """ + super(VideoResNet, self).__init__() + self.inplanes = 64 + + self.stem = stem() + + self.layer1 = self._make_layer(block, conv_makers[0], 64, layers[0], stride=1 ) + self.layer2 = self._make_layer(block, conv_makers[1], 128, layers[1], stride=2 , temporal_stride=1) + self.layer3 = self._make_layer(block, conv_makers[2], 256, layers[2], stride=2 , temporal_stride=1) + self.layer4 = self._make_layer(block, conv_makers[3], 512, layers[3], stride=1, temporal_stride=1) + + # init weights + self._initialize_weights() + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + nn.init.constant_(m.bn3.weight, 0) + + def forward(self, x): + x_0 = self.stem(x) + x_1 = self.layer1(x_0) + x_2 = self.layer2(x_1) + x_3 = self.layer3(x_2) + x_4 = self.layer4(x_3) + return x_0 , x_1 , x_2 , x_3 , x_4 + + def _make_layer(self, block, conv_builder, planes, blocks, stride=1, temporal_stride=None): + downsample = None + + if stride != 1 or self.inplanes != planes * block.expansion: + ds_stride = conv_builder.get_downsample_stride(stride , temporal_stride) + downsample = nn.Sequential( + nn.Conv3d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=ds_stride, bias=False), + batchnorm(planes * block.expansion) + ) + stride = ds_stride + + layers = [] + layers.append(block(self.inplanes, planes, conv_builder, stride, downsample )) + + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, conv_builder )) + + return nn.Sequential(*layers) + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv3d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', + nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm3d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +def _video_resnet(arch, pretrained=False, progress=True, **kwargs): + model = VideoResNet(**kwargs) + ## TODO: Other 3D resnet models, like S3D, r(2+1)D. + + if pretrained: + state_dict = load_state_dict_from_url(model_urls[arch], + progress=progress) + model.load_state_dict(state_dict) + return model + + +def unet_18(pretrained=False, bn=False, progress=True, **kwargs): + """ + Construct 18 layer Unet3D model as in + https://arxiv.org/abs/1711.11248 + + Args: + pretrained (bool): If True, returns a model pre-trained on Kinetics-400 + progress (bool): If True, displays a progress bar of the download to stderr + + Returns: + nn.Module: R3D-18 encoder + """ + global batchnorm + if bn: + batchnorm = nn.BatchNorm3d + else: + batchnorm = identity + + return _video_resnet('r3d_18', + pretrained, progress, + block=BasicBlock, + conv_makers=[Conv3DSimple] * 4, + layers=[2, 2, 2, 2], + stem=BasicStem, **kwargs) + +def unet_34(pretrained=False, bn=False, progress=True, **kwargs): + """ + Construct 34 layer Unet3D model as in + https://arxiv.org/abs/1711.11248 + + Args: + pretrained (bool): If True, returns a model pre-trained on Kinetics-400 + progress (bool): If True, displays a progress bar of the download to stderr + + Returns: + nn.Module: R3D-18 encoder + """ + global batchnorm + # bn = False + if bn: + batchnorm = nn.BatchNorm3d + else: + batchnorm = identity + + + return _video_resnet('r3d_34', + pretrained, progress, + block=BasicBlock, + conv_makers=[Conv3DSimple] * 4, + layers=[3, 4, 6, 3], + stem=BasicStem, **kwargs) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna.py new file mode 100644 index 0000000000000000000000000000000000000000..949e5130c59b3cf090666dff229bfa9a2be3a072 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna.py @@ -0,0 +1,24 @@ +import itertools +import numpy as np +import vapoursynth as vs +from .GMFSS_Fortuna_arch import Model_inference +import torch +import traceback + + +class GMFSS_Fortuna: + def __init__(self): + self.cache = False + self.amount_input_img = 2 + + torch.set_grad_enabled(False) + torch.backends.cudnn.enabled = True + torch.backends.cudnn.benchmark = True + + self.model = Model_inference() + self.model.eval() + + def execute(self, I0, I1, timestep): + with torch.inference_mode(): + middle = self.model(I0, I1, timestep).cpu() + return middle diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..062489675ef58d541c0311a8a88f3510a737075f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_arch.py @@ -0,0 +1,1850 @@ +""" +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/GMFSS_infer_b.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/softsplat.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/FusionNet_b.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/FeatureNet.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/MetricNet.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/IFNet_HDv3.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/gmflow.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/utils.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/position.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/geometry.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/matching.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/transformer.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/backbone.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/trident_conv.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/warplayer.py +""" + +from torch import nn +from torch.nn import functional as F +from torch.nn.modules.utils import _pair +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch +import math +from vfi_models.rife.rife_arch import IFNet +from vfi_models.ops import softsplat +from comfy.model_management import get_torch_device + +device = get_torch_device() +backwarp_tenGrid = {} + + +def warp(tenInput, tenFlow): + k = (str(tenFlow.device), str(tenFlow.size())) + if k not in backwarp_tenGrid: + tenHorizontal = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[3], device=device) + .view(1, 1, 1, tenFlow.shape[3]) + .expand(tenFlow.shape[0], -1, tenFlow.shape[2], -1) + ) + tenVertical = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[2], device=device) + .view(1, 1, tenFlow.shape[2], 1) + .expand(tenFlow.shape[0], -1, -1, tenFlow.shape[3]) + ) + backwarp_tenGrid[k] = torch.cat([tenHorizontal, tenVertical], 1).to(device) + + tenFlow = torch.cat( + [ + tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), + tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + g = (backwarp_tenGrid[k] + tenFlow).permute(0, 2, 3, 1) + return torch.nn.functional.grid_sample( + input=tenInput, + grid=g, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + + +class MultiScaleTridentConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + strides=1, + paddings=0, + dilations=1, + dilation=1, + groups=1, + num_branch=1, + test_branch_idx=-1, + bias=False, + norm=None, + activation=None, + ): + super(MultiScaleTridentConv, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.num_branch = num_branch + self.stride = _pair(stride) + self.groups = groups + self.with_bias = bias + self.dilation = dilation + if isinstance(paddings, int): + paddings = [paddings] * self.num_branch + if isinstance(dilations, int): + dilations = [dilations] * self.num_branch + if isinstance(strides, int): + strides = [strides] * self.num_branch + self.paddings = [_pair(padding) for padding in paddings] + self.dilations = [_pair(dilation) for dilation in dilations] + self.strides = [_pair(stride) for stride in strides] + self.test_branch_idx = test_branch_idx + self.norm = norm + self.activation = activation + + assert len({self.num_branch, len(self.paddings), len(self.strides)}) == 1 + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // groups, *self.kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, inputs): + num_branch = ( + self.num_branch if self.training or self.test_branch_idx == -1 else 1 + ) + assert len(inputs) == num_branch + + if self.training or self.test_branch_idx == -1: + outputs = [ + F.conv2d( + input, + self.weight, + self.bias, + stride, + padding, + self.dilation, + self.groups, + ) + for input, stride, padding in zip(inputs, self.strides, self.paddings) + ] + else: + outputs = [ + F.conv2d( + inputs[0], + self.weight, + self.bias, + self.strides[self.test_branch_idx] + if self.test_branch_idx == -1 + else self.strides[-1], + self.paddings[self.test_branch_idx] + if self.test_branch_idx == -1 + else self.paddings[-1], + self.dilation, + self.groups, + ) + ] + + if self.norm is not None: + outputs = [self.norm(x) for x in outputs] + if self.activation is not None: + outputs = [self.activation(x) for x in outputs] + return outputs + + +class ResidualBlock_class(nn.Module): + def __init__( + self, + in_planes, + planes, + norm_layer=nn.InstanceNorm2d, + stride=1, + dilation=1, + ): + super(ResidualBlock_class, self).__init__() + + self.conv1 = nn.Conv2d( + in_planes, + planes, + kernel_size=3, + dilation=dilation, + padding=dilation, + stride=stride, + bias=False, + ) + self.conv2 = nn.Conv2d( + planes, + planes, + kernel_size=3, + dilation=dilation, + padding=dilation, + bias=False, + ) + self.relu = nn.ReLU(inplace=True) + + self.norm1 = norm_layer(planes) + self.norm2 = norm_layer(planes) + if not stride == 1 or in_planes != planes: + self.norm3 = norm_layer(planes) + + if stride == 1 and in_planes == planes: + self.downsample = None + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3 + ) + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x + y) + + +class CNNEncoder(nn.Module): + def __init__( + self, + output_dim=128, + norm_layer=nn.InstanceNorm2d, + num_output_scales=1, + **kwargs, + ): + super(CNNEncoder, self).__init__() + self.num_branch = num_output_scales + + feature_dims = [64, 96, 128] + + self.conv1 = nn.Conv2d( + 3, feature_dims[0], kernel_size=7, stride=2, padding=3, bias=False + ) # 1/2 + self.norm1 = norm_layer(feature_dims[0]) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = feature_dims[0] + self.layer1 = self._make_layer( + feature_dims[0], stride=1, norm_layer=norm_layer + ) # 1/2 + self.layer2 = self._make_layer( + feature_dims[1], stride=2, norm_layer=norm_layer + ) # 1/4 + + # highest resolution 1/4 or 1/8 + stride = 2 if num_output_scales == 1 else 1 + self.layer3 = self._make_layer( + feature_dims[2], + stride=stride, + norm_layer=norm_layer, + ) # 1/4 or 1/8 + + self.conv2 = nn.Conv2d(feature_dims[2], output_dim, 1, 1, 0) + + if self.num_branch > 1: + if self.num_branch == 4: + strides = (1, 2, 4, 8) + elif self.num_branch == 3: + strides = (1, 2, 4) + elif self.num_branch == 2: + strides = (1, 2) + else: + raise ValueError + + self.trident_conv = MultiScaleTridentConv( + output_dim, + output_dim, + kernel_size=3, + strides=strides, + paddings=1, + num_branch=self.num_branch, + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1, dilation=1, norm_layer=nn.InstanceNorm2d): + layer1 = ResidualBlock_class( + self.in_planes, dim, norm_layer=norm_layer, stride=stride, dilation=dilation + ) + layer2 = ResidualBlock_class( + dim, dim, norm_layer=norm_layer, stride=1, dilation=dilation + ) + + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) # 1/2 + x = self.layer2(x) # 1/4 + x = self.layer3(x) # 1/8 or 1/4 + + x = self.conv2(x) + + if self.num_branch > 1: + out = self.trident_conv([x] * self.num_branch) # high to low res + else: + out = [x] + + return out + + +def single_head_full_attention(q, k, v): + # q, k, v: [B, L, C] + assert q.dim() == k.dim() == v.dim() == 3 + + scores = torch.matmul(q, k.permute(0, 2, 1)) / (q.size(2) ** 0.5) # [B, L, L] + attn = torch.softmax(scores, dim=2) # [B, L, L] + out = torch.matmul(attn, v) # [B, L, C] + + return out + + +def generate_shift_window_attn_mask( + input_resolution, + window_size_h, + window_size_w, + shift_size_h, + shift_size_w, + device=get_torch_device(), +): + # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py + # calculate attention mask for SW-MSA + h, w = input_resolution + img_mask = torch.zeros((1, h, w, 1)).to(device) # 1 H W 1 + h_slices = ( + slice(0, -window_size_h), + slice(-window_size_h, -shift_size_h), + slice(-shift_size_h, None), + ) + w_slices = ( + slice(0, -window_size_w), + slice(-window_size_w, -shift_size_w), + slice(-shift_size_w, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = split_feature( + img_mask, num_splits=input_resolution[-1] // window_size_w, channel_last=True + ) + + mask_windows = mask_windows.view(-1, window_size_h * window_size_w) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill( + attn_mask == 0, float(0.0) + ) + + return attn_mask + + +def single_head_split_window_attention( + q, + k, + v, + num_splits=1, + with_shift=False, + h=None, + w=None, + attn_mask=None, +): + # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py + # q, k, v: [B, L, C] + assert q.dim() == k.dim() == v.dim() == 3 + + assert h is not None and w is not None + assert q.size(1) == h * w + + b, _, c = q.size() + + b_new = b * num_splits * num_splits + + window_size_h = h // num_splits + window_size_w = w // num_splits + + q = q.view(b, h, w, c) # [B, H, W, C] + k = k.view(b, h, w, c) + v = v.view(b, h, w, c) + + scale_factor = c**0.5 + + if with_shift: + assert attn_mask is not None # compute once + shift_size_h = window_size_h // 2 + shift_size_w = window_size_w // 2 + + q = torch.roll(q, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + k = torch.roll(k, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + v = torch.roll(v, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + + q = split_feature( + q, num_splits=num_splits, channel_last=True + ) # [B*K*K, H/K, W/K, C] + k = split_feature(k, num_splits=num_splits, channel_last=True) + v = split_feature(v, num_splits=num_splits, channel_last=True) + + scores = ( + torch.matmul(q.view(b_new, -1, c), k.view(b_new, -1, c).permute(0, 2, 1)) + / scale_factor + ) # [B*K*K, H/K*W/K, H/K*W/K] + + if with_shift: + scores += attn_mask.repeat(b, 1, 1) + + attn = torch.softmax(scores, dim=-1) + + out = torch.matmul(attn, v.view(b_new, -1, c)) # [B*K*K, H/K*W/K, C] + + out = merge_splits( + out.view(b_new, h // num_splits, w // num_splits, c), + num_splits=num_splits, + channel_last=True, + ) # [B, H, W, C] + + # shift back + if with_shift: + out = torch.roll(out, shifts=(shift_size_h, shift_size_w), dims=(1, 2)) + + out = out.view(b, -1, c) + + return out + + +class TransformerLayer(nn.Module): + def __init__( + self, + d_model=256, + nhead=1, + attention_type="swin", + no_ffn=False, + ffn_dim_expansion=4, + with_shift=False, + **kwargs, + ): + super(TransformerLayer, self).__init__() + + self.dim = d_model + self.nhead = nhead + self.attention_type = attention_type + self.no_ffn = no_ffn + + self.with_shift = with_shift + + # multi-head attention + self.q_proj = nn.Linear(d_model, d_model, bias=False) + self.k_proj = nn.Linear(d_model, d_model, bias=False) + self.v_proj = nn.Linear(d_model, d_model, bias=False) + + self.merge = nn.Linear(d_model, d_model, bias=False) + + self.norm1 = nn.LayerNorm(d_model) + + # no ffn after self-attn, with ffn after cross-attn + if not self.no_ffn: + in_channels = d_model * 2 + self.mlp = nn.Sequential( + nn.Linear(in_channels, in_channels * ffn_dim_expansion, bias=False), + nn.GELU(), + nn.Linear(in_channels * ffn_dim_expansion, d_model, bias=False), + ) + + self.norm2 = nn.LayerNorm(d_model) + + def forward( + self, + source, + target, + height=None, + width=None, + shifted_window_attn_mask=None, + attn_num_splits=None, + **kwargs, + ): + # source, target: [B, L, C] + query, key, value = source, target, target + + # single-head attention + query = self.q_proj(query) # [B, L, C] + key = self.k_proj(key) # [B, L, C] + value = self.v_proj(value) # [B, L, C] + + if self.attention_type == "swin" and attn_num_splits > 1: + if self.nhead > 1: + # we observe that multihead attention slows down the speed and increases the memory consumption + # without bringing obvious performance gains and thus the implementation is removed + raise NotImplementedError + else: + message = single_head_split_window_attention( + query, + key, + value, + num_splits=attn_num_splits, + with_shift=self.with_shift, + h=height, + w=width, + attn_mask=shifted_window_attn_mask, + ) + else: + message = single_head_full_attention(query, key, value) # [B, L, C] + + message = self.merge(message) # [B, L, C] + message = self.norm1(message) + + if not self.no_ffn: + message = self.mlp(torch.cat([source, message], dim=-1)) + message = self.norm2(message) + + return source + message + + +class TransformerBlock(nn.Module): + """self attention + cross attention + FFN""" + + def __init__( + self, + d_model=256, + nhead=1, + attention_type="swin", + ffn_dim_expansion=4, + with_shift=False, + **kwargs, + ): + super(TransformerBlock, self).__init__() + + self.self_attn = TransformerLayer( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + no_ffn=True, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=with_shift, + ) + + self.cross_attn_ffn = TransformerLayer( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=with_shift, + ) + + def forward( + self, + source, + target, + height=None, + width=None, + shifted_window_attn_mask=None, + attn_num_splits=None, + **kwargs, + ): + # source, target: [B, L, C] + + # self attention + source = self.self_attn( + source, + source, + height=height, + width=width, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + # cross attention and ffn + source = self.cross_attn_ffn( + source, + target, + height=height, + width=width, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + return source + + +class FeatureTransformer(nn.Module): + def __init__( + self, + num_layers=6, + d_model=128, + nhead=1, + attention_type="swin", + ffn_dim_expansion=4, + **kwargs, + ): + super(FeatureTransformer, self).__init__() + + self.attention_type = attention_type + + self.d_model = d_model + self.nhead = nhead + + self.layers = nn.ModuleList( + [ + TransformerBlock( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=True + if attention_type == "swin" and i % 2 == 1 + else False, + ) + for i in range(num_layers) + ] + ) + + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward( + self, + feature0, + feature1, + attn_num_splits=None, + **kwargs, + ): + b, c, h, w = feature0.shape + assert self.d_model == c + + feature0 = feature0.flatten(-2).permute(0, 2, 1) # [B, H*W, C] + feature1 = feature1.flatten(-2).permute(0, 2, 1) # [B, H*W, C] + + if self.attention_type == "swin" and attn_num_splits > 1: + # global and refine use different number of splits + window_size_h = h // attn_num_splits + window_size_w = w // attn_num_splits + + # compute attn mask once + shifted_window_attn_mask = generate_shift_window_attn_mask( + input_resolution=(h, w), + window_size_h=window_size_h, + window_size_w=window_size_w, + shift_size_h=window_size_h // 2, + shift_size_w=window_size_w // 2, + device=feature0.device, + ) # [K*K, H/K*W/K, H/K*W/K] + else: + shifted_window_attn_mask = None + + # concat feature0 and feature1 in batch dimension to compute in parallel + concat0 = torch.cat((feature0, feature1), dim=0) # [2B, H*W, C] + concat1 = torch.cat((feature1, feature0), dim=0) # [2B, H*W, C] + + for layer in self.layers: + concat0 = layer( + concat0, + concat1, + height=h, + width=w, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + # update feature1 + concat1 = torch.cat(concat0.chunk(chunks=2, dim=0)[::-1], dim=0) + + feature0, feature1 = concat0.chunk(chunks=2, dim=0) # [B, H*W, C] + + # reshape back + feature0 = ( + feature0.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() + ) # [B, C, H, W] + feature1 = ( + feature1.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() + ) # [B, C, H, W] + + return feature0, feature1 + + +class FeatureFlowAttention(nn.Module): + """ + flow propagation with self-attention on feature + query: feature0, key: feature0, value: flow + """ + + def __init__( + self, + in_channels, + **kwargs, + ): + super(FeatureFlowAttention, self).__init__() + + self.q_proj = nn.Linear(in_channels, in_channels) + self.k_proj = nn.Linear(in_channels, in_channels) + + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward( + self, + feature0, + flow, + local_window_attn=False, + local_window_radius=1, + **kwargs, + ): + # q, k: feature [B, C, H, W], v: flow [B, 2, H, W] + if local_window_attn: + return self.forward_local_window_attn( + feature0, flow, local_window_radius=local_window_radius + ) + + b, c, h, w = feature0.size() + + query = feature0.view(b, c, h * w).permute(0, 2, 1) # [B, H*W, C] + + # a note: the ``correct'' implementation should be: + # ``query = self.q_proj(query), key = self.k_proj(query)'' + # this problem is observed while cleaning up the code + # however, this doesn't affect the performance since the projection is a linear operation, + # thus the two projection matrices for key can be merged + # so I just leave it as is in order to not re-train all models :) + query = self.q_proj(query) # [B, H*W, C] + key = self.k_proj(query) # [B, H*W, C] + + value = flow.view(b, flow.size(1), h * w).permute(0, 2, 1) # [B, H*W, 2] + + scores = torch.matmul(query, key.permute(0, 2, 1)) / (c**0.5) # [B, H*W, H*W] + prob = torch.softmax(scores, dim=-1) + + out = torch.matmul(prob, value) # [B, H*W, 2] + out = out.view(b, h, w, value.size(-1)).permute(0, 3, 1, 2) # [B, 2, H, W] + + return out + + def forward_local_window_attn( + self, + feature0, + flow, + local_window_radius=1, + ): + assert flow.size(1) == 2 + assert local_window_radius > 0 + + b, c, h, w = feature0.size() + + feature0_reshape = self.q_proj( + feature0.view(b, c, -1).permute(0, 2, 1) + ).reshape( + b * h * w, 1, c + ) # [B*H*W, 1, C] + + kernel_size = 2 * local_window_radius + 1 + + feature0_proj = ( + self.k_proj(feature0.view(b, c, -1).permute(0, 2, 1)) + .permute(0, 2, 1) + .reshape(b, c, h, w) + ) + + feature0_window = F.unfold( + feature0_proj, kernel_size=kernel_size, padding=local_window_radius + ) # [B, C*(2R+1)^2), H*W] + + feature0_window = ( + feature0_window.view(b, c, kernel_size**2, h, w) + .permute(0, 3, 4, 1, 2) + .reshape(b * h * w, c, kernel_size**2) + ) # [B*H*W, C, (2R+1)^2] + + flow_window = F.unfold( + flow, kernel_size=kernel_size, padding=local_window_radius + ) # [B, 2*(2R+1)^2), H*W] + + flow_window = ( + flow_window.view(b, 2, kernel_size**2, h, w) + .permute(0, 3, 4, 2, 1) + .reshape(b * h * w, kernel_size**2, 2) + ) # [B*H*W, (2R+1)^2, 2] + + scores = torch.matmul(feature0_reshape, feature0_window) / ( + c**0.5 + ) # [B*H*W, 1, (2R+1)^2] + + prob = torch.softmax(scores, dim=-1) + + out = ( + torch.matmul(prob, flow_window) + .view(b, h, w, 2) + .permute(0, 3, 1, 2) + .contiguous() + ) # [B, 2, H, W] + + return out + + +def global_correlation_softmax( + feature0, + feature1, + pred_bidir_flow=False, +): + # global correlation + b, c, h, w = feature0.shape + feature0 = feature0.view(b, c, -1).permute(0, 2, 1) # [B, H*W, C] + feature1 = feature1.view(b, c, -1) # [B, C, H*W] + + correlation = torch.matmul(feature0, feature1).view(b, h, w, h, w) / ( + c**0.5 + ) # [B, H, W, H, W] + + # flow from softmax + init_grid = coords_grid(b, h, w).to(correlation.device) # [B, 2, H, W] + grid = init_grid.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2] + + correlation = correlation.view(b, h * w, h * w) # [B, H*W, H*W] + + if pred_bidir_flow: + correlation = torch.cat( + (correlation, correlation.permute(0, 2, 1)), dim=0 + ) # [2*B, H*W, H*W] + init_grid = init_grid.repeat(2, 1, 1, 1) # [2*B, 2, H, W] + grid = grid.repeat(2, 1, 1) # [2*B, H*W, 2] + b = b * 2 + + prob = F.softmax(correlation, dim=-1) # [B, H*W, H*W] + + correspondence = ( + torch.matmul(prob, grid).view(b, h, w, 2).permute(0, 3, 1, 2) + ) # [B, 2, H, W] + + # when predicting bidirectional flow, flow is the concatenation of forward flow and backward flow + flow = correspondence - init_grid + + return flow, prob + + +def local_correlation_softmax( + feature0, + feature1, + local_radius, + padding_mode="zeros", +): + b, c, h, w = feature0.size() + coords_init = coords_grid(b, h, w).to(feature0.device) # [B, 2, H, W] + coords = coords_init.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2] + + local_h = 2 * local_radius + 1 + local_w = 2 * local_radius + 1 + + window_grid = generate_window_grid( + -local_radius, + local_radius, + -local_radius, + local_radius, + local_h, + local_w, + device=feature0.device, + ) # [2R+1, 2R+1, 2] + window_grid = window_grid.reshape(-1, 2).repeat(b, 1, 1, 1) # [B, 1, (2R+1)^2, 2] + sample_coords = coords.unsqueeze(-2) + window_grid # [B, H*W, (2R+1)^2, 2] + + sample_coords_softmax = sample_coords + + # exclude coords that are out of image space + valid_x = (sample_coords[:, :, :, 0] >= 0) & ( + sample_coords[:, :, :, 0] < w + ) # [B, H*W, (2R+1)^2] + valid_y = (sample_coords[:, :, :, 1] >= 0) & ( + sample_coords[:, :, :, 1] < h + ) # [B, H*W, (2R+1)^2] + + valid = ( + valid_x & valid_y + ) # [B, H*W, (2R+1)^2], used to mask out invalid values when softmax + + # normalize coordinates to [-1, 1] + sample_coords_norm = normalize_coords(sample_coords, h, w) # [-1, 1] + window_feature = F.grid_sample( + feature1, sample_coords_norm, padding_mode=padding_mode, align_corners=True + ).permute( + 0, 2, 1, 3 + ) # [B, H*W, C, (2R+1)^2] + feature0_view = feature0.permute(0, 2, 3, 1).view(b, h * w, 1, c) # [B, H*W, 1, C] + + corr = torch.matmul(feature0_view, window_feature).view(b, h * w, -1) / ( + c**0.5 + ) # [B, H*W, (2R+1)^2] + + # mask invalid locations + corr[~valid] = -1e9 + + prob = F.softmax(corr, -1) # [B, H*W, (2R+1)^2] + + correspondence = ( + torch.matmul(prob.unsqueeze(-2), sample_coords_softmax) + .squeeze(-2) + .view(b, h, w, 2) + .permute(0, 3, 1, 2) + ) # [B, 2, H, W] + + flow = correspondence - coords_init + match_prob = prob + + return flow, match_prob + + +def coords_grid(b, h, w, homogeneous=False, device=None): + y, x = torch.meshgrid(torch.arange(h), torch.arange(w)) # [H, W] + + stacks = [x, y] + + if homogeneous: + ones = torch.ones_like(x) # [H, W] + stacks.append(ones) + + grid = torch.stack(stacks, dim=0).float() # [2, H, W] or [3, H, W] + + grid = grid[None].repeat(b, 1, 1, 1) # [B, 2, H, W] or [B, 3, H, W] + + if device is not None: + grid = grid.to(device) + + return grid + + +def generate_window_grid(h_min, h_max, w_min, w_max, len_h, len_w, device=None): + assert device is not None + + x, y = torch.meshgrid( + [ + torch.linspace(w_min, w_max, len_w, device=device), + torch.linspace(h_min, h_max, len_h, device=device), + ], + ) + grid = torch.stack((x, y), -1).transpose(0, 1).float() # [H, W, 2] + + return grid + + +def normalize_coords(coords, h, w): + # coords: [B, H, W, 2] + c = torch.Tensor([(w - 1) / 2.0, (h - 1) / 2.0]).float().to(coords.device) + return (coords - c) / c # [-1, 1] + + +def bilinear_sample( + img, sample_coords, mode="bilinear", padding_mode="zeros", return_mask=False +): + # img: [B, C, H, W] + # sample_coords: [B, 2, H, W] in image scale + if sample_coords.size(1) != 2: # [B, H, W, 2] + sample_coords = sample_coords.permute(0, 3, 1, 2) + + b, _, h, w = sample_coords.shape + + # Normalize to [-1, 1] + x_grid = 2 * sample_coords[:, 0] / (w - 1) - 1 + y_grid = 2 * sample_coords[:, 1] / (h - 1) - 1 + + grid = torch.stack([x_grid, y_grid], dim=-1) # [B, H, W, 2] + + img = F.grid_sample( + img, grid, mode=mode, padding_mode=padding_mode, align_corners=True + ) + + if return_mask: + mask = ( + (x_grid >= -1) & (y_grid >= -1) & (x_grid <= 1) & (y_grid <= 1) + ) # [B, H, W] + + return img, mask + + return img + + +def flow_warp(feature, flow, mask=False, padding_mode="zeros"): + b, c, h, w = feature.size() + assert flow.size(1) == 2 + + grid = coords_grid(b, h, w).to(flow.device) + flow # [B, 2, H, W] + + return bilinear_sample(feature, grid, padding_mode=padding_mode, return_mask=mask) + + +def forward_backward_consistency_check(fwd_flow, bwd_flow, alpha=0.01, beta=0.5): + # fwd_flow, bwd_flow: [B, 2, H, W] + # alpha and beta values are following UnFlow (https://arxiv.org/abs/1711.07837) + assert fwd_flow.dim() == 4 and bwd_flow.dim() == 4 + assert fwd_flow.size(1) == 2 and bwd_flow.size(1) == 2 + flow_mag = torch.norm(fwd_flow, dim=1) + torch.norm(bwd_flow, dim=1) # [B, H, W] + + warped_bwd_flow = flow_warp(bwd_flow, fwd_flow) # [B, 2, H, W] + warped_fwd_flow = flow_warp(fwd_flow, bwd_flow) # [B, 2, H, W] + + diff_fwd = torch.norm(fwd_flow + warped_bwd_flow, dim=1) # [B, H, W] + diff_bwd = torch.norm(bwd_flow + warped_fwd_flow, dim=1) + + threshold = alpha * flow_mag + beta + + fwd_occ = (diff_fwd > threshold).float() # [B, H, W] + bwd_occ = (diff_bwd > threshold).float() + + return fwd_occ, bwd_occ + + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + + def __init__(self, num_pos_feats=64, temperature=10000, normalize=True, scale=None): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError("normalize should be True if scale is passed") + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, x): + # x = tensor_list.tensors # [B, C, H, W] + # mask = tensor_list.mask # [B, H, W], input with padding, valid as 0 + b, c, h, w = x.size() + mask = torch.ones((b, h, w), device=x.device) # [B, H, W] + y_embed = mask.cumsum(1, dtype=torch.float32) + x_embed = mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) + dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +def split_feature( + feature, + num_splits=2, + channel_last=False, +): + if channel_last: # [B, H, W, C] + b, h, w, c = feature.size() + assert h % num_splits == 0 and w % num_splits == 0 + + b_new = b * num_splits * num_splits + h_new = h // num_splits + w_new = w // num_splits + + feature = ( + feature.view(b, num_splits, h // num_splits, num_splits, w // num_splits, c) + .permute(0, 1, 3, 2, 4, 5) + .reshape(b_new, h_new, w_new, c) + ) # [B*K*K, H/K, W/K, C] + else: # [B, C, H, W] + b, c, h, w = feature.size() + assert h % num_splits == 0 and w % num_splits == 0 + + b_new = b * num_splits * num_splits + h_new = h // num_splits + w_new = w // num_splits + + feature = ( + feature.view(b, c, num_splits, h // num_splits, num_splits, w // num_splits) + .permute(0, 2, 4, 1, 3, 5) + .reshape(b_new, c, h_new, w_new) + ) # [B*K*K, C, H/K, W/K] + + return feature + + +def merge_splits( + splits, + num_splits=2, + channel_last=False, +): + if channel_last: # [B*K*K, H/K, W/K, C] + b, h, w, c = splits.size() + new_b = b // num_splits // num_splits + + splits = splits.view(new_b, num_splits, num_splits, h, w, c) + merge = ( + splits.permute(0, 1, 3, 2, 4, 5) + .contiguous() + .view(new_b, num_splits * h, num_splits * w, c) + ) # [B, H, W, C] + else: # [B*K*K, C, H/K, W/K] + b, c, h, w = splits.size() + new_b = b // num_splits // num_splits + + splits = splits.view(new_b, num_splits, num_splits, c, h, w) + merge = ( + splits.permute(0, 3, 1, 4, 2, 5) + .contiguous() + .view(new_b, c, num_splits * h, num_splits * w) + ) # [B, C, H, W] + + return merge + + +def normalize_img(img0, img1): + # loaded images are in [0, 255] + # normalize by ImageNet mean and std + mean = torch.tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(img1.device) + std = torch.tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(img1.device) + img0 = (img0 - mean) / std + img1 = (img1 - mean) / std + + return img0, img1 + + +def feature_add_position(feature0, feature1, attn_splits, feature_channels): + pos_enc = PositionEmbeddingSine(num_pos_feats=feature_channels // 2) + + if attn_splits > 1: # add position in splited window + feature0_splits = split_feature(feature0, num_splits=attn_splits) + feature1_splits = split_feature(feature1, num_splits=attn_splits) + + position = pos_enc(feature0_splits) + + feature0_splits = feature0_splits + position + feature1_splits = feature1_splits + position + + feature0 = merge_splits(feature0_splits, num_splits=attn_splits) + feature1 = merge_splits(feature1_splits, num_splits=attn_splits) + else: + position = pos_enc(feature0) + + feature0 = feature0 + position + feature1 = feature1 + position + + return feature0, feature1 + + +class GMFlow(nn.Module): + def __init__( + self, + num_scales=2, + upsample_factor=4, + feature_channels=128, + attention_type="swin", + num_transformer_layers=6, + ffn_dim_expansion=4, + num_head=1, + **kwargs, + ): + super(GMFlow, self).__init__() + + self.num_scales = num_scales + self.feature_channels = feature_channels + self.upsample_factor = upsample_factor + self.attention_type = attention_type + self.num_transformer_layers = num_transformer_layers + + # CNN backbone + self.backbone = CNNEncoder( + output_dim=feature_channels, num_output_scales=num_scales + ) + + # Transformer + self.transformer = FeatureTransformer( + num_layers=num_transformer_layers, + d_model=feature_channels, + nhead=num_head, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + ) + + # flow propagation with self-attn + self.feature_flow_attn = FeatureFlowAttention(in_channels=feature_channels) + + # convex upsampling: concat feature0 and flow as input + self.upsampler = nn.Sequential( + nn.Conv2d(2 + feature_channels, 256, 3, 1, 1), + nn.ReLU(inplace=True), + nn.Conv2d(256, upsample_factor**2 * 9, 1, 1, 0), + ) + + def extract_feature(self, img0, img1): + concat = torch.cat((img0, img1), dim=0) # [2B, C, H, W] + features = self.backbone( + concat + ) # list of [2B, C, H, W], resolution from high to low + + # reverse: resolution from low to high + features = features[::-1] + + feature0, feature1 = [], [] + + for i in range(len(features)): + feature = features[i] + chunks = torch.chunk(feature, 2, 0) # tuple + feature0.append(chunks[0]) + feature1.append(chunks[1]) + + return feature0, feature1 + + def upsample_flow( + self, + flow, + feature, + bilinear=False, + upsample_factor=8, + ): + if bilinear: + up_flow = ( + F.interpolate( + flow, + scale_factor=upsample_factor, + mode="bilinear", + align_corners=True, + ) + * upsample_factor + ) + + else: + # convex upsampling + concat = torch.cat((flow, feature), dim=1) + + mask = self.upsampler(concat) + b, flow_channel, h, w = flow.shape + mask = mask.view( + b, 1, 9, self.upsample_factor, self.upsample_factor, h, w + ) # [B, 1, 9, K, K, H, W] + mask = torch.softmax(mask, dim=2) + + up_flow = F.unfold(self.upsample_factor * flow, [3, 3], padding=1) + up_flow = up_flow.view( + b, flow_channel, 9, 1, 1, h, w + ) # [B, 2, 9, 1, 1, H, W] + + up_flow = torch.sum(mask * up_flow, dim=2) # [B, 2, K, K, H, W] + up_flow = up_flow.permute(0, 1, 4, 2, 5, 3) # [B, 2, K, H, K, W] + up_flow = up_flow.reshape( + b, flow_channel, self.upsample_factor * h, self.upsample_factor * w + ) # [B, 2, K*H, K*W] + + return up_flow + + def forward( + self, + img0, + img1, + attn_splits_list=[2, 8], + corr_radius_list=[-1, 4], + prop_radius_list=[-1, 1], + pred_bidir_flow=False, + **kwargs, + ): + img0, img1 = normalize_img(img0, img1) # [B, 3, H, W] + + # resolution low to high + feature0_list, feature1_list = self.extract_feature( + img0, img1 + ) # list of features + + flow = None + + assert ( + len(attn_splits_list) + == len(corr_radius_list) + == len(prop_radius_list) + == self.num_scales + ) + + for scale_idx in range(self.num_scales): + feature0, feature1 = feature0_list[scale_idx], feature1_list[scale_idx] + + if pred_bidir_flow and scale_idx > 0: + # predicting bidirectional flow with refinement + feature0, feature1 = torch.cat((feature0, feature1), dim=0), torch.cat( + (feature1, feature0), dim=0 + ) + + upsample_factor = self.upsample_factor * ( + 2 ** (self.num_scales - 1 - scale_idx) + ) + + if scale_idx > 0: + flow = ( + F.interpolate( + flow, scale_factor=2, mode="bilinear", align_corners=True + ) + * 2 + ) + + if flow is not None: + flow = flow.detach() + feature1 = flow_warp(feature1, flow) # [B, C, H, W] + + attn_splits = attn_splits_list[scale_idx] + corr_radius = corr_radius_list[scale_idx] + prop_radius = prop_radius_list[scale_idx] + + # add position to features + feature0, feature1 = feature_add_position( + feature0, feature1, attn_splits, self.feature_channels + ) + + # Transformer + feature0, feature1 = self.transformer( + feature0, feature1, attn_num_splits=attn_splits + ) + + # correlation and softmax + if corr_radius == -1: # global matching + flow_pred = global_correlation_softmax( + feature0, feature1, pred_bidir_flow + )[0] + else: # local matching + flow_pred = local_correlation_softmax(feature0, feature1, corr_radius)[ + 0 + ] + + # flow or residual flow + flow = flow + flow_pred if flow is not None else flow_pred + + # upsample to the original resolution for supervison + if ( + self.training + ): # only need to upsample intermediate flow predictions at training time + flow_bilinear = self.upsample_flow( + flow, None, bilinear=True, upsample_factor=upsample_factor + ) + + # flow propagation with self-attn + if pred_bidir_flow and scale_idx == 0: + feature0 = torch.cat( + (feature0, feature1), dim=0 + ) # [2*B, C, H, W] for propagation + flow = self.feature_flow_attn( + feature0, + flow.detach(), + local_window_attn=prop_radius > 0, + local_window_radius=prop_radius, + ) + + # bilinear upsampling at training time except the last one + if self.training and scale_idx < self.num_scales - 1: + flow_up = self.upsample_flow( + flow, feature0, bilinear=True, upsample_factor=upsample_factor + ) + + if scale_idx == self.num_scales - 1: + flow_up = self.upsample_flow(flow, feature0) + + return flow_up + + +backwarp_tenGrid = {} + + +def backwarp(tenIn, tenflow): + if str(tenflow.shape) not in backwarp_tenGrid: + tenHor = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenflow.shape[3], + dtype=tenflow.dtype, + device=tenflow.device, + ) + .view(1, 1, 1, -1) + .repeat(1, 1, tenflow.shape[2], 1) + ) + tenVer = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenflow.shape[2], + dtype=tenflow.dtype, + device=tenflow.device, + ) + .view(1, 1, -1, 1) + .repeat(1, 1, 1, tenflow.shape[3]) + ) + + backwarp_tenGrid[str(tenflow.shape)] = torch.cat([tenHor, tenVer], 1).to(get_torch_device()) + # end + + tenflow = torch.cat( + [ + tenflow[:, 0:1, :, :] / ((tenIn.shape[3] - 1.0) / 2.0), + tenflow[:, 1:2, :, :] / ((tenIn.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + return torch.nn.functional.grid_sample( + input=tenIn, + grid=(backwarp_tenGrid[str(tenflow.shape)] + tenflow).permute(0, 2, 3, 1), + mode="bilinear", + padding_mode="zeros", + align_corners=True, + ) + + +class MetricNet(nn.Module): + def __init__(self): + super(MetricNet, self).__init__() + self.metric_in = nn.Conv2d(14, 64, 3, 1, 1) + self.metric_net1 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_net2 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_net3 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_out = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 2, 3, 1, 1)) + + def forward(self, img0, img1, flow01, flow10): + metric0 = F.l1_loss(img0, backwarp(img1, flow01), reduction="none").mean( + [1], True + ) + metric1 = F.l1_loss(img1, backwarp(img0, flow10), reduction="none").mean( + [1], True + ) + + fwd_occ, bwd_occ = forward_backward_consistency_check(flow01, flow10) + + flow01 = torch.cat( + [ + flow01[:, 0:1, :, :] / ((flow01.shape[3] - 1.0) / 2.0), + flow01[:, 1:2, :, :] / ((flow01.shape[2] - 1.0) / 2.0), + ], + 1, + ) + flow10 = torch.cat( + [ + flow10[:, 0:1, :, :] / ((flow10.shape[3] - 1.0) / 2.0), + flow10[:, 1:2, :, :] / ((flow10.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + img = torch.cat((img0, img1), 1) + metric = torch.cat((-metric0, -metric1), 1) + flow = torch.cat((flow01, flow10), 1) + occ = torch.cat((fwd_occ.unsqueeze(1), bwd_occ.unsqueeze(1)), 1) + + feat = self.metric_in(torch.cat((img, metric, flow, occ), 1)) + feat = self.metric_net1(feat) + feat + feat = self.metric_net2(feat) + feat + feat = self.metric_net3(feat) + feat + metric = self.metric_out(feat) + + metric = torch.tanh(metric) * 10 + + return metric[:, :1], metric[:, 1:2] + + +class FeatureNet(nn.Module): + """The quadratic model""" + + def __init__(self): + super(FeatureNet, self).__init__() + self.block1 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(3, 64, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(64, 64, 3, 1, 1), + ) + self.block2 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(64, 128, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(128, 128, 3, 1, 1), + ) + self.block3 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(128, 192, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(192, 192, 3, 1, 1), + ) + + def forward(self, x): + x1 = self.block1(x) + x2 = self.block2(x1) + x3 = self.block3(x2) + + return x1, x2, x3 + + +# Residual Block +def ResidualBlock(in_channels, out_channels, stride=1): + return torch.nn.Sequential( + nn.PReLU(), + nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + ) + + +# downsample block +def DownsampleBlock(in_channels, out_channels, stride=2): + return torch.nn.Sequential( + nn.PReLU(), + nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=True + ), + ) + + +# upsample block +def UpsampleBlock(in_channels, out_channels, stride=2): + return torch.nn.Sequential( + nn.PReLU(), + nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=4, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=True + ), + ) + + +class PixelShuffleBlcok(nn.Module): + def __init__(self, in_feat, num_feat, num_out_ch): + super(PixelShuffleBlcok, self).__init__() + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(in_feat, num_feat, 3, 1, 1), nn.PReLU() + ) + self.upsample = nn.Sequential( + nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1), nn.PixelShuffle(2) + ) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + def forward(self, x): + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + return x + + +# grid network +class GridNet(nn.Module): + def __init__( + self, + in_channels=12, + in_channels1=128, + in_channels2=256, + in_channels3=384, + out_channels=3, + ): + super(GridNet, self).__init__() + + self.residual_model_head = ResidualBlock(in_channels, 64) + self.residual_model_head1 = ResidualBlock(in_channels1, 64) + self.residual_model_head2 = ResidualBlock(in_channels2, 128) + self.residual_model_head3 = ResidualBlock(in_channels3, 192) + + self.residual_model_01 = ResidualBlock(64, 64) + # self.residual_model_02=ResidualBlock(64, 64) + # self.residual_model_03=ResidualBlock(64, 64) + self.residual_model_04 = ResidualBlock(64, 64) + self.residual_model_05 = ResidualBlock(64, 64) + self.residual_model_tail = PixelShuffleBlcok(64, 64, out_channels) + + self.residual_model_11 = ResidualBlock(128, 128) + # self.residual_model_12=ResidualBlock(128, 128) + # self.residual_model_13=ResidualBlock(128, 128) + self.residual_model_14 = ResidualBlock(128, 128) + self.residual_model_15 = ResidualBlock(128, 128) + + self.residual_model_21 = ResidualBlock(192, 192) + # self.residual_model_22=ResidualBlock(192, 192) + # self.residual_model_23=ResidualBlock(192, 192) + self.residual_model_24 = ResidualBlock(192, 192) + self.residual_model_25 = ResidualBlock(192, 192) + + # + + self.downsample_model_10 = DownsampleBlock(64, 128) + self.downsample_model_20 = DownsampleBlock(128, 192) + + self.downsample_model_11 = DownsampleBlock(64, 128) + self.downsample_model_21 = DownsampleBlock(128, 192) + + # self.downsample_model_12=DownsampleBlock(64, 128) + # self.downsample_model_22=DownsampleBlock(128, 192) + + # + + # self.upsample_model_03=UpsampleBlock(128, 64) + # self.upsample_model_13=UpsampleBlock(192, 128) + + self.upsample_model_04 = UpsampleBlock(128, 64) + self.upsample_model_14 = UpsampleBlock(192, 128) + + self.upsample_model_05 = UpsampleBlock(128, 64) + self.upsample_model_15 = UpsampleBlock(192, 128) + + def forward(self, x, x1, x2, x3): + X00 = self.residual_model_head(x) + self.residual_model_head1( + x1 + ) # --- 182 ~ 185 + # X10 = self.residual_model_head1(x1) + + X01 = self.residual_model_01(X00) + X00 # --- 208 ~ 211 ,AddBackward1213 + + X10 = self.downsample_model_10(X00) + self.residual_model_head2( + x2 + ) # --- 186 ~ 189 + X20 = self.downsample_model_20(X10) + self.residual_model_head3( + x3 + ) # --- 190 ~ 193 + + residual_11 = ( + self.residual_model_11(X10) + X10 + ) # 201 ~ 204 , sum AddBackward1206 + downsample_11 = self.downsample_model_11(X01) # 214 ~ 217 + X11 = residual_11 + downsample_11 # --- AddBackward1218 + + residual_21 = ( + self.residual_model_21(X20) + X20 + ) # 194 ~ 197 , sum AddBackward1199 + downsample_21 = self.downsample_model_21(X11) # 219 ~ 222 + X21 = residual_21 + downsample_21 # AddBackward1223 + + X24 = self.residual_model_24(X21) + X21 # --- 224 ~ 227 , AddBackward1229 + X25 = self.residual_model_25(X24) + X24 # --- 230 ~ 233 , AddBackward1235 + + upsample_14 = self.upsample_model_14(X24) # 242 ~ 246 + residual_14 = self.residual_model_14(X11) + X11 # 248 ~ 251, AddBackward1253 + X14 = upsample_14 + residual_14 # --- AddBackward1254 + + upsample_04 = self.upsample_model_04(X14) # 268 ~ 272 + residual_04 = self.residual_model_04(X01) + X01 # 274 ~ 277, AddBackward1279 + X04 = upsample_04 + residual_04 # --- AddBackward1280 + + upsample_15 = self.upsample_model_15(X25) # 236 ~ 240 + residual_15 = self.residual_model_15(X14) + X14 # 255 ~ 258, AddBackward1260 + X15 = upsample_15 + residual_15 # AddBackward1261 + + upsample_05 = self.upsample_model_05(X15) # 262 ~ 266 + residual_05 = self.residual_model_05(X04) + X04 # 281 ~ 284,AddBackward1286 + X05 = upsample_05 + residual_05 # AddBackward1287 + + X_tail = self.residual_model_tail(X05) # 288 ~ 291 + + return X_tail +# end + +class Model: + def __init__(self): + self.flownet = GMFlow() + self.metricnet = MetricNet() + self.feat_ext = FeatureNet() + self.fusionnet = GridNet() + self.version = 3.9 + + def eval(self): + self.flownet.eval() + self.metricnet.eval() + self.feat_ext.eval() + self.fusionnet.eval() + + def device(self): + self.flownet.to(device) + self.metricnet.to(device) + self.feat_ext.to(device) + self.fusionnet.to(device) + + def load_model(self, path_dict): + #models/GMFSS_fortuna_flownet.pkl + self.flownet.load_state_dict(torch.load(path_dict["flownet"])) + #models/GMFSS_fortuna_metric.pkl + self.metricnet.load_state_dict(torch.load(path_dict["metricnet"])) + #models/GMFSS_fortuna_feat.pkl + self.feat_ext.load_state_dict(torch.load(path_dict["feat_ext"])) + #models/GMFSS_fortuna_fusionnet.pkl + self.fusionnet.load_state_dict(torch.load(path_dict["fusionnet"])) + + def reuse(self, img0, img1, scale): + feat11, feat12, feat13 = self.feat_ext(img0) + feat21, feat22, feat23 = self.feat_ext(img1) + + img0 = F.interpolate( + img0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + img1 = F.interpolate( + img1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + + if scale != 1.0: + imgf0 = F.interpolate( + img0, scale_factor=scale, mode="bilinear", align_corners=False + ) + imgf1 = F.interpolate( + img1, scale_factor=scale, mode="bilinear", align_corners=False + ) + else: + imgf0 = img0 + imgf1 = img1 + flow01 = self.flownet(imgf0, imgf1, return_flow=True) + flow10 = self.flownet(imgf1, imgf0, return_flow=True) + if scale != 1.0: + flow01 = ( + F.interpolate( + flow01, + scale_factor=1.0 / scale, + mode="bilinear", + align_corners=False, + ) + / scale + ) + flow10 = ( + F.interpolate( + flow10, + scale_factor=1.0 / scale, + mode="bilinear", + align_corners=False, + ) + / scale + ) + + metric0, metric1 = self.metricnet(img0, img1, flow01, flow10) + + return ( + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + ) + + def inference( + self, + img0, + img1, + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + timestep, + ): + F1t = timestep * flow01 + F2t = (1 - timestep) * flow10 + + Z1t = timestep * metric0 + Z2t = (1 - timestep) * metric1 + + img0 = F.interpolate( + img0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + I1t = softsplat(img0, F1t, Z1t, strMode="soft") + img1 = F.interpolate( + img1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + I2t = softsplat(img1, F2t, Z2t, strMode="soft") + + feat1t1 = softsplat(feat11, F1t, Z1t, strMode="soft") + feat2t1 = softsplat(feat21, F2t, Z2t, strMode="soft") + + F1td = ( + F.interpolate(F1t, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + Z1d = F.interpolate(Z1t, scale_factor=0.5, mode="bilinear", align_corners=False) + feat1t2 = softsplat(feat12, F1td, Z1d, strMode="soft") + F2td = ( + F.interpolate(F2t, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + Z2d = F.interpolate(Z2t, scale_factor=0.5, mode="bilinear", align_corners=False) + feat2t2 = softsplat(feat22, F2td, Z2d, strMode="soft") + + F1tdd = ( + F.interpolate(F1t, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + Z1dd = F.interpolate( + Z1t, scale_factor=0.25, mode="bilinear", align_corners=False + ) + feat1t3 = softsplat(feat13, F1tdd, Z1dd, strMode="soft") + F2tdd = ( + F.interpolate(F2t, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + Z2dd = F.interpolate( + Z2t, scale_factor=0.25, mode="bilinear", align_corners=False + ) + feat2t3 = softsplat(feat23, F2tdd, Z2dd, strMode="soft") + + out = self.fusionnet( + torch.cat([img0, I1t, I2t, img1], dim=1), + torch.cat([feat1t1, feat2t1], dim=1), + torch.cat([feat1t2, feat2t2], dim=1), + torch.cat([feat1t3, feat2t3], dim=1), + ) + + return torch.clamp(out, 0, 1) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union.py new file mode 100644 index 0000000000000000000000000000000000000000..41e92ddfa62fae5877eb12b43677d28ee9d3e29e --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union.py @@ -0,0 +1,23 @@ +import itertools +import numpy as np +import vapoursynth as vs +from .GMFSS_Fortuna_union_arch import Model_inference +import torch + + +class GMFSS_Fortuna_union: + def __init__(self): + self.cache = False + self.amount_input_img = 2 + + torch.set_grad_enabled(False) + torch.backends.cudnn.enabled = True + torch.backends.cudnn.benchmark = True + + self.model = Model_inference() + self.model.eval() + + def execute(self, I0, I1, timestep): + with torch.inference_mode(): + middle = self.model(I0, I1, timestep).cpu() + return middle diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..665e56534b647fe884d236693833f12060daa148 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/GMFSS_Fortuna_union_arch.py @@ -0,0 +1,1857 @@ +""" +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/GMFSS_infer_u.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/softsplat.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/FusionNet_u.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/FeatureNet.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/MetricNet.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/IFNet_HDv3.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/gmflow.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/utils.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/position.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/geometry.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/matching.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/transformer.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/backbone.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/gmflow/trident_conv.py +https://github.com/98mxr/GMFSS_Fortuna/blob/b5d0bd544e3f1eee6a059e49c69bcd3124c8343c/model/warplayer.py +""" + +from torch import nn +from torch.nn import functional as F +from torch.nn.modules.utils import _pair +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch +import math +from vfi_models.rife.rife_arch import IFNet +from vfi_models.ops import softsplat +from comfy.model_management import get_torch_device + +device = get_torch_device() +backwarp_tenGrid = {} + + +def warp(tenInput, tenFlow): + k = (str(tenFlow.device), str(tenFlow.size())) + if k not in backwarp_tenGrid: + tenHorizontal = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[3], device=device) + .view(1, 1, 1, tenFlow.shape[3]) + .expand(tenFlow.shape[0], -1, tenFlow.shape[2], -1) + ) + tenVertical = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[2], device=device) + .view(1, 1, tenFlow.shape[2], 1) + .expand(tenFlow.shape[0], -1, -1, tenFlow.shape[3]) + ) + backwarp_tenGrid[k] = torch.cat([tenHorizontal, tenVertical], 1).to(device) + + tenFlow = torch.cat( + [ + tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), + tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + g = (backwarp_tenGrid[k] + tenFlow).permute(0, 2, 3, 1) + return torch.nn.functional.grid_sample( + input=tenInput, + grid=g, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + + +class MultiScaleTridentConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + strides=1, + paddings=0, + dilations=1, + dilation=1, + groups=1, + num_branch=1, + test_branch_idx=-1, + bias=False, + norm=None, + activation=None, + ): + super(MultiScaleTridentConv, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.num_branch = num_branch + self.stride = _pair(stride) + self.groups = groups + self.with_bias = bias + self.dilation = dilation + if isinstance(paddings, int): + paddings = [paddings] * self.num_branch + if isinstance(dilations, int): + dilations = [dilations] * self.num_branch + if isinstance(strides, int): + strides = [strides] * self.num_branch + self.paddings = [_pair(padding) for padding in paddings] + self.dilations = [_pair(dilation) for dilation in dilations] + self.strides = [_pair(stride) for stride in strides] + self.test_branch_idx = test_branch_idx + self.norm = norm + self.activation = activation + + assert len({self.num_branch, len(self.paddings), len(self.strides)}) == 1 + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // groups, *self.kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, inputs): + num_branch = ( + self.num_branch if self.training or self.test_branch_idx == -1 else 1 + ) + assert len(inputs) == num_branch + + if self.training or self.test_branch_idx == -1: + outputs = [ + F.conv2d( + input, + self.weight, + self.bias, + stride, + padding, + self.dilation, + self.groups, + ) + for input, stride, padding in zip(inputs, self.strides, self.paddings) + ] + else: + outputs = [ + F.conv2d( + inputs[0], + self.weight, + self.bias, + self.strides[self.test_branch_idx] + if self.test_branch_idx == -1 + else self.strides[-1], + self.paddings[self.test_branch_idx] + if self.test_branch_idx == -1 + else self.paddings[-1], + self.dilation, + self.groups, + ) + ] + + if self.norm is not None: + outputs = [self.norm(x) for x in outputs] + if self.activation is not None: + outputs = [self.activation(x) for x in outputs] + return outputs + + +class ResidualBlock_class(nn.Module): + def __init__( + self, + in_planes, + planes, + norm_layer=nn.InstanceNorm2d, + stride=1, + dilation=1, + ): + super(ResidualBlock_class, self).__init__() + + self.conv1 = nn.Conv2d( + in_planes, + planes, + kernel_size=3, + dilation=dilation, + padding=dilation, + stride=stride, + bias=False, + ) + self.conv2 = nn.Conv2d( + planes, + planes, + kernel_size=3, + dilation=dilation, + padding=dilation, + bias=False, + ) + self.relu = nn.ReLU(inplace=True) + + self.norm1 = norm_layer(planes) + self.norm2 = norm_layer(planes) + if not stride == 1 or in_planes != planes: + self.norm3 = norm_layer(planes) + + if stride == 1 and in_planes == planes: + self.downsample = None + else: + self.downsample = nn.Sequential( + nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3 + ) + + def forward(self, x): + y = x + y = self.relu(self.norm1(self.conv1(y))) + y = self.relu(self.norm2(self.conv2(y))) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x + y) + + +class CNNEncoder(nn.Module): + def __init__( + self, + output_dim=128, + norm_layer=nn.InstanceNorm2d, + num_output_scales=1, + **kwargs, + ): + super(CNNEncoder, self).__init__() + self.num_branch = num_output_scales + + feature_dims = [64, 96, 128] + + self.conv1 = nn.Conv2d( + 3, feature_dims[0], kernel_size=7, stride=2, padding=3, bias=False + ) # 1/2 + self.norm1 = norm_layer(feature_dims[0]) + self.relu1 = nn.ReLU(inplace=True) + + self.in_planes = feature_dims[0] + self.layer1 = self._make_layer( + feature_dims[0], stride=1, norm_layer=norm_layer + ) # 1/2 + self.layer2 = self._make_layer( + feature_dims[1], stride=2, norm_layer=norm_layer + ) # 1/4 + + # highest resolution 1/4 or 1/8 + stride = 2 if num_output_scales == 1 else 1 + self.layer3 = self._make_layer( + feature_dims[2], + stride=stride, + norm_layer=norm_layer, + ) # 1/4 or 1/8 + + self.conv2 = nn.Conv2d(feature_dims[2], output_dim, 1, 1, 0) + + if self.num_branch > 1: + if self.num_branch == 4: + strides = (1, 2, 4, 8) + elif self.num_branch == 3: + strides = (1, 2, 4) + elif self.num_branch == 2: + strides = (1, 2) + else: + raise ValueError + + self.trident_conv = MultiScaleTridentConv( + output_dim, + output_dim, + kernel_size=3, + strides=strides, + paddings=1, + num_branch=self.num_branch, + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)): + if m.weight is not None: + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def _make_layer(self, dim, stride=1, dilation=1, norm_layer=nn.InstanceNorm2d): + layer1 = ResidualBlock_class( + self.in_planes, dim, norm_layer=norm_layer, stride=stride, dilation=dilation + ) + layer2 = ResidualBlock_class( + dim, dim, norm_layer=norm_layer, stride=1, dilation=dilation + ) + + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x = self.relu1(x) + + x = self.layer1(x) # 1/2 + x = self.layer2(x) # 1/4 + x = self.layer3(x) # 1/8 or 1/4 + + x = self.conv2(x) + + if self.num_branch > 1: + out = self.trident_conv([x] * self.num_branch) # high to low res + else: + out = [x] + + return out + + +def single_head_full_attention(q, k, v): + # q, k, v: [B, L, C] + assert q.dim() == k.dim() == v.dim() == 3 + + scores = torch.matmul(q, k.permute(0, 2, 1)) / (q.size(2) ** 0.5) # [B, L, L] + attn = torch.softmax(scores, dim=2) # [B, L, L] + out = torch.matmul(attn, v) # [B, L, C] + + return out + + +def generate_shift_window_attn_mask( + input_resolution, + window_size_h, + window_size_w, + shift_size_h, + shift_size_w, + device=get_torch_device(), +): + # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py + # calculate attention mask for SW-MSA + h, w = input_resolution + img_mask = torch.zeros((1, h, w, 1)).to(device) # 1 H W 1 + h_slices = ( + slice(0, -window_size_h), + slice(-window_size_h, -shift_size_h), + slice(-shift_size_h, None), + ) + w_slices = ( + slice(0, -window_size_w), + slice(-window_size_w, -shift_size_w), + slice(-shift_size_w, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = split_feature( + img_mask, num_splits=input_resolution[-1] // window_size_w, channel_last=True + ) + + mask_windows = mask_windows.view(-1, window_size_h * window_size_w) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill( + attn_mask == 0, float(0.0) + ) + + return attn_mask + + +def single_head_split_window_attention( + q, + k, + v, + num_splits=1, + with_shift=False, + h=None, + w=None, + attn_mask=None, +): + # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py + # q, k, v: [B, L, C] + assert q.dim() == k.dim() == v.dim() == 3 + + assert h is not None and w is not None + assert q.size(1) == h * w + + b, _, c = q.size() + + b_new = b * num_splits * num_splits + + window_size_h = h // num_splits + window_size_w = w // num_splits + + q = q.view(b, h, w, c) # [B, H, W, C] + k = k.view(b, h, w, c) + v = v.view(b, h, w, c) + + scale_factor = c**0.5 + + if with_shift: + assert attn_mask is not None # compute once + shift_size_h = window_size_h // 2 + shift_size_w = window_size_w // 2 + + q = torch.roll(q, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + k = torch.roll(k, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + v = torch.roll(v, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2)) + + q = split_feature( + q, num_splits=num_splits, channel_last=True + ) # [B*K*K, H/K, W/K, C] + k = split_feature(k, num_splits=num_splits, channel_last=True) + v = split_feature(v, num_splits=num_splits, channel_last=True) + + scores = ( + torch.matmul(q.view(b_new, -1, c), k.view(b_new, -1, c).permute(0, 2, 1)) + / scale_factor + ) # [B*K*K, H/K*W/K, H/K*W/K] + + if with_shift: + scores += attn_mask.repeat(b, 1, 1) + + attn = torch.softmax(scores, dim=-1) + + out = torch.matmul(attn, v.view(b_new, -1, c)) # [B*K*K, H/K*W/K, C] + + out = merge_splits( + out.view(b_new, h // num_splits, w // num_splits, c), + num_splits=num_splits, + channel_last=True, + ) # [B, H, W, C] + + # shift back + if with_shift: + out = torch.roll(out, shifts=(shift_size_h, shift_size_w), dims=(1, 2)) + + out = out.view(b, -1, c) + + return out + + +class TransformerLayer(nn.Module): + def __init__( + self, + d_model=256, + nhead=1, + attention_type="swin", + no_ffn=False, + ffn_dim_expansion=4, + with_shift=False, + **kwargs, + ): + super(TransformerLayer, self).__init__() + + self.dim = d_model + self.nhead = nhead + self.attention_type = attention_type + self.no_ffn = no_ffn + + self.with_shift = with_shift + + # multi-head attention + self.q_proj = nn.Linear(d_model, d_model, bias=False) + self.k_proj = nn.Linear(d_model, d_model, bias=False) + self.v_proj = nn.Linear(d_model, d_model, bias=False) + + self.merge = nn.Linear(d_model, d_model, bias=False) + + self.norm1 = nn.LayerNorm(d_model) + + # no ffn after self-attn, with ffn after cross-attn + if not self.no_ffn: + in_channels = d_model * 2 + self.mlp = nn.Sequential( + nn.Linear(in_channels, in_channels * ffn_dim_expansion, bias=False), + nn.GELU(), + nn.Linear(in_channels * ffn_dim_expansion, d_model, bias=False), + ) + + self.norm2 = nn.LayerNorm(d_model) + + def forward( + self, + source, + target, + height=None, + width=None, + shifted_window_attn_mask=None, + attn_num_splits=None, + **kwargs, + ): + # source, target: [B, L, C] + query, key, value = source, target, target + + # single-head attention + query = self.q_proj(query) # [B, L, C] + key = self.k_proj(key) # [B, L, C] + value = self.v_proj(value) # [B, L, C] + + if self.attention_type == "swin" and attn_num_splits > 1: + if self.nhead > 1: + # we observe that multihead attention slows down the speed and increases the memory consumption + # without bringing obvious performance gains and thus the implementation is removed + raise NotImplementedError + else: + message = single_head_split_window_attention( + query, + key, + value, + num_splits=attn_num_splits, + with_shift=self.with_shift, + h=height, + w=width, + attn_mask=shifted_window_attn_mask, + ) + else: + message = single_head_full_attention(query, key, value) # [B, L, C] + + message = self.merge(message) # [B, L, C] + message = self.norm1(message) + + if not self.no_ffn: + message = self.mlp(torch.cat([source, message], dim=-1)) + message = self.norm2(message) + + return source + message + + +class TransformerBlock(nn.Module): + """self attention + cross attention + FFN""" + + def __init__( + self, + d_model=256, + nhead=1, + attention_type="swin", + ffn_dim_expansion=4, + with_shift=False, + **kwargs, + ): + super(TransformerBlock, self).__init__() + + self.self_attn = TransformerLayer( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + no_ffn=True, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=with_shift, + ) + + self.cross_attn_ffn = TransformerLayer( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=with_shift, + ) + + def forward( + self, + source, + target, + height=None, + width=None, + shifted_window_attn_mask=None, + attn_num_splits=None, + **kwargs, + ): + # source, target: [B, L, C] + + # self attention + source = self.self_attn( + source, + source, + height=height, + width=width, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + # cross attention and ffn + source = self.cross_attn_ffn( + source, + target, + height=height, + width=width, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + return source + + +class FeatureTransformer(nn.Module): + def __init__( + self, + num_layers=6, + d_model=128, + nhead=1, + attention_type="swin", + ffn_dim_expansion=4, + **kwargs, + ): + super(FeatureTransformer, self).__init__() + + self.attention_type = attention_type + + self.d_model = d_model + self.nhead = nhead + + self.layers = nn.ModuleList( + [ + TransformerBlock( + d_model=d_model, + nhead=nhead, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + with_shift=True + if attention_type == "swin" and i % 2 == 1 + else False, + ) + for i in range(num_layers) + ] + ) + + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward( + self, + feature0, + feature1, + attn_num_splits=None, + **kwargs, + ): + b, c, h, w = feature0.shape + assert self.d_model == c + + feature0 = feature0.flatten(-2).permute(0, 2, 1) # [B, H*W, C] + feature1 = feature1.flatten(-2).permute(0, 2, 1) # [B, H*W, C] + + if self.attention_type == "swin" and attn_num_splits > 1: + # global and refine use different number of splits + window_size_h = h // attn_num_splits + window_size_w = w // attn_num_splits + + # compute attn mask once + shifted_window_attn_mask = generate_shift_window_attn_mask( + input_resolution=(h, w), + window_size_h=window_size_h, + window_size_w=window_size_w, + shift_size_h=window_size_h // 2, + shift_size_w=window_size_w // 2, + device=feature0.device, + ) # [K*K, H/K*W/K, H/K*W/K] + else: + shifted_window_attn_mask = None + + # concat feature0 and feature1 in batch dimension to compute in parallel + concat0 = torch.cat((feature0, feature1), dim=0) # [2B, H*W, C] + concat1 = torch.cat((feature1, feature0), dim=0) # [2B, H*W, C] + + for layer in self.layers: + concat0 = layer( + concat0, + concat1, + height=h, + width=w, + shifted_window_attn_mask=shifted_window_attn_mask, + attn_num_splits=attn_num_splits, + ) + + # update feature1 + concat1 = torch.cat(concat0.chunk(chunks=2, dim=0)[::-1], dim=0) + + feature0, feature1 = concat0.chunk(chunks=2, dim=0) # [B, H*W, C] + + # reshape back + feature0 = ( + feature0.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() + ) # [B, C, H, W] + feature1 = ( + feature1.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() + ) # [B, C, H, W] + + return feature0, feature1 + + +class FeatureFlowAttention(nn.Module): + """ + flow propagation with self-attention on feature + query: feature0, key: feature0, value: flow + """ + + def __init__( + self, + in_channels, + **kwargs, + ): + super(FeatureFlowAttention, self).__init__() + + self.q_proj = nn.Linear(in_channels, in_channels) + self.k_proj = nn.Linear(in_channels, in_channels) + + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward( + self, + feature0, + flow, + local_window_attn=False, + local_window_radius=1, + **kwargs, + ): + # q, k: feature [B, C, H, W], v: flow [B, 2, H, W] + if local_window_attn: + return self.forward_local_window_attn( + feature0, flow, local_window_radius=local_window_radius + ) + + b, c, h, w = feature0.size() + + query = feature0.view(b, c, h * w).permute(0, 2, 1) # [B, H*W, C] + + # a note: the ``correct'' implementation should be: + # ``query = self.q_proj(query), key = self.k_proj(query)'' + # this problem is observed while cleaning up the code + # however, this doesn't affect the performance since the projection is a linear operation, + # thus the two projection matrices for key can be merged + # so I just leave it as is in order to not re-train all models :) + query = self.q_proj(query) # [B, H*W, C] + key = self.k_proj(query) # [B, H*W, C] + + value = flow.view(b, flow.size(1), h * w).permute(0, 2, 1) # [B, H*W, 2] + + scores = torch.matmul(query, key.permute(0, 2, 1)) / (c**0.5) # [B, H*W, H*W] + prob = torch.softmax(scores, dim=-1) + + out = torch.matmul(prob, value) # [B, H*W, 2] + out = out.view(b, h, w, value.size(-1)).permute(0, 3, 1, 2) # [B, 2, H, W] + + return out + + def forward_local_window_attn( + self, + feature0, + flow, + local_window_radius=1, + ): + assert flow.size(1) == 2 + assert local_window_radius > 0 + + b, c, h, w = feature0.size() + + feature0_reshape = self.q_proj( + feature0.view(b, c, -1).permute(0, 2, 1) + ).reshape( + b * h * w, 1, c + ) # [B*H*W, 1, C] + + kernel_size = 2 * local_window_radius + 1 + + feature0_proj = ( + self.k_proj(feature0.view(b, c, -1).permute(0, 2, 1)) + .permute(0, 2, 1) + .reshape(b, c, h, w) + ) + + feature0_window = F.unfold( + feature0_proj, kernel_size=kernel_size, padding=local_window_radius + ) # [B, C*(2R+1)^2), H*W] + + feature0_window = ( + feature0_window.view(b, c, kernel_size**2, h, w) + .permute(0, 3, 4, 1, 2) + .reshape(b * h * w, c, kernel_size**2) + ) # [B*H*W, C, (2R+1)^2] + + flow_window = F.unfold( + flow, kernel_size=kernel_size, padding=local_window_radius + ) # [B, 2*(2R+1)^2), H*W] + + flow_window = ( + flow_window.view(b, 2, kernel_size**2, h, w) + .permute(0, 3, 4, 2, 1) + .reshape(b * h * w, kernel_size**2, 2) + ) # [B*H*W, (2R+1)^2, 2] + + scores = torch.matmul(feature0_reshape, feature0_window) / ( + c**0.5 + ) # [B*H*W, 1, (2R+1)^2] + + prob = torch.softmax(scores, dim=-1) + + out = ( + torch.matmul(prob, flow_window) + .view(b, h, w, 2) + .permute(0, 3, 1, 2) + .contiguous() + ) # [B, 2, H, W] + + return out + + +def global_correlation_softmax( + feature0, + feature1, + pred_bidir_flow=False, +): + # global correlation + b, c, h, w = feature0.shape + feature0 = feature0.view(b, c, -1).permute(0, 2, 1) # [B, H*W, C] + feature1 = feature1.view(b, c, -1) # [B, C, H*W] + + correlation = torch.matmul(feature0, feature1).view(b, h, w, h, w) / ( + c**0.5 + ) # [B, H, W, H, W] + + # flow from softmax + init_grid = coords_grid(b, h, w).to(correlation.device) # [B, 2, H, W] + grid = init_grid.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2] + + correlation = correlation.view(b, h * w, h * w) # [B, H*W, H*W] + + if pred_bidir_flow: + correlation = torch.cat( + (correlation, correlation.permute(0, 2, 1)), dim=0 + ) # [2*B, H*W, H*W] + init_grid = init_grid.repeat(2, 1, 1, 1) # [2*B, 2, H, W] + grid = grid.repeat(2, 1, 1) # [2*B, H*W, 2] + b = b * 2 + + prob = F.softmax(correlation, dim=-1) # [B, H*W, H*W] + + correspondence = ( + torch.matmul(prob, grid).view(b, h, w, 2).permute(0, 3, 1, 2) + ) # [B, 2, H, W] + + # when predicting bidirectional flow, flow is the concatenation of forward flow and backward flow + flow = correspondence - init_grid + + return flow, prob + + +def local_correlation_softmax( + feature0, + feature1, + local_radius, + padding_mode="zeros", +): + b, c, h, w = feature0.size() + coords_init = coords_grid(b, h, w).to(feature0.device) # [B, 2, H, W] + coords = coords_init.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2] + + local_h = 2 * local_radius + 1 + local_w = 2 * local_radius + 1 + + window_grid = generate_window_grid( + -local_radius, + local_radius, + -local_radius, + local_radius, + local_h, + local_w, + device=feature0.device, + ) # [2R+1, 2R+1, 2] + window_grid = window_grid.reshape(-1, 2).repeat(b, 1, 1, 1) # [B, 1, (2R+1)^2, 2] + sample_coords = coords.unsqueeze(-2) + window_grid # [B, H*W, (2R+1)^2, 2] + + sample_coords_softmax = sample_coords + + # exclude coords that are out of image space + valid_x = (sample_coords[:, :, :, 0] >= 0) & ( + sample_coords[:, :, :, 0] < w + ) # [B, H*W, (2R+1)^2] + valid_y = (sample_coords[:, :, :, 1] >= 0) & ( + sample_coords[:, :, :, 1] < h + ) # [B, H*W, (2R+1)^2] + + valid = ( + valid_x & valid_y + ) # [B, H*W, (2R+1)^2], used to mask out invalid values when softmax + + # normalize coordinates to [-1, 1] + sample_coords_norm = normalize_coords(sample_coords, h, w) # [-1, 1] + window_feature = F.grid_sample( + feature1, sample_coords_norm, padding_mode=padding_mode, align_corners=True + ).permute( + 0, 2, 1, 3 + ) # [B, H*W, C, (2R+1)^2] + feature0_view = feature0.permute(0, 2, 3, 1).view(b, h * w, 1, c) # [B, H*W, 1, C] + + corr = torch.matmul(feature0_view, window_feature).view(b, h * w, -1) / ( + c**0.5 + ) # [B, H*W, (2R+1)^2] + + # mask invalid locations + corr[~valid] = -1e9 + + prob = F.softmax(corr, -1) # [B, H*W, (2R+1)^2] + + correspondence = ( + torch.matmul(prob.unsqueeze(-2), sample_coords_softmax) + .squeeze(-2) + .view(b, h, w, 2) + .permute(0, 3, 1, 2) + ) # [B, 2, H, W] + + flow = correspondence - coords_init + match_prob = prob + + return flow, match_prob + + +def coords_grid(b, h, w, homogeneous=False, device=None): + y, x = torch.meshgrid(torch.arange(h), torch.arange(w)) # [H, W] + + stacks = [x, y] + + if homogeneous: + ones = torch.ones_like(x) # [H, W] + stacks.append(ones) + + grid = torch.stack(stacks, dim=0).float() # [2, H, W] or [3, H, W] + + grid = grid[None].repeat(b, 1, 1, 1) # [B, 2, H, W] or [B, 3, H, W] + + if device is not None: + grid = grid.to(device) + + return grid + + +def generate_window_grid(h_min, h_max, w_min, w_max, len_h, len_w, device=None): + assert device is not None + + x, y = torch.meshgrid( + [ + torch.linspace(w_min, w_max, len_w, device=device), + torch.linspace(h_min, h_max, len_h, device=device), + ], + ) + grid = torch.stack((x, y), -1).transpose(0, 1).float() # [H, W, 2] + + return grid + + +def normalize_coords(coords, h, w): + # coords: [B, H, W, 2] + c = torch.Tensor([(w - 1) / 2.0, (h - 1) / 2.0]).float().to(coords.device) + return (coords - c) / c # [-1, 1] + + +def bilinear_sample( + img, sample_coords, mode="bilinear", padding_mode="zeros", return_mask=False +): + # img: [B, C, H, W] + # sample_coords: [B, 2, H, W] in image scale + if sample_coords.size(1) != 2: # [B, H, W, 2] + sample_coords = sample_coords.permute(0, 3, 1, 2) + + b, _, h, w = sample_coords.shape + + # Normalize to [-1, 1] + x_grid = 2 * sample_coords[:, 0] / (w - 1) - 1 + y_grid = 2 * sample_coords[:, 1] / (h - 1) - 1 + + grid = torch.stack([x_grid, y_grid], dim=-1) # [B, H, W, 2] + + img = F.grid_sample( + img, grid, mode=mode, padding_mode=padding_mode, align_corners=True + ) + + if return_mask: + mask = ( + (x_grid >= -1) & (y_grid >= -1) & (x_grid <= 1) & (y_grid <= 1) + ) # [B, H, W] + + return img, mask + + return img + + +def flow_warp(feature, flow, mask=False, padding_mode="zeros"): + b, c, h, w = feature.size() + assert flow.size(1) == 2 + + grid = coords_grid(b, h, w).to(flow.device) + flow # [B, 2, H, W] + + return bilinear_sample(feature, grid, padding_mode=padding_mode, return_mask=mask) + + +def forward_backward_consistency_check(fwd_flow, bwd_flow, alpha=0.01, beta=0.5): + # fwd_flow, bwd_flow: [B, 2, H, W] + # alpha and beta values are following UnFlow (https://arxiv.org/abs/1711.07837) + assert fwd_flow.dim() == 4 and bwd_flow.dim() == 4 + assert fwd_flow.size(1) == 2 and bwd_flow.size(1) == 2 + flow_mag = torch.norm(fwd_flow, dim=1) + torch.norm(bwd_flow, dim=1) # [B, H, W] + + warped_bwd_flow = flow_warp(bwd_flow, fwd_flow) # [B, 2, H, W] + warped_fwd_flow = flow_warp(fwd_flow, bwd_flow) # [B, 2, H, W] + + diff_fwd = torch.norm(fwd_flow + warped_bwd_flow, dim=1) # [B, H, W] + diff_bwd = torch.norm(bwd_flow + warped_fwd_flow, dim=1) + + threshold = alpha * flow_mag + beta + + fwd_occ = (diff_fwd > threshold).float() # [B, H, W] + bwd_occ = (diff_bwd > threshold).float() + + return fwd_occ, bwd_occ + + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + + def __init__(self, num_pos_feats=64, temperature=10000, normalize=True, scale=None): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError("normalize should be True if scale is passed") + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, x): + # x = tensor_list.tensors # [B, C, H, W] + # mask = tensor_list.mask # [B, H, W], input with padding, valid as 0 + b, c, h, w = x.size() + mask = torch.ones((b, h, w), device=x.device) # [B, H, W] + y_embed = mask.cumsum(1, dtype=torch.float32) + x_embed = mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) + dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4 + ).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +def split_feature( + feature, + num_splits=2, + channel_last=False, +): + if channel_last: # [B, H, W, C] + b, h, w, c = feature.size() + assert h % num_splits == 0 and w % num_splits == 0 + + b_new = b * num_splits * num_splits + h_new = h // num_splits + w_new = w // num_splits + + feature = ( + feature.view(b, num_splits, h // num_splits, num_splits, w // num_splits, c) + .permute(0, 1, 3, 2, 4, 5) + .reshape(b_new, h_new, w_new, c) + ) # [B*K*K, H/K, W/K, C] + else: # [B, C, H, W] + b, c, h, w = feature.size() + assert h % num_splits == 0 and w % num_splits == 0 + + b_new = b * num_splits * num_splits + h_new = h // num_splits + w_new = w // num_splits + + feature = ( + feature.view(b, c, num_splits, h // num_splits, num_splits, w // num_splits) + .permute(0, 2, 4, 1, 3, 5) + .reshape(b_new, c, h_new, w_new) + ) # [B*K*K, C, H/K, W/K] + + return feature + + +def merge_splits( + splits, + num_splits=2, + channel_last=False, +): + if channel_last: # [B*K*K, H/K, W/K, C] + b, h, w, c = splits.size() + new_b = b // num_splits // num_splits + + splits = splits.view(new_b, num_splits, num_splits, h, w, c) + merge = ( + splits.permute(0, 1, 3, 2, 4, 5) + .contiguous() + .view(new_b, num_splits * h, num_splits * w, c) + ) # [B, H, W, C] + else: # [B*K*K, C, H/K, W/K] + b, c, h, w = splits.size() + new_b = b // num_splits // num_splits + + splits = splits.view(new_b, num_splits, num_splits, c, h, w) + merge = ( + splits.permute(0, 3, 1, 4, 2, 5) + .contiguous() + .view(new_b, c, num_splits * h, num_splits * w) + ) # [B, C, H, W] + + return merge + + +def normalize_img(img0, img1): + # loaded images are in [0, 255] + # normalize by ImageNet mean and std + mean = torch.tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(img1.device) + std = torch.tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(img1.device) + img0 = (img0 - mean) / std + img1 = (img1 - mean) / std + + return img0, img1 + + +def feature_add_position(feature0, feature1, attn_splits, feature_channels): + pos_enc = PositionEmbeddingSine(num_pos_feats=feature_channels // 2) + + if attn_splits > 1: # add position in splited window + feature0_splits = split_feature(feature0, num_splits=attn_splits) + feature1_splits = split_feature(feature1, num_splits=attn_splits) + + position = pos_enc(feature0_splits) + + feature0_splits = feature0_splits + position + feature1_splits = feature1_splits + position + + feature0 = merge_splits(feature0_splits, num_splits=attn_splits) + feature1 = merge_splits(feature1_splits, num_splits=attn_splits) + else: + position = pos_enc(feature0) + + feature0 = feature0 + position + feature1 = feature1 + position + + return feature0, feature1 + + +class GMFlow(nn.Module): + def __init__( + self, + num_scales=2, + upsample_factor=4, + feature_channels=128, + attention_type="swin", + num_transformer_layers=6, + ffn_dim_expansion=4, + num_head=1, + **kwargs, + ): + super(GMFlow, self).__init__() + + self.num_scales = num_scales + self.feature_channels = feature_channels + self.upsample_factor = upsample_factor + self.attention_type = attention_type + self.num_transformer_layers = num_transformer_layers + + # CNN backbone + self.backbone = CNNEncoder( + output_dim=feature_channels, num_output_scales=num_scales + ) + + # Transformer + self.transformer = FeatureTransformer( + num_layers=num_transformer_layers, + d_model=feature_channels, + nhead=num_head, + attention_type=attention_type, + ffn_dim_expansion=ffn_dim_expansion, + ) + + # flow propagation with self-attn + self.feature_flow_attn = FeatureFlowAttention(in_channels=feature_channels) + + # convex upsampling: concat feature0 and flow as input + self.upsampler = nn.Sequential( + nn.Conv2d(2 + feature_channels, 256, 3, 1, 1), + nn.ReLU(inplace=True), + nn.Conv2d(256, upsample_factor**2 * 9, 1, 1, 0), + ) + + def extract_feature(self, img0, img1): + concat = torch.cat((img0, img1), dim=0) # [2B, C, H, W] + features = self.backbone( + concat + ) # list of [2B, C, H, W], resolution from high to low + + # reverse: resolution from low to high + features = features[::-1] + + feature0, feature1 = [], [] + + for i in range(len(features)): + feature = features[i] + chunks = torch.chunk(feature, 2, 0) # tuple + feature0.append(chunks[0]) + feature1.append(chunks[1]) + + return feature0, feature1 + + def upsample_flow( + self, + flow, + feature, + bilinear=False, + upsample_factor=8, + ): + if bilinear: + up_flow = ( + F.interpolate( + flow, + scale_factor=upsample_factor, + mode="bilinear", + align_corners=True, + ) + * upsample_factor + ) + + else: + # convex upsampling + concat = torch.cat((flow, feature), dim=1) + + mask = self.upsampler(concat) + b, flow_channel, h, w = flow.shape + mask = mask.view( + b, 1, 9, self.upsample_factor, self.upsample_factor, h, w + ) # [B, 1, 9, K, K, H, W] + mask = torch.softmax(mask, dim=2) + + up_flow = F.unfold(self.upsample_factor * flow, [3, 3], padding=1) + up_flow = up_flow.view( + b, flow_channel, 9, 1, 1, h, w + ) # [B, 2, 9, 1, 1, H, W] + + up_flow = torch.sum(mask * up_flow, dim=2) # [B, 2, K, K, H, W] + up_flow = up_flow.permute(0, 1, 4, 2, 5, 3) # [B, 2, K, H, K, W] + up_flow = up_flow.reshape( + b, flow_channel, self.upsample_factor * h, self.upsample_factor * w + ) # [B, 2, K*H, K*W] + + return up_flow + + def forward( + self, + img0, + img1, + attn_splits_list=[2, 8], + corr_radius_list=[-1, 4], + prop_radius_list=[-1, 1], + pred_bidir_flow=False, + **kwargs, + ): + img0, img1 = normalize_img(img0, img1) # [B, 3, H, W] + + # resolution low to high + feature0_list, feature1_list = self.extract_feature( + img0, img1 + ) # list of features + + flow = None + + assert ( + len(attn_splits_list) + == len(corr_radius_list) + == len(prop_radius_list) + == self.num_scales + ) + + for scale_idx in range(self.num_scales): + feature0, feature1 = feature0_list[scale_idx], feature1_list[scale_idx] + + if pred_bidir_flow and scale_idx > 0: + # predicting bidirectional flow with refinement + feature0, feature1 = torch.cat((feature0, feature1), dim=0), torch.cat( + (feature1, feature0), dim=0 + ) + + upsample_factor = self.upsample_factor * ( + 2 ** (self.num_scales - 1 - scale_idx) + ) + + if scale_idx > 0: + flow = ( + F.interpolate( + flow, scale_factor=2, mode="bilinear", align_corners=True + ) + * 2 + ) + + if flow is not None: + flow = flow.detach() + feature1 = flow_warp(feature1, flow) # [B, C, H, W] + + attn_splits = attn_splits_list[scale_idx] + corr_radius = corr_radius_list[scale_idx] + prop_radius = prop_radius_list[scale_idx] + + # add position to features + feature0, feature1 = feature_add_position( + feature0, feature1, attn_splits, self.feature_channels + ) + + # Transformer + feature0, feature1 = self.transformer( + feature0, feature1, attn_num_splits=attn_splits + ) + + # correlation and softmax + if corr_radius == -1: # global matching + flow_pred = global_correlation_softmax( + feature0, feature1, pred_bidir_flow + )[0] + else: # local matching + flow_pred = local_correlation_softmax(feature0, feature1, corr_radius)[ + 0 + ] + + # flow or residual flow + flow = flow + flow_pred if flow is not None else flow_pred + + # upsample to the original resolution for supervison + if ( + self.training + ): # only need to upsample intermediate flow predictions at training time + flow_bilinear = self.upsample_flow( + flow, None, bilinear=True, upsample_factor=upsample_factor + ) + + # flow propagation with self-attn + if pred_bidir_flow and scale_idx == 0: + feature0 = torch.cat( + (feature0, feature1), dim=0 + ) # [2*B, C, H, W] for propagation + flow = self.feature_flow_attn( + feature0, + flow.detach(), + local_window_attn=prop_radius > 0, + local_window_radius=prop_radius, + ) + + # bilinear upsampling at training time except the last one + if self.training and scale_idx < self.num_scales - 1: + flow_up = self.upsample_flow( + flow, feature0, bilinear=True, upsample_factor=upsample_factor + ) + + if scale_idx == self.num_scales - 1: + flow_up = self.upsample_flow(flow, feature0) + + return flow_up + + +backwarp_tenGrid = {} + + +def backwarp(tenIn, tenflow): + if str(tenflow.shape) not in backwarp_tenGrid: + tenHor = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenflow.shape[3], + dtype=tenflow.dtype, + device=tenflow.device, + ) + .view(1, 1, 1, -1) + .repeat(1, 1, tenflow.shape[2], 1) + ) + tenVer = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenflow.shape[2], + dtype=tenflow.dtype, + device=tenflow.device, + ) + .view(1, 1, -1, 1) + .repeat(1, 1, 1, tenflow.shape[3]) + ) + + backwarp_tenGrid[str(tenflow.shape)] = torch.cat([tenHor, tenVer], 1).to(get_torch_device()) + # end + + tenflow = torch.cat( + [ + tenflow[:, 0:1, :, :] / ((tenIn.shape[3] - 1.0) / 2.0), + tenflow[:, 1:2, :, :] / ((tenIn.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + return torch.nn.functional.grid_sample( + input=tenIn, + grid=(backwarp_tenGrid[str(tenflow.shape)] + tenflow).permute(0, 2, 3, 1), + mode="bilinear", + padding_mode="zeros", + align_corners=True, + ) + + +class MetricNet(nn.Module): + def __init__(self): + super(MetricNet, self).__init__() + self.metric_in = nn.Conv2d(14, 64, 3, 1, 1) + self.metric_net1 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_net2 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_net3 = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 64, 3, 1, 1)) + self.metric_out = nn.Sequential(nn.PReLU(), nn.Conv2d(64, 2, 3, 1, 1)) + + def forward(self, img0, img1, flow01, flow10): + metric0 = F.l1_loss(img0, backwarp(img1, flow01), reduction="none").mean( + [1], True + ) + metric1 = F.l1_loss(img1, backwarp(img0, flow10), reduction="none").mean( + [1], True + ) + + fwd_occ, bwd_occ = forward_backward_consistency_check(flow01, flow10) + + flow01 = torch.cat( + [ + flow01[:, 0:1, :, :] / ((flow01.shape[3] - 1.0) / 2.0), + flow01[:, 1:2, :, :] / ((flow01.shape[2] - 1.0) / 2.0), + ], + 1, + ) + flow10 = torch.cat( + [ + flow10[:, 0:1, :, :] / ((flow10.shape[3] - 1.0) / 2.0), + flow10[:, 1:2, :, :] / ((flow10.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + img = torch.cat((img0, img1), 1) + metric = torch.cat((-metric0, -metric1), 1) + flow = torch.cat((flow01, flow10), 1) + occ = torch.cat((fwd_occ.unsqueeze(1), bwd_occ.unsqueeze(1)), 1) + + feat = self.metric_in(torch.cat((img, metric, flow, occ), 1)) + feat = self.metric_net1(feat) + feat + feat = self.metric_net2(feat) + feat + feat = self.metric_net3(feat) + feat + metric = self.metric_out(feat) + + metric = torch.tanh(metric) * 10 + + return metric[:, :1], metric[:, 1:2] + + +class FeatureNet(nn.Module): + """The quadratic model""" + + def __init__(self): + super(FeatureNet, self).__init__() + self.block1 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(3, 64, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(64, 64, 3, 1, 1), + ) + self.block2 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(64, 128, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(128, 128, 3, 1, 1), + ) + self.block3 = nn.Sequential( + nn.PReLU(), + nn.Conv2d(128, 192, 3, 2, 1), + nn.PReLU(), + nn.Conv2d(192, 192, 3, 1, 1), + ) + + def forward(self, x): + x1 = self.block1(x) + x2 = self.block2(x1) + x3 = self.block3(x2) + + return x1, x2, x3 + + +# Residual Block +def ResidualBlock(in_channels, out_channels, stride=1): + return torch.nn.Sequential( + nn.PReLU(), + nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + ) + + +# downsample block +def DownsampleBlock(in_channels, out_channels, stride=2): + return torch.nn.Sequential( + nn.PReLU(), + nn.Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=True + ), + ) + + +# upsample block +def UpsampleBlock(in_channels, out_channels, stride=2): + return torch.nn.Sequential( + nn.PReLU(), + nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=4, + stride=stride, + padding=1, + bias=True, + ), + nn.PReLU(), + nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1, bias=True + ), + ) + + +class PixelShuffleBlcok(nn.Module): + def __init__(self, in_feat, num_feat, num_out_ch): + super(PixelShuffleBlcok, self).__init__() + self.conv_before_upsample = nn.Sequential( + nn.Conv2d(in_feat, num_feat, 3, 1, 1), nn.PReLU() + ) + self.upsample = nn.Sequential( + nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1), nn.PixelShuffle(2) + ) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + def forward(self, x): + x = self.conv_before_upsample(x) + x = self.conv_last(self.upsample(x)) + return x + + +# grid network +class GridNet(nn.Module): + def __init__( + self, + in_channels=9, + in_channels1=128, + in_channels2=256, + in_channels3=384, + out_channels=3, + ): + super(GridNet, self).__init__() + + self.residual_model_head0 = ResidualBlock(in_channels, 64) + self.residual_model_head1 = ResidualBlock(in_channels1, 64) + self.residual_model_head2 = ResidualBlock(in_channels2, 128) + self.residual_model_head3 = ResidualBlock(in_channels3, 192) + + self.residual_model_01 = ResidualBlock(64, 64) + # self.residual_model_02=ResidualBlock(64, 64) + # self.residual_model_03=ResidualBlock(64, 64) + self.residual_model_04 = ResidualBlock(64, 64) + self.residual_model_05 = ResidualBlock(64, 64) + self.residual_model_tail = PixelShuffleBlcok(64, 64, out_channels) + + self.residual_model_11 = ResidualBlock(128, 128) + # self.residual_model_12=ResidualBlock(128, 128) + # self.residual_model_13=ResidualBlock(128, 128) + self.residual_model_14 = ResidualBlock(128, 128) + self.residual_model_15 = ResidualBlock(128, 128) + + self.residual_model_21 = ResidualBlock(192, 192) + # self.residual_model_22=ResidualBlock(192, 192) + # self.residual_model_23=ResidualBlock(192, 192) + self.residual_model_24 = ResidualBlock(192, 192) + self.residual_model_25 = ResidualBlock(192, 192) + + # + + self.downsample_model_10 = DownsampleBlock(64, 128) + self.downsample_model_20 = DownsampleBlock(128, 192) + + self.downsample_model_11 = DownsampleBlock(64, 128) + self.downsample_model_21 = DownsampleBlock(128, 192) + + # self.downsample_model_12=DownsampleBlock(64, 128) + # self.downsample_model_22=DownsampleBlock(128, 192) + + # + + # self.upsample_model_03=UpsampleBlock(128, 64) + # self.upsample_model_13=UpsampleBlock(192, 128) + + self.upsample_model_04 = UpsampleBlock(128, 64) + self.upsample_model_14 = UpsampleBlock(192, 128) + + self.upsample_model_05 = UpsampleBlock(128, 64) + self.upsample_model_15 = UpsampleBlock(192, 128) + + def forward(self, x, x1, x2, x3): + X00 = self.residual_model_head0(x) + self.residual_model_head1( + x1 + ) # --- 182 ~ 185 + # X10 = self.residual_model_head1(x1) + + X01 = self.residual_model_01(X00) + X00 # --- 208 ~ 211 ,AddBackward1213 + + X10 = self.downsample_model_10(X00) + self.residual_model_head2( + x2 + ) # --- 186 ~ 189 + X20 = self.downsample_model_20(X10) + self.residual_model_head3( + x3 + ) # --- 190 ~ 193 + + residual_11 = ( + self.residual_model_11(X10) + X10 + ) # 201 ~ 204 , sum AddBackward1206 + downsample_11 = self.downsample_model_11(X01) # 214 ~ 217 + X11 = residual_11 + downsample_11 # --- AddBackward1218 + + residual_21 = ( + self.residual_model_21(X20) + X20 + ) # 194 ~ 197 , sum AddBackward1199 + downsample_21 = self.downsample_model_21(X11) # 219 ~ 222 + X21 = residual_21 + downsample_21 # AddBackward1223 + + X24 = self.residual_model_24(X21) + X21 # --- 224 ~ 227 , AddBackward1229 + X25 = self.residual_model_25(X24) + X24 # --- 230 ~ 233 , AddBackward1235 + + upsample_14 = self.upsample_model_14(X24) # 242 ~ 246 + residual_14 = self.residual_model_14(X11) + X11 # 248 ~ 251, AddBackward1253 + X14 = upsample_14 + residual_14 # --- AddBackward1254 + + upsample_04 = self.upsample_model_04(X14) # 268 ~ 272 + residual_04 = self.residual_model_04(X01) + X01 # 274 ~ 277, AddBackward1279 + X04 = upsample_04 + residual_04 # --- AddBackward1280 + + upsample_15 = self.upsample_model_15(X25) # 236 ~ 240 + residual_15 = self.residual_model_15(X14) + X14 # 255 ~ 258, AddBackward1260 + X15 = upsample_15 + residual_15 # AddBackward1261 + + upsample_05 = self.upsample_model_05(X15) # 262 ~ 266 + residual_05 = self.residual_model_05(X04) + X04 # 281 ~ 284,AddBackward1286 + X05 = upsample_05 + residual_05 # AddBackward1287 + + X_tail = self.residual_model_tail(X05) # 288 ~ 291 + + return X_tail +# end + + +class Model: + def __init__(self): + self.flownet = GMFlow() + self.ifnet = IFNet(arch_ver="4.6") + self.metricnet = MetricNet() + self.feat_ext = FeatureNet() + self.fusionnet = GridNet() + self.version = 3.9 + + def eval(self): + self.flownet.eval() + self.ifnet.eval() + self.metricnet.eval() + self.feat_ext.eval() + self.fusionnet.eval() + + def device(self): + self.flownet.to(device) + self.ifnet.to(device) + self.metricnet.to(device) + self.feat_ext.to(device) + self.fusionnet.to(device) + + def load_model(self, path_dict): + #models/rife46.pth + self.ifnet.load_state_dict(torch.load(path_dict["ifnet"])) + #models/GMFSS_fortuna_flownet.pkl + self.flownet.load_state_dict(torch.load(path_dict["flownet"])) + #models/GMFSS_fortuna_union_metric.pkl + self.metricnet.load_state_dict(torch.load(path_dict["metricnet"])) + #models/GMFSS_fortuna_union_feat.pkl + self.feat_ext.load_state_dict(torch.load(path_dict["feat_ext"])) + #models/GMFSS_fortuna_union_fusionnet.pkl + self.fusionnet.load_state_dict(torch.load(path_dict["fusionnet"])) + + def reuse(self, img0, img1, scale): + feat11, feat12, feat13 = self.feat_ext(img0) + feat21, feat22, feat23 = self.feat_ext(img1) + + img0 = F.interpolate( + img0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + img1 = F.interpolate( + img1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + + if scale != 1.0: + imgf0 = F.interpolate( + img0, scale_factor=scale, mode="bilinear", align_corners=False + ) + imgf1 = F.interpolate( + img1, scale_factor=scale, mode="bilinear", align_corners=False + ) + else: + imgf0 = img0 + imgf1 = img1 + flow01 = self.flownet(imgf0, imgf1, return_flow=True) + flow10 = self.flownet(imgf1, imgf0, return_flow=True) + if scale != 1.0: + flow01 = ( + F.interpolate( + flow01, + scale_factor=1.0 / scale, + mode="bilinear", + align_corners=False, + ) + / scale + ) + flow10 = ( + F.interpolate( + flow10, + scale_factor=1.0 / scale, + mode="bilinear", + align_corners=False, + ) + / scale + ) + + metric0, metric1 = self.metricnet(img0, img1, flow01, flow10) + + return ( + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + ) + + def inference( + self, + img0, + img1, + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + timestep, + ): + F1t = timestep * flow01 + F2t = (1 - timestep) * flow10 + + Z1t = timestep * metric0 + Z2t = (1 - timestep) * metric1 + + img0 = F.interpolate( + img0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + I1t = softsplat(img0, F1t, Z1t, strMode="soft") + img1 = F.interpolate( + img1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + I2t = softsplat(img1, F2t, Z2t, strMode="soft") + + rife = self.ifnet(img0, img1, timestep, scale_list=[8, 4, 2, 1]) + + feat1t1 = softsplat(feat11, F1t, Z1t, strMode="soft") + feat2t1 = softsplat(feat21, F2t, Z2t, strMode="soft") + + F1td = ( + F.interpolate(F1t, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + Z1d = F.interpolate(Z1t, scale_factor=0.5, mode="bilinear", align_corners=False) + feat1t2 = softsplat(feat12, F1td, Z1d, strMode="soft") + F2td = ( + F.interpolate(F2t, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + Z2d = F.interpolate(Z2t, scale_factor=0.5, mode="bilinear", align_corners=False) + feat2t2 = softsplat(feat22, F2td, Z2d, strMode="soft") + + F1tdd = ( + F.interpolate(F1t, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + Z1dd = F.interpolate( + Z1t, scale_factor=0.25, mode="bilinear", align_corners=False + ) + feat1t3 = softsplat(feat13, F1tdd, Z1dd, strMode="soft") + F2tdd = ( + F.interpolate(F2t, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + Z2dd = F.interpolate( + Z2t, scale_factor=0.25, mode="bilinear", align_corners=False + ) + feat2t3 = softsplat(feat23, F2tdd, Z2dd, strMode="soft") + + out = self.fusionnet( + torch.cat([I1t, rife, I2t], dim=1), + torch.cat([feat1t1, feat2t1], dim=1), + torch.cat([feat1t2, feat2t2], dim=1), + torch.cat([feat1t3, feat2t3], dim=1), + ) + + return torch.clamp(out, 0, 1) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3e55bb606f30d7d42eeac118fe8832bd7493957f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__init__.py @@ -0,0 +1,143 @@ +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +import torch +import torch.nn as nn +import torch.nn.functional as F +from comfy.model_management import get_torch_device + + +GLOBAL_MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPTS_PATH_CONFIG = { + "GMFSS_fortuna_union": { + "ifnet": ("rife", "rife46.pth"), + "flownet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_flownet.pkl"), + "metricnet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_union_metric.pkl"), + "feat_ext": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_union_feat.pkl"), + "fusionnet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_union_fusionnet.pkl") + }, + "GMFSS_fortuna": { + "flownet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_flownet.pkl"), + "metricnet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_metric.pkl"), + "feat_ext": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_feat.pkl"), + "fusionnet": (GLOBAL_MODEL_TYPE, "GMFSS_fortuna_fusionnet.pkl") + } +} + +class CommonModelInference(nn.Module): + def __init__(self, model_type): + super(CommonModelInference, self).__init__() + from .GMFSS_Fortuna_arch import Model as GMFSS + from .GMFSS_Fortuna_union_arch import Model as GMFSS_Union + self.model = GMFSS_Union() if "union" in model_type else GMFSS() + self.model.eval() + self.model.device() + _model_path_config = CKPTS_PATH_CONFIG[model_type] + self.model.load_model({ + key: load_file_from_github_release(*_model_path_config[key]) + for key in _model_path_config + }) + + def forward(self, I0, I1, timestep, scale=1.0): + n, c, h, w = I0.shape + tmp = max(64, int(64 / scale)) + ph = ((h - 1) // tmp + 1) * tmp + pw = ((w - 1) // tmp + 1) * tmp + padding = (0, pw - w, 0, ph - h) + I0 = F.pad(I0, padding) + I1 = F.pad(I1, padding) + ( + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + ) = self.model.reuse(I0, I1, scale) + + output = self.model.inference( + I0, + I1, + flow01, + flow10, + metric0, + metric1, + feat11, + feat12, + feat13, + feat21, + feat22, + feat23, + timestep, + ) + return output[:, :, :h, :w] + +class GMFSS_Fortuna_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (list(CKPTS_PATH_CONFIG.keys()), ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + """ + Perform video frame interpolation using a given checkpoint model. + + Args: + ckpt_name (str): The name of the checkpoint model to use. + frames (torch.Tensor): A tensor containing input video frames. + clear_cache_after_n_frames (int, optional): The number of frames to process before clearing CUDA cache + to prevent memory overflow. Defaults to 10. Lower numbers are safer but mean more processing time. + How high you should set it depends on how many input frames there are, input resolution (after upscaling), + how many times you want to multiply them, and how long you're willing to wait for the process to complete. + multiplier (int, optional): The multiplier for each input frame. 60 input frames * 2 = 120 output frames. Defaults to 2. + + Returns: + tuple: A tuple containing the output interpolated frames. + + Note: + This method interpolates frames in a video sequence using a specified checkpoint model. + It processes each frame sequentially, generating interpolated frames between them. + + To prevent memory overflow, it clears the CUDA cache after processing a specified number of frames. + """ + + interpolation_model = CommonModelInference(model_type=ckpt_name) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model, scale): + return model(frame_0, frame_1, timestep, scale) + + scale = 1 + + args = [interpolation_model, scale] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60d4309965a57ca14a0bc46653cfda6cd3cf342b Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/gmfss_fortuna/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_L_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_L_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..f9554a0bbfbe274ccfa021c618512f5220bbb220 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_L_arch.py @@ -0,0 +1,293 @@ +# https://github.com/ltkong218/IFRNet/blob/main/models/IFRNet_L.py +# https://github.com/ltkong218/IFRNet/blob/main/utils.py +import torch +import torch.nn as nn +import torch.nn.functional as F +from comfy.model_management import get_torch_device + + +def warp(img, flow): + B, _, H, W = flow.shape + xx = torch.linspace(-1.0, 1.0, W).view(1, 1, 1, W).expand(B, -1, H, -1) + yy = torch.linspace(-1.0, 1.0, H).view(1, 1, H, 1).expand(B, -1, -1, W) + grid = torch.cat([xx, yy], 1).to(img) + flow_ = torch.cat( + [ + flow[:, 0:1, :, :] / ((W - 1.0) / 2.0), + flow[:, 1:2, :, :] / ((H - 1.0) / 2.0), + ], + 1, + ) + grid_ = (grid + flow_).permute(0, 2, 3, 1) + output = F.grid_sample( + input=img, + grid=grid_, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + return output + + +def get_robust_weight(flow_pred, flow_gt, beta): + epe = ((flow_pred.detach() - flow_gt) ** 2).sum(dim=1, keepdim=True) ** 0.5 + robust_weight = torch.exp(-beta * epe) + return robust_weight + + +def resize(x, scale_factor): + return F.interpolate( + x, scale_factor=scale_factor, mode="bilinear", align_corners=False + ) + + +def convrelu( + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + groups=1, + bias=True, +): + return nn.Sequential( + nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride, + padding, + dilation, + groups, + bias=bias, + ), + nn.PReLU(out_channels), + ) + + +class ResBlock(nn.Module): + def __init__(self, in_channels, side_channels, bias=True): + super(ResBlock, self).__init__() + self.side_channels = side_channels + self.conv1 = nn.Sequential( + nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ), + nn.PReLU(in_channels), + ) + self.conv2 = nn.Sequential( + nn.Conv2d( + side_channels, + side_channels, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ), + nn.PReLU(side_channels), + ) + self.conv3 = nn.Sequential( + nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ), + nn.PReLU(in_channels), + ) + self.conv4 = nn.Sequential( + nn.Conv2d( + side_channels, + side_channels, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ), + nn.PReLU(side_channels), + ) + self.conv5 = nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ) + self.prelu = nn.PReLU(in_channels) + + def forward(self, x): + out = self.conv1(x) + out[:, -self.side_channels :, :, :] = self.conv2( + out[:, -self.side_channels :, :, :] + ) + out = self.conv3(out) + out[:, -self.side_channels :, :, :] = self.conv4( + out[:, -self.side_channels :, :, :] + ) + out = self.prelu(x + self.conv5(out)) + return out + + +class Encoder(nn.Module): + def __init__(self): + super(Encoder, self).__init__() + self.pyramid1 = nn.Sequential( + convrelu(3, 64, 7, 2, 3), convrelu(64, 64, 3, 1, 1) + ) + self.pyramid2 = nn.Sequential( + convrelu(64, 96, 3, 2, 1), convrelu(96, 96, 3, 1, 1) + ) + self.pyramid3 = nn.Sequential( + convrelu(96, 144, 3, 2, 1), convrelu(144, 144, 3, 1, 1) + ) + self.pyramid4 = nn.Sequential( + convrelu(144, 192, 3, 2, 1), convrelu(192, 192, 3, 1, 1) + ) + + def forward(self, img): + f1 = self.pyramid1(img) + f2 = self.pyramid2(f1) + f3 = self.pyramid3(f2) + f4 = self.pyramid4(f3) + return f1, f2, f3, f4 + + +class Decoder4(nn.Module): + def __init__(self): + super(Decoder4, self).__init__() + self.convblock = nn.Sequential( + convrelu(384 + 1, 384), + ResBlock(384, 64), + nn.ConvTranspose2d(384, 148, 4, 2, 1, bias=True), + ) + + def forward(self, f0, f1, embt): + b, c, h, w = f0.shape + embt = embt.repeat(1, 1, h, w) + f_in = torch.cat([f0, f1, embt], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder3(nn.Module): + def __init__(self): + super(Decoder3, self).__init__() + self.convblock = nn.Sequential( + convrelu(436, 432), + ResBlock(432, 64), + nn.ConvTranspose2d(432, 100, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder2(nn.Module): + def __init__(self): + super(Decoder2, self).__init__() + self.convblock = nn.Sequential( + convrelu(292, 288), + ResBlock(288, 64), + nn.ConvTranspose2d(288, 68, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder1(nn.Module): + def __init__(self): + super(Decoder1, self).__init__() + self.convblock = nn.Sequential( + convrelu(196, 192), + ResBlock(192, 64), + nn.ConvTranspose2d(192, 8, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class IRFNet_L(nn.Module): + def __init__(self): + super(IRFNet_L, self).__init__() + self.encoder = Encoder() + self.decoder4 = Decoder4() + self.decoder3 = Decoder3() + self.decoder2 = Decoder2() + self.decoder1 = Decoder1() + + def forward(self, img0, img1, scale_factor=1.0, timestep=0.5): + # emb1 = torch.tensor(1/2).view(1, 1, 1, 1).float() + # emb2 = torch.tensor(2/2).view(1, 1, 1, 1).float() + # embt = torch.cat([emb1, emb2], 0) + n, c, h, w = img0.shape + + ph = ((h - 1) // 64 + 1) * 64 + pw = ((w - 1) // 64 + 1) * 64 + padding = (0, pw - w, 0, ph - h) + img0 = F.pad(img0, padding) + img1 = F.pad(img1, padding) + + #Support multiple batches + embt = torch.tensor([timestep] * n).view(n, 1, 1, 1).float().to(get_torch_device()) + if "HalfTensor" in str(img0.type()): + embt = embt.half() + + mean_ = ( + torch.cat([img0, img1], 2) + .mean(1, keepdim=True) + .mean(2, keepdim=True) + .mean(3, keepdim=True) + ) + img0 = img0 - mean_ + img1 = img1 - mean_ + + img0_ = resize(img0, scale_factor=scale_factor) + img1_ = resize(img1, scale_factor=scale_factor) + + f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_) + f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_) + + out4 = self.decoder4(f0_4, f1_4, embt) + up_flow0_4 = out4[:, 0:2] + up_flow1_4 = out4[:, 2:4] + ft_3_ = out4[:, 4:] + + out3 = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4) + up_flow0_3 = out3[:, 0:2] + 2.0 * resize(up_flow0_4, scale_factor=2.0) + up_flow1_3 = out3[:, 2:4] + 2.0 * resize(up_flow1_4, scale_factor=2.0) + ft_2_ = out3[:, 4:] + + out2 = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3) + up_flow0_2 = out2[:, 0:2] + 2.0 * resize(up_flow0_3, scale_factor=2.0) + up_flow1_2 = out2[:, 2:4] + 2.0 * resize(up_flow1_3, scale_factor=2.0) + ft_1_ = out2[:, 4:] + + out1 = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2) + up_flow0_1 = out1[:, 0:2] + 2.0 * resize(up_flow0_2, scale_factor=2.0) + up_flow1_1 = out1[:, 2:4] + 2.0 * resize(up_flow1_2, scale_factor=2.0) + up_mask_1 = torch.sigmoid(out1[:, 4:5]) + up_res_1 = out1[:, 5:] + + up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0 / scale_factor)) * ( + 1.0 / scale_factor + ) + up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0 / scale_factor)) * ( + 1.0 / scale_factor + ) + up_mask_1 = resize(up_mask_1, scale_factor=(1.0 / scale_factor)) + up_res_1 = resize(up_res_1, scale_factor=(1.0 / scale_factor)) + + img0_warp = warp(img0, up_flow0_1) + img1_warp = warp(img1, up_flow1_1) + imgt_merge = up_mask_1 * img0_warp + (1 - up_mask_1) * img1_warp + mean_ + imgt_pred = imgt_merge + up_res_1 + imgt_pred = torch.clamp(imgt_pred, 0, 1) + return imgt_pred[:, :, :h, :w] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_S_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_S_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..4a2282b93b74eddf5792190f77dff0de457ff8b4 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/IFRNet_S_arch.py @@ -0,0 +1,293 @@ +# https://github.com/ltkong218/IFRNet/blob/main/models/IFRNet_S.py +# https://github.com/ltkong218/IFRNet/blob/main/utils.py +import torch +import torch.nn as nn +import torch.nn.functional as F +from comfy.model_management import get_torch_device + + +def warp(img, flow): + B, _, H, W = flow.shape + xx = torch.linspace(-1.0, 1.0, W).view(1, 1, 1, W).expand(B, -1, H, -1) + yy = torch.linspace(-1.0, 1.0, H).view(1, 1, H, 1).expand(B, -1, -1, W) + grid = torch.cat([xx, yy], 1).to(img) + flow_ = torch.cat( + [ + flow[:, 0:1, :, :] / ((W - 1.0) / 2.0), + flow[:, 1:2, :, :] / ((H - 1.0) / 2.0), + ], + 1, + ) + grid_ = (grid + flow_).permute(0, 2, 3, 1) + output = F.grid_sample( + input=img, + grid=grid_, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + return output + + +def get_robust_weight(flow_pred, flow_gt, beta): + epe = ((flow_pred.detach() - flow_gt) ** 2).sum(dim=1, keepdim=True) ** 0.5 + robust_weight = torch.exp(-beta * epe) + return robust_weight + + +def resize(x, scale_factor): + return F.interpolate( + x, scale_factor=scale_factor, mode="bilinear", align_corners=False + ) + + +def convrelu( + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + groups=1, + bias=True, +): + return nn.Sequential( + nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride, + padding, + dilation, + groups, + bias=bias, + ), + nn.PReLU(out_channels), + ) + + +class ResBlock(nn.Module): + def __init__(self, in_channels, side_channels, bias=True): + super(ResBlock, self).__init__() + self.side_channels = side_channels + self.conv1 = nn.Sequential( + nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ), + nn.PReLU(in_channels), + ) + self.conv2 = nn.Sequential( + nn.Conv2d( + side_channels, + side_channels, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ), + nn.PReLU(side_channels), + ) + self.conv3 = nn.Sequential( + nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ), + nn.PReLU(in_channels), + ) + self.conv4 = nn.Sequential( + nn.Conv2d( + side_channels, + side_channels, + kernel_size=3, + stride=1, + padding=1, + bias=bias, + ), + nn.PReLU(side_channels), + ) + self.conv5 = nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias + ) + self.prelu = nn.PReLU(in_channels) + + def forward(self, x): + out = self.conv1(x) + out[:, -self.side_channels :, :, :] = self.conv2( + out[:, -self.side_channels :, :, :] + ) + out = self.conv3(out) + out[:, -self.side_channels :, :, :] = self.conv4( + out[:, -self.side_channels :, :, :] + ) + out = self.prelu(x + self.conv5(out)) + return out + + +class Encoder(nn.Module): + def __init__(self): + super(Encoder, self).__init__() + self.pyramid1 = nn.Sequential( + convrelu(3, 24, 3, 2, 1), convrelu(24, 24, 3, 1, 1) + ) + self.pyramid2 = nn.Sequential( + convrelu(24, 36, 3, 2, 1), convrelu(36, 36, 3, 1, 1) + ) + self.pyramid3 = nn.Sequential( + convrelu(36, 54, 3, 2, 1), convrelu(54, 54, 3, 1, 1) + ) + self.pyramid4 = nn.Sequential( + convrelu(54, 72, 3, 2, 1), convrelu(72, 72, 3, 1, 1) + ) + + def forward(self, img): + f1 = self.pyramid1(img) + f2 = self.pyramid2(f1) + f3 = self.pyramid3(f2) + f4 = self.pyramid4(f3) + return f1, f2, f3, f4 + + +class Decoder4(nn.Module): + def __init__(self): + super(Decoder4, self).__init__() + self.convblock = nn.Sequential( + convrelu(144 + 1, 144), + ResBlock(144, 24), + nn.ConvTranspose2d(144, 58, 4, 2, 1, bias=True), + ) + + def forward(self, f0, f1, embt): + b, c, h, w = f0.shape + embt = embt.repeat(1, 1, h, w) + f_in = torch.cat([f0, f1, embt], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder3(nn.Module): + def __init__(self): + super(Decoder3, self).__init__() + self.convblock = nn.Sequential( + convrelu(166, 162), + ResBlock(162, 24), + nn.ConvTranspose2d(162, 40, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder2(nn.Module): + def __init__(self): + super(Decoder2, self).__init__() + self.convblock = nn.Sequential( + convrelu(112, 108), + ResBlock(108, 24), + nn.ConvTranspose2d(108, 28, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class Decoder1(nn.Module): + def __init__(self): + super(Decoder1, self).__init__() + self.convblock = nn.Sequential( + convrelu(76, 72), + ResBlock(72, 24), + nn.ConvTranspose2d(72, 8, 4, 2, 1, bias=True), + ) + + def forward(self, ft_, f0, f1, up_flow0, up_flow1): + f0_warp = warp(f0, up_flow0) + f1_warp = warp(f1, up_flow1) + f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1) + f_out = self.convblock(f_in) + return f_out + + +class IRFNet_S(nn.Module): + def __init__(self): + super(IRFNet_S, self).__init__() + self.encoder = Encoder() + self.decoder4 = Decoder4() + self.decoder3 = Decoder3() + self.decoder2 = Decoder2() + self.decoder1 = Decoder1() + + def forward(self, img0, img1, scale_factor=1.0, timestep=0.5): + # emb1 = torch.tensor(1/2).view(1, 1, 1, 1).float() + # emb2 = torch.tensor(2/2).view(1, 1, 1, 1).float() + # embt = torch.cat([emb1, emb2], 0) + n, c, h, w = img0.shape + + ph = ((h - 1) // 64 + 1) * 64 + pw = ((w - 1) // 64 + 1) * 64 + padding = (0, pw - w, 0, ph - h) + img0 = F.pad(img0, padding) + img1 = F.pad(img1, padding) + + #Support multiple batches + embt = torch.tensor([timestep] * n).view(n, 1, 1, 1).float().to(get_torch_device()) + if "HalfTensor" in str(img0.type()): + embt = embt.half() + + mean_ = ( + torch.cat([img0, img1], 2) + .mean(1, keepdim=True) + .mean(2, keepdim=True) + .mean(3, keepdim=True) + ) + img0 = img0 - mean_ + img1 = img1 - mean_ + + img0_ = resize(img0, scale_factor=scale_factor) + img1_ = resize(img1, scale_factor=scale_factor) + + f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_) + f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_) + + out4 = self.decoder4(f0_4, f1_4, embt) + up_flow0_4 = out4[:, 0:2] + up_flow1_4 = out4[:, 2:4] + ft_3_ = out4[:, 4:] + + out3 = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4) + up_flow0_3 = out3[:, 0:2] + 2.0 * resize(up_flow0_4, scale_factor=2.0) + up_flow1_3 = out3[:, 2:4] + 2.0 * resize(up_flow1_4, scale_factor=2.0) + ft_2_ = out3[:, 4:] + + out2 = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3) + up_flow0_2 = out2[:, 0:2] + 2.0 * resize(up_flow0_3, scale_factor=2.0) + up_flow1_2 = out2[:, 2:4] + 2.0 * resize(up_flow1_3, scale_factor=2.0) + ft_1_ = out2[:, 4:] + + out1 = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2) + up_flow0_1 = out1[:, 0:2] + 2.0 * resize(up_flow0_2, scale_factor=2.0) + up_flow1_1 = out1[:, 2:4] + 2.0 * resize(up_flow1_2, scale_factor=2.0) + up_mask_1 = torch.sigmoid(out1[:, 4:5]) + up_res_1 = out1[:, 5:] + + up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0 / scale_factor)) * ( + 1.0 / scale_factor + ) + up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0 / scale_factor)) * ( + 1.0 / scale_factor + ) + up_mask_1 = resize(up_mask_1, scale_factor=(1.0 / scale_factor)) + up_res_1 = resize(up_res_1, scale_factor=(1.0 / scale_factor)) + + img0_warp = warp(img0, up_flow0_1) + img1_warp = warp(img1, up_flow1_1) + imgt_merge = up_mask_1 * img0_warp + (1 - up_mask_1) * img1_warp + mean_ + imgt_pred = imgt_merge + up_res_1 + imgt_pred = torch.clamp(imgt_pred, 0, 1) + return imgt_pred[:, :, :h, :w] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2d7d3cec3a2c4074de091e798800329d5da9d19f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__init__.py @@ -0,0 +1,57 @@ +import torch +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames +import typing +from comfy.model_management import get_torch_device +from vfi_utils import generic_frame_loop, InterpolationStateList + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["IFRNet_S_Vimeo90K.pth", "IFRNet_L_Vimeo90K.pth"] + +class IFRNet_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + "scale_factor": ([0.25, 0.5, 1.0, 2.0, 4.0], {"default": 1.0}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames: typing.SupportsInt = 1, + multiplier: typing.SupportsInt = 2, + scale_factor: typing.SupportsFloat = 1.0, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .IFRNet_S_arch import IRFNet_S + from .IFRNet_L_arch import IRFNet_L + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + interpolation_model = IRFNet_S() if 'S' in ckpt_name else IRFNet_L() + interpolation_model.load_state_dict(torch.load(model_path)) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model, scale_factor): + return model(frame_0, frame_1, timestep, scale_factor) + + args = [interpolation_model, scale_factor] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41e10714ed8d39e5e3da4f1e3363322d75c6e0b6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifrnet/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/IFUNet_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/IFUNet_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..82066132e8fb370d8016afbedb47afed7779c9bb --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/IFUNet_arch.py @@ -0,0 +1,766 @@ +""" +https://github.com/98mxr/IFUNet/blob/main/model/IFUNet.py +https://github.com/98mxr/IFUNet/blob/main/model/cbam.py +https://github.com/98mxr/IFUNet/blob/main/model/warplayer.py +https://github.com/98mxr/IFUNet/blob/5be535c8cff66d6fa1967252685719df4c0620e4/model/RIFE.py +https://github.com/98mxr/IFUNet/blob/main/model/rrdb.py +https://github.com/98mxr/IFUNet/blob/main/model/ResynNet.py +""" +import torch +import torch.nn as nn +import torch.nn.functional as F +from comfy.model_management import get_torch_device + +backwarp_tenGrid = {} +device = get_torch_device() + + +def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + nn.PReLU(out_planes), + ) + + +def conv_bn(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=False, + ), + nn.BatchNorm2d(out_planes), + nn.PReLU(out_planes), + ) + + +class DegCNN(nn.Module): + def __init__(self): + super(DegCNN, self).__init__() + self.conv0 = conv(3, 32, 3, 2, 1) + self.conv1 = conv(32, 32, 3, 2, 1) + self.conv2 = conv(32, 32, 3, 2, 1) + self.conv3 = conv(32, 32, 3, 2, 1) + self.deconv = nn.Sequential( + nn.Dropout2d(0.95), + nn.ConvTranspose2d(4 * 32, 32, 4, 2, 1), + nn.PReLU(32), + nn.Conv2d(32, 3, 3, 1, 1), + nn.Sigmoid(), + ) + + def forward(self, x): + f0 = self.conv0(x) + f1 = self.conv1(f0) + f2 = self.conv2(f1) + f3 = self.conv3(f2) + f1 = F.interpolate(f1, scale_factor=2.0, mode="bilinear", align_corners=False) + f2 = F.interpolate(f2, scale_factor=4.0, mode="bilinear", align_corners=False) + f3 = F.interpolate(f3, scale_factor=8.0, mode="bilinear", align_corners=False) + return self.deconv(torch.cat((f0, f1, f2, f3), 1)) + + +class FlowBlock(nn.Module): + def __init__(self, in_planes, c=64): + super(FlowBlock, self).__init__() + self.conv0 = nn.Sequential( + conv_bn(in_planes, c // 2, 3, 2, 1), + conv_bn(c // 2, c, 3, 2, 1), + conv_bn(c, 2 * c, 3, 2, 1), + ) + self.convblock = nn.Sequential( + conv_bn(2 * c, 2 * c), + conv_bn(2 * c, 2 * c), + conv_bn(2 * c, 2 * c), + conv_bn(2 * c, 2 * c), + conv_bn(2 * c, 2 * c), + conv_bn(2 * c, 2 * c), + ) + self.lastconv = nn.ConvTranspose2d(2 * c, 4, 4, 2, 1) + + def forward(self, x, flow, scale=1): + x = F.interpolate( + x, scale_factor=1.0 / scale, mode="bilinear", align_corners=False + ) + if flow is not None: + flow = ( + F.interpolate( + flow, scale_factor=1.0 / scale, mode="bilinear", align_corners=False + ) + * 1.0 + / scale + ) + x = torch.cat((x, flow), 1) + feat = self.conv0(x) + feat = self.convblock(feat) + feat + tmp = self.lastconv(feat) + tmp = F.interpolate( + tmp, scale_factor=scale * 4, mode="bilinear", align_corners=False + ) + flow = tmp[:, :2] * scale * 4 + mask = tmp[:, 2:3] + return flow, mask + + +class ResynNet(nn.Module): + def __init__(self): + super(ResynNet, self).__init__() + self.block0 = FlowBlock(6, c=128) + self.block1 = FlowBlock(12, c=128) + self.block2 = FlowBlock(12, c=128) + self.degrad = DegCNN() + # Contextual Refinement context + decode + self.context0 = nn.Sequential( + conv(3, 16, 3, 2, 1), + conv(16, 32, 3, 2, 1), + ) + self.context1 = nn.Sequential( + conv(3, 16, 3, 2, 1), + conv(16, 32, 3, 2, 1), + ) + self.decode = nn.Sequential( + nn.ConvTranspose2d(64, 32, 4, 2, 1), + nn.ConvTranspose2d(32, 3, 4, 2, 1), + nn.Tanh(), + ) + + def calflow(self, img0, lowres, scale): + flow = None + stu = [self.block0, self.block1, self.block2] + for i in range(3): + if flow is not None: + flow_d, mask_d = stu[i]( + torch.cat((img0, lowres, warped_img0, mask), 1), + flow, + scale=scale[i], + ) + flow = flow + flow_d + mask = mask + mask_d + else: + flow, mask = stu[i](torch.cat((img0, lowres), 1), None, scale=scale[i]) + warped_img0 = warp(img0, flow) + flow_down = ( + F.interpolate(flow, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + c0 = warp(self.context0(img0), flow_down) + c1 = self.context1(warped_img0) + warped_img0 = warped_img0 + self.decode(torch.cat((c0, c1), 1)) + return flow, mask, torch.clamp(warped_img0, 0, 1) + + def forward( + self, x, deg=None, gt=None, scale=[4, 2, 1], training=False, blend=True + ): + if training: + deg = self.degrad(gt) + loss_cons = (gt - deg).abs().mean() + else: + loss_cons = torch.tensor([0]) + img_list = [] + N = x.shape[1] // 3 + for i in range(N): + img_list.append(x[:, i * 3 : i * 3 + 3]) + warped_list = [] + merged = [] + mask_list = [] + flow_list = [] + for i in range(N): + f, m, img = self.calflow(img_list[i], deg.detach(), scale) + mask_list.append(m) + warped_list.append(img) + flow_list.append(f) + if blend: + N += 1 + mask_list.append(m * 0) + warped_list.append(deg) + mask = F.softmax(torch.clamp(torch.cat(mask_list, 1), -4, 4), dim=1) + merged = 0 + for i in range(N): + merged += warped_list[i] * mask[:, i : i + 1] + return merged, loss_cons + + +def make_layer(basic_block, num_basic_block, **kwarg): + """Make layers by stacking the same blocks. + Args: + basic_block (nn.module): nn.module class for basic block. + num_basic_block (int): number of blocks. + Returns: + nn.Sequential: Stacked blocks in nn.Sequential. + """ + layers = [] + for _ in range(num_basic_block): + layers.append(basic_block(**kwarg)) + return nn.Sequential(*layers) + + +class ResidualDenseBlock(nn.Module): + """Residual Dense Block. + + Used in RRDB block in ESRGAN. + + Args: + num_feat (int): Channel number of intermediate features. + num_grow_ch (int): Channels for each growth. + """ + + def __init__(self, num_feat=64, num_grow_ch=32): + super(ResidualDenseBlock, self).__init__() + self.conv1 = nn.Conv2d(num_feat, num_grow_ch, 3, 1, 1) + self.conv2 = nn.Conv2d(num_feat + num_grow_ch, num_grow_ch, 3, 1, 1) + self.conv3 = nn.Conv2d(num_feat + 2 * num_grow_ch, num_grow_ch, 3, 1, 1) + self.conv4 = nn.Conv2d(num_feat + 3 * num_grow_ch, num_grow_ch, 3, 1, 1) + self.conv5 = nn.Conv2d(num_feat + 4 * num_grow_ch, num_feat, 3, 1, 1) + + self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + # initialization + # default_init_weights([self.conv1, self.conv2, self.conv3, self.conv4, self.conv5], 0.1) + # 只能先取消,default_init_weights来自basicsr.arch_util + + def forward(self, x): + x1 = self.lrelu(self.conv1(x)) + x2 = self.lrelu(self.conv2(torch.cat((x, x1), 1))) + x3 = self.lrelu(self.conv3(torch.cat((x, x1, x2), 1))) + x4 = self.lrelu(self.conv4(torch.cat((x, x1, x2, x3), 1))) + x5 = self.conv5(torch.cat((x, x1, x2, x3, x4), 1)) + # Emperically, we use 0.2 to scale the residual for better performance + # 原作者这么说我就这么听着吧 + return x5 * 0.2 + x + + +class RRDB(nn.Module): + """Residual in Residual Dense Block. + + Used in RRDB-Net in ESRGAN. + + Args: + num_feat (int): Channel number of intermediate features. + num_grow_ch (int): Channels for each growth. + """ + + def __init__(self, num_feat, num_grow_ch=32): + super(RRDB, self).__init__() + self.rdb1 = ResidualDenseBlock(num_feat, num_grow_ch) + self.rdb2 = ResidualDenseBlock(num_feat, num_grow_ch) + self.rdb3 = ResidualDenseBlock(num_feat, num_grow_ch) + + def forward(self, x): + out = self.rdb1(x) + out = self.rdb2(out) + out = self.rdb3(out) + # Emperically, we use 0.2 to scale the residual for better performance + # 原作者这么说我就这么听着吧 + return out * 0.2 + x + + +class RRDBNet(nn.Module): + """Networks consisting of Residual in Residual Dense Block, which is used + in ESRGAN. + + ESRGAN: Enhanced Super-Resolution Generative Adversarial Networks. + + We extend ESRGAN for scale x2 and scale x1. + Note: This is one option for scale 1, scale 2 in RRDBNet. + We first employ the pixel-unshuffle (an inverse operation of pixelshuffle to reduce the spatial size + and enlarge the channel size before feeding inputs into the main ESRGAN architecture. + + Args: + num_in_ch (int): Channel number of inputs. + num_out_ch (int): Channel number of outputs. + num_feat (int): Channel number of intermediate features. + Default: 64 + num_block (int): Block number in the trunk network. Defaults: 23 + num_grow_ch (int): Channels for each growth. Default: 32. + """ + + def __init__( + self, num_in_ch=16, num_out_ch=1, num_feat=64, num_block=6, num_grow_ch=32 + ): + super(RRDBNet, self).__init__() + self.conv_first = nn.Conv2d(num_in_ch, num_feat, 3, 1, 1) + self.body = make_layer( + RRDB, num_block, num_feat=num_feat, num_grow_ch=num_grow_ch + ) + self.conv_body = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + # upsample + self.conv_up1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_hr = nn.Conv2d(num_feat, num_feat, 3, 1, 1) + self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1) + + self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + def forward(self, img0, img1, warped_img0, warped_img1, flow): + x = torch.cat((img0, img1, warped_img0, warped_img1), 1) + x = F.interpolate(x, scale_factor=0.25, mode="bilinear", align_corners=False) + flow = ( + F.interpolate(flow, scale_factor=0.25, mode="bilinear", align_corners=False) + * 0.25 + ) + feat = torch.cat((x, flow), 1) + + feat = self.conv_first(feat) + body_feat = self.conv_body(self.body(feat)) + feat = feat + body_feat + # upsample,充分利用四倍放大 + feat = self.lrelu( + self.conv_up1(F.interpolate(feat, scale_factor=2.0, mode="nearest")) + ) + feat = self.lrelu( + self.conv_up2(F.interpolate(feat, scale_factor=2.0, mode="nearest")) + ) + out = self.conv_last(self.lrelu(self.conv_hr(feat))) + + out = torch.sigmoid(out) + return out + + +def warp(tenInput, tenFlow): + k = (str(tenFlow.device), str(tenFlow.size())) + if k not in backwarp_tenGrid: + tenHorizontal = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[3], device=device) + .view(1, 1, 1, tenFlow.shape[3]) + .expand(tenFlow.shape[0], -1, tenFlow.shape[2], -1) + ) + tenVertical = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[2], device=device) + .view(1, 1, tenFlow.shape[2], 1) + .expand(tenFlow.shape[0], -1, -1, tenFlow.shape[3]) + ) + backwarp_tenGrid[k] = torch.cat([tenHorizontal, tenVertical], 1).to(device) + + tenFlow = torch.cat( + [ + tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), + tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + g = (backwarp_tenGrid[k] + tenFlow).permute(0, 2, 3, 1) + return torch.nn.functional.grid_sample( + input=tenInput, + grid=g, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + + +class BasicConv(nn.Module): + def __init__( + self, + in_planes, + out_planes, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + relu=True, + bn=True, + bias=False, + ): + super(BasicConv, self).__init__() + self.out_channels = out_planes + self.conv = nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=bias, + ) + self.bn = ( + nn.BatchNorm2d(out_planes, eps=1e-5, momentum=0.01, affine=True) + if bn + else None + ) + self.relu = nn.ReLU() if relu else None + + def forward(self, x): + x = self.conv(x) + if self.bn is not None: + x = self.bn(x) + if self.relu is not None: + x = self.relu(x) + return x + + +class Flatten(nn.Module): + def forward(self, x): + return x.view(x.size(0), -1) + + +class ChannelGate(nn.Module): + def __init__(self, gate_channels, reduction_ratio=16, pool_types=["avg", "max"]): + super(ChannelGate, self).__init__() + self.gate_channels = gate_channels + self.mlp = nn.Sequential( + Flatten(), + nn.Linear(gate_channels, gate_channels // reduction_ratio), + nn.ReLU(), + nn.Linear(gate_channels // reduction_ratio, gate_channels), + ) + self.pool_types = pool_types + + def forward(self, x): + channel_att_sum = None + for pool_type in self.pool_types: + if pool_type == "avg": + avg_pool = F.avg_pool2d( + x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3)) + ) + channel_att_raw = self.mlp(avg_pool) + elif pool_type == "max": + max_pool = F.max_pool2d( + x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3)) + ) + channel_att_raw = self.mlp(max_pool) + elif pool_type == "lp": + lp_pool = F.lp_pool2d( + x, 2, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3)) + ) + channel_att_raw = self.mlp(lp_pool) + elif pool_type == "lse": + # LSE pool only + lse_pool = logsumexp_2d(x) + channel_att_raw = self.mlp(lse_pool) + + if channel_att_sum is None: + channel_att_sum = channel_att_raw + else: + channel_att_sum = channel_att_sum + channel_att_raw + + scale = F.sigmoid(channel_att_sum).unsqueeze(2).unsqueeze(3).expand_as(x) + return x * scale + + +def logsumexp_2d(tensor): + tensor_flatten = tensor.view(tensor.size(0), tensor.size(1), -1) + s, _ = torch.max(tensor_flatten, dim=2, keepdim=True) + outputs = s + (tensor_flatten - s).exp().sum(dim=2, keepdim=True).log() + return outputs + + +class ChannelPool(nn.Module): + def forward(self, x): + return torch.cat( + (torch.max(x, 1)[0].unsqueeze(1), torch.mean(x, 1).unsqueeze(1)), dim=1 + ) + + +class SpatialGate(nn.Module): + def __init__(self): + super(SpatialGate, self).__init__() + kernel_size = 7 + self.compress = ChannelPool() + self.spatial = BasicConv( + 2, 1, kernel_size, stride=1, padding=(kernel_size - 1) // 2, relu=False + ) + + def forward(self, x): + x_compress = self.compress(x) + x_out = self.spatial(x_compress) + scale = F.sigmoid(x_out) # broadcasting + return x * scale + + +class CBAM(nn.Module): + def __init__( + self, + gate_channels, + reduction_ratio=16, + pool_types=["avg", "max"], + no_spatial=False, + ): + super(CBAM, self).__init__() + self.ChannelGate = ChannelGate(gate_channels, reduction_ratio, pool_types) + self.no_spatial = no_spatial + if not no_spatial: + self.SpatialGate = SpatialGate() + + def forward(self, x): + x_out = self.ChannelGate(x) + if not self.no_spatial: + x_out = self.SpatialGate(x_out) + return x_out + + +def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + nn.PReLU(out_planes), + ) + + +class UNetConv(nn.Module): + def __init__(self, in_planes, out_planes, att=True): + super(UNetConv, self).__init__() + self.conv1 = conv(in_planes, out_planes, 3, 2, 1) + self.conv2 = conv(out_planes, out_planes, 3, 1, 1) + + if att: + self.cbam = CBAM(out_planes, 16) # 这一步导致了通道数最低为128 + else: + self.cbam = None + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + if self.cbam is not None: + x = self.cbam(x) + return x + + +class UpConv(nn.Module): + def __init__(self, in_planes, out_planes, att=True): + super(UpConv, self).__init__() + self.deconv = nn.Sequential( + nn.ConvTranspose2d(in_planes, in_planes // 2, 4, 2, 1), + nn.PReLU(in_planes // 2), + ) + + # 也许不需要这么卷积,我不确定 + self.conv1 = conv(in_planes, in_planes // 2, 3, 1, 1) + self.conv2 = conv(in_planes // 2, out_planes, 3, 1, 1) + + if att: + self.cbam = CBAM(out_planes, 16) + else: + self.cbam = None + + def forward(self, x1, x2): + x1 = self.deconv(x1) + y = self.conv1(torch.cat((x1, x2), 1)) + y = self.conv2(y) + if self.cbam is not None: + y = self.cbam(y) + return y + + +class FeatureNet(nn.Module): + def __init__(self, in_planes, out_planes): + super(FeatureNet, self).__init__() + # 处理IFBlock0时通道数问题 + self.conv0 = conv(7, in_planes, 1, 1, 0) + + self.conv1 = UNetConv(in_planes, out_planes // 8, att=False) + self.conv2 = UNetConv(out_planes // 8, out_planes // 4, att=True) + self.conv3 = UNetConv(out_planes // 4, out_planes // 2, att=True) + self.conv4 = UNetConv(out_planes // 2, out_planes, att=True) + self.conv5 = UNetConv(out_planes, 2 * out_planes, att=True) + + self.deconv5 = UpConv(2 * out_planes, out_planes, att=True) + self.deconv4 = UpConv(out_planes, out_planes // 2, att=False) + self.deconv3 = UpConv(out_planes // 2, out_planes // 4, att=False) + + def forward(self, x, level=0): + if x.shape[1] != 17: + x = self.conv0(x) + x2 = self.conv1(x) + x4 = self.conv2(x2) + x8 = self.conv3(x4) + x16 = self.conv4(x8) + x32 = self.conv5(x16) + y = self.deconv5(x32, x16) # 匹配IFBlock0通道和尺寸 + + # “早退机制”以期待用同一个UNet提取特征,不确定是否对训练产生影响 + if level != 0: + y = self.deconv4(y, x8) # 匹配IFBlock1通道和尺寸 + if level == 2: + y = self.deconv3(y, x4) # 匹配IFBlock2通道和尺寸 + return y + + +class IFBlock(nn.Module): + def __init__(self, c=64, level=0): + super(IFBlock, self).__init__() + self.convblock = nn.Sequential( + conv(c, c), + conv(c, c), + conv(c, c), + conv(c, c), + conv(c, c), + conv(c, c), + ) + self.flowconv = nn.Conv2d(c, 4, 3, 1, 1) + self.maskconvx16 = nn.Conv2d(c, 16 * 16 * 9, 1, 1, 0) + self.maskconvx8 = nn.Conv2d(c, 8 * 8 * 9, 1, 1, 0) + self.maskconvx4 = nn.Conv2d(c, 4 * 4 * 9, 1, 1, 0) + + self.level = level + assert self.level in [4, 8, 16], "Bitch" + + def mask_conv(self, x): + if self.level == 4: + return self.maskconvx4(x) + if self.level == 8: + return self.maskconvx8(x) + if self.level == 16: + return self.maskconvx16(x) + + def upsample_flow(self, flow, mask): + # 俺寻思俺懂了 + N, _, H, W = flow.shape + mask = mask.view(N, 1, 9, self.level, self.level, H, W) + mask = torch.softmax(mask, dim=2) + + up_flow = F.unfold(self.level * flow, [3, 3], padding=1) + up_flow = up_flow.view(N, 4, 9, 1, 1, H, W) + + up_flow = torch.sum(mask * up_flow, dim=2) + up_flow = up_flow.permute(0, 1, 4, 2, 5, 3) + return up_flow.reshape(N, 4, self.level * H, self.level * W) + + def forward(self, x, scale): + x = self.convblock(x) + x # 类似ResNet的f(x) + x + tmp = self.flowconv(x) + up_mask = self.mask_conv(x) + flow_up = self.upsample_flow(tmp, up_mask) + flow = ( + F.interpolate( + flow_up, scale_factor=scale, mode="bilinear", align_corners=False + ) + * scale + ) + return flow + + +class IFUNet(nn.Module): + def __init__(self): + super(IFUNet, self).__init__() + # block0通道数必须为128的整倍数 + self.fmap = FeatureNet(in_planes=17, out_planes=256) + self.block0 = IFBlock(c=256, level=16) + self.block1 = IFBlock(c=128, level=8) + self.block2 = IFBlock(c=64, level=4) + + def forward(self, x, scale=1.0, timestep=0.5, ensemble=True): + channel = x.shape[1] // 2 + img0 = x[:, :channel] + img1 = x[:, channel:] + if not torch.is_tensor(timestep): + timestep = (x[:, :1].clone() * 0 + 1) * timestep + else: + timestep = timestep.repeat(1, 1, img0.shape[2], img0.shape[3]) + warped_img0 = img0 + warped_img1 = img1 + flow = None + block = [self.block0, self.block1, self.block2] + for i in range(3): + if flow != None: + x = torch.cat((img0, img1, timestep, warped_img0, warped_img1), 1) + flowtmp = flow + if scale != 1: + x = F.interpolate( + x, scale_factor=scale, mode="bilinear", align_corners=False + ) + flowtmp = ( + F.interpolate( + flow, + scale_factor=scale, + mode="bilinear", + align_corners=False, + ) + * scale + ) + x = torch.cat((x, flowtmp), 1) + # 期待UNet能提取到特征,不再需要ensemble + Fmap = self.fmap(x, level=i) + flow_d = block[i](Fmap, scale=1.0 / scale) + flow = flow + flow_d + + if ensemble: + x = torch.cat( + (img1, img0, 1 - timestep, warped_img0, warped_img1), 1 + ) + flowtmp = flow + if scale != 1: + x = F.interpolate( + x, scale_factor=scale, mode="bilinear", align_corners=False + ) + flowtmp = ( + F.interpolate( + flow, + scale_factor=scale, + mode="bilinear", + align_corners=False, + ) + * scale + ) + x = torch.cat((x, flowtmp), 1) + # 期待UNet能提取到特征,不再需要ensemble + Fmap = self.fmap(x, level=i) + flow_d = block[i](Fmap, scale=1.0 / scale) + flow2 = flow + flow_d + flow = (flow + flow2) / 2 + else: + x = torch.cat((img0, img1, timestep), 1) + if scale != 1: + x = F.interpolate( + x, scale_factor=scale, mode="bilinear", align_corners=False + ) + Fmap = self.fmap(x, level=i) + flow = block[i](Fmap, scale=1.0 / scale) + + if ensemble: + x = torch.cat((img1, img0, 1 - timestep), 1) + if scale != 1: + x = F.interpolate( + x, scale_factor=scale, mode="bilinear", align_corners=False + ) + Fmap = self.fmap(x, level=i) + flow2 = block[i](Fmap, scale=1.0 / scale) + flow = (flow + flow2) / 2 + + warped_img0 = warp(img0, flow[:, :2]) + warped_img1 = warp(img1, flow[:, 2:4]) + return flow, warped_img0, warped_img1 + + +class IFUNetModel(nn.Module): + def __init__(self, local_rank=-1): + super(IFUNetModel, self).__init__() + self.flownet = IFUNet() + self.fusionnet = RRDBNet() + self.refinenet = ResynNet() + + def forward(self, img0, img1, timestep=0.5, scale=1.0, ensemble=False): + n, c, h, w = img0.shape + ph = ((h - 1) // 64 + 1) * 64 + pw = ((w - 1) // 64 + 1) * 64 + padding = (0, pw - w, 0, ph - h) + img0 = F.pad(img0, padding) + img1 = F.pad(img1, padding) + + imgs = torch.cat((img0, img1), 1) + flow, warped_img0, warped_img1 = self.flownet(imgs, scale, timestep, ensemble) + mask = self.fusionnet(img0, img1, warped_img0, warped_img1, flow) + merged = warped_img0 * mask + warped_img1 * (1 - mask) + merged, _ = self.refinenet(imgs, deg=merged, scale=[4, 2, 1]) + return merged[:, :, :h, :w] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bec98675f6cc459a026a0837c90915016c058d79 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__init__.py @@ -0,0 +1,59 @@ +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +from comfy.model_management import get_torch_device + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["IFUNet.pth"] + +class IFUnet_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + "scale_factor": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 100, "step": 0.1}), + "ensemble": ("BOOLEAN", {"default":True}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames: typing.SupportsInt = 1, + multiplier: typing.SupportsInt = 2, + scale_factor: typing.SupportsFloat = 1.0, + ensemble: bool = True, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .IFUNet_arch import IFUNetModel + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + interpolation_model = IFUNetModel() + interpolation_model.load_state_dict(torch.load(model_path)) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model, scale_factor, ensemble): + return model(frame_0, frame_1, timestep=timestep, scale=scale_factor, ensemble=ensemble) + + args = [interpolation_model, scale_factor, ensemble] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9799f98755d697df2987a639a0d304a9e81c3558 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ifunet/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/M2M_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/M2M_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..536f915efa4678bd55884852ad6e3ea386fd3ffe --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/M2M_arch.py @@ -0,0 +1,1037 @@ +""" +https://github.com/feinanshan/M2M_VFI/blob/main/Test/model/py +https://raw.githubusercontent.com/feinanshan/M2M_VFI/main/Test/model/py +https://github.com/feinanshan/M2M_VFI/blob/main/Test/model/py +https://github.com/feinanshan/M2M_VFI/blob/main/Test/model/py +https://github.com/feinanshan/M2M_VFI/blob/main/Test/model/m2m.py +""" + +import collections +import math +import os +import re +import torch +import typing +from vfi_models.ops import softsplat_func +from vfi_models.ops import costvol_func + +########################################################## + + +objBackwarpcache = {} + + +def backwarp(tenIn: torch.Tensor, tenFlow: torch.Tensor): + if ( + "grid" + + str(tenFlow.dtype) + + str(tenFlow.device) + + str(tenFlow.shape[2]) + + str(tenFlow.shape[3]) + not in objBackwarpcache + ): + tenHor = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenFlow.shape[3], + dtype=tenFlow.dtype, + device=tenFlow.device, + ) + .view(1, 1, 1, -1) + .repeat(1, 1, tenFlow.shape[2], 1) + ) + tenVer = ( + torch.linspace( + start=-1.0, + end=1.0, + steps=tenFlow.shape[2], + dtype=tenFlow.dtype, + device=tenFlow.device, + ) + .view(1, 1, -1, 1) + .repeat(1, 1, 1, tenFlow.shape[3]) + ) + + objBackwarpcache[ + "grid" + + str(tenFlow.dtype) + + str(tenFlow.device) + + str(tenFlow.shape[2]) + + str(tenFlow.shape[3]) + ] = torch.cat([tenHor, tenVer], 1) + # end + + if tenFlow.shape[3] == tenFlow.shape[2]: + tenFlow = tenFlow * (2.0 / ((tenFlow.shape[3] and tenFlow.shape[2]) - 1.0)) + + elif tenFlow.shape[3] != tenFlow.shape[2]: + tenFlow = tenFlow * torch.tensor( + data=[2.0 / (tenFlow.shape[3] - 1.0), 2.0 / (tenFlow.shape[2] - 1.0)], + dtype=tenFlow.dtype, + device=tenFlow.device, + ).view(1, 2, 1, 1) + + # end + + return torch.nn.functional.grid_sample( + input=tenIn, + grid=( + objBackwarpcache[ + "grid" + + str(tenFlow.dtype) + + str(tenFlow.device) + + str(tenFlow.shape[2]) + + str(tenFlow.shape[3]) + ] + + tenFlow + ).permute(0, 2, 3, 1), + mode="bilinear", + padding_mode="zeros", + align_corners=True, + ) + + +# end + +########################################################## + + +class Basic(torch.nn.Module): + def __init__( + self, + strType: str, + intChans: typing.List[int], + objScratch: typing.Optional[typing.Dict] = None, + ): + super().__init__() + + self.strType = strType + self.netEvenize = None + self.netMain = None + self.netShortcut = None + + intIn = intChans[0] + intOut = intChans[-1] + netMain = [] + intChans = intChans.copy() + fltStride = 1.0 + + for intPart, strPart in enumerate(self.strType.split("+")[0].split("-")): + if strPart.startswith("evenize") == True and intPart == 0: + + class Evenize(torch.nn.Module): + def __init__(self, strPad): + super().__init__() + + self.strPad = strPad + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + intPad = [0, 0, 0, 0] + + if tenIn.shape[3] % 2 != 0: + intPad[1] = 1 + if tenIn.shape[2] % 2 != 0: + intPad[3] = 1 + + if min(intPad) != 0 or max(intPad) != 0: + tenIn = torch.nn.functional.pad( + input=tenIn, + pad=intPad, + mode=self.strPad + if self.strPad != "zeros" + else "constant", + value=0.0, + ) + # end + + return tenIn + + # end + + # end + + strPad = "zeros" + + if "(" in strPart: + if "replpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "replicate" + if "reflpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "reflect" + # end + + self.netEvenize = Evenize(strPad) + + elif strPart.startswith("conv") == True: + intKsize = 3 + intPad = 1 + strPad = "zeros" + + if "(" in strPart: + intKsize = int(strPart.split("(")[1].split(")")[0].split(",")[0]) + intPad = int(math.floor(0.5 * (intKsize - 1))) + + if "replpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "replicate" + if "reflpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "reflect" + # end + + if "nopad" in self.strType.split("+"): + intPad = 0 + # end + + netMain += [ + torch.nn.Conv2d( + in_channels=intChans[0], + out_channels=intChans[1], + kernel_size=intKsize, + stride=1, + padding=intPad, + padding_mode=strPad, + bias="nobias" not in self.strType.split("+"), + ) + ] + intChans = intChans[1:] + fltStride *= 1.0 + + elif strPart.startswith("sconv") == True: + intKsize = 3 + intPad = 1 + strPad = "zeros" + + if "(" in strPart: + intKsize = int(strPart.split("(")[1].split(")")[0].split(",")[0]) + intPad = int(math.floor(0.5 * (intKsize - 1))) + + if "replpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "replicate" + if "reflpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "reflect" + # end + + if "nopad" in self.strType.split("+"): + intPad = 0 + # end + + netMain += [ + torch.nn.Conv2d( + in_channels=intChans[0], + out_channels=intChans[1], + kernel_size=intKsize, + stride=2, + padding=intPad, + padding_mode=strPad, + bias="nobias" not in self.strType.split("+"), + ) + ] + intChans = intChans[1:] + fltStride *= 2.0 + + elif strPart.startswith("up") == True: + + class Up(torch.nn.Module): + def __init__(self, strType): + super().__init__() + + self.strType = strType + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + if self.strType == "nearest": + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=2.0, + mode="nearest-exact", + align_corners=False, + ) + + elif self.strType == "bilinear": + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=2.0, + mode="bilinear", + align_corners=False, + ) + + elif self.strType == "pyramid": + return pyramid(tenIn, None, "up") + + elif self.strType == "shuffle": + return torch.nn.functional.pixel_shuffle( + tenIn, upscale_factor=2 + ) # https://github.com/pytorch/pytorch/issues/62854 + + # end + + assert False # to make torchscript happy + + # end + + # end + + strType = "bilinear" + + if "(" in strPart: + if "nearest" in strPart.split("(")[1].split(")")[0].split(","): + strType = "nearest" + if "pyramid" in strPart.split("(")[1].split(")")[0].split(","): + strType = "pyramid" + if "shuffle" in strPart.split("(")[1].split(")")[0].split(","): + strType = "shuffle" + # end + + netMain += [Up(strType)] + fltStride *= 0.5 + + elif strPart.startswith("prelu") == True: + netMain += [ + torch.nn.PReLU( + num_parameters=1, + init=float(strPart.split("(")[1].split(")")[0].split(",")[0]), + ) + ] + fltStride *= 1.0 + + elif True: + assert False + + # end + # end + + self.netMain = torch.nn.Sequential(*netMain) + + for strPart in self.strType.split("+")[1:]: + if strPart.startswith("skip") == True: + if intIn == intOut and fltStride == 1.0: + self.netShortcut = torch.nn.Identity() + + elif intIn != intOut and fltStride == 1.0: + self.netShortcut = torch.nn.Conv2d( + in_channels=intIn, + out_channels=intOut, + kernel_size=1, + stride=1, + padding=0, + bias="nobias" not in self.strType.split("+"), + ) + + elif intIn == intOut and fltStride != 1.0: + + class Down(torch.nn.Module): + def __init__(self, fltScale): + super().__init__() + + self.fltScale = fltScale + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=self.fltScale, + mode="bilinear", + align_corners=False, + ) + + # end + + # end + + self.netShortcut = Down(1.0 / fltStride) + + elif intIn != intOut and fltStride != 1.0: + + class Down(torch.nn.Module): + def __init__(self, fltScale): + super().__init__() + + self.fltScale = fltScale + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=self.fltScale, + mode="bilinear", + align_corners=False, + ) + + # end + + # end + + self.netShortcut = torch.nn.Sequential( + Down(1.0 / fltStride), + torch.nn.Conv2d( + in_channels=intIn, + out_channels=intOut, + kernel_size=1, + stride=1, + padding=0, + bias="nobias" not in self.strType.split("+"), + ), + ) + + # end + + elif strPart.startswith("...") == True: + pass + + # end + # end + + assert len(intChans) == 1 + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + if self.netEvenize is not None: + tenIn = self.netEvenize(tenIn) + # end + + tenOut = self.netMain(tenIn) + + if self.netShortcut is not None: + tenOut = tenOut + self.netShortcut(tenIn) + # end + + return tenOut + + # end + + +# end + + +########################################################## + + +class Network(torch.nn.Module): + def __init__(self): + super().__init__() + + class Extractor(torch.nn.Module): + def __init__(self): + super().__init__() + + self.netOne = Basic( + "evenize(replpad)-sconv(2)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)", + [3, 32, 32, 32], + None, + ) + self.netTwo = Basic( + "evenize(replpad)-sconv(2)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)", + [32, 32, 32, 32], + None, + ) + self.netThr = Basic( + "evenize(replpad)-sconv(2)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)", + [32, 32, 32, 32], + None, + ) + + # end + + def forward(self, tenIn): + tenOne = self.netOne(tenIn) + tenTwo = self.netTwo(tenOne) + tenThr = self.netThr(tenTwo) + tenFou = torch.nn.functional.avg_pool2d( + input=tenThr, kernel_size=2, stride=2, count_include_pad=False + ) + tenFiv = torch.nn.functional.avg_pool2d( + input=tenFou, kernel_size=2, stride=2, count_include_pad=False + ) + + return [tenOne, tenTwo, tenThr, tenFou, tenFiv] + + # end + + # end + + class Decoder(torch.nn.Module): + def __init__(self, intChannels): + super().__init__() + + self.netCostacti = torch.nn.PReLU(num_parameters=1, init=0.25) + self.netMain = Basic( + "conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)-prelu(0.25)-conv(3,replpad)", + [intChannels, 128, 128, 96, 64, 32, 2], + None, + ) + + # end + + def forward(self, tenOne, tenTwo, tenFlow): + if tenFlow is not None: + tenFlow = 2.0 * torch.nn.functional.interpolate( + input=tenFlow, + scale_factor=2.0, + mode="bilinear", + align_corners=False, + ) + # end + + tenMain = [] + + if tenFlow is None: + tenMain.append(tenOne) + tenMain.append(self.netCostacti(costvol_func.apply(tenOne, tenTwo))) + + elif tenFlow is not None: + tenMain.append(tenOne) + tenMain.append( + self.netCostacti( + costvol_func.apply( + tenOne, backwarp(tenTwo, tenFlow.detach()) + ) + ) + ) + tenMain.append(tenFlow) + + # end + + return (tenFlow if tenFlow is not None else 0.0) + self.netMain( + torch.cat(tenMain, 1) + ) + + # end + + # end + + self.netExtractor = Extractor() + + self.netFiv = Decoder(32 + 81 + 0) + self.netFou = Decoder(32 + 81 + 2) + self.netThr = Decoder(32 + 81 + 2) + self.netTwo = Decoder(32 + 81 + 2) + self.netOne = Decoder(32 + 81 + 2) + + # end + + def bidir(self, tenOne, tenTwo): + tenOne, tenTwo = list( + zip( + *[ + torch.split(tenFeat, [tenOne.shape[0], tenTwo.shape[0]], 0) + for tenFeat in self.netExtractor(torch.cat([tenOne, tenTwo], 0)) + ] + ) + ) + + tenFwd = None + tenFwd = self.netFiv(tenOne[-1], tenTwo[-1], tenFwd) + tenFwd = self.netFou(tenOne[-2], tenTwo[-2], tenFwd) + tenFwd = self.netThr(tenOne[-3], tenTwo[-3], tenFwd) + tenFwd = self.netTwo(tenOne[-4], tenTwo[-4], tenFwd) + tenFwd = self.netOne(tenOne[-5], tenTwo[-5], tenFwd) + + tenBwd = None + tenBwd = self.netFiv(tenTwo[-1], tenOne[-1], tenBwd) + tenBwd = self.netFou(tenTwo[-2], tenOne[-2], tenBwd) + tenBwd = self.netThr(tenTwo[-3], tenOne[-3], tenBwd) + tenBwd = self.netTwo(tenTwo[-4], tenOne[-4], tenBwd) + tenBwd = self.netOne(tenTwo[-5], tenOne[-5], tenBwd) + + return tenFwd, tenBwd + + # end + + +# end + +########################################################## + + +def forwarp_mframe_mask( + tenIn1, tenFlow1, t1, tenIn2, tenFlow2, t2, tenMetric1=None, tenMetric2=None +): + def one_fdir(tenIn, tenFlow, td, tenMetric): + tenIn = torch.cat( + [ + tenIn * td * (tenMetric).clip(-20.0, 20.0).exp(), + td * (tenMetric).clip(-20.0, 20.0).exp(), + ], + 1, + ) + + tenOut = softsplat_func.apply(tenIn, tenFlow) + + return tenOut[:, :-1, :, :], tenOut[:, -1:, :, :] + 0.0000001 + + flow_num = tenFlow1.shape[0] + tenOut = 0 + tenNormalize = 0 + for idx in range(flow_num): + tenOutF, tenNormalizeF = one_fdir( + tenIn1[idx], tenFlow1[idx], t1[idx], tenMetric1[idx] + ) + tenOutB, tenNormalizeB = one_fdir( + tenIn2[idx], tenFlow2[idx], t2[idx], tenMetric2[idx] + ) + + tenOut += tenOutF + tenOutB + tenNormalize += tenNormalizeF + tenNormalizeB + + return tenOut / tenNormalize, tenNormalize < 0.00001 + + +################################################################### + +c = 16 + + +def conv(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + torch.nn.PReLU(out_planes), + ) + + +def deconv(in_planes, out_planes, kernel_size=4, stride=2, padding=1): + return torch.nn.Sequential( + torch.torch.nn.ConvTranspose2d( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=4, + stride=2, + padding=1, + bias=True, + ), + torch.nn.PReLU(out_planes), + ) + + +class Conv2(torch.nn.Module): + def __init__(self, in_planes, out_planes, stride=2): + super(Conv2, self).__init__() + self.conv1 = conv(in_planes, out_planes, 3, stride, 1) + self.conv2 = conv(out_planes, out_planes, 3, 1, 1) + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + return x + + +class Conv2n(torch.nn.Module): + def __init__(self, in_planes, out_planes, stride=2): + super(Conv2n, self).__init__() + self.conv1 = conv(in_planes, in_planes, 3, stride, 1) + self.conv2 = conv(in_planes, in_planes, 3, 1, 1) + self.conv3 = conv(in_planes, in_planes, 1, 1, 0) + self.conv4 = conv(in_planes, out_planes, 1, 1, 0) + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + x = self.conv4(x) + return x + + +##################################################### + + +class ImgPyramid(torch.nn.Module): + def __init__(self): + super(ImgPyramid, self).__init__() + self.conv1 = Conv2(3, c) + self.conv2 = Conv2(c, 2 * c) + self.conv3 = Conv2(2 * c, 4 * c) + self.conv4 = Conv2(4 * c, 8 * c) + + def forward(self, x): + x1 = self.conv1(x) + x2 = self.conv2(x1) + x3 = self.conv3(x2) + x4 = self.conv4(x3) + return [x1, x2, x3, x4] + + +class EncDec(torch.nn.Module): + def __init__(self, branch): + super(EncDec, self).__init__() + self.branch = branch + + self.down0 = Conv2(8, 2 * c) + self.down1 = Conv2(6 * c, 4 * c) + self.down2 = Conv2(12 * c, 8 * c) + self.down3 = Conv2(24 * c, 16 * c) + + self.up0 = deconv(48 * c, 8 * c) + self.up1 = deconv(16 * c, 4 * c) + self.up2 = deconv(8 * c, 2 * c) + self.up3 = deconv(4 * c, c) + self.conv = torch.nn.Conv2d(c, 2 * self.branch, 3, 1, 1) + + self.conv_m = torch.nn.Conv2d(c, 1, 3, 1, 1) + + # For Channel dimennsion + self.conv_C = torch.nn.Sequential( + torch.nn.AdaptiveAvgPool2d(1), + torch.nn.Conv2d( + 16 * c, + 16 * 16 * c, + kernel_size=(1, 1), + stride=(1, 1), + padding=(0, 0), + bias=True, + ), + torch.nn.Sigmoid(), + ) + + # For Height dimennsion + self.conv_H = torch.nn.Sequential( + torch.nn.AdaptiveAvgPool2d((None, 1)), + torch.nn.Conv2d( + 16 * c, 16, kernel_size=(1, 1), stride=(1, 1), padding=(0, 0), bias=True + ), + torch.nn.Sigmoid(), + ) + + # For Width dimennsion + self.conv_W = torch.nn.Sequential( + torch.nn.AdaptiveAvgPool2d((1, None)), + torch.nn.Conv2d( + 16 * c, 16, kernel_size=(1, 1), stride=(1, 1), padding=(0, 0), bias=True + ), + torch.nn.Sigmoid(), + ) + + self.sigmoid = torch.nn.Sigmoid() + + def forward(self, flow0, flow1, im0, im1, c0, c1): + N_, C_, H_, W_ = im0.shape + + wim1 = backwarp(im1, flow0) + wim0 = backwarp(im0, flow1) + s0_0 = self.down0(torch.cat((flow0, im0, wim1), 1)) + s1_0 = self.down0(torch.cat((flow1, im1, wim0), 1)) + + ######################################################################################### + flow0 = ( + torch.nn.functional.interpolate( + flow0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + flow1 = ( + torch.nn.functional.interpolate( + flow1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + + wf0 = backwarp(torch.cat((s0_0, c0[0]), 1), flow1) + wf1 = backwarp(torch.cat((s1_0, c1[0]), 1), flow0) + + s0_1 = self.down1(torch.cat((s0_0, c0[0], wf1), 1)) + s1_1 = self.down1(torch.cat((s1_0, c1[0], wf0), 1)) + + ######################################################################################### + flow0 = ( + torch.nn.functional.interpolate( + flow0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + flow1 = ( + torch.nn.functional.interpolate( + flow1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + + wf0 = backwarp(torch.cat((s0_1, c0[1]), 1), flow1) + wf1 = backwarp(torch.cat((s1_1, c1[1]), 1), flow0) + + s0_2 = self.down2(torch.cat((s0_1, c0[1], wf1), 1)) + s1_2 = self.down2(torch.cat((s1_1, c1[1], wf0), 1)) + + ######################################################################################### + flow0 = ( + torch.nn.functional.interpolate( + flow0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + flow1 = ( + torch.nn.functional.interpolate( + flow1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + + wf0 = backwarp(torch.cat((s0_2, c0[2]), 1), flow1) + wf1 = backwarp(torch.cat((s1_2, c1[2]), 1), flow0) + + s0_3 = self.down3(torch.cat((s0_2, c0[2], wf1), 1)) + s1_3 = self.down3(torch.cat((s1_2, c1[2], wf0), 1)) + + ######################################################################################### + + s0_3_c = self.conv_C(s0_3) + s0_3_c = s0_3_c.view(N_, 16, -1, 1, 1) + + s0_3_h = self.conv_H(s0_3) + s0_3_h = s0_3_h.view(N_, 16, 1, -1, 1) + + s0_3_w = self.conv_W(s0_3) + s0_3_w = s0_3_w.view(N_, 16, 1, 1, -1) + + cube0 = (s0_3_c * s0_3_h * s0_3_w).mean(1) + + s0_3 = s0_3 * cube0 + + s1_3_c = self.conv_C(s1_3) + s1_3_c = s1_3_c.view(N_, 16, -1, 1, 1) + + s1_3_h = self.conv_H(s1_3) + s1_3_h = s1_3_h.view(N_, 16, 1, -1, 1) + + s1_3_w = self.conv_W(s1_3) + s1_3_w = s1_3_w.view(N_, 16, 1, 1, -1) + + cube1 = (s1_3_c * s1_3_h * s1_3_w).mean(1) + + s1_3 = s1_3 * cube1 + + ######################################################################################### + flow0 = ( + torch.nn.functional.interpolate( + flow0, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + flow1 = ( + torch.nn.functional.interpolate( + flow1, scale_factor=0.5, mode="bilinear", align_corners=False + ) + * 0.5 + ) + + wf0 = backwarp(torch.cat((s0_3, c0[3]), 1), flow1) + wf1 = backwarp(torch.cat((s1_3, c1[3]), 1), flow0) + + x0 = self.up0(torch.cat((s0_3, c0[3], wf1), 1)) + x1 = self.up0(torch.cat((s1_3, c1[3], wf0), 1)) + + x0 = self.up1(torch.cat((s0_2, x0), 1)) + x1 = self.up1(torch.cat((s1_2, x1), 1)) + + x0 = self.up2(torch.cat((s0_1, x0), 1)) + x1 = self.up2(torch.cat((s1_1, x1), 1)) + + x0 = self.up3(torch.cat((s0_0, x0), 1)) + x1 = self.up3(torch.cat((s1_0, x1), 1)) + + m0 = self.sigmoid(self.conv_m(x0)) * 0.8 + 0.1 + m1 = self.sigmoid(self.conv_m(x1)) * 0.8 + 0.1 + + x0 = self.conv(x0) + x1 = self.conv(x1) + + return x0, x1, m0.repeat(1, self.branch, 1, 1), m1.repeat(1, self.branch, 1, 1) + + +class M2M_PWC(torch.nn.Module): + def __init__(self, ratio=4): + super(M2M_PWC, self).__init__() + self.branch = 4 + self.ratio = ratio + + self.netFlow = Network() + + self.paramAlpha = torch.nn.Parameter(10.0 * torch.ones(1, 1, 1, 1)) + + class MotionRefineNet(torch.nn.Module): + def __init__(self, branch): + super(MotionRefineNet, self).__init__() + self.branch = branch + self.img_pyramid = ImgPyramid() + self.motion_encdec = EncDec(branch) + + def forward(self, flow0, flow1, im0, im1, ratio): + flow0 = ratio * torch.nn.functional.interpolate( + input=flow0, + scale_factor=ratio, + mode="bilinear", + align_corners=False, + ) + flow1 = ratio * torch.nn.functional.interpolate( + input=flow1, + scale_factor=ratio, + mode="bilinear", + align_corners=False, + ) + + c0 = self.img_pyramid(im0) + c1 = self.img_pyramid(im1) + + flow_res = self.motion_encdec(flow0, flow1, im0, im1, c0, c1) + + flow0 = flow0.repeat(1, self.branch, 1, 1) + flow_res[0] + flow1 = flow1.repeat(1, self.branch, 1, 1) + flow_res[1] + + return flow0, flow1, flow_res[2], flow_res[3] + + self.MRN = MotionRefineNet(self.branch) + + def forward(self, im0, im1, fltTimes=[0.5], ratio=None): + if ratio is None: + ratio = self.ratio + + intWidth = im0.shape[3] and im1.shape[3] + intHeight = im0.shape[2] and im1.shape[2] + + intPadr = ((ratio * 16) - (intWidth % (ratio * 16))) % (ratio * 16) + intPadb = ((ratio * 16) - (intHeight % (ratio * 16))) % (ratio * 16) + + im0 = torch.nn.functional.pad( + input=im0, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + im1 = torch.nn.functional.pad( + input=im1, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + + N_, C_, H_, W_ = im0.shape + + outputs = [] + + with torch.set_grad_enabled(False): + tenStats = [im0, im1] + tenMean_ = sum([tenIn.mean([1, 2, 3], True) for tenIn in tenStats]) / len( + tenStats + ) + tenStd_ = ( + sum( + [ + tenIn.std([1, 2, 3], False, True).square() + + (tenMean_ - tenIn.mean([1, 2, 3], True)).square() + for tenIn in tenStats + ] + ) + / len(tenStats) + ).sqrt() + + im0_o = (im0 - tenMean_) / (tenStd_ + 0.0000001) + im1_o = (im1 - tenMean_) / (tenStd_ + 0.0000001) + + im0 = (im0 - tenMean_) / (tenStd_ + 0.0000001) + im1 = (im1 - tenMean_) / (tenStd_ + 0.0000001) + + im0_ = torch.nn.functional.interpolate( + input=im0, scale_factor=2.0 / ratio, mode="bilinear", align_corners=False + ) + im1_ = torch.nn.functional.interpolate( + input=im1, scale_factor=2.0 / ratio, mode="bilinear", align_corners=False + ) + + tenFwd, tenBwd = self.netFlow.bidir(im0_, im1_) + + tenFwd, tenBwd, WeiMF, WeiMB = self.MRN(tenFwd, tenBwd, im0, im1, ratio) + + for fltTime_ in fltTimes: + im0 = im0_o.repeat(1, self.branch, 1, 1) + im1 = im1_o.repeat(1, self.branch, 1, 1) + tenStd = tenStd_.repeat(1, self.branch, 1, 1) + tenMean = tenMean_.repeat(1, self.branch, 1, 1) + fltTime = fltTime_.repeat(1, self.branch, 1, 1) + + tenFwd = tenFwd.reshape(N_, self.branch, 2, H_, W_).view( + N_ * self.branch, 2, H_, W_ + ) + tenBwd = tenBwd.reshape(N_, self.branch, 2, H_, W_).view( + N_ * self.branch, 2, H_, W_ + ) + + WeiMF = WeiMF.reshape(N_, self.branch, 1, H_, W_).view( + N_ * self.branch, 1, H_, W_ + ) + WeiMB = WeiMB.reshape(N_, self.branch, 1, H_, W_).view( + N_ * self.branch, 1, H_, W_ + ) + + im0 = im0.reshape(N_, self.branch, 3, H_, W_).view( + N_ * self.branch, 3, H_, W_ + ) + im1 = im1.reshape(N_, self.branch, 3, H_, W_).view( + N_ * self.branch, 3, H_, W_ + ) + + tenStd = tenStd.reshape(N_, self.branch, 1, 1, 1).view( + N_ * self.branch, 1, 1, 1 + ) + tenMean = tenMean.reshape(N_, self.branch, 1, 1, 1).view( + N_ * self.branch, 1, 1, 1 + ) + fltTime = fltTime.reshape(N_, self.branch, 1, 1, 1).view( + N_ * self.branch, 1, 1, 1 + ) + + tenPhotoone = ( + ( + 1.0 + - ( + WeiMF + * (im0 - backwarp(im1, tenFwd).detach()).abs().mean([1], True) + ) + ) + .clip(0.001, None) + .square() + ) + tenPhototwo = ( + ( + 1.0 + - ( + WeiMB + * (im1 - backwarp(im0, tenBwd).detach()).abs().mean([1], True) + ) + ) + .clip(0.001, None) + .square() + ) + + t0 = fltTime + flow0 = tenFwd * t0 + metric0 = self.paramAlpha * tenPhotoone + + t1 = 1.0 - fltTime + flow1 = tenBwd * t1 + metric1 = self.paramAlpha * tenPhototwo + + flow0 = flow0.reshape(N_, self.branch, 2, H_, W_).permute(1, 0, 2, 3, 4) + flow1 = flow1.reshape(N_, self.branch, 2, H_, W_).permute(1, 0, 2, 3, 4) + + metric0 = metric0.reshape(N_, self.branch, 1, H_, W_).permute(1, 0, 2, 3, 4) + metric1 = metric1.reshape(N_, self.branch, 1, H_, W_).permute(1, 0, 2, 3, 4) + + im0 = im0.reshape(N_, self.branch, 3, H_, W_).permute(1, 0, 2, 3, 4) + im1 = im1.reshape(N_, self.branch, 3, H_, W_).permute(1, 0, 2, 3, 4) + + t0 = t0.reshape(N_, self.branch, 1, 1, 1).permute(1, 0, 2, 3, 4) + t1 = t1.reshape(N_, self.branch, 1, 1, 1).permute(1, 0, 2, 3, 4) + + tenOutput, mask = forwarp_mframe_mask( + im0, flow0, t1, im1, flow1, t0, metric0, metric1 + ) + + tenOutput = tenOutput + mask * (t1.mean(0) * im0_o + t0.mean(0) * im1_o) + + outputs.append((tenOutput * (tenStd_ + 0.0000001)) + tenMean_) + + return [output[:, :, :intHeight, :intWidth] for output in outputs] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..88e489e7825b005010e85009843b122aee8edbd6 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__init__.py @@ -0,0 +1,60 @@ +import pathlib +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames +import typing +from comfy.model_management import get_torch_device +from vfi_utils import InterpolationStateList, generic_frame_loop + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["M2M.pth"] + + +class M2M_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames: typing.SupportsInt = 1, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .M2M_arch import M2M_PWC + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + interpolation_model = M2M_PWC() + interpolation_model.load_state_dict(torch.load(model_path)) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, int_timestep, model): + tenSteps = [ + torch.FloatTensor([int_timestep] * len(frame_0)).view(len(frame_0), 1, 1, 1).to(get_torch_device()) + ] + return model(frame_0, frame_1, tenSteps)[0] + + args = [interpolation_model] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0dc0efc100f3c922ada0519ef80e9efc5595cd2c Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/m2m/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..25b13b405172ab46f9b45769520c58f5c098286d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/__init__.py @@ -0,0 +1,22 @@ +import torch.multiprocessing as mp + +if mp.current_process().name == "MainProcess": + import yaml + import os + from pathlib import Path + + config_path = Path(Path(__file__).parent.parent.parent.resolve(), "config.yaml") + + if os.path.exists(config_path): + config = yaml.load(open(config_path, "r"), Loader=yaml.FullLoader) + ops_backend = config["ops_backend"] + else: + ops_backend = "taichi" + + assert ops_backend in ["taichi", "cupy"] + + if ops_backend == "taichi": + from .taichi_ops import softsplat, ModuleSoftsplat, FunctionSoftsplat, softsplat_func, costvol_func, sepconv_func, init, batch_edt, FunctionAdaCoF, ModuleCorrelation, FunctionCorrelation, _FunctionCorrelation + else: + from .cupy_ops import softsplat, ModuleSoftsplat, FunctionSoftsplat, softsplat_func, costvol_func, sepconv_func, init, batch_edt, FunctionAdaCoF, ModuleCorrelation, FunctionCorrelation, _FunctionCorrelation + diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..10090797e7a12b14bafe26d0c3235d6e80f82158 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/__init__.py @@ -0,0 +1,11 @@ +from .costvol import * +from .sepconv import * +from .softsplat import * +from .adacof import * +from .correlation import * +from comfy.model_management import is_nvidia, get_torch_device_name, get_torch_device + +def init(): + if not is_nvidia(): + raise NotImplementedError(f"CuPy ops backend only support CUDA device but found {get_torch_device_name(get_torch_device())} instead. Try Taichi ops backend by editing config.yaml") + return \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/adacof.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/adacof.py new file mode 100644 index 0000000000000000000000000000000000000000..378469045ab9dcb31df62080ba80b2cf5aa9b616 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/adacof.py @@ -0,0 +1,491 @@ +import torch +from .utils import cuda_kernel, cuda_launch, cuda_int32 +import math + +kernel_AdaCoF_updateOutput = """ + extern "C" __global__ void kernel_AdaCoF_updateOutput( + const int n, + const float* input, + const float* weight, + const float* offset_i, + const float* offset_j, + float* output + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + float dblOutput = 0.0; + + const int intSample = ( intIndex / SIZE_3(output) / SIZE_2(output) / SIZE_1(output) ) % SIZE_0(output); + const int c = ( intIndex / SIZE_3(output) / SIZE_2(output) ) % SIZE_1(output); + const int i = ( intIndex / SIZE_3(output) ) % SIZE_2(output); + const int j = ( intIndex ) % SIZE_3(output); + + for (int k = 0; k < F_SIZE; k += 1) { + for (int l = 0; l < F_SIZE; l += 1) { + float w = VALUE_4(weight, intSample, k*F_SIZE+l, i, j); + float alpha = VALUE_4(offset_i, intSample, k*F_SIZE+l, i, j); + float beta = VALUE_4(offset_j, intSample, k*F_SIZE+l, i, j); + int A = (int) alpha; + int B = (int) beta; + + int i_k_A = i+k*DILATION+A; + if(i_k_A < 0) + i_k_A = 0; + if(i_k_A > SIZE_2(input) - 1) + i_k_A = SIZE_2(input) - 1; + + int j_l_B = j+l*DILATION+B; + if(j_l_B < 0) + j_l_B = 0; + if(j_l_B > SIZE_3(input) - 1) + j_l_B = SIZE_3(input) - 1; + + int i_k_A_1 = i+k*DILATION+A+1; + if(i_k_A_1 < 0) + i_k_A_1 = 0; + if(i_k_A_1 > SIZE_2(input) - 1) + i_k_A_1 = SIZE_2(input) - 1; + + int j_l_B_1 = j+l*DILATION+B+1; + if(j_l_B_1 < 0) + j_l_B_1 = 0; + if(j_l_B_1 > SIZE_3(input) - 1) + j_l_B_1 = SIZE_3(input) - 1; + + dblOutput += w * ( + VALUE_4(input, intSample, c, i_k_A, j_l_B)*(1-(alpha-(float)A))*(1-(beta-(float)B)) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B)*(alpha-(float)A)*(1-(beta-(float)B)) + + VALUE_4(input, intSample, c, i_k_A, j_l_B_1)*(1-(alpha-(float)A))*(beta-(float)B) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B_1)*(alpha-(float)A)*(beta-(float)B) + ); + } + } + + output[intIndex] = dblOutput; + } } +""" + +kernel_AdaCoF_updateGradWeight = """ + extern "C" __global__ void kernel_AdaCoF_updateGradWeight( + const int n, + const float* gradLoss, + const float* input, + const float* offset_i, + const float* offset_j, + float* gradWeight + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + float floatOutput = 0.0; + + const int intSample = ( intIndex / SIZE_3(gradWeight) / SIZE_2(gradWeight) / SIZE_1(gradWeight) ) % SIZE_0(gradWeight); + const int intDepth = ( intIndex / SIZE_3(gradWeight) / SIZE_2(gradWeight) ) % SIZE_1(gradWeight); + const int i = ( intIndex / SIZE_3(gradWeight) ) % SIZE_2(gradWeight); + const int j = ( intIndex ) % SIZE_3(gradWeight); + + int k = intDepth / F_SIZE; + int l = intDepth % F_SIZE; + + for (int c = 0; c < 3; c++) + { + float delta = VALUE_4(gradLoss, intSample, c, i, j); + float alpha = VALUE_4(offset_i, intSample, k*F_SIZE+l, i, j); + float beta = VALUE_4(offset_j, intSample, k*F_SIZE+l, i, j); + int A = (int) alpha; + int B = (int) beta; + + int i_k_A = i+k*DILATION+A; + if(i_k_A < 0) + i_k_A = 0; + if(i_k_A > SIZE_2(input) - 1) + i_k_A = SIZE_2(input) - 1; + + int j_l_B = j+l*DILATION+B; + if(j_l_B < 0) + j_l_B = 0; + if(j_l_B > SIZE_3(input) - 1) + j_l_B = SIZE_3(input) - 1; + + int i_k_A_1 = i+k*DILATION+A+1; + if(i_k_A_1 < 0) + i_k_A_1 = 0; + if(i_k_A_1 > SIZE_2(input) - 1) + i_k_A_1 = SIZE_2(input) - 1; + + int j_l_B_1 = j+l*DILATION+B+1; + if(j_l_B_1 < 0) + j_l_B_1 = 0; + if(j_l_B_1 > SIZE_3(input) - 1) + j_l_B_1 = SIZE_3(input) - 1; + + floatOutput += delta * ( + VALUE_4(input, intSample, c, i_k_A, j_l_B)*(1-(alpha-(float)A))*(1-(beta-(float)B)) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B)*(alpha-(float)A)*(1-(beta-(float)B)) + + VALUE_4(input, intSample, c, i_k_A, j_l_B_1)*(1-(alpha-(float)A))*(beta-(float)B) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B_1)*(alpha-(float)A)*(beta-(float)B) + ); + } + + gradWeight[intIndex] = floatOutput; + } } +""" + +kernel_AdaCoF_updateGradAlpha = """ + extern "C" __global__ void kernel_AdaCoF_updateGradAlpha( + const int n, + const float* gradLoss, + const float* input, + const float* weight, + const float* offset_i, + const float* offset_j, + float* gradOffset_i + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + float floatOutput = 0.0; + + const int intSample = ( intIndex / SIZE_3(gradOffset_i) / SIZE_2(gradOffset_i) / SIZE_1(gradOffset_i) ) % SIZE_0(gradOffset_i); + const int intDepth = ( intIndex / SIZE_3(gradOffset_i) / SIZE_2(gradOffset_i) ) % SIZE_1(gradOffset_i); + const int i = ( intIndex / SIZE_3(gradOffset_i) ) % SIZE_2(gradOffset_i); + const int j = ( intIndex ) % SIZE_3(gradOffset_i); + + int k = intDepth / F_SIZE; + int l = intDepth % F_SIZE; + + for (int c = 0; c < 3; c++) + { + float delta = VALUE_4(gradLoss, intSample, c, i, j); + float w = VALUE_4(weight, intSample, k*F_SIZE+l, i, j); + float alpha = VALUE_4(offset_i, intSample, k*F_SIZE+l, i, j); + float beta = VALUE_4(offset_j, intSample, k*F_SIZE+l, i, j); + int A = (int) alpha; + int B = (int) beta; + + int i_k_A = i+k*DILATION+A; + if(i_k_A < 0) + i_k_A = 0; + if(i_k_A > SIZE_2(input) - 1) + i_k_A = SIZE_2(input) - 1; + + int j_l_B = j+l*DILATION+B; + if(j_l_B < 0) + j_l_B = 0; + if(j_l_B > SIZE_3(input) - 1) + j_l_B = SIZE_3(input) - 1; + + int i_k_A_1 = i+k*DILATION+A+1; + if(i_k_A_1 < 0) + i_k_A_1 = 0; + if(i_k_A_1 > SIZE_2(input) - 1) + i_k_A_1 = SIZE_2(input) - 1; + + int j_l_B_1 = j+l*DILATION+B+1; + if(j_l_B_1 < 0) + j_l_B_1 = 0; + if(j_l_B_1 > SIZE_3(input) - 1) + j_l_B_1 = SIZE_3(input) - 1; + + floatOutput += delta * w * ( + - VALUE_4(input, intSample, c, i_k_A, j_l_B)*(1-(beta-(float)B)) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B)*(1-(beta-(float)B)) - + VALUE_4(input, intSample, c, i_k_A, j_l_B_1)*(beta-(float)B) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B_1)*(beta-(float)B) + ); + } + + gradOffset_i[intIndex] = floatOutput; + } } +""" + +kernel_AdaCoF_updateGradBeta = """ + extern "C" __global__ void kernel_AdaCoF_updateGradBeta( + const int n, + const float* gradLoss, + const float* input, + const float* weight, + const float* offset_i, + const float* offset_j, + float* gradOffset_j + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + float floatOutput = 0.0; + + const int intSample = ( intIndex / SIZE_3(gradOffset_j) / SIZE_2(gradOffset_j) / SIZE_1(gradOffset_j) ) % SIZE_0(gradOffset_j); + const int intDepth = ( intIndex / SIZE_3(gradOffset_j) / SIZE_2(gradOffset_j) ) % SIZE_1(gradOffset_j); + const int i = ( intIndex / SIZE_3(gradOffset_j) ) % SIZE_2(gradOffset_j); + const int j = ( intIndex ) % SIZE_3(gradOffset_j); + + int k = intDepth / F_SIZE; + int l = intDepth % F_SIZE; + + for (int c = 0; c < 3; c++) + { + float delta = VALUE_4(gradLoss, intSample, c, i, j); + float w = VALUE_4(weight, intSample, k*F_SIZE+l, i, j); + float alpha = VALUE_4(offset_i, intSample, k*F_SIZE+l, i, j); + float beta = VALUE_4(offset_j, intSample, k*F_SIZE+l, i, j); + int A = (int) alpha; + int B = (int) beta; + + int i_k_A = i+k*DILATION+A; + if(i_k_A < 0) + i_k_A = 0; + if(i_k_A > SIZE_2(input) - 1) + i_k_A = SIZE_2(input) - 1; + + int j_l_B = j+l*DILATION+B; + if(j_l_B < 0) + j_l_B = 0; + if(j_l_B > SIZE_3(input) - 1) + j_l_B = SIZE_3(input) - 1; + + int i_k_A_1 = i+k*DILATION+A+1; + if(i_k_A_1 < 0) + i_k_A_1 = 0; + if(i_k_A_1 > SIZE_2(input) - 1) + i_k_A_1 = SIZE_2(input) - 1; + + int j_l_B_1 = j+l*DILATION+B+1; + if(j_l_B_1 < 0) + j_l_B_1 = 0; + if(j_l_B_1 > SIZE_3(input) - 1) + j_l_B_1 = SIZE_3(input) - 1; + + floatOutput += delta * w * ( + - VALUE_4(input, intSample, c, i_k_A, j_l_B)*(1-(alpha-(float)A)) - + VALUE_4(input, intSample, c, i_k_A_1, j_l_B)*(alpha-(float)A) + + VALUE_4(input, intSample, c, i_k_A, j_l_B_1)*(1-(alpha-(float)A)) + + VALUE_4(input, intSample, c, i_k_A_1, j_l_B_1)*(alpha-(float)A) + ); + } + + gradOffset_j[intIndex] = floatOutput; + } } +""" + +class FunctionAdaCoF(torch.autograd.Function): + # end + @staticmethod + def forward(ctx, input, weight, offset_i, offset_j, dilation): + ctx.save_for_backward(input, weight, offset_i, offset_j) + ctx.dilation = dilation + + intSample = input.size(0) + intInputDepth = input.size(1) + intInputHeight = input.size(2) + intInputWidth = input.size(3) + intFilterSize = int(math.sqrt(weight.size(1))) + intOutputHeight = weight.size(2) + intOutputWidth = weight.size(3) + + assert ( + intInputHeight - ((intFilterSize - 1) * dilation + 1) == intOutputHeight - 1 + ) + assert ( + intInputWidth - ((intFilterSize - 1) * dilation + 1) == intOutputWidth - 1 + ) + + assert input.is_contiguous() == True + assert weight.is_contiguous() == True + assert offset_i.is_contiguous() == True + assert offset_j.is_contiguous() == True + + output = input.new_zeros( + intSample, intInputDepth, intOutputHeight, intOutputWidth + ) + + if input.is_cuda == True: + + class Stream: + ptr = torch.cuda.current_stream().cuda_stream + + # end + + n = output.nelement() + cuda_launch( + cuda_kernel( + "kernel_AdaCoF_updateOutput", + kernel_AdaCoF_updateOutput, + { + "input": input, + "weight": weight, + "offset_i": offset_i, + "offset_j": offset_j, + "output": output, + }, + F_SIZE=str(intFilterSize), + DILATION=str(dilation) + ), + )( + grid=tuple([int((n + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n, + input.data_ptr(), + weight.data_ptr(), + offset_i.data_ptr(), + offset_j.data_ptr(), + output.data_ptr(), + ], + stream=Stream, + ) + + elif input.is_cuda == False: + raise NotImplementedError() + + # end + + return output + + # end + @staticmethod + def backward(ctx, gradOutput): + input, weight, offset_i, offset_j = ctx.saved_tensors + dilation = ctx.dilation + + intSample = input.size(0) + intInputDepth = input.size(1) + intInputHeight = input.size(2) + intInputWidth = input.size(3) + intFilterSize = int(math.sqrt(weight.size(1))) + intOutputHeight = weight.size(2) + intOutputWidth = weight.size(3) + + assert ( + intInputHeight - ((intFilterSize - 1) * dilation + 1) == intOutputHeight - 1 + ) + assert ( + intInputWidth - ((intFilterSize - 1) * dilation + 1) == intOutputWidth - 1 + ) + + assert gradOutput.is_contiguous() == True + + gradInput = ( + input.new_zeros(intSample, intInputDepth, intInputHeight, intInputWidth) + if ctx.needs_input_grad[0] == True + else None + ) + gradWeight = ( + input.new_zeros( + intSample, intFilterSize**2, intOutputHeight, intOutputWidth + ) + if ctx.needs_input_grad[1] == True + else None + ) + gradOffset_i = ( + input.new_zeros( + intSample, intFilterSize**2, intOutputHeight, intOutputWidth + ) + if ctx.needs_input_grad[2] == True + else None + ) + gradOffset_j = ( + input.new_zeros( + intSample, intFilterSize**2, intOutputHeight, intOutputWidth + ) + if ctx.needs_input_grad[2] == True + else None + ) + + if input.is_cuda == True: + + class Stream: + ptr = torch.cuda.current_stream().cuda_stream + + # end + + # weight grad + n_w = gradWeight.nelement() + cuda_launch( + cuda_kernel( + "kernel_AdaCoF_updateGradWeight", + kernel_AdaCoF_updateGradWeight, + { + "gradLoss": gradOutput, + "input": input, + "offset_i": offset_i, + "offset_j": offset_j, + "gradWeight": gradWeight, + }, + F_SIZE=str(intFilterSize), + DILATION=str(dilation) + ), + )( + grid=tuple([int((n_w + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n_w, + gradOutput.data_ptr(), + input.data_ptr(), + offset_i.data_ptr(), + offset_j.data_ptr(), + gradWeight.data_ptr(), + ], + stream=Stream, + ) + + # alpha grad + n_i = gradOffset_i.nelement() + cuda_launch( + cuda_kernel( + "kernel_AdaCoF_updateGradAlpha", + kernel_AdaCoF_updateGradAlpha, + { + "gradLoss": gradOutput, + "input": input, + "weight": weight, + "offset_i": offset_i, + "offset_j": offset_j, + "gradOffset_i": gradOffset_i, + }, + F_SIZE=str(intFilterSize), + DILATION=str(dilation) + ), + )( + grid=tuple([int((n_i + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n_i, + gradOutput.data_ptr(), + input.data_ptr(), + weight.data_ptr(), + offset_i.data_ptr(), + offset_j.data_ptr(), + gradOffset_i.data_ptr(), + ], + stream=Stream, + ) + + # beta grad + n_j = gradOffset_j.nelement() + cuda_launch( + cuda_kernel( + "kernel_AdaCoF_updateGradBeta", + kernel_AdaCoF_updateGradBeta, + { + "gradLoss": gradOutput, + "input": input, + "weight": weight, + "offset_i": offset_i, + "offset_j": offset_j, + "gradOffset_j": gradOffset_j, + }, + F_SIZE=str(intFilterSize), + DILATION=str(dilation) + ), + )( + grid=tuple([int((n_j + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n_j, + gradOutput.data_ptr(), + input.data_ptr(), + weight.data_ptr(), + offset_i.data_ptr(), + offset_j.data_ptr(), + gradOffset_j.data_ptr(), + ], + stream=Stream, + ) + + elif input.is_cuda == False: + raise NotImplementedError() + + # end + + return gradInput, gradWeight, gradOffset_i, gradOffset_j, None + +__all__ = ["FunctionAdaCoF"] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/batch_edt.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/batch_edt.py new file mode 100644 index 0000000000000000000000000000000000000000..3c3542c7dc9332417ed898de2f7da5e3f750b298 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/batch_edt.py @@ -0,0 +1,119 @@ +############### DISTANCE TRANSFORM ############### +# img tensor: (bs,h,w) or (bs,1,h,w) +# returns same shape +# expects white lines, black whitespace +# defaults to diameter if empty image +from .utils import cuda_kernel, cuda_launch, cuda_int32, cuda_float32 +import torch + +_batch_edt_kernel = ( + "kernel_dt", + """ + extern "C" __global__ void kernel_dt( + const int bs, + const int h, + const int w, + const float diam2, + float* data, + float* output + ) { + int idx = blockIdx.x * blockDim.x + threadIdx.x; + if (idx >= bs*h*w) { + return; + } + int pb = idx / (h*w); + int pi = (idx - h*w*pb) / w; + int pj = (idx - h*w*pb - w*pi); + + float cost; + float mincost = diam2; + for (int j = 0; j < w; j++) { + cost = data[h*w*pb + w*pi + j] + (pj-j)*(pj-j); + if (cost < mincost) { + mincost = cost; + } + } + output[idx] = mincost; + return; + } +""", +) +_batch_edt = None + + +def batch_edt(img, block=1024): + # must initialize cuda/cupy after forking + global _batch_edt + if _batch_edt is None: + _batch_edt = cuda_launch(*_batch_edt_kernel) + + # bookkeeppingg + if len(img.shape) == 4: + assert img.shape[1] == 1 + img = img.squeeze(1) + expand = True + else: + expand = False + bs, h, w = img.shape + diam2 = h**2 + w**2 + odtype = img.dtype + grid = (img.nelement() + block - 1) // block + + # cupy implementation + if img.is_cuda: + # first pass, y-axis + data = ((1 - img.type(torch.float32)) * diam2).contiguous() + intermed = torch.zeros_like(data) + _batch_edt( + grid=(grid, 1, 1), + block=(block, 1, 1), # < 1024 + args=[ + cuda_int32(bs), + cuda_int32(h), + cuda_int32(w), + cuda_float32(diam2), + data.data_ptr(), + intermed.data_ptr(), + ], + ) + + # second pass, x-axis + intermed = intermed.permute(0, 2, 1).contiguous() + out = torch.zeros_like(intermed) + _batch_edt( + grid=(grid, 1, 1), + block=(block, 1, 1), + args=[ + cuda_int32(bs), + cuda_int32(w), + cuda_int32(h), + cuda_float32(diam2), + intermed.data_ptr(), + out.data_ptr(), + ], + ) + ans = out.permute(0, 2, 1).sqrt() + ans = ans.type(odtype) if odtype != ans.dtype else ans + + # default to scipy cpu implementation + else: + raise NotImplementedError() + """ sums = img.sum(dim=(1, 2)) + ans = torch.tensor( + np.stack( + [ + scipy.ndimage.morphology.distance_transform_edt(i) + if s != 0 + else np.ones_like(i) # change scipy behavior for empty image + * np.sqrt(diam2) + for i, s in zip(1 - img, sums) + ] + ), + dtype=odtype, + ) """ + + if expand: + ans = ans.unsqueeze(1) + return ans + +__all__ = ["batch_edt"] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/correlation.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/correlation.py new file mode 100644 index 0000000000000000000000000000000000000000..d1e69e2dbbba453ab367dc76a1c3c566d2f5540c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/correlation.py @@ -0,0 +1,413 @@ +import torch +from .utils import cuda_kernel, cuda_launch, cuda_int32 + +kernel_Correlation_rearrange = """ + extern "C" __global__ void kernel_Correlation_rearrange( + const int n, + const float* input, + float* output + ) { + int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; + + if (intIndex >= n) { + return; + } + + int intSample = blockIdx.z; + int intChannel = blockIdx.y; + + float fltValue = input[(((intSample * SIZE_1(input)) + intChannel) * SIZE_2(input) * SIZE_3(input)) + intIndex]; + + __syncthreads(); + + int intPaddedY = (intIndex / SIZE_3(input)) + 4; + int intPaddedX = (intIndex % SIZE_3(input)) + 4; + int intRearrange = ((SIZE_3(input) + 8) * intPaddedY) + intPaddedX; + + output[(((intSample * SIZE_1(output) * SIZE_2(output)) + intRearrange) * SIZE_1(input)) + intChannel] = fltValue; + } +""" + +kernel_Correlation_updateOutput = """ + extern "C" __global__ void kernel_Correlation_updateOutput( + const int n, + const float* rbot0, + const float* rbot1, + float* top + ) { + extern __shared__ char patch_data_char[]; + + float *patch_data = (float *)patch_data_char; + + // First (upper left) position of kernel upper-left corner in current center position of neighborhood in image 1 + int x1 = blockIdx.x + 4; + int y1 = blockIdx.y + 4; + int item = blockIdx.z; + int ch_off = threadIdx.x; + + // Load 3D patch into shared shared memory + for (int j = 0; j < 1; j++) { // HEIGHT + for (int i = 0; i < 1; i++) { // WIDTH + int ji_off = (j + i) * SIZE_3(rbot0); + for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS + int idx1 = ((item * SIZE_1(rbot0) + y1+j) * SIZE_2(rbot0) + x1+i) * SIZE_3(rbot0) + ch; + int idxPatchData = ji_off + ch; + patch_data[idxPatchData] = rbot0[idx1]; + } + } + } + + __syncthreads(); + + __shared__ float sum[32]; + + // Compute correlation + for (int top_channel = 0; top_channel < SIZE_1(top); top_channel++) { + sum[ch_off] = 0; + + int s2o = top_channel % 9 - 4; + int s2p = top_channel / 9 - 4; + + for (int j = 0; j < 1; j++) { // HEIGHT + for (int i = 0; i < 1; i++) { // WIDTH + int ji_off = (j + i) * SIZE_3(rbot0); + for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS + int x2 = x1 + s2o; + int y2 = y1 + s2p; + + int idxPatchData = ji_off + ch; + int idx2 = ((item * SIZE_1(rbot0) + y2+j) * SIZE_2(rbot0) + x2+i) * SIZE_3(rbot0) + ch; + + sum[ch_off] += patch_data[idxPatchData] * rbot1[idx2]; + } + } + } + + __syncthreads(); + + if (ch_off == 0) { + float total_sum = 0; + for (int idx = 0; idx < 32; idx++) { + total_sum += sum[idx]; + } + const int sumelems = SIZE_3(rbot0); + const int index = ((top_channel*SIZE_2(top) + blockIdx.y)*SIZE_3(top))+blockIdx.x; + top[index + item*SIZE_1(top)*SIZE_2(top)*SIZE_3(top)] = total_sum / (float)sumelems; + } + } + } +""" + +kernel_Correlation_updateGradFirst = """ + #define ROUND_OFF 50000 + + extern "C" __global__ void kernel_Correlation_updateGradFirst( + const int n, + const int intSample, + const float* rbot0, + const float* rbot1, + const float* gradOutput, + float* gradFirst, + float* gradSecond + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + int n = intIndex % SIZE_1(gradFirst); // channels + int l = (intIndex / SIZE_1(gradFirst)) % SIZE_3(gradFirst) + 4; // w-pos + int m = (intIndex / SIZE_1(gradFirst) / SIZE_3(gradFirst)) % SIZE_2(gradFirst) + 4; // h-pos + + // round_off is a trick to enable integer division with ceil, even for negative numbers + // We use a large offset, for the inner part not to become negative. + const int round_off = ROUND_OFF; + const int round_off_s1 = round_off; + + // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior: + int xmin = (l - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4) + int ymin = (m - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4) + + // Same here: + int xmax = (l - 4 + round_off_s1) - round_off; // floor (l - 4) + int ymax = (m - 4 + round_off_s1) - round_off; // floor (m - 4) + + float sum = 0; + if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) { + xmin = max(0,xmin); + xmax = min(SIZE_3(gradOutput)-1,xmax); + + ymin = max(0,ymin); + ymax = min(SIZE_2(gradOutput)-1,ymax); + + for (int p = -4; p <= 4; p++) { + for (int o = -4; o <= 4; o++) { + // Get rbot1 data: + int s2o = o; + int s2p = p; + int idxbot1 = ((intSample * SIZE_1(rbot0) + (m+s2p)) * SIZE_2(rbot0) + (l+s2o)) * SIZE_3(rbot0) + n; + float bot1tmp = rbot1[idxbot1]; // rbot1[l+s2o,m+s2p,n] + + // Index offset for gradOutput in following loops: + int op = (p+4) * 9 + (o+4); // index[o,p] + int idxopoffset = (intSample * SIZE_1(gradOutput) + op); + + for (int y = ymin; y <= ymax; y++) { + for (int x = xmin; x <= xmax; x++) { + int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p] + sum += gradOutput[idxgradOutput] * bot1tmp; + } + } + } + } + } + const int sumelems = SIZE_1(gradFirst); + const int bot0index = ((n * SIZE_2(gradFirst)) + (m-4)) * SIZE_3(gradFirst) + (l-4); + gradFirst[bot0index + intSample*SIZE_1(gradFirst)*SIZE_2(gradFirst)*SIZE_3(gradFirst)] = sum / (float)sumelems; + } } +""" + +kernel_Correlation_updateGradSecond = """ + #define ROUND_OFF 50000 + + extern "C" __global__ void kernel_Correlation_updateGradSecond( + const int n, + const int intSample, + const float* rbot0, + const float* rbot1, + const float* gradOutput, + float* gradFirst, + float* gradSecond + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + int n = intIndex % SIZE_1(gradSecond); // channels + int l = (intIndex / SIZE_1(gradSecond)) % SIZE_3(gradSecond) + 4; // w-pos + int m = (intIndex / SIZE_1(gradSecond) / SIZE_3(gradSecond)) % SIZE_2(gradSecond) + 4; // h-pos + + // round_off is a trick to enable integer division with ceil, even for negative numbers + // We use a large offset, for the inner part not to become negative. + const int round_off = ROUND_OFF; + const int round_off_s1 = round_off; + + float sum = 0; + for (int p = -4; p <= 4; p++) { + for (int o = -4; o <= 4; o++) { + int s2o = o; + int s2p = p; + + //Get X,Y ranges and clamp + // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior: + int xmin = (l - 4 - s2o + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o) + int ymin = (m - 4 - s2p + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o) + + // Same here: + int xmax = (l - 4 - s2o + round_off_s1) - round_off; // floor (l - 4 - s2o) + int ymax = (m - 4 - s2p + round_off_s1) - round_off; // floor (m - 4 - s2p) + + if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) { + xmin = max(0,xmin); + xmax = min(SIZE_3(gradOutput)-1,xmax); + + ymin = max(0,ymin); + ymax = min(SIZE_2(gradOutput)-1,ymax); + + // Get rbot0 data: + int idxbot0 = ((intSample * SIZE_1(rbot0) + (m-s2p)) * SIZE_2(rbot0) + (l-s2o)) * SIZE_3(rbot0) + n; + float bot0tmp = rbot0[idxbot0]; // rbot1[l+s2o,m+s2p,n] + + // Index offset for gradOutput in following loops: + int op = (p+4) * 9 + (o+4); // index[o,p] + int idxopoffset = (intSample * SIZE_1(gradOutput) + op); + + for (int y = ymin; y <= ymax; y++) { + for (int x = xmin; x <= xmax; x++) { + int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p] + sum += gradOutput[idxgradOutput] * bot0tmp; + } + } + } + } + } + const int sumelems = SIZE_1(gradSecond); + const int bot1index = ((n * SIZE_2(gradSecond)) + (m-4)) * SIZE_3(gradSecond) + (l-4); + gradSecond[bot1index + intSample*SIZE_1(gradSecond)*SIZE_2(gradSecond)*SIZE_3(gradSecond)] = sum / (float)sumelems; + } } +""" + + +class _FunctionCorrelation(torch.autograd.Function): + @staticmethod + def forward(self, first, second): + rbot0 = first.new_zeros( + [first.shape[0], first.shape[2] + 8, first.shape[3] + 8, first.shape[1]] + ) + rbot1 = first.new_zeros( + [first.shape[0], first.shape[2] + 8, first.shape[3] + 8, first.shape[1]] + ) + + self.save_for_backward(first, second, rbot0, rbot1) + + first = first.contiguous() + assert first.is_cuda == True + second = second.contiguous() + assert second.is_cuda == True + + output = first.new_zeros([first.shape[0], 81, first.shape[2], first.shape[3]]) + + if first.is_cuda == True: + n = first.shape[2] * first.shape[3] + cuda_launch( + cuda_kernel( + "kernel_Correlation_rearrange", kernel_Correlation_rearrange, {"input": first, "output": rbot0} + ), + )( + grid=tuple([int((n + 16 - 1) / 16), first.shape[1], first.shape[0]]), + block=tuple([16, 1, 1]), + args=[n, first.data_ptr(), rbot0.data_ptr()], + ) + + n = second.shape[2] * second.shape[3] + cuda_launch( + cuda_kernel( + "kernel_Correlation_rearrange", kernel_Correlation_rearrange, {"input": second, "output": rbot1} + ), + )( + grid=tuple([int((n + 16 - 1) / 16), second.shape[1], second.shape[0]]), + block=tuple([16, 1, 1]), + args=[n, second.data_ptr(), rbot1.data_ptr()], + ) + + n = output.shape[1] * output.shape[2] * output.shape[3] + cuda_launch( + cuda_kernel( + "kernel_Correlation_updateOutput", + kernel_Correlation_updateOutput, + {"rbot0": rbot0, "rbot1": rbot1, "top": output}, + ), + )( + grid=tuple([output.shape[3], output.shape[2], output.shape[0]]), + block=tuple([32, 1, 1]), + shared_mem=first.shape[1] * 4, + args=[n, rbot0.data_ptr(), rbot1.data_ptr(), output.data_ptr()], + ) + + elif first.is_cuda == False: + raise NotImplementedError() + + # end + + return output + + # end + + @staticmethod + def backward(self, gradOutput): + first, second, rbot0, rbot1 = self.saved_tensors + + gradOutput = gradOutput.contiguous() + assert gradOutput.is_cuda == True + + gradFirst = ( + first.new_zeros( + [first.shape[0], first.shape[1], first.shape[2], first.shape[3]] + ) + if self.needs_input_grad[0] == True + else None + ) + gradSecond = ( + first.new_zeros( + [first.shape[0], first.shape[1], first.shape[2], first.shape[3]] + ) + if self.needs_input_grad[1] == True + else None + ) + + if first.is_cuda == True: + if gradFirst is not None: + for intSample in range(first.shape[0]): + n = first.shape[1] * first.shape[2] * first.shape[3] + cuda_launch( + cuda_kernel( + "kernel_Correlation_updateGradFirst", + kernel_Correlation_updateGradFirst, + { + "rbot0": rbot0, + "rbot1": rbot1, + "gradOutput": gradOutput, + "gradFirst": gradFirst, + "gradSecond": None, + }, + ), + )( + grid=tuple([int((n + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n, + intSample, + rbot0.data_ptr(), + rbot1.data_ptr(), + gradOutput.data_ptr(), + gradFirst.data_ptr(), + None, + ], + ) + # end + # end + + if gradSecond is not None: + for intSample in range(first.shape[0]): + n = first.shape[1] * first.shape[2] * first.shape[3] + cuda_launch( + cuda_kernel( + "kernel_Correlation_updateGradSecond", + kernel_Correlation_updateGradSecond, + { + "rbot0": rbot0, + "rbot1": rbot1, + "gradOutput": gradOutput, + "gradFirst": None, + "gradSecond": gradSecond, + }, + ), + )( + grid=tuple([int((n + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + n, + intSample, + rbot0.data_ptr(), + rbot1.data_ptr(), + gradOutput.data_ptr(), + None, + gradSecond.data_ptr(), + ], + ) + # end + # end + + elif first.is_cuda == False: + raise NotImplementedError() + + # end + + return gradFirst, gradSecond + + # end + + +# end + + +def FunctionCorrelation(tenFirst, tenSecond): + return _FunctionCorrelation.apply(tenFirst, tenSecond) + + +# end + + +class ModuleCorrelation(torch.nn.Module): + def __init__(self): + super(ModuleCorrelation, self).__init__() + + # end + + def forward(self, tenFirst, tenSecond): + return _FunctionCorrelation.apply(tenFirst, tenSecond) + + # end + +__all__ = ["_FunctionCorrelation", "FunctionCorrelation", "ModuleCorrelation"] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/costvol.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/costvol.py new file mode 100644 index 0000000000000000000000000000000000000000..070dd483be1716de1a3986034eaaeb9cddda9ee9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/costvol.py @@ -0,0 +1,317 @@ +from .utils import cuda_kernel, cuda_launch, cuda_int32 +import torch, collections + +costvol_out = """ + extern "C" __global__ void __launch_bounds__(512) costvol_out( + const int n, + const {{type}}* __restrict__ tenOne, + const {{type}}* __restrict__ tenTwo, + {{type}}* __restrict__ tenOut + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) ) % SIZE_0(tenOut); + const int intC = -1; + const int intY = ( intIndex / SIZE_3(tenOut) ) % SIZE_2(tenOut); + const int intX = ( intIndex ) % SIZE_3(tenOut); + + {{type}} fltOne[{{intChans}}]; + + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + fltOne[intValue] = VALUE_4(tenOne, intN, intValue, intY, intX); + } + + int intOffset = OFFSET_4(tenOut, intN, 0, intY, intX); + + for (int intOy = intY - 4; intOy <= intY + 4; intOy += 1) { + for (int intOx = intX - 4; intOx <= intX + 4; intOx += 1) { + {{type}} fltValue = 0.0f; + + if ((intOy >= 0) && (intOy < SIZE_2(tenOut)) && (intOx >= 0) && (intOx < SIZE_3(tenOut))) { + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + fltValue += abs(fltOne[intValue] - VALUE_4(tenTwo, intN, intValue, intOy, intOx)); + } + } else { + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + fltValue += abs(fltOne[intValue]); + } + } + + tenOut[intOffset] = fltValue / SIZE_1(tenOne); + intOffset += SIZE_2(tenOut) * SIZE_3(tenOut); + } + } + } } +""" + +costvol_onegrad = """ + extern "C" __global__ void __launch_bounds__(512) costvol_onegrad( + const int n, + const {{type}}* __restrict__ tenOne, + const {{type}}* __restrict__ tenTwo, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenOnegrad, + {{type}}* __restrict__ tenTwograd + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenOnegrad) / SIZE_2(tenOnegrad) ) % SIZE_0(tenOnegrad); + const int intC = -1; + const int intY = ( intIndex / SIZE_3(tenOnegrad) ) % SIZE_2(tenOnegrad); + const int intX = ( intIndex ) % SIZE_3(tenOnegrad); + + {{type}} fltOne[{{intChans}}]; + + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + fltOne[intValue] = VALUE_4(tenOne, intN, intValue, intY, intX); + } + + int intOffset = OFFSET_4(tenOutgrad, intN, 0, intY, intX); + + for (int intOy = intY - 4; intOy <= intY + 4; intOy += 1) { + for (int intOx = intX - 4; intOx <= intX + 4; intOx += 1) { + if ((intOy >= 0) && (intOy < SIZE_2(tenOutgrad)) && (intOx >= 0) && (intOx < SIZE_3(tenOutgrad))) { + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + if (fltOne[intValue] - VALUE_4(tenTwo, intN, intValue, intOy, intOx) >= 0.0f) { + tenOnegrad[OFFSET_4(tenOnegrad, intN, intValue, intY, intX)] += +tenOutgrad[intOffset] / SIZE_1(tenOne); + } else { + tenOnegrad[OFFSET_4(tenOnegrad, intN, intValue, intY, intX)] += -tenOutgrad[intOffset] / SIZE_1(tenOne); + } + } + } else { + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + if (fltOne[intValue] >= 0.0f) { + tenOnegrad[OFFSET_4(tenOnegrad, intN, intValue, intY, intX)] += +tenOutgrad[intOffset] / SIZE_1(tenOne); + } else { + tenOnegrad[OFFSET_4(tenOnegrad, intN, intValue, intY, intX)] += -tenOutgrad[intOffset] / SIZE_1(tenOne); + } + } + } + + intOffset += SIZE_2(tenOutgrad) * SIZE_3(tenOutgrad); + } + } + } } +""" + +costvol_twograd = """ + extern "C" __global__ void __launch_bounds__(512) costvol_twograd( + const int n, + const {{type}}* __restrict__ tenOne, + const {{type}}* __restrict__ tenTwo, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenOnegrad, + {{type}}* __restrict__ tenTwograd + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenTwograd) / SIZE_2(tenTwograd) ) % SIZE_0(tenTwograd); + const int intC = -1; + const int intY = ( intIndex / SIZE_3(tenTwograd) ) % SIZE_2(tenTwograd); + const int intX = ( intIndex ) % SIZE_3(tenTwograd); + + {{type}} fltOne[{{intChans}}]; + + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + fltOne[intValue] = VALUE_4(tenOne, intN, intValue, intY, intX); + } + + int intOffset = OFFSET_4(tenOutgrad, intN, 0, intY, intX); + + for (int intOy = intY - 4; intOy <= intY + 4; intOy += 1) { + for (int intOx = intX - 4; intOx <= intX + 4; intOx += 1) { + if ((intOy >= 0) && (intOy < SIZE_2(tenOutgrad)) && (intOx >= 0) && (intOx < SIZE_3(tenOutgrad))) { + for (int intValue = 0; intValue < SIZE_1(tenOne); intValue += 1) { + if (fltOne[intValue] - VALUE_4(tenTwo, intN, intValue, intOy, intOx) >= 0.0f) { + atomicAdd(&tenTwograd[OFFSET_4(tenTwograd, intN, intValue, intOy, intOx)], -tenOutgrad[intOffset] / SIZE_1(tenOne)); + } else { + atomicAdd(&tenTwograd[OFFSET_4(tenTwograd, intN, intValue, intOy, intOx)], +tenOutgrad[intOffset] / SIZE_1(tenOne)); + } + } + } else { + // ... + } + + intOffset += SIZE_2(tenOutgrad) * SIZE_3(tenOutgrad); + } + } + } } +""" + +class costvol_func(torch.autograd.Function): + @staticmethod + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, tenOne, tenTwo): + tenOut = tenOne.new_empty( + [tenOne.shape[0], 81, tenOne.shape[2], tenOne.shape[3]] + ) + + cuda_launch( + cuda_kernel( + "costvol_out", + costvol_out, + { + "intChans": tenOne.shape[1], + "tenOne": tenOne, + "tenTwo": tenTwo, + "tenOut": tenOut, + }, + ) + )( + grid=tuple( + [ + int( + ( + (tenOut.shape[0] * tenOut.shape[2] * tenOut.shape[3]) + + 512 + - 1 + ) + / 512 + ), + 1, + 1, + ] + ), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenOut.shape[0] * tenOut.shape[2] * tenOut.shape[3]), + tenOne.data_ptr(), + tenTwo.data_ptr(), + tenOut.data_ptr(), + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + + self.save_for_backward(tenOne, tenTwo) + + return tenOut + + # end + + @staticmethod + @torch.cuda.amp.custom_bwd + def backward(self, tenOutgrad): + tenOne, tenTwo = self.saved_tensors + + tenOutgrad = tenOutgrad.contiguous() + assert tenOutgrad.is_cuda == True + + tenOnegrad = ( + tenOne.new_zeros( + [tenOne.shape[0], tenOne.shape[1], tenOne.shape[2], tenOne.shape[3]] + ) + if self.needs_input_grad[0] == True + else None + ) + tenTwograd = ( + tenTwo.new_zeros( + [tenTwo.shape[0], tenTwo.shape[1], tenTwo.shape[2], tenTwo.shape[3]] + ) + if self.needs_input_grad[1] == True + else None + ) + + if tenOnegrad is not None: + cuda_launch( + cuda_kernel( + "costvol_onegrad", + costvol_onegrad, + { + "intChans": tenOne.shape[1], + "tenOne": tenOne, + "tenTwo": tenTwo, + "tenOutgrad": tenOutgrad, + "tenOnegrad": tenOnegrad, + "tenTwograd": tenTwograd, + }, + ) + )( + grid=tuple( + [ + int( + ( + ( + tenOnegrad.shape[0] + * tenOnegrad.shape[2] + * tenOnegrad.shape[3] + ) + + 512 + - 1 + ) + / 512 + ), + 1, + 1, + ] + ), + block=tuple([512, 1, 1]), + args=[ + cuda_int32( + tenOnegrad.shape[0] * tenOnegrad.shape[2] * tenOnegrad.shape[3] + ), + tenOne.data_ptr(), + tenTwo.data_ptr(), + tenOutgrad.data_ptr(), + tenOnegrad.data_ptr(), + tenTwograd.data_ptr(), + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + # end + + if tenTwograd is not None: + cuda_launch( + cuda_kernel( + "costvol_twograd", + costvol_twograd, + { + "intChans": tenOne.shape[1], + "tenOne": tenOne, + "tenTwo": tenTwo, + "tenOutgrad": tenOutgrad, + "tenOnegrad": tenOnegrad, + "tenTwograd": tenTwograd, + }, + ) + )( + grid=tuple( + [ + int( + ( + ( + tenTwograd.shape[0] + * tenTwograd.shape[2] + * tenTwograd.shape[3] + ) + + 512 + - 1 + ) + / 512 + ), + 1, + 1, + ] + ), + block=tuple([512, 1, 1]), + args=[ + cuda_int32( + tenTwograd.shape[0] * tenTwograd.shape[2] * tenTwograd.shape[3] + ), + tenOne.data_ptr(), + tenTwo.data_ptr(), + tenOutgrad.data_ptr(), + tenOnegrad.data_ptr(), + tenTwograd.data_ptr(), + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + # end + + return tenOnegrad, tenTwograd, None, None + + # end + + +# end + +__all__ = ["costvol_func"] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/sepconv.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/sepconv.py new file mode 100644 index 0000000000000000000000000000000000000000..c334cdca77674f2566dd0075903e4d590e6d9eca --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/sepconv.py @@ -0,0 +1,332 @@ +import torch +from .utils import cuda_launch, cuda_kernel, cuda_int32 + +sepconv_vergrad = """ + extern "C" __global__ void __launch_bounds__(512) sepconv_vergrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenVer, + const {{type}}* __restrict__ tenHor, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenVergrad, + {{type}}* __restrict__ tenHorgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenVergrad) / SIZE_2(tenVergrad) / SIZE_1(tenVergrad) ) % SIZE_0(tenVergrad); + const int intC = ( intIndex / SIZE_3(tenVergrad) / SIZE_2(tenVergrad) ) % SIZE_1(tenVergrad); + const int intY = ( intIndex / SIZE_3(tenVergrad) ) % SIZE_2(tenVergrad); + const int intX = ( intIndex ) % SIZE_3(tenVergrad); + + {{type}} fltVergrad = 0.0; + + {{type}} fltKahanc = 0.0; + {{type}} fltKahany = 0.0; + {{type}} fltKahant = 0.0; + + for (int intI = 0; intI < SIZE_1(tenIn); intI += 1) { + for (int intFx = 0; intFx < SIZE_1(tenHor); intFx += 1) { + fltKahany = VALUE_4(tenHor, intN, intFx, intY, intX) * VALUE_4(tenIn, intN, intI, intY + intC, intX + intFx) * VALUE_4(tenOutgrad, intN, intI, intY, intX); + fltKahany = fltKahany - fltKahanc; + fltKahant = fltVergrad + fltKahany; + fltKahanc = (fltKahant - fltVergrad) - fltKahany; + fltVergrad = fltKahant; + } + } + + tenVergrad[intIndex] = fltVergrad; + } } +""" + +sepconv_ingrad = """ + extern "C" __global__ void __launch_bounds__(512) sepconv_ingrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenVer, + const {{type}}* __restrict__ tenHor, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenVergrad, + {{type}}* __restrict__ tenHorgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) / SIZE_1(tenIngrad) ) % SIZE_0(tenIngrad); + const int intC = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) ) % SIZE_1(tenIngrad); + const int intY = ( intIndex / SIZE_3(tenIngrad) ) % SIZE_2(tenIngrad); + const int intX = ( intIndex ) % SIZE_3(tenIngrad); + + {{type}} fltIngrad = 0.0; + + {{type}} fltKahanc = 0.0; + {{type}} fltKahany = 0.0; + {{type}} fltKahant = 0.0; + + for (int intFy = 0; intFy < SIZE_1(tenVer); intFy += 1) { + int intKy = intY + intFy - (SIZE_1(tenVer) - 1); + + if (intKy < 0) { continue; } + if (intKy >= SIZE_2(tenVer)) { continue; } + + for (int intFx = 0; intFx < SIZE_1(tenHor); intFx += 1) { + int intKx = intX + intFx - (SIZE_1(tenHor) - 1); + + if (intKx < 0) { continue; } + if (intKx >= SIZE_3(tenHor)) { continue; } + + fltKahany = VALUE_4(tenVer, intN, (SIZE_1(tenVer) - 1) - intFy, intKy, intKx) * VALUE_4(tenHor, intN, (SIZE_1(tenHor) - 1) - intFx, intKy, intKx) * VALUE_4(tenOutgrad, intN, intC, intKy, intKx); + fltKahany = fltKahany - fltKahanc; + fltKahant = fltIngrad + fltKahany; + fltKahanc = (fltKahant - fltIngrad) - fltKahany; + fltIngrad = fltKahant; + } + } + + tenIngrad[intIndex] = fltIngrad; + } } +""" + +sepconv_out = """ + extern "C" __global__ void __launch_bounds__(512) sepconv_out( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenVer, + const {{type}}* __restrict__ tenHor, + {{type}}* __restrict__ tenOut + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) / SIZE_1(tenOut) ) % SIZE_0(tenOut); + const int intC = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) ) % SIZE_1(tenOut); + const int intY = ( intIndex / SIZE_3(tenOut) ) % SIZE_2(tenOut); + const int intX = ( intIndex ) % SIZE_3(tenOut); + + {{type}} fltOut = 0.0; + + {{type}} fltKahanc = 0.0; + {{type}} fltKahany = 0.0; + {{type}} fltKahant = 0.0; + + for (int intFy = 0; intFy < SIZE_1(tenVer); intFy += 1) { + for (int intFx = 0; intFx < SIZE_1(tenHor); intFx += 1) { + fltKahany = VALUE_4(tenIn, intN, intC, intY + intFy, intX + intFx) * VALUE_4(tenVer, intN, intFy, intY, intX) * VALUE_4(tenHor, intN, intFx, intY, intX); + fltKahany = fltKahany - fltKahanc; + fltKahant = fltOut + fltKahany; + fltKahanc = (fltKahant - fltOut) - fltKahany; + fltOut = fltKahant; + } + } + + tenOut[intIndex] = fltOut; + } } +""" + +sepconv_horgrad = """ + extern "C" __global__ void __launch_bounds__(512) sepconv_horgrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenVer, + const {{type}}* __restrict__ tenHor, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenVergrad, + {{type}}* __restrict__ tenHorgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenHorgrad) / SIZE_2(tenHorgrad) / SIZE_1(tenHorgrad) ) % SIZE_0(tenHorgrad); + const int intC = ( intIndex / SIZE_3(tenHorgrad) / SIZE_2(tenHorgrad) ) % SIZE_1(tenHorgrad); + const int intY = ( intIndex / SIZE_3(tenHorgrad) ) % SIZE_2(tenHorgrad); + const int intX = ( intIndex ) % SIZE_3(tenHorgrad); + + {{type}} fltHorgrad = 0.0; + + {{type}} fltKahanc = 0.0; + {{type}} fltKahany = 0.0; + {{type}} fltKahant = 0.0; + + for (int intI = 0; intI < SIZE_1(tenIn); intI += 1) { + for (int intFy = 0; intFy < SIZE_1(tenVer); intFy += 1) { + fltKahany = VALUE_4(tenVer, intN, intFy, intY, intX) * VALUE_4(tenIn, intN, intI, intY + intFy, intX + intC) * VALUE_4(tenOutgrad, intN, intI, intY, intX); + fltKahany = fltKahany - fltKahanc; + fltKahant = fltHorgrad + fltKahany; + fltKahanc = (fltKahant - fltHorgrad) - fltKahany; + fltHorgrad = fltKahant; + } + } + + tenHorgrad[intIndex] = fltHorgrad; + } } +""" + +class sepconv_func(torch.autograd.Function): + @staticmethod + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, tenIn, tenVer, tenHor): + tenOut = tenIn.new_empty( + [ + tenIn.shape[0], + tenIn.shape[1], + tenVer.shape[2] and tenHor.shape[2], + tenVer.shape[3] and tenHor.shape[3], + ] + ) + + if tenIn.is_cuda == True: + cuda_launch( + cuda_kernel( + "sepconv_out", + sepconv_out, + { + "tenIn": tenIn, + "tenVer": tenVer, + "tenHor": tenHor, + "tenOut": tenOut, + }, + ) + )( + grid=tuple([int((tenOut.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenOut.nelement()), + tenIn.data_ptr(), + tenVer.data_ptr(), + tenHor.data_ptr(), + tenOut.data_ptr(), + ], + ) + + elif tenIn.is_cuda != True: + assert False + + # end + + self.save_for_backward(tenIn, tenVer, tenHor) + + return tenOut + + # end + + @staticmethod + @torch.cuda.amp.custom_bwd + def backward(self, tenOutgrad): + tenIn, tenVer, tenHor = self.saved_tensors + + tenOutgrad = tenOutgrad.contiguous() + assert tenOutgrad.is_cuda == True + + tenIngrad = ( + tenIn.new_empty( + [tenIn.shape[0], tenIn.shape[1], tenIn.shape[2], tenIn.shape[3]] + ) + if self.needs_input_grad[0] == True + else None + ) + tenVergrad = ( + tenVer.new_empty( + [tenVer.shape[0], tenVer.shape[1], tenVer.shape[2], tenVer.shape[3]] + ) + if self.needs_input_grad[1] == True + else None + ) + tenHorgrad = ( + tenHor.new_empty( + [tenHor.shape[0], tenHor.shape[1], tenHor.shape[2], tenHor.shape[3]] + ) + if self.needs_input_grad[2] == True + else None + ) + + if tenIngrad is not None: + cuda_launch( + cuda_kernel( + "sepconv_ingrad", + sepconv_ingrad, + { + "tenIn": tenIn, + "tenVer": tenVer, + "tenHor": tenHor, + "tenOutgrad": tenOutgrad, + "tenIngrad": tenIngrad, + "tenVergrad": tenVergrad, + "tenHorgrad": tenHorgrad, + }, + ) + )( + grid=tuple([int((tenIngrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenIngrad.nelement()), + tenIn.data_ptr(), + tenVer.data_ptr(), + tenHor.data_ptr(), + tenOutgrad.data_ptr(), + tenIngrad.data_ptr(), + None, + None, + ], + ) + # end + + if tenVergrad is not None: + cuda_launch( + cuda_kernel( + "sepconv_vergrad", + sepconv_vergrad, + { + "tenIn": tenIn, + "tenVer": tenVer, + "tenHor": tenHor, + "tenOutgrad": tenOutgrad, + "tenIngrad": tenIngrad, + "tenVergrad": tenVergrad, + "tenHorgrad": tenHorgrad, + }, + ) + )( + grid=tuple([int((tenVergrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenVergrad.nelement()), + tenIn.data_ptr(), + tenVer.data_ptr(), + tenHor.data_ptr(), + tenOutgrad.data_ptr(), + None, + tenVergrad.data_ptr(), + None, + ], + ) + # end + + if tenHorgrad is not None: + cuda_launch( + cuda_kernel( + "sepconv_horgrad", + sepconv_horgrad, + { + "tenIn": tenIn, + "tenVer": tenVer, + "tenHor": tenHor, + "tenOutgrad": tenOutgrad, + "tenIngrad": tenIngrad, + "tenVergrad": tenVergrad, + "tenHorgrad": tenHorgrad, + }, + ) + )( + grid=tuple([int((tenHorgrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenHorgrad.nelement()), + tenIn.data_ptr(), + tenVer.data_ptr(), + tenHor.data_ptr(), + tenOutgrad.data_ptr(), + None, + None, + tenHorgrad.data_ptr(), + ], + ) + # end + + return tenIngrad, tenVergrad, tenHorgrad + + # end + + +# end +__all__ = ["sepconv_func"] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/softsplat.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/softsplat.py new file mode 100644 index 0000000000000000000000000000000000000000..4a2ae47a638c5d025e85169759b587947640e012 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/softsplat.py @@ -0,0 +1,440 @@ +import torch +from .utils import cuda_launch, cuda_kernel, cuda_int32 +import cupy +import collections + +softsplat_flowgrad = """ + extern "C" __global__ void __launch_bounds__(512) softsplat_flowgrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenFlowgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenFlowgrad) / SIZE_2(tenFlowgrad) / SIZE_1(tenFlowgrad) ) % SIZE_0(tenFlowgrad); + const int intC = ( intIndex / SIZE_3(tenFlowgrad) / SIZE_2(tenFlowgrad) ) % SIZE_1(tenFlowgrad); + const int intY = ( intIndex / SIZE_3(tenFlowgrad) ) % SIZE_2(tenFlowgrad); + const int intX = ( intIndex ) % SIZE_3(tenFlowgrad); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltFlowgrad = 0.0f; + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = 0.0f; + {{type}} fltNortheast = 0.0f; + {{type}} fltSouthwest = 0.0f; + {{type}} fltSoutheast = 0.0f; + + if (intC == 0) { + fltNorthwest = (({{type}}) (-1.0f)) * (({{type}}) (intSoutheastY) - fltY); + fltNortheast = (({{type}}) (+1.0f)) * (({{type}}) (intSouthwestY) - fltY); + fltSouthwest = (({{type}}) (-1.0f)) * (fltY - ({{type}}) (intNortheastY)); + fltSoutheast = (({{type}}) (+1.0f)) * (fltY - ({{type}}) (intNorthwestY)); + + } else if (intC == 1) { + fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (-1.0f)); + fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (-1.0f)); + fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (({{type}}) (+1.0f)); + fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (({{type}}) (+1.0f)); + + } + + for (int intChannel = 0; intChannel < SIZE_1(tenOutgrad); intChannel += 1) { + {{type}} fltIn = VALUE_4(tenIn, intN, intChannel, intY, intX); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOutgrad)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intNorthwestY, intNorthwestX) * fltIn * fltNorthwest; + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOutgrad)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intNortheastY, intNortheastX) * fltIn * fltNortheast; + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOutgrad)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intSouthwestY, intSouthwestX) * fltIn * fltSouthwest; + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOutgrad)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOutgrad))) { + fltFlowgrad += VALUE_4(tenOutgrad, intN, intChannel, intSoutheastY, intSoutheastX) * fltIn * fltSoutheast; + } + } + + tenFlowgrad[intIndex] = fltFlowgrad; + } } +""" + +softsplat_ingrad = """ + extern "C" __global__ void __launch_bounds__(512) softsplat_ingrad( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + const {{type}}* __restrict__ tenOutgrad, + {{type}}* __restrict__ tenIngrad, + {{type}}* __restrict__ tenFlowgrad + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) / SIZE_1(tenIngrad) ) % SIZE_0(tenIngrad); + const int intC = ( intIndex / SIZE_3(tenIngrad) / SIZE_2(tenIngrad) ) % SIZE_1(tenIngrad); + const int intY = ( intIndex / SIZE_3(tenIngrad) ) % SIZE_2(tenIngrad); + const int intX = ( intIndex ) % SIZE_3(tenIngrad); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltIngrad = 0.0f; + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (intSoutheastY) - fltY); + {{type}} fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (intSouthwestY) - fltY); + {{type}} fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (fltY - ({{type}}) (intNortheastY)); + {{type}} fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (fltY - ({{type}}) (intNorthwestY)); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOutgrad)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intNorthwestY, intNorthwestX) * fltNorthwest; + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOutgrad)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intNortheastY, intNortheastX) * fltNortheast; + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOutgrad)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intSouthwestY, intSouthwestX) * fltSouthwest; + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOutgrad)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOutgrad))) { + fltIngrad += VALUE_4(tenOutgrad, intN, intC, intSoutheastY, intSoutheastX) * fltSoutheast; + } + + tenIngrad[intIndex] = fltIngrad; + } } +""" + +softsplat_out = """ + extern "C" __global__ void __launch_bounds__(512) softsplat_out( + const int n, + const {{type}}* __restrict__ tenIn, + const {{type}}* __restrict__ tenFlow, + {{type}}* __restrict__ tenOut + ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) { + const int intN = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) / SIZE_1(tenOut) ) % SIZE_0(tenOut); + const int intC = ( intIndex / SIZE_3(tenOut) / SIZE_2(tenOut) ) % SIZE_1(tenOut); + const int intY = ( intIndex / SIZE_3(tenOut) ) % SIZE_2(tenOut); + const int intX = ( intIndex ) % SIZE_3(tenOut); + + assert(SIZE_1(tenFlow) == 2); + + {{type}} fltX = ({{type}}) (intX) + VALUE_4(tenFlow, intN, 0, intY, intX); + {{type}} fltY = ({{type}}) (intY) + VALUE_4(tenFlow, intN, 1, intY, intX); + + if (isfinite(fltX) == false) { return; } + if (isfinite(fltY) == false) { return; } + + {{type}} fltIn = VALUE_4(tenIn, intN, intC, intY, intX); + + int intNorthwestX = (int) (floor(fltX)); + int intNorthwestY = (int) (floor(fltY)); + int intNortheastX = intNorthwestX + 1; + int intNortheastY = intNorthwestY; + int intSouthwestX = intNorthwestX; + int intSouthwestY = intNorthwestY + 1; + int intSoutheastX = intNorthwestX + 1; + int intSoutheastY = intNorthwestY + 1; + + {{type}} fltNorthwest = (({{type}}) (intSoutheastX) - fltX) * (({{type}}) (intSoutheastY) - fltY); + {{type}} fltNortheast = (fltX - ({{type}}) (intSouthwestX)) * (({{type}}) (intSouthwestY) - fltY); + {{type}} fltSouthwest = (({{type}}) (intNortheastX) - fltX) * (fltY - ({{type}}) (intNortheastY)); + {{type}} fltSoutheast = (fltX - ({{type}}) (intNorthwestX)) * (fltY - ({{type}}) (intNorthwestY)); + + if ((intNorthwestX >= 0) && (intNorthwestX < SIZE_3(tenOut)) && (intNorthwestY >= 0) && (intNorthwestY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intNorthwestY, intNorthwestX)], fltIn * fltNorthwest); + } + + if ((intNortheastX >= 0) && (intNortheastX < SIZE_3(tenOut)) && (intNortheastY >= 0) && (intNortheastY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intNortheastY, intNortheastX)], fltIn * fltNortheast); + } + + if ((intSouthwestX >= 0) && (intSouthwestX < SIZE_3(tenOut)) && (intSouthwestY >= 0) && (intSouthwestY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intSouthwestY, intSouthwestX)], fltIn * fltSouthwest); + } + + if ((intSoutheastX >= 0) && (intSoutheastX < SIZE_3(tenOut)) && (intSoutheastY >= 0) && (intSoutheastY < SIZE_2(tenOut))) { + atomicAdd(&tenOut[OFFSET_4(tenOut, intN, intC, intSoutheastY, intSoutheastX)], fltIn * fltSoutheast); + } + } } +""" + + +# end + +class softsplat_func(torch.autograd.Function): + @staticmethod + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, tenIn, tenFlow): + tenOut = tenIn.new_zeros( + [tenIn.shape[0], tenIn.shape[1], tenIn.shape[2], tenIn.shape[3]] + ) + + if tenIn.is_cuda == True: + cuda_launch( + cuda_kernel( + "softsplat_out", + softsplat_out, + {"tenIn": tenIn, "tenFlow": tenFlow, "tenOut": tenOut}, + ) + )( + grid=tuple([int((tenOut.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenOut.nelement()), + tenIn.data_ptr(), + tenFlow.data_ptr(), + tenOut.data_ptr(), + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + + elif tenIn.is_cuda != True: + assert False + + # end + + self.save_for_backward(tenIn, tenFlow) + + return tenOut + + # end + + @staticmethod + @torch.cuda.amp.custom_bwd + def backward(self, tenOutgrad): + tenIn, tenFlow = self.saved_tensors + + tenOutgrad = tenOutgrad.contiguous() + assert tenOutgrad.is_cuda == True + + tenIngrad = ( + tenIn.new_zeros( + [tenIn.shape[0], tenIn.shape[1], tenIn.shape[2], tenIn.shape[3]] + ) + if self.needs_input_grad[0] == True + else None + ) + tenFlowgrad = ( + tenFlow.new_zeros( + [tenFlow.shape[0], tenFlow.shape[1], tenFlow.shape[2], tenFlow.shape[3]] + ) + if self.needs_input_grad[1] == True + else None + ) + + if tenIngrad is not None: + cuda_launch( + cuda_kernel( + "softsplat_ingrad", + softsplat_ingrad, + { + "tenIn": tenIn, + "tenFlow": tenFlow, + "tenOutgrad": tenOutgrad, + "tenIngrad": tenIngrad, + "tenFlowgrad": tenFlowgrad, + }, + ) + )( + grid=tuple([int((tenIngrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenIngrad.nelement()), + tenIn.data_ptr(), + tenFlow.data_ptr(), + tenOutgrad.data_ptr(), + tenIngrad.data_ptr(), + None, + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + # end + + if tenFlowgrad is not None: + cuda_launch( + cuda_kernel( + "softsplat_flowgrad", + softsplat_flowgrad, + { + "tenIn": tenIn, + "tenFlow": tenFlow, + "tenOutgrad": tenOutgrad, + "tenIngrad": tenIngrad, + "tenFlowgrad": tenFlowgrad, + }, + ) + )( + grid=tuple([int((tenFlowgrad.nelement() + 512 - 1) / 512), 1, 1]), + block=tuple([512, 1, 1]), + args=[ + cuda_int32(tenFlowgrad.nelement()), + tenIn.data_ptr(), + tenFlow.data_ptr(), + tenOutgrad.data_ptr(), + None, + tenFlowgrad.data_ptr(), + ], + stream=collections.namedtuple("Stream", "ptr")( + torch.cuda.current_stream().cuda_stream + ), + ) + # end + + return tenIngrad, tenFlowgrad + + # end + + +def FunctionSoftsplat(tenInput, tenFlow, tenMetric, strType): + assert tenMetric is None or tenMetric.shape[1] == 1 + assert strType in ["summation", "average", "linear", "softmax"] + + if strType == "average": + tenInput = torch.cat( + [ + tenInput, + tenInput.new_ones( + tenInput.shape[0], 1, tenInput.shape[2], tenInput.shape[3] + ), + ], + 1, + ) + + elif strType == "linear": + tenInput = torch.cat([tenInput * tenMetric, tenMetric], 1) + + elif strType == "softmax": + tenInput = torch.cat([tenInput * tenMetric.exp(), tenMetric.exp()], 1) + + # end + + tenOutput = softsplat_func.apply(tenInput, tenFlow) + + if strType != "summation": + tenNormalize = tenOutput[:, -1:, :, :] + + tenNormalize[tenNormalize == 0.0] = 1.0 + + tenOutput = tenOutput[:, :-1, :, :] / tenNormalize + # end + + return tenOutput + + +# end + + +class ModuleSoftsplat(torch.nn.Module): + def __init__(self, strType): + super().__init__() + + self.strType = strType + + # end + + def forward(self, tenInput, tenFlow, tenMetric): + return FunctionSoftsplat(tenInput, tenFlow, tenMetric, self.strType) + + # end + + +# end + + + +def softsplat( + tenIn: torch.Tensor, tenFlow: torch.Tensor, tenMetric: torch.Tensor, strMode: str +): + assert strMode.split("-")[0] in ["sum", "avg", "linear", "soft"] + + if strMode == "sum": + assert tenMetric is None + if strMode == "avg": + assert tenMetric is None + if strMode.split("-")[0] == "linear": + assert tenMetric is not None + if strMode.split("-")[0] == "soft": + assert tenMetric is not None + + if strMode == "avg": + tenIn = torch.cat( + [ + tenIn, + tenIn.new_ones([tenIn.shape[0], 1, tenIn.shape[2], tenIn.shape[3]]), + ], + 1, + ) + + elif strMode.split("-")[0] == "linear": + tenIn = torch.cat([tenIn * tenMetric, tenMetric], 1) + + elif strMode.split("-")[0] == "soft": + tenIn = torch.cat([tenIn * tenMetric.exp(), tenMetric.exp()], 1) + + # end + + tenOut = softsplat_func.apply(tenIn, tenFlow) + + if strMode.split("-")[0] in ["avg", "linear", "soft"]: + tenNormalize = tenOut[:, -1:, :, :] + + if len(strMode.split("-")) == 1: + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split("-")[1] == "addeps": + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split("-")[1] == "zeroeps": + tenNormalize[tenNormalize == 0.0] = 1.0 + + elif strMode.split("-")[1] == "clipeps": + tenNormalize = tenNormalize.clip(0.0000001, None) + + # end + + tenOut = tenOut[:, :-1, :, :] / tenNormalize + # end + + return tenOut + + +# end + +__all__ = ["FunctionSoftsplat", "ModuleSoftsplat", "softsplat", "softsplat_func"] diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/utils.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ec29a48e11955eb9f9f8aa36b1542721dd362345 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/cupy_ops/utils.py @@ -0,0 +1,242 @@ +import cupy +import os +import re +import torch +import typing +from pathlib import Path +import platform + +########################################################## + + +objCudacache = {} + + +def cuda_int32(intIn: int): + return cupy.int32(intIn) + + +# end + + +def cuda_float32(fltIn: float): + return cupy.float32(fltIn) + + +# end + + +def cuda_kernel(strFunction: str, strKernel: str, objVariables: typing.Dict, **replace_kwargs): + if "device" not in objCudacache: + objCudacache["device"] = torch.cuda.get_device_name() + # end + + strKey = strFunction + + for strVariable in objVariables: + objValue = objVariables[strVariable] + + strKey += strVariable + + if objValue is None: + continue + + elif type(objValue) == int: + strKey += str(objValue) + + elif type(objValue) == float: + strKey += str(objValue) + + elif type(objValue) == bool: + strKey += str(objValue) + + elif type(objValue) == str: + strKey += objValue + + elif type(objValue) == torch.Tensor: + strKey += str(objValue.dtype) + strKey += str(objValue.shape) + strKey += str(objValue.stride()) + + elif True: + print(strVariable, type(objValue)) + assert False + + # end + # end + + strKey += objCudacache["device"] + + if strKey not in objCudacache: + for strVariable in objVariables: + objValue = objVariables[strVariable] + + if objValue is None: + continue + + elif type(objValue) == int: + strKernel = strKernel.replace("{{" + strVariable + "}}", str(objValue)) + + elif type(objValue) == float: + strKernel = strKernel.replace("{{" + strVariable + "}}", str(objValue)) + + elif type(objValue) == bool: + strKernel = strKernel.replace("{{" + strVariable + "}}", str(objValue)) + + elif type(objValue) == str: + strKernel = strKernel.replace("{{" + strVariable + "}}", objValue) + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.uint8: + strKernel = strKernel.replace("{{type}}", "unsigned char") + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float16: + strKernel = strKernel.replace("{{type}}", "half") + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float32: + strKernel = strKernel.replace("{{type}}", "float") + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.float64: + strKernel = strKernel.replace("{{type}}", "double") + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.int32: + strKernel = strKernel.replace("{{type}}", "int") + + elif type(objValue) == torch.Tensor and objValue.dtype == torch.int64: + strKernel = strKernel.replace("{{type}}", "long") + + elif type(objValue) == torch.Tensor: + print(strVariable, objValue.dtype) + assert False + + elif True: + print(strVariable, type(objValue)) + assert False + + # end + # end + + while True: + objMatch = re.search("(SIZE_)([0-4])(\()([^\)]*)(\))", strKernel) + + if objMatch is None: + break + # end + + intArg = int(objMatch.group(2)) + + strTensor = objMatch.group(4) + intSizes = objVariables[strTensor].size() + + strKernel = strKernel.replace(objMatch.group(), str(intSizes[intArg])) + # end + + while True: + objMatch = re.search("(OFFSET_)([0-4])(\()([^\)]+)(\))", strKernel) + + if objMatch is None: + break + # end + + intArgs = int(objMatch.group(2)) + strArgs = objMatch.group(4).split(",") + + strTensor = strArgs[0] + intStrides = objVariables[strTensor].stride() + strIndex = [ + "((" + + strArgs[intArg + 1].replace("{", "(").replace("}", ")").strip() + + ")*" + + str(intStrides[intArg]) + + ")" + for intArg in range(intArgs) + ] + + strKernel = strKernel.replace( + objMatch.group(0), "(" + str.join("+", strIndex) + ")" + ) + # end + + while True: + objMatch = re.search("(VALUE_)([0-4])(\()", strKernel) + + if objMatch is None: + break + # end + + intStart = objMatch.span()[1] + intStop = objMatch.span()[1] + intParentheses = 1 + + while True: + intParentheses += 1 if strKernel[intStop] == "(" else 0 + intParentheses -= 1 if strKernel[intStop] == ")" else 0 + + if intParentheses == 0: + break + # end + + intStop += 1 + # end + + intArgs = int(objMatch.group(2)) + strArgs = strKernel[intStart:intStop].split(",") + + assert intArgs == len(strArgs) - 1 + + strTensor = strArgs[0] + intStrides = objVariables[strTensor].stride() + + strIndex = [] + + for intArg in range(intArgs): + strIndex.append( + "((" + + strArgs[intArg + 1].replace("{", "(").replace("}", ")").strip() + + ")*" + + str(intStrides[intArg]) + + ")" + ) + # end + + strKernel = strKernel.replace( + "VALUE_" + str(intArgs) + "(" + strKernel[intStart:intStop] + ")", + strTensor + "[" + str.join("+", strIndex) + "]", + ) + # end + + for replace_key, value in replace_kwargs.items(): + strKernel = strKernel.replace(replace_key, value) + + objCudacache[strKey] = {"strFunction": strFunction, "strKernel": strKernel} + # end + + return strKey + + +# end +def get_cuda_home_path(): + if "CUDA_HOME" in os.environ: + return os.environ["CUDA_HOME"] + import torch + torch_lib_path = Path(torch.__file__).parent / "lib" + torch_lib_path = str(torch_lib_path.resolve()) + if os.path.exists(torch_lib_path): + nvrtc = filter(lambda lib_file: "nvrtc-builtins" in lib_file, os.listdir(torch_lib_path)) + nvrtc = list(nvrtc) + return torch_lib_path if len(nvrtc) > 0 else None + +@cupy.memoize(for_each_device=True) +def cuda_launch(strKey: str): + if True:#"CUDA_HOME" not in os.environ: + cuda_home = get_cuda_home_path() + if cuda_home is not None: + os.environ["CUDA_HOME"] = cuda_home + os.environ["CUDA_PATH"] = cuda_home + else: + os.environ["CUDA_HOME"] = "/usr/local/cuda/" + os.environ["CUDA_PATH"] = "/usr/local/cuda/" + # print(objCudacache[strKey]['strKernel']) + # return cupy.cuda.compile_with_cache(objCudacache[strKey]['strKernel'], tuple(['-I ' + os.environ['CUDA_HOME'], '-I ' + os.environ['CUDA_HOME'] + '/include'])).get_function(objCudacache[strKey]['strFunction']) + return cupy.RawModule(code=objCudacache[strKey]["strKernel"]).get_function( + objCudacache[strKey]["strFunction"] + ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e36bb6cf71d64b17637b96712784de777eef1b28 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/__init__.py @@ -0,0 +1,150 @@ +import comfy.model_management as model_management +import torch +import torch.multiprocessing as mp +from .worker_process import f +from .utils import to_shared_memory + +parent_conn, child_conn, process = None, None, None +device = model_management.get_torch_device() + +def req_to_taichi_process(op_name, *tensors): + global parent_conn, child_conn, process + if parent_conn is None: + mp.set_start_method('spawn', force=True) + parent_conn, child_conn = mp.Pipe() + process = mp.Process(target=f, args=(child_conn, device)) + process.start() + + tensors = to_shared_memory(tensors) + parent_conn.send((op_name, tensors)) + result = parent_conn.recv() + del tensors + + if type(result) not in [tuple, list]: + raise Exception(result) + + return [tensor.to(device) for tensor in result] + +def softsplat( + tenIn: torch.Tensor, tenFlow: torch.Tensor, tenMetric: torch.Tensor, strMode: str +): + assert strMode.split("-")[0] in ["sum", "avg", "linear", "soft"] + + if strMode == "sum": + assert tenMetric is None + if strMode == "avg": + assert tenMetric is None + if strMode.split("-")[0] == "linear": + assert tenMetric is not None + if strMode.split("-")[0] == "soft": + assert tenMetric is not None + + if strMode == "avg": + tenIn = torch.cat( + [ + tenIn, + tenIn.new_ones([tenIn.shape[0], 1, tenIn.shape[2], tenIn.shape[3]]), + ], + 1, + ) + + elif strMode.split("-")[0] == "linear": + tenIn = torch.cat([tenIn * tenMetric, tenMetric], 1) + + elif strMode.split("-")[0] == "soft": + tenIn = torch.cat([tenIn * tenMetric.exp(), tenMetric.exp()], 1) + + # end + + tenOut = req_to_taichi_process("softsplat_out", tenIn, tenFlow)[0] + + if strMode.split("-")[0] in ["avg", "linear", "soft"]: + tenNormalize = tenOut[:, -1:, :, :] + + if len(strMode.split("-")) == 1: + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split("-")[1] == "addeps": + tenNormalize = tenNormalize + 0.0000001 + + elif strMode.split("-")[1] == "zeroeps": + tenNormalize[tenNormalize == 0.0] = 1.0 + + elif strMode.split("-")[1] == "clipeps": + tenNormalize = tenNormalize.clip(0.0000001, None) + + # end + + tenOut = tenOut[:, :-1, :, :] / tenNormalize + # end + + return tenOut + +def FunctionSoftsplat(tenInput, tenFlow, tenMetric, strType): + assert tenMetric is None or tenMetric.shape[1] == 1 + assert strType in ["summation", "average", "linear", "softmax"] + + if strType == "average": + tenInput = torch.cat( + [ + tenInput, + tenInput.new_ones( + tenInput.shape[0], 1, tenInput.shape[2], tenInput.shape[3] + ), + ], + 1, + ) + + elif strType == "linear": + tenInput = torch.cat([tenInput * tenMetric, tenMetric], 1) + + elif strType == "softmax": + tenInput = torch.cat([tenInput * tenMetric.exp(), tenMetric.exp()], 1) + + # end + + tenOutput = req_to_taichi_process("softsplat_out", tenInput, tenFlow)[0] + + if strType != "summation": + tenNormalize = tenOutput[:, -1:, :, :] + + tenNormalize[tenNormalize == 0.0] = 1.0 + + tenOutput = tenOutput[:, :-1, :, :] / tenNormalize + # end + + return tenOutput + + +# end + + +class ModuleSoftsplat(torch.nn.Module): + def __init__(self, strType): + super(self).__init__() + + self.strType = strType + + # end + + def forward(self, tenInput, tenFlow, tenMetric): + return FunctionSoftsplat(tenInput, tenFlow, tenMetric, self.strType) + +def softsplat_func(tenIn, tenFlow): + return req_to_taichi_process("softsplat_out", tenIn, tenFlow)[0] + +class costvol_func: + @staticmethod + def apply(tenOne, tenTwo): + return req_to_taichi_process("costvol_out", tenOne, tenTwo)[0] + +class sepconv_func: + @staticmethod + def apply(tenIn, tenVer, tenHor): + return req_to_taichi_process("sepconv_out", tenIn, tenVer, tenHor)[0] + +def init(): + one_sample = torch.ones(1, 3, 16, 16, dtype=torch.float32, device=device) + softsplat_func(one_sample, one_sample) + costvol_func.apply(one_sample, one_sample) + sepconv_func.apply(one_sample, one_sample, one_sample) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/adacof.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/adacof.py new file mode 100644 index 0000000000000000000000000000000000000000..acf2672e84b32f843018ee617b7800b9b884d4b1 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/adacof.py @@ -0,0 +1,6 @@ +import torch +class FunctionAdaCoF(torch.autograd.Function): + # end + @staticmethod + def forward(ctx, input, weight, offset_i, offset_j, dilation): + raise NotImplementedError() diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/batch_edt.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/batch_edt.py new file mode 100644 index 0000000000000000000000000000000000000000..c1fe1fd4e2e6eb51cbad9d76d29cf2191c194d14 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/batch_edt.py @@ -0,0 +1,2 @@ +def batch_edt(img, block=1024): + raise NotImplementedError() \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/correlation.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/correlation.py new file mode 100644 index 0000000000000000000000000000000000000000..3782d1a930219681996d3e48361acf82b8866edc --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/correlation.py @@ -0,0 +1,15 @@ +import torch + +class _FunctionCorrelation(torch.autograd.Function): + @staticmethod + def forward(self, first, second): + raise NotImplementedError() + +def FunctionCorrelation(tenFirst, tenSecond): + raise NotImplementedError() + return _FunctionCorrelation.apply(tenFirst, tenSecond) + +class ModuleCorrelation(torch.nn.Module): + def __init__(self): + raise NotImplementedError() + super(ModuleCorrelation, self).__init__() \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/costvol.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/costvol.py new file mode 100644 index 0000000000000000000000000000000000000000..a82b7273cd38b7ea19c99356ea762a6e4b58ff99 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/costvol.py @@ -0,0 +1,26 @@ +import taichi as ti +import taichi.math as tm + +""" @ti.kernel +def costvol_out(tenOne: ti.types.ndarray(), tltOne: ti.types.ndarray(), tenTwo: ti.types.ndarray(), tenOut: ti.types.ndarray()): + N, C, H, W = tenOut.shape + for i, ch, y, x in ti.ndrange(N, C, H, W): + for intValue in range(tenOne.shape[1]): + tltOne[intValue] = tenOne[i, intValue, y, x] + + tenOut_ch = 0 + for intOy in range(y - 4, y + 4 + 1): + for intOx in range(x - 4, x + 4 + 1): + point = tm.ivec2(intOx, intOy) + fltValue = 0.0 + for intValue in range(ch): + if (point.y >= 0) and (point.y < H) and (point.x >= 0) and (point.x < W): + fltValue += ti.abs(tltOne[intValue] - tenTwo[i, intValue, point.y, point.x]) + else: + fltValue += ti.abs(tltOne[intValue]) + + tenOut[i, tenOut_ch, y, x] = fltValue / tenOne.shape[1] + tenOut_ch += 1 """ + +def worker_interface(op_name, tensors): + raise NotImplementedError(op_name) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/raw_softsplat.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/raw_softsplat.py new file mode 100644 index 0000000000000000000000000000000000000000..0cfd3fd52484afa930939541c10c94c7b270d7bf --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/raw_softsplat.py @@ -0,0 +1,126 @@ +#Seperate taichi kernels to another file so that comfy.model_management won't be called in the new process + +import taichi as ti +import taichi.math as tm + +@ti.func +def put_to_tenOut(tenOut: ti.types.ndarray(), fltIn: ti.i32, flt: ti.i32, pos:tm.uvec2, i:ti.i32, ch:ti.i32): + N, C, H, W = tenOut.shape + if (pos.x >= 0) and (pos.x < W) and (pos.y >= 0) and (pos.y < H): + tenOut[i, ch, pos.y, pos.x] += fltIn * flt +@ti.kernel +def softsplat_out(tenIn: ti.types.ndarray(), tenFlow: ti.types.ndarray(), tenOut: ti.types.ndarray()): + N, C, H, W = tenIn.shape + for i, ch, y, x in ti.ndrange(N, C, H, W): + fltX = x + tenFlow[i, 0, y, x] + fltY = y + tenFlow[i, 1, y, x] + fltIn = tenIn[i, ch, y, x] + + northWest = tm.ivec2(ti.floor(fltX), ti.floor(fltY)) + northEast = northWest + [1, 0] + southWest = northWest + [0, 1] + southEast = northWest + [1, 1] + + fltNorthwest = (southEast.x - fltX) * (southEast.y - fltY) + fltNortheast = (fltX - southWest.x) * (southWest.y - fltY) + fltSouthwest = (northEast.x - fltX) * (fltY - northEast.y) + fltSoutheast = (fltX - northWest.x) * (fltY - northWest.y) + + put_to_tenOut(tenOut, fltIn, fltNorthwest, northWest, i, ch) + put_to_tenOut(tenOut, fltIn, fltNortheast, northEast, i, ch) + put_to_tenOut(tenOut, fltIn, fltSouthwest, southWest, i, ch) + put_to_tenOut(tenOut, fltIn, fltSoutheast, southEast, i, ch) + +@ti.func +def add_to_fltFlowgrad(fltFlowgrad, tenOutgrad, fltIn, flt, pos, i, ch): + N, C, H, W = tenOutgrad.shape + if (pos.x >= 0) and (pos.x < W) and (pos.y >= 0) and (pos.y < H): + fltFlowgrad += tenOutgrad[i, ch, pos.y, pos.x] * fltIn * flt + +@ti.kernel +def softsplat_flowgrad( + tenIn: ti.types.ndarray(), + tenFlow: ti.types.ndarray(), + tenOutgrad: ti.types.ndarray(), + tenIngrad: ti.types.ndarray(), + tenFlowgrad: ti.types.ndarray() +): + N, C, H, W = tenFlowgrad.shape + for i, ch, y, x in ti.ndrange(N, C, H, W): + fltFlowgrad = 0.0 + fltX = x + tenFlow[i, 0, y, x] + fltY = y + tenFlow[i, 1, y, x] + + northWest = tm.vec2(ti.floor(fltX, dtype=ti.i32), ti.floor(fltY, dtype=ti.i32)) + northEast = tm.vec2(northWest.x + 1, northWest.y) + southWest = tm.vec2(northWest.x, northWest.y + 1) + southEast = tm.vec2(northWest.x + 1, northWest.y + 1) + + if ch == 0: + fltNorthwest = -1.0 * (southEast.y - fltY) + fltNortheast = +1.0 * (southWest.y - fltY) + fltSouthwest = -1.0 * (fltY - northEast.y) + fltSoutheast = +1.0 * (fltY - northWest.y) + + elif ch == 1: + fltNorthwest = -1.0 * (southEast.x - fltX) + fltNortheast = -1.0 * (fltX - southWest.x) + fltSouthwest = +1.0 * (northEast.x - fltX) + fltSoutheast = +1.0 * (fltX - northWest.x) + + for outgrad_ch in ti.ndrange(tenOutgrad.shape[1]): + fltIn = tenIn[i, outgrad_ch, y, x] + add_to_fltFlowgrad(fltFlowgrad, tenOutgrad, fltIn, fltNorthwest, northWest, i, outgrad_ch) + add_to_fltFlowgrad(fltFlowgrad, tenOutgrad, fltIn, fltNortheast, northEast, i, outgrad_ch) + add_to_fltFlowgrad(fltFlowgrad, tenOutgrad, fltIn, fltSouthwest, southWest, i, outgrad_ch) + add_to_fltFlowgrad(fltFlowgrad, tenOutgrad, fltIn, fltSoutheast, southEast, i, outgrad_ch) + + tenFlowgrad[i] = fltFlowgrad #Is 'i' the same as intIndex? + +@ti.func +def add_to_fltIngrad(fltIngrad, tenOutgrad, flt, pos, i, ch): + N, C, H, W = tenOutgrad.shape + if (pos.x >= 0) and (pos.x < W) and (pos.y >= 0) and (pos.y < H): + fltIngrad += tenOutgrad[i, ch, pos.y, pos.x] * flt +@ti.kernel +def softsplat_ingrad( + tenIn: ti.types.ndarray(), + tenFlow: ti.types.ndarray(), + tenOutgrad: ti.types.ndarray(), + tenIngrad: ti.types.ndarray(), + tenFlowgrad: ti.types.ndarray() +): + N, C, H, W = tenIngrad.shape + for i, ch, y, x in ti.ndrange(N, C, H, W): + fltIngrad = 0.0 + fltX = x + tenFlow[i, 0, y, x] + fltY = y + tenFlow[i, 1, y, x] + + northWest = tm.vec2(ti.floor(fltX, dtype=ti.i32), ti.floor(fltY, dtype=ti.i32)) + northEast = tm.vec2(northWest.x + 1, northWest.y) + southWest = tm.vec2(northWest.x, northWest.y + 1) + southEast = tm.vec2(northWest.x + 1, northWest.y + 1) + + fltNorthwest = (southEast.x - fltX) * (southEast.y - fltY) + fltNortheast = (fltX - southWest.x) * (southWest.y - fltY) + fltSouthwest = (northEast.x - fltX) * (fltY - northEast.y) + fltSoutheast = (fltX - northWest.x) * (fltY - northWest.y) + + add_to_fltIngrad(fltIngrad, tenOutgrad, fltNorthwest, northWest, i, ch) + add_to_fltIngrad(fltIngrad, tenOutgrad, fltNortheast, northEast, i, ch) + add_to_fltIngrad(fltIngrad, tenOutgrad, fltSouthwest, southWest, i, ch) + add_to_fltIngrad(fltIngrad, tenOutgrad, fltSoutheast, southEast, i, ch) + tenIngrad[i] = fltIngrad + +# end + +def worker_interface(op_name, tensors): + if op_name == "softsplat_out": + tenIn, tenFlow = tensors + tenOut = tenIn.new_zeros(tenIn.shape) + softsplat_out(tenIn, tenFlow, tenOut) + return (tenOut, ) + + raise NotImplementedError(op_name) + +__all__ = ["worker_interface"] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/sepconv.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/sepconv.py new file mode 100644 index 0000000000000000000000000000000000000000..f9d8909befa072646f4077b10bd18f735c928218 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/sepconv.py @@ -0,0 +1,39 @@ +import taichi as ti +import taichi.math as tm +from functools import reduce + +@ti.kernel +def sepconv_out(tenIn: ti.types.ndarray(), tenVer: ti.types.ndarray(), tenHor: ti.types.ndarray(), tenOut: ti.types.ndarray()): + N, C, H, W = tenIn.shape + intIndex = 0 + for i, ch, y, x in ti.ndrange(N, C, H, W): + fltOut, fltKahanc, fltKahany, fltKahant = 0.0, 0.0, 0.0, 0.0 + for intFy, intFx in ti.ndrange(tenVer.shape[1], tenHor.shape[1]): + fltKahany = tenIn[i, ch, y + intFy, x + intFx] * tenVer[i, intFy, y, x] * tenHor[i, intFx, y, x] + fltKahany = fltKahany - fltKahanc + fltKahant = fltOut + fltKahany + fltKahanc = (fltKahant - fltOut) - fltKahany + fltOut = fltKahant + tenOut[intIndex] = fltOut + intIndex += 1 + + +def worker_interface(op_name, tensors): + if op_name == "sepconv_out": + tenIn, tenVer, tenHor = tensors + real_tenOut_shape = [ + tenIn.shape[0], + tenIn.shape[1], + tenVer.shape[2] and tenHor.shape[2], + tenVer.shape[3] and tenHor.shape[3], + ] + tenOut = tenIn.new_zeros([ + int(reduce(lambda a, b: a * b, real_tenOut_shape)) + ]) + sepconv_out(tenIn, tenVer, tenHor, tenOut) + tenOut = tenOut.view(*real_tenOut_shape) + return (tenOut, ) + + raise NotImplementedError(op_name) + +__all__ = ["worker_interface"] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/utils.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ee42b2fb05c2230930d0fe4557db361a47b54708 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/utils.py @@ -0,0 +1,11 @@ +import platform +import torch +def to_shared_memory(tensors: tuple[torch.Tensor]): + return [tensor.cpu() for tensor in tensors if tensor is not None] + """ if platform.system() == "Windows": + return [tensor.cpu() for tensor in tensors if tensor is not None] + + return [tensor.share_memory_() for tensor in tensors if tensor is not None] """ + +def to_device(tensors: tuple[torch.Tensor], device: torch.device): + return [tensor.to(device) for tensor in tensors if tensor is not None] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/worker_process.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/worker_process.py new file mode 100644 index 0000000000000000000000000000000000000000..586d06a9e310247fcdd2368f8e362eeaaddac1fd --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/ops/taichi_ops/worker_process.py @@ -0,0 +1,26 @@ +import torch.multiprocessing as mp +import torch +from .raw_softsplat import worker_interface as raw_softsplat +from .costvol import worker_interface as costvol +from .sepconv import worker_interface as sepconv +from .utils import to_shared_memory, to_device +import taichi as ti +import traceback + +def f(child_conn, device: torch.DeviceObjType): + ti.init(arch=ti.gpu) + while True: + op_name, tensors = child_conn.recv() + tensors = to_device(tensors, device) + try: + if "softsplat" in op_name: + result = raw_softsplat(op_name, tensors) + elif "costvol" in op_name: + result = costvol(op_name, tensors) + elif "sepconv" in op_name: + result = sepconv(op_name, tensors) + else: + raise NotImplementedError(op_name) + child_conn.send(to_shared_memory(result)) + except: + child_conn.send(traceback.format_exc()) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9c9e82dbeebcd7dcd49acc16703fbcea110d0657 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__init__.py @@ -0,0 +1,107 @@ +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, generic_frame_loop, InterpolationStateList +import typing +from comfy.model_management import get_torch_device +import re +from functools import cmp_to_key +from packaging import version + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAME_VER_DICT = { + "rife40.pth": "4.0", + "rife41.pth": "4.0", + "rife42.pth": "4.2", + "rife43.pth": "4.3", + "rife44.pth": "4.3", + "rife45.pth": "4.5", + "rife46.pth": "4.6", + "rife47.pth": "4.7", + "rife48.pth": "4.7", + "rife49.pth": "4.7", + "sudo_rife4_269.662_testV1_scale1.pth": "4.0" + #Arch 4.10 doesn't work due to state dict mismatch + #TODO: Investigating and fix it + #"rife410.pth": "4.10", + #"rife411.pth": "4.10", + #"rife412.pth": "4.10" +} + +class RIFE_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": ( + sorted(list(CKPT_NAME_VER_DICT.keys()), key=lambda ckpt_name: version.parse(CKPT_NAME_VER_DICT[ckpt_name])), + {"default": "rife47.pth"} + ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 1}), + "fast_mode": ("BOOLEAN", {"default":True}), + "ensemble": ("BOOLEAN", {"default":True}), + "scale_factor": ([0.25, 0.5, 1.0, 2.0, 4.0], {"default": 1.0}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + fast_mode = False, + ensemble = False, + scale_factor = 1.0, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + """ + Perform video frame interpolation using a given checkpoint model. + + Args: + ckpt_name (str): The name of the checkpoint model to use. + frames (torch.Tensor): A tensor containing input video frames. + clear_cache_after_n_frames (int, optional): The number of frames to process before clearing CUDA cache + to prevent memory overflow. Defaults to 10. Lower numbers are safer but mean more processing time. + How high you should set it depends on how many input frames there are, input resolution (after upscaling), + how many times you want to multiply them, and how long you're willing to wait for the process to complete. + multiplier (int, optional): The multiplier for each input frame. 60 input frames * 2 = 120 output frames. Defaults to 2. + + Returns: + tuple: A tuple containing the output interpolated frames. + + Note: + This method interpolates frames in a video sequence using a specified checkpoint model. + It processes each frame sequentially, generating interpolated frames between them. + + To prevent memory overflow, it clears the CUDA cache after processing a specified number of frames. + """ + from .rife_arch import IFNet + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + arch_ver = CKPT_NAME_VER_DICT[ckpt_name] + interpolation_model = IFNet(arch_ver=arch_ver) + interpolation_model.load_state_dict(torch.load(model_path)) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model, scale_list, in_fast_mode, in_ensemble): + return model(frame_0, frame_1, timestep, scale_list, in_fast_mode, in_ensemble) + + scale_list = [8 / scale_factor, 4 / scale_factor, 2 / scale_factor, 1 / scale_factor] + + args = [interpolation_model, scale_list, fast_mode, ensemble] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc0d32f7395c63da01434ebc4f8ff0eab741b51e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/rife_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/rife_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..01eeed84d00aade73a6f6eb6337b3d48770e5558 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/rife/rife_arch.py @@ -0,0 +1,581 @@ +""" +26-Dez-21 +https://github.com/hzwer/Practical-RIFE +https://github.com/hzwer/Practical-RIFE/blob/main/model/warplayer.py +https://github.com/HolyWu/vs-rife/blob/master/vsrife/__init__.py +""" +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.optim import AdamW +import torch +import torch.nn.functional as F +import torch.nn as nn +import torch.optim as optim +import warnings +from comfy.model_management import get_torch_device + +device = get_torch_device() +backwarp_tenGrid = {} + + +class ResConv(nn.Module): + def __init__(self, c, dilation=1): + super(ResConv, self).__init__() + self.conv = nn.Conv2d(c, c, 3, 1, dilation, dilation=dilation, groups=1) + self.beta = nn.Parameter(torch.ones((1, c, 1, 1)), requires_grad=True) + self.relu = nn.LeakyReLU(0.2, True) + + def forward(self, x): + return self.relu(self.conv(x) * self.beta + x) + + +def warp(tenInput, tenFlow): + k = (str(tenFlow.device), str(tenFlow.size())) + if k not in backwarp_tenGrid: + tenHorizontal = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[3], device=device) + .view(1, 1, 1, tenFlow.shape[3]) + .expand(tenFlow.shape[0], -1, tenFlow.shape[2], -1) + ) + tenVertical = ( + torch.linspace(-1.0, 1.0, tenFlow.shape[2], device=device) + .view(1, 1, tenFlow.shape[2], 1) + .expand(tenFlow.shape[0], -1, -1, tenFlow.shape[3]) + ) + backwarp_tenGrid[k] = torch.cat([tenHorizontal, tenVertical], 1).to(device) + + tenFlow = torch.cat( + [ + tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), + tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0), + ], + 1, + ) + + g = (backwarp_tenGrid[k] + tenFlow).permute(0, 2, 3, 1) + + if tenInput.type() == "torch.cuda.HalfTensor": + g = g.half() + + return torch.nn.functional.grid_sample( + input=tenInput, + grid=g, + mode="bilinear", + padding_mode="border", + align_corners=True, + ) + + +def conv( + in_planes, + out_planes, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + arch_ver="4.0", +): + if arch_ver == "4.0": + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + nn.PReLU(out_planes), + ) + if arch_ver in ["4.2", "4.3", "4.5", "4.6", "4.7", "4.10"]: + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + nn.LeakyReLU(0.2, True), + ) + + +def conv_woact(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ), + ) + + +def conv_woact(in_planes, out_planes, kernel_size=3, stride=1, padding=1, dilation=1): + return nn.Sequential( + nn.Conv2d( + in_planes, + out_planes, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=True, + ) + ) + + +def deconv(in_planes, out_planes, kernel_size=4, stride=2, padding=1, arch_ver="4.0"): + if arch_ver == "4.0": + return nn.Sequential( + torch.nn.ConvTranspose2d( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=4, + stride=2, + padding=1, + bias=True, + ), + nn.PReLU(out_planes), + ) + if arch_ver in ["4.2", "4.3", "4.5", "4.6", "4.7", "4.10"]: + return nn.Sequential( + torch.nn.ConvTranspose2d( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=4, + stride=2, + padding=1, + bias=True, + ), + nn.LeakyReLU(0.2, True), + ) + + +class Conv2(nn.Module): + def __init__(self, in_planes, out_planes, stride=2, arch_ver="4.0"): + super(Conv2, self).__init__() + self.conv1 = conv(in_planes, out_planes, 3, stride, 1, arch_ver=arch_ver) + self.conv2 = conv(out_planes, out_planes, 3, 1, 1, arch_ver=arch_ver) + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + return x + + +class IFBlock(nn.Module): + def __init__(self, in_planes, c=64, arch_ver="4.0"): + super(IFBlock, self).__init__() + self.arch_ver = arch_ver + self.conv0 = nn.Sequential( + conv(in_planes, c // 2, 3, 2, 1, arch_ver=arch_ver), + conv(c // 2, c, 3, 2, 1, arch_ver=arch_ver), + ) + self.arch_ver = arch_ver + + if arch_ver in ["4.0", "4.2", "4.3"]: + self.convblock = nn.Sequential( + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + conv(c, c, arch_ver=arch_ver), + ) + self.lastconv = nn.ConvTranspose2d(c, 5, 4, 2, 1) + + if arch_ver in ["4.5", "4.6", "4.7", "4.10"]: + self.convblock = nn.Sequential( + ResConv(c), + ResConv(c), + ResConv(c), + ResConv(c), + ResConv(c), + ResConv(c), + ResConv(c), + ResConv(c), + ) + if arch_ver == "4.5": + self.lastconv = nn.Sequential( + nn.ConvTranspose2d(c, 4 * 5, 4, 2, 1), nn.PixelShuffle(2) + ) + if arch_ver in ["4.6", "4.7", "4.10"]: + self.lastconv = nn.Sequential( + nn.ConvTranspose2d(c, 4 * 6, 4, 2, 1), nn.PixelShuffle(2) + ) + + def forward(self, x, flow=None, scale=1): + x = F.interpolate( + x, scale_factor=1.0 / scale, mode="bilinear", align_corners=False + ) + if flow is not None: + flow = ( + F.interpolate( + flow, scale_factor=1.0 / scale, mode="bilinear", align_corners=False + ) + * 1.0 + / scale + ) + x = torch.cat((x, flow), 1) + feat = self.conv0(x) + if self.arch_ver == "4.0": + feat = self.convblock(feat) + feat + if self.arch_ver in ["4.2", "4.3", "4.5", "4.6", "4.7", "4.10"]: + feat = self.convblock(feat) + + tmp = self.lastconv(feat) + if self.arch_ver in ["4.0", "4.2", "4.3"]: + tmp = F.interpolate( + tmp, scale_factor=scale * 2, mode="bilinear", align_corners=False + ) + flow = tmp[:, :4] * scale * 2 + if self.arch_ver in ["4.5", "4.6", "4.7", "4.10"]: + tmp = F.interpolate( + tmp, scale_factor=scale, mode="bilinear", align_corners=False + ) + flow = tmp[:, :4] * scale + mask = tmp[:, 4:5] + return flow, mask + + +class Contextnet(nn.Module): + def __init__(self, arch_ver="4.0"): + super(Contextnet, self).__init__() + c = 16 + self.conv1 = Conv2(3, c, arch_ver=arch_ver) + self.conv2 = Conv2(c, 2 * c, arch_ver=arch_ver) + self.conv3 = Conv2(2 * c, 4 * c, arch_ver=arch_ver) + self.conv4 = Conv2(4 * c, 8 * c, arch_ver=arch_ver) + + def forward(self, x, flow): + x = self.conv1(x) + flow = ( + F.interpolate(flow, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + f1 = warp(x, flow) + x = self.conv2(x) + flow = ( + F.interpolate(flow, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + f2 = warp(x, flow) + x = self.conv3(x) + flow = ( + F.interpolate(flow, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + f3 = warp(x, flow) + x = self.conv4(x) + flow = ( + F.interpolate(flow, scale_factor=0.5, mode="bilinear", align_corners=False) + * 0.5 + ) + f4 = warp(x, flow) + return [f1, f2, f3, f4] + + +class Unet(nn.Module): + def __init__(self, arch_ver="4.0"): + super(Unet, self).__init__() + c = 16 + self.down0 = Conv2(17, 2 * c, arch_ver=arch_ver) + self.down1 = Conv2(4 * c, 4 * c, arch_ver=arch_ver) + self.down2 = Conv2(8 * c, 8 * c, arch_ver=arch_ver) + self.down3 = Conv2(16 * c, 16 * c, arch_ver=arch_ver) + self.up0 = deconv(32 * c, 8 * c, arch_ver=arch_ver) + self.up1 = deconv(16 * c, 4 * c, arch_ver=arch_ver) + self.up2 = deconv(8 * c, 2 * c, arch_ver=arch_ver) + self.up3 = deconv(4 * c, c, arch_ver=arch_ver) + self.conv = nn.Conv2d(c, 3, 3, 1, 1) + + def forward(self, img0, img1, warped_img0, warped_img1, mask, flow, c0, c1): + s0 = self.down0( + torch.cat((img0, img1, warped_img0, warped_img1, mask, flow), 1) + ) + s1 = self.down1(torch.cat((s0, c0[0], c1[0]), 1)) + s2 = self.down2(torch.cat((s1, c0[1], c1[1]), 1)) + s3 = self.down3(torch.cat((s2, c0[2], c1[2]), 1)) + x = self.up0(torch.cat((s3, c0[3], c1[3]), 1)) + x = self.up1(torch.cat((x, s2), 1)) + x = self.up2(torch.cat((x, s1), 1)) + x = self.up3(torch.cat((x, s0), 1)) + x = self.conv(x) + return torch.sigmoid(x) + + +""" +currently supports 4.0-4.12 + +4.0: 4.0, 4.1 +4.2: 4.2 +4.3: 4.3, 4.4 +4.5: 4.5 +4.6: 4.6 +4.7: 4.7, 4.8, 4.9 +4.10: 4.10 4.11 4.12 +""" + + +class IFNet(nn.Module): + def __init__(self, arch_ver="4.0"): + super(IFNet, self).__init__() + self.arch_ver = arch_ver + if arch_ver in ["4.0", "4.2", "4.3", "4.5", "4.6"]: + self.block0 = IFBlock(7, c=192, arch_ver=arch_ver) + self.block1 = IFBlock(8 + 4, c=128, arch_ver=arch_ver) + self.block2 = IFBlock(8 + 4, c=96, arch_ver=arch_ver) + self.block3 = IFBlock(8 + 4, c=64, arch_ver=arch_ver) + if arch_ver in ["4.7"]: + self.block0 = IFBlock(7 + 8, c=192, arch_ver=arch_ver) + self.block1 = IFBlock(8 + 4 + 8, c=128, arch_ver=arch_ver) + self.block2 = IFBlock(8 + 4 + 8, c=96, arch_ver=arch_ver) + self.block3 = IFBlock(8 + 4 + 8, c=64, arch_ver=arch_ver) + self.encode = nn.Sequential( + nn.Conv2d(3, 16, 3, 2, 1), nn.ConvTranspose2d(16, 4, 4, 2, 1) + ) + if arch_ver in ["4.10"]: + self.block0 = IFBlock(7 + 16, c=192) + self.block1 = IFBlock(8 + 4 + 16, c=128) + self.block2 = IFBlock(8 + 4 + 16, c=96) + self.block3 = IFBlock(8 + 4 + 16, c=64) + self.encode = nn.Sequential( + nn.Conv2d(3, 32, 3, 2, 1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(32, 32, 3, 1, 1), + nn.LeakyReLU(0.2, True), + nn.Conv2d(32, 32, 3, 1, 1), + nn.LeakyReLU(0.2, True), + nn.ConvTranspose2d(32, 8, 4, 2, 1), + ) + + if arch_ver in ["4.0", "4.2", "4.3"]: + self.contextnet = Contextnet(arch_ver=arch_ver) + self.unet = Unet(arch_ver=arch_ver) + self.arch_ver = arch_ver + + def forward( + self, + img0, + img1, + timestep=0.5, + scale_list=[8, 4, 2, 1], + training=True, + fastmode=True, + ensemble=False, + return_flow=False, + ): + img0 = torch.clamp(img0, 0, 1) + img1 = torch.clamp(img1, 0, 1) + + n, c, h, w = img0.shape + ph = ((h - 1) // 64 + 1) * 64 + pw = ((w - 1) // 64 + 1) * 64 + padding = (0, pw - w, 0, ph - h) + img0 = F.pad(img0, padding) + img1 = F.pad(img1, padding) + x = torch.cat((img0, img1), 1) + + if training == False: + channel = x.shape[1] // 2 + img0 = x[:, :channel] + img1 = x[:, channel:] + if not torch.is_tensor(timestep): + timestep = (x[:, :1].clone() * 0 + 1) * timestep + else: + timestep = timestep.repeat(1, 1, img0.shape[2], img0.shape[3]) + + flow_list = [] + merged = [] + mask_list = [] + + if self.arch_ver in ["4.7", "4.10"]: + f0 = self.encode(img0[:, :3]) + f1 = self.encode(img1[:, :3]) + + warped_img0 = img0 + warped_img1 = img1 + flow = None + mask = None + block = [self.block0, self.block1, self.block2, self.block3] + + for i in range(4): + if flow is None: + # 4.0-4.6 + if self.arch_ver in ["4.0", "4.2", "4.3", "4.5", "4.6"]: + flow, mask = block[i]( + torch.cat((img0[:, :3], img1[:, :3], timestep), 1), + None, + scale=scale_list[i], + ) + if ensemble: + f1, m1 = block[i]( + torch.cat((img1[:, :3], img0[:, :3], 1 - timestep), 1), + None, + scale=scale_list[i], + ) + flow = (flow + torch.cat((f1[:, 2:4], f1[:, :2]), 1)) / 2 + mask = (mask + (-m1)) / 2 + + # 4.7+ + if self.arch_ver in ["4.7", "4.10"]: + flow, mask = block[i]( + torch.cat((img0[:, :3], img1[:, :3], f0, f1, timestep), 1), + None, + scale=scale_list[i], + ) + + if ensemble: + f_, m_ = block[i]( + torch.cat( + (img1[:, :3], img0[:, :3], f1, f0, 1 - timestep), 1 + ), + None, + scale=scale_list[i], + ) + flow = (flow + torch.cat((f_[:, 2:4], f_[:, :2]), 1)) / 2 + mask = (mask + (-m_)) / 2 + + else: + # 4.0-4.6 + if self.arch_ver in ["4.0", "4.2", "4.3", "4.5", "4.6"]: + f0, m0 = block[i]( + torch.cat( + (warped_img0[:, :3], warped_img1[:, :3], timestep, mask), 1 + ), + flow, + scale=scale_list[i], + ) + + if self.arch_ver in ["4.0"]: + if ( + i == 1 + and f0[:, :2].abs().max() > 32 + and f0[:, 2:4].abs().max() > 32 + and not training + ): + for k in range(4): + scale_list[k] *= 2 + flow, mask = block[0]( + torch.cat((img0[:, :3], img1[:, :3], timestep), 1), + None, + scale=scale_list[0], + ) + warped_img0 = warp(img0, flow[:, :2]) + warped_img1 = warp(img1, flow[:, 2:4]) + f0, m0 = block[i]( + torch.cat( + ( + warped_img0[:, :3], + warped_img1[:, :3], + timestep, + mask, + ), + 1, + ), + flow, + scale=scale_list[i], + ) + + # 4.7+ + if self.arch_ver in ["4.7", "4.10"]: + fd, m0 = block[i]( + torch.cat( + ( + warped_img0[:, :3], + warped_img1[:, :3], + warp(f0, flow[:, :2]), + warp(f1, flow[:, 2:4]), + timestep, + mask, + ), + 1, + ), + flow, + scale=scale_list[i], + ) + flow = flow + fd + + # 4.0-4.6 ensemble + if ensemble and self.arch_ver in [ + "4.0", + "4.2", + "4.3", + "4.5", + "4.6", + ]: + f1, m1 = block[i]( + torch.cat( + ( + warped_img1[:, :3], + warped_img0[:, :3], + 1 - timestep, + -mask, + ), + 1, + ), + torch.cat((flow[:, 2:4], flow[:, :2]), 1), + scale=scale_list[i], + ) + f0 = (f0 + torch.cat((f1[:, 2:4], f1[:, :2]), 1)) / 2 + m0 = (m0 + (-m1)) / 2 + + # 4.7+ ensemble + if ensemble and self.arch_ver in ["4.7", "4.10"]: + wf0 = warp(f0, flow[:, :2]) + wf1 = warp(f1, flow[:, 2:4]) + + f_, m_ = block[i]( + torch.cat( + ( + warped_img1[:, :3], + warped_img0[:, :3], + wf1, + wf0, + 1 - timestep, + -mask, + ), + 1, + ), + torch.cat((flow[:, 2:4], flow[:, :2]), 1), + scale=scale_list[i], + ) + fd = (fd + torch.cat((f_[:, 2:4], f_[:, :2]), 1)) / 2 + mask = (m0 + (-m_)) / 2 + + if self.arch_ver in ["4.0", "4.2", "4.3", "4.5", "4.6"]: + flow = flow + f0 + mask = mask + m0 + + if not ensemble and self.arch_ver in ["4.7", "4.10"]: + mask = m0 + + mask_list.append(mask) + flow_list.append(flow) + warped_img0 = warp(img0, flow[:, :2]) + warped_img1 = warp(img1, flow[:, 2:4]) + merged.append((warped_img0, warped_img1)) + + if self.arch_ver in ["4.0", "4.1", "4.2", "4.3", "4.4", "4.5", "4.6"]: + mask_list[3] = torch.sigmoid(mask_list[3]) + merged[3] = merged[3][0] * mask_list[3] + merged[3][1] * (1 - mask_list[3]) + + if self.arch_ver in ["4.7", "4.10"]: + mask = torch.sigmoid(mask) + merged[3] = warped_img0 * mask + warped_img1 * (1 - mask) + + if not fastmode and self.arch_ver in ["4.0", "4.2", "4.3"]: + c0 = self.contextnet(img0, flow[:, :2]) + c1 = self.contextnet(img1, flow[:, 2:4]) + tmp = self.unet(img0, img1, warped_img0, warped_img1, mask, flow, c0, c1) + res = tmp[:, :3] * 2 - 1 + merged[3] = torch.clamp(merged[3] + res, 0, 1) + return merged[3][:, :, :h, :w] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..eb62e76289052ff8a0bc7c9260b664c62e2e033a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__init__.py @@ -0,0 +1,56 @@ +import torch +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames +import typing +from comfy.model_management import soft_empty_cache, get_torch_device +from vfi_utils import InterpolationStateList, generic_frame_loop + +MODEL_TYPE = pathlib.Path(__file__).parent.name +CKPT_NAMES = ["sepconv.pth"] + + +class SepconvVFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (CKPT_NAMES, ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 1000}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .sepconv_enhanced import Network + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + interpolation_model = Network() + interpolation_model.load_state_dict(torch.load(model_path)) + interpolation_model.eval().to(get_torch_device()) + frames = preprocess_frames(frames) + + def return_middle_frame(frame_0, frame_1, timestep, model): + return model(frame_0, frame_1) + + args = [interpolation_model] + out = postprocess_frames( + generic_frame_loop(type(self).__name__, frames, clear_cache_after_n_frames, multiplier, return_middle_frame, *args, + interpolation_states=optional_interpolation_states, use_timestep=False, dtype=torch.float32) + ) + return (out,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b15c5457524c322af358c5ba4b865b443e19d62d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/sepconv_enhanced.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/sepconv_enhanced.py new file mode 100644 index 0000000000000000000000000000000000000000..e5a747ee5053bd7b1053e1a432cf86a0164a2223 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/sepconv/sepconv_enhanced.py @@ -0,0 +1,748 @@ +""" +23-nov-21 +https://github.com/sniklaus/revisiting-sepconv/blob/fea509d98157170df1fb35bf615bd41d98858e1a/run.py +https://github.com/sniklaus/revisiting-sepconv/blob/fea509d98157170df1fb35bf615bd41d98858e1a/sepconv/sepconv.py +Deleted stuffs about arguments_strModel and getopt +""" +#!/usr/bin/env python +import torch +import typing +from comfy.model_management import get_torch_device + +########################################################## +from vfi_models.ops import sepconv_func +########################################################## + + + + + +import torch + +import math +import numpy +import os +import PIL +import PIL.Image +import sys +import typing + +########################################################## + +assert ( + int(str("").join(torch.__version__.split(".")[0:2])) >= 13 +) # requires at least pytorch version 1.3.0 + +torch.set_grad_enabled( + False +) # make sure to not compute gradients for computational performance + +torch.backends.cudnn.enabled = ( + True # make sure to use cudnn for computational performance +) + +########################################################## + +########################################################## + + +class Basic(torch.nn.Module): + def __init__( + self, + strType: str, + intChans: typing.List[int], + objScratch: typing.Optional[typing.Dict] = None, + ): + super().__init__() + + self.strType = strType + self.netEvenize = None + self.netMain = None + self.netShortcut = None + + intIn = intChans[0] + intOut = intChans[-1] + netMain = [] + intChans = intChans.copy() + fltStride = 1.0 + + for intPart, strPart in enumerate(self.strType.split("+")[0].split("-")): + if strPart.startswith("conv") == True: + intKsize = 3 + intPad = 1 + strPad = "zeros" + + if "(" in strPart: + intKsize = int(strPart.split("(")[1].split(")")[0].split(",")[0]) + intPad = int(math.floor(0.5 * (intKsize - 1))) + + if "replpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "replicate" + if "reflpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "reflect" + # end + + if "nopad" in self.strType.split("+"): + intPad = 0 + # end + + netMain += [ + torch.nn.Conv2d( + in_channels=intChans[0], + out_channels=intChans[1], + kernel_size=intKsize, + stride=1, + padding=intPad, + padding_mode=strPad, + bias="nobias" not in self.strType.split("+"), + ) + ] + intChans = intChans[1:] + fltStride *= 1.0 + + elif strPart.startswith("sconv") == True: + intKsize = 3 + intPad = 1 + strPad = "zeros" + + if "(" in strPart: + intKsize = int(strPart.split("(")[1].split(")")[0].split(",")[0]) + intPad = int(math.floor(0.5 * (intKsize - 1))) + + if "replpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "replicate" + if "reflpad" in strPart.split("(")[1].split(")")[0].split(","): + strPad = "reflect" + # end + + if "nopad" in self.strType.split("+"): + intPad = 0 + # end + + netMain += [ + torch.nn.Conv2d( + in_channels=intChans[0], + out_channels=intChans[1], + kernel_size=intKsize, + stride=2, + padding=intPad, + padding_mode=strPad, + bias="nobias" not in self.strType.split("+"), + ) + ] + intChans = intChans[1:] + fltStride *= 2.0 + + elif strPart.startswith("up") == True: + + class Up(torch.nn.Module): + def __init__(self, strType): + super().__init__() + + self.strType = strType + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + if self.strType == "nearest": + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=2.0, + mode="nearest", + align_corners=False, + ) + + elif self.strType == "bilinear": + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=2.0, + mode="bilinear", + align_corners=False, + ) + + elif self.strType == "pyramid": + return pyramid(tenIn, None, "up") + + elif self.strType == "shuffle": + return torch.nn.functional.pixel_shuffle( + tenIn, upscale_factor=2 + ) # https://github.com/pytorch/pytorch/issues/62854 + + # end + + assert False # to make torchscript happy + + # end + + # end + + strType = "bilinear" + + if "(" in strPart: + if "nearest" in strPart.split("(")[1].split(")")[0].split(","): + strType = "nearest" + if "pyramid" in strPart.split("(")[1].split(")")[0].split(","): + strType = "pyramid" + if "shuffle" in strPart.split("(")[1].split(")")[0].split(","): + strType = "shuffle" + # end + + netMain += [Up(strType)] + fltStride *= 0.5 + + elif strPart.startswith("prelu") == True: + netMain += [ + torch.nn.PReLU( + num_parameters=1, + init=float(strPart.split("(")[1].split(")")[0].split(",")[0]), + ) + ] + fltStride *= 1.0 + + elif True: + assert False + + # end + # end + + self.netMain = torch.nn.Sequential(*netMain) + + for strPart in self.strType.split("+")[1:]: + if strPart.startswith("skip") == True: + if intIn == intOut and fltStride == 1.0: + self.netShortcut = torch.nn.Identity() + + elif intIn != intOut and fltStride == 1.0: + self.netShortcut = torch.nn.Conv2d( + in_channels=intIn, + out_channels=intOut, + kernel_size=1, + stride=1, + padding=0, + bias="nobias" not in self.strType.split("+"), + ) + + elif intIn == intOut and fltStride != 1.0: + + class Down(torch.nn.Module): + def __init__(self, fltScale): + super().__init__() + + self.fltScale = fltScale + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=self.fltScale, + mode="bilinear", + align_corners=False, + ) + + # end + + # end + + self.netShortcut = Down(1.0 / fltStride) + + elif intIn != intOut and fltStride != 1.0: + + class Down(torch.nn.Module): + def __init__(self, fltScale): + super().__init__() + + self.fltScale = fltScale + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.interpolate( + input=tenIn, + scale_factor=self.fltScale, + mode="bilinear", + align_corners=False, + ) + + # end + + # end + + self.netShortcut = torch.nn.Sequential( + Down(1.0 / fltStride), + torch.nn.Conv2d( + in_channels=intIn, + out_channels=intOut, + kernel_size=1, + stride=1, + padding=0, + bias="nobias" not in self.strType.split("+"), + ), + ) + + # end + + elif strPart.startswith("...") == True: + pass + + # end + # end + + assert len(intChans) == 1 + + # end + + def forward(self, tenIn: torch.Tensor) -> torch.Tensor: + if self.netEvenize is not None: + tenIn = self.netEvenize(tenIn) + # end + + tenOut = self.netMain(tenIn) + + if self.netShortcut is not None: + tenOut = tenOut + self.netShortcut(tenIn) + # end + + return tenOut + + # end + + +# end + + +class Encode(torch.nn.Module): + objScratch: typing.Dict[str, typing.List[int]] = None + + def __init__( + self, + intIns: typing.List[int], + intOuts: typing.List[int], + strHor: str, + strVer: str, + objScratch: typing.Dict[str, typing.List[int]], + ): + super().__init__() + + assert len(intIns) == len(intOuts) + assert len(intOuts) == len(intIns) + + self.intRows = len(intIns) and len(intOuts) + self.intIns = intIns.copy() + self.intOuts = intOuts.copy() + self.strHor = strHor + self.strVer = strVer + self.objScratch = objScratch + + self.netHor = torch.nn.ModuleList() + self.netVer = torch.nn.ModuleList() + + for intRow in range(self.intRows): + netHor = torch.nn.Identity() + netVer = torch.nn.Identity() + + if self.intOuts[intRow] != 0: + if self.intIns[intRow] != 0: + netHor = Basic( + self.strHor, + [ + self.intIns[intRow], + self.intOuts[intRow], + self.intOuts[intRow], + ], + objScratch, + ) + # end + + if intRow != 0: + netVer = Basic( + self.strVer, + [ + self.intOuts[intRow - 1], + self.intOuts[intRow], + self.intOuts[intRow], + ], + objScratch, + ) + # end + # end + + self.netHor.append(netHor) + self.netVer.append(netVer) + # end + + # end + + def forward(self, tenIns: typing.List[torch.Tensor]) -> typing.List[torch.Tensor]: + intRow = 0 + for netHor in self.netHor: + if self.intOuts[intRow] != 0: + if self.intIns[intRow] != 0: + tenIns[intRow] = netHor(tenIns[intRow]) + # end + # end + intRow += 1 + # end + + intRow = 0 + for netVer in self.netVer: + if self.intOuts[intRow] != 0: + if intRow != 0: + tenIns[intRow] = tenIns[intRow] + netVer(tenIns[intRow - 1]) + # end + # end + intRow += 1 + # end + + for intRow, tenIn in enumerate(tenIns): + self.objScratch["levelshape" + str(intRow)] = tenIn.shape + # end + + return tenIns + + # end + + +# end + + +class Decode(torch.nn.Module): + objScratch: typing.Dict[str, typing.List[int]] = None + + def __init__( + self, + intIns: typing.List[int], + intOuts: typing.List[int], + strHor: str, + strVer: str, + objScratch: typing.Dict[str, typing.List[int]], + ): + super().__init__() + + assert len(intIns) == len(intOuts) + assert len(intOuts) == len(intIns) + + self.intRows = len(intIns) and len(intOuts) + self.intIns = intIns.copy() + self.intOuts = intOuts.copy() + self.strHor = strHor + self.strVer = strVer + self.objScratch = objScratch + + self.netHor = torch.nn.ModuleList() + self.netVer = torch.nn.ModuleList() + + for intRow in range(self.intRows - 1, -1, -1): + netHor = torch.nn.Identity() + netVer = torch.nn.Identity() + + if self.intOuts[intRow] != 0: + if self.intIns[intRow] != 0: + netHor = Basic( + self.strHor, + [ + self.intIns[intRow], + self.intOuts[intRow], + self.intOuts[intRow], + ], + objScratch, + ) + # end + + if intRow != self.intRows - 1: + netVer = Basic( + self.strVer, + [ + self.intOuts[intRow + 1], + self.intOuts[intRow], + self.intOuts[intRow], + ], + objScratch, + ) + # end + # end + + self.netHor.append(netHor) + self.netVer.append(netVer) + # end + + # end + + def forward(self, tenIns: typing.List[torch.Tensor]) -> typing.List[torch.Tensor]: + intRow = self.intRows - 1 + for netHor in self.netHor: + if self.intOuts[intRow] != 0: + if self.intIns[intRow] != 0: + tenIns[intRow] = netHor(tenIns[intRow]) + # end + # end + intRow -= 1 + # end + + intRow = self.intRows - 1 + for netVer in self.netVer: + if self.intOuts[intRow] != 0: + if intRow != self.intRows - 1: + tenVer = netVer(tenIns[intRow + 1]) + + if "levelshape" + str(intRow) in self.objScratch: + if ( + tenVer.shape[2] + == self.objScratch["levelshape" + str(intRow)][2] + 1 + ): + tenVer = torch.nn.functional.pad( + input=tenVer, + pad=[0, 0, 0, -1], + mode="constant", + value=0.0, + ) + if ( + tenVer.shape[3] + == self.objScratch["levelshape" + str(intRow)][3] + 1 + ): + tenVer = torch.nn.functional.pad( + input=tenVer, + pad=[0, -1, 0, 0], + mode="constant", + value=0.0, + ) + # end + + tenIns[intRow] = tenIns[intRow] + tenVer + # end + # end + intRow -= 1 + # end + + return tenIns + + # end + + +# end + +########################################################## + + +class Network(torch.nn.Module): + def __init__(self): + super().__init__() + + self.intEncdec = [1, 1] + self.intChannels = [32, 64, 128, 256, 512] + + self.objScratch = {} + + self.netInput = torch.nn.Conv2d( + in_channels=3, + out_channels=int(round(0.5 * self.intChannels[0])), + kernel_size=3, + stride=1, + padding=1, + padding_mode="zeros", + ) + + self.netEncode = torch.nn.Sequential( + *( + [ + Encode( + [0] * len(self.intChannels), + self.intChannels, + "prelu(0.25)-conv(3)-prelu(0.25)-conv(3)+skip", + "prelu(0.25)-sconv(3)-prelu(0.25)-conv(3)", + self.objScratch, + ) + ] + + [ + Encode( + self.intChannels, + self.intChannels, + "prelu(0.25)-conv(3)-prelu(0.25)-conv(3)+skip", + "prelu(0.25)-sconv(3)-prelu(0.25)-conv(3)", + self.objScratch, + ) + for intEncdec in range(1, self.intEncdec[0]) + ] + ) + ) + + self.netDecode = torch.nn.Sequential( + *( + [ + Decode( + [0] + self.intChannels[1:], + [0] + self.intChannels[1:], + "prelu(0.25)-conv(3)-prelu(0.25)-conv(3)+skip", + "prelu(0.25)-up(bilinear)-conv(3)-prelu(0.25)-conv(3)", + self.objScratch, + ) + for intEncdec in range(0, self.intEncdec[1]) + ] + ) + ) + + self.netVerone = Basic( + "up(bilinear)-conv(3)-prelu(0.25)-conv(3)", + [self.intChannels[1], self.intChannels[1], 51], + ) + self.netVertwo = Basic( + "up(bilinear)-conv(3)-prelu(0.25)-conv(3)", + [self.intChannels[1], self.intChannels[1], 51], + ) + self.netHorone = Basic( + "up(bilinear)-conv(3)-prelu(0.25)-conv(3)", + [self.intChannels[1], self.intChannels[1], 51], + ) + self.netHortwo = Basic( + "up(bilinear)-conv(3)-prelu(0.25)-conv(3)", + [self.intChannels[1], self.intChannels[1], 51], + ) + + # self.load_state_dict(torch.hub.load_state_dict_from_url(url='http://content.sniklaus.com/resepconv/network-' + arguments_strModel + '.pytorch', file_name='resepconv-' + arguments_strModel)) + + # end + + def forward(self, x1, x2): + # padding if needed + intWidth = x1.shape[3] + intHeight = x1.shape[2] + + intPadr = (2 - (intWidth % 2)) % 2 + intPadb = (2 - (intHeight % 2)) % 2 + + tenOne = torch.nn.functional.pad( + input=x1, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + tenTwo = torch.nn.functional.pad( + input=x2, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + #### + + tenSeq = [tenOne, tenTwo] + + with torch.set_grad_enabled(False): + tenStack = torch.stack(tenSeq, 1) + tenMean = ( + tenStack.view(tenStack.shape[0], -1) + .mean(1, True) + .view(tenStack.shape[0], 1, 1, 1) + ) + tenStd = ( + tenStack.view(tenStack.shape[0], -1) + .std(1, True) + .view(tenStack.shape[0], 1, 1, 1) + ) + tenSeq = [ + (tenFrame - tenMean) / (tenStd + 0.0000001) for tenFrame in tenSeq + ] + tenSeq = [tenFrame.detach() for tenFrame in tenSeq] + # end + + tenOut = self.netDecode( + self.netEncode( + [torch.cat([self.netInput(tenSeq[0]), self.netInput(tenSeq[1])], 1)] + + ([0.0] * (len(self.intChannels) - 1)) + ) + )[1] + + tenOne = torch.nn.functional.pad( + input=tenOne, + pad=[ + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + ], + mode="replicate", + ) + tenTwo = torch.nn.functional.pad( + input=tenTwo, + pad=[ + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + int(math.floor(0.5 * 51)), + ], + mode="replicate", + ) + + tenOne = torch.cat( + [ + tenOne, + tenOne.new_ones([tenOne.shape[0], 1, tenOne.shape[2], tenOne.shape[3]]), + ], + 1, + ).detach() + tenTwo = torch.cat( + [ + tenTwo, + tenTwo.new_ones([tenTwo.shape[0], 1, tenTwo.shape[2], tenTwo.shape[3]]), + ], + 1, + ).detach() + + tenVerone = self.netVerone(tenOut) + tenVertwo = self.netVertwo(tenOut) + tenHorone = self.netHorone(tenOut) + tenHortwo = self.netHortwo(tenOut) + + tenOut = sepconv_func.apply(tenOne, tenVerone, tenHorone) + sepconv_func.apply( + tenTwo, tenVertwo, tenHortwo + ) + + tenNormalize = tenOut[:, -1:, :, :] + tenNormalize[tenNormalize.abs() < 0.01] = 1.0 + tenOut = tenOut[:, :-1, :, :] / tenNormalize + + # crop if needed + return tenOut[:, :, :intHeight, :intWidth] + + # end + + +# end + +netNetwork = None + +########################################################## + + +def estimate(tenOne, tenTwo): + global netNetwork + + if netNetwork is None: + netNetwork = Network().to(get_torch_device()).eval() + # end + + assert tenOne.shape[1] == tenTwo.shape[1] + assert tenOne.shape[2] == tenTwo.shape[2] + + intWidth = tenOne.shape[2] + intHeight = tenOne.shape[1] + + assert ( + intWidth <= 1280 + ) # while our approach works with larger images, we do not recommend it unless you are aware of the implications + assert ( + intHeight <= 720 + ) # while our approach works with larger images, we do not recommend it unless you are aware of the implications + + tenPreprocessedOne = tenOne.to(get_torch_device()).view(1, 3, intHeight, intWidth) + tenPreprocessedTwo = tenTwo.to(get_torch_device()).view(1, 3, intHeight, intWidth) + + intPadr = (2 - (intWidth % 2)) % 2 + intPadb = (2 - (intHeight % 2)) % 2 + + tenPreprocessedOne = torch.nn.functional.pad( + input=tenPreprocessedOne, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + tenPreprocessedTwo = torch.nn.functional.pad( + input=tenPreprocessedTwo, pad=[0, intPadr, 0, intPadb], mode="replicate" + ) + + return netNetwork([tenPreprocessedOne, tenPreprocessedTwo])[ + 0, :, :intHeight, :intWidth + ].cpu() + + +# end diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1613ab97bf4dffa7ececaf0df97ea2335297cff5 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__init__.py @@ -0,0 +1,100 @@ +import torch +from comfy.model_management import get_torch_device, soft_empty_cache +import numpy as np +import typing +from vfi_utils import InterpolationStateList, load_file_from_github_release, preprocess_frames, postprocess_frames, assert_batch_size +import pathlib +import warnings +import gc + +MODEL_TYPE = pathlib.Path(__file__).parent.name +device = get_torch_device() + +class STMFNet_VFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (["stmfnet.pth"], ), + "frames": ("IMAGE", ), + "clear_cache_after_n_frames": ("INT", {"default": 10, "min": 1, "max": 1000}), + "multiplier": ("INT", {"default": 2, "min": 2, "max": 2}), #TODO: Implement recursively invoking interpolator for multi-frame interpolation + "duplicate_first_last_frames": ("BOOLEAN", {"default": False}) + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ) + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + #Reference: https://github.com/danier97/ST-MFNet/blob/main/interpolate_yuv.py#L93 + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + clear_cache_after_n_frames = 10, + multiplier: typing.SupportsInt = 2, + duplicate_first_last_frames: bool = False, + optional_interpolation_states: InterpolationStateList = None, + **kwargs + ): + from .stmfnet_arch import STMFNet_Model + if multiplier != 2: + warnings.warn("Currently, ST-MFNet only supports 2x interpolation. The process will continue but please set multiplier=2 afterward") + + assert_batch_size(frames, batch_size=4, vfi_name="ST-MFNet") + interpolation_states = optional_interpolation_states + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + model = STMFNet_Model() + model.load_state_dict(torch.load(model_path)) + model = model.eval().to(device) + + frames = preprocess_frames(frames) + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + output_frames = [] + for frame_itr in range(len(frames) - 3): + #Does skipping frame i+1 make sanse in this case? + if interpolation_states is not None and interpolation_states.is_frame_skipped(frame_itr) and interpolation_states.is_frame_skipped(frame_itr + 1): + continue + + #Ensure that input frames are in fp32 - the same dtype as model + frame0, frame1, frame2, frame3 = ( + frames[frame_itr:frame_itr+1].float(), + frames[frame_itr+1:frame_itr+2].float(), + frames[frame_itr+2:frame_itr+3].float(), + frames[frame_itr+3:frame_itr+4].float() + ) + new_frame = model(frame0.to(device), frame1.to(device), frame2.to(device), frame3.to(device)).detach().cpu() + number_of_frames_processed_since_last_cleared_cuda_cache += 2 + + if frame_itr == 0: + output_frames.append(frame0) + if duplicate_first_last_frames: + output_frames.append(frame0) # repeat the first frame + output_frames.append(frame1) + output_frames.append(new_frame) + output_frames.append(frame2) + if frame_itr == len(frames) - 4: + output_frames.append(frame3) + if duplicate_first_last_frames: + output_frames.append(frame3) # repeat the last frame + + # Try to avoid a memory overflow by clearing cuda cache regularly + if number_of_frames_processed_since_last_cleared_cuda_cache >= clear_cache_after_n_frames: + print("Comfy-VFI: Clearing cache...", end = ' ') + soft_empty_cache() + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + print("Done cache clearing") + gc.collect() + + dtype = torch.float32 + output_frames = [frame.cpu().to(dtype=dtype) for frame in output_frames] #Ensure all frames are in cpu + out = torch.cat(output_frames, dim=0) + # clear cache for courtesy + print("Comfy-VFI: Final clearing cache...", end = ' ') + soft_empty_cache() + print("Done cache clearing") + return (postprocess_frames(out), ) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9080455b64dc7728a39988e03fb757b2ab3f4f0d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/stmfnet_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/stmfnet_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..1e495e0eabf68a39af6bc7eceef554c3777d9d5f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/stmfnet/stmfnet_arch.py @@ -0,0 +1,2858 @@ +# https://github.com/danielism97/ST-MFNet/blob/main/models/stmfnet.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/pwcnet.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/correlation/correlation.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/gridnet.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/feature.py +# https://github.com/danielism97/ST-MFNet/blob/main/utility.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/resnet_3D.py +# https://github.com/danielism97/ST-MFNet/blob/main/cupy_module/adacof.py +# https://github.com/danielism97/ST-MFNet/blob/main/cupy_module/softsplat.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/__init__.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/pwcnet.py +# https://github.com/danielism97/ST-MFNet/blob/main/models/misc/correlation/correlation.py +from torch.nn import functional as F +from torch.utils.model_zoo import load_url as load_state_dict_from_url +import cv2 +import math +import numpy +import numpy as np +import PIL +import PIL.Image +import re +import sys +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +import torch.optim.lr_scheduler as lrs +from vfi_models.ops import FunctionCorrelation, FunctionAdaCoF, ModuleSoftsplat +from vfi_utils import get_ckpt_container_path +import pathlib +MODEL_TYPE = pathlib.Path(__file__).parent.name + +#Simple way to reduce oranges on VSCode bar +def identity(x): + return x + + +def backwarp(tenInput, tenFlow): + backwarp_tenGrid = {} + backwarp_tenPartial = {} + if str(tenFlow.shape) not in backwarp_tenGrid: + tenHor = ( + torch.linspace( + -1.0 + (1.0 / tenFlow.shape[3]), + 1.0 - (1.0 / tenFlow.shape[3]), + tenFlow.shape[3], + ) + .view(1, 1, 1, -1) + .expand(-1, -1, tenFlow.shape[2], -1) + ) + tenVer = ( + torch.linspace( + -1.0 + (1.0 / tenFlow.shape[2]), + 1.0 - (1.0 / tenFlow.shape[2]), + tenFlow.shape[2], + ) + .view(1, 1, -1, 1) + .expand(-1, -1, -1, tenFlow.shape[3]) + ) + + backwarp_tenGrid[str(tenFlow.shape)] = torch.cat([tenHor, tenVer], 1).cuda() + # end + + if str(tenFlow.shape) not in backwarp_tenPartial: + backwarp_tenPartial[str(tenFlow.shape)] = tenFlow.new_ones( + [tenFlow.shape[0], 1, tenFlow.shape[2], tenFlow.shape[3]] + ) + # end + + tenFlow = torch.cat( + [ + tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), + tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0), + ], + 1, + ) + tenInput = torch.cat([tenInput, backwarp_tenPartial[str(tenFlow.shape)]], 1) + + tenOutput = torch.nn.functional.grid_sample( + input=tenInput, + grid=(backwarp_tenGrid[str(tenFlow.shape)] + tenFlow).permute(0, 2, 3, 1), + mode="bilinear", + padding_mode="zeros", + align_corners=False, + ) + + tenMask = tenOutput[:, -1:, :, :] + tenMask[tenMask > 0.999] = 1.0 + tenMask[tenMask < 1.0] = 0.0 + + return tenOutput[:, :-1, :, :] * tenMask + + +# end + +########################################################## + + +class PWCNet(torch.nn.Module): + def __init__(self): + super(PWCNet, self).__init__() + + class Extractor(torch.nn.Module): + def __init__(self): + super(Extractor, self).__init__() + + self.netOne = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=3, + out_channels=16, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=16, + out_channels=16, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=16, + out_channels=16, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netTwo = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=16, + out_channels=32, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netThr = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=32, + out_channels=64, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFou = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, + out_channels=96, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFiv = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=96, + out_channels=128, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netSix = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=128, + out_channels=196, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=196, + out_channels=196, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=196, + out_channels=196, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + # end + + def forward(self, tenInput): + tenOne = self.netOne(tenInput) + tenTwo = self.netTwo(tenOne) + tenThr = self.netThr(tenTwo) + tenFou = self.netFou(tenThr) + tenFiv = self.netFiv(tenFou) + tenSix = self.netSix(tenFiv) + + return [tenOne, tenTwo, tenThr, tenFou, tenFiv, tenSix] + + # end + + # end + + class Decoder(torch.nn.Module): + def __init__(self, intLevel): + super(Decoder, self).__init__() + + intPrevious = [ + None, + None, + 81 + 32 + 2 + 2, + 81 + 64 + 2 + 2, + 81 + 96 + 2 + 2, + 81 + 128 + 2 + 2, + 81, + None, + ][intLevel + 1] + intCurrent = [ + None, + None, + 81 + 32 + 2 + 2, + 81 + 64 + 2 + 2, + 81 + 96 + 2 + 2, + 81 + 128 + 2 + 2, + 81, + None, + ][intLevel + 0] + + if intLevel < 6: + self.netUpflow = torch.nn.ConvTranspose2d( + in_channels=2, + out_channels=2, + kernel_size=4, + stride=2, + padding=1, + ) + if intLevel < 6: + self.netUpfeat = torch.nn.ConvTranspose2d( + in_channels=intPrevious + 128 + 128 + 96 + 64 + 32, + out_channels=2, + kernel_size=4, + stride=2, + padding=1, + ) + if intLevel < 6: + self.fltBackwarp = [None, None, None, 5.0, 2.5, 1.25, 0.625, None][ + intLevel + 1 + ] + + self.netOne = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netTwo = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netThr = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFou = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFiv = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96 + 64, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netSix = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96 + 64 + 32, + out_channels=2, + kernel_size=3, + stride=1, + padding=1, + ) + ) + + # end + + def forward(self, tenFirst, tenSecond, objPrevious): + tenFlow = None + tenFeat = None + + if objPrevious is None: + tenFlow = None + tenFeat = None + + tenVolume = torch.nn.functional.leaky_relu( + input=FunctionCorrelation( + tenFirst=tenFirst, tenSecond=tenSecond + ), + negative_slope=0.1, + inplace=False, + ) + + tenFeat = torch.cat([tenVolume], 1) + + elif objPrevious is not None: + tenFlow = self.netUpflow(objPrevious["tenFlow"]) + tenFeat = self.netUpfeat(objPrevious["tenFeat"]) + + tenVolume = torch.nn.functional.leaky_relu( + input=FunctionCorrelation( + tenFirst=tenFirst, + tenSecond=backwarp( + tenInput=tenSecond, tenFlow=tenFlow * self.fltBackwarp + ), + ), + negative_slope=0.1, + inplace=False, + ) + + tenFeat = torch.cat([tenVolume, tenFirst, tenFlow, tenFeat], 1) + + # end + + tenFeat = torch.cat([self.netOne(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netTwo(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netThr(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netFou(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netFiv(tenFeat), tenFeat], 1) + + tenFlow = self.netSix(tenFeat) + + return {"tenFlow": tenFlow, "tenFeat": tenFeat} + + # end + + # end + + class Refiner(torch.nn.Module): + def __init__(self): + super(Refiner, self).__init__() + + self.netMain = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=81 + 32 + 2 + 2 + 128 + 128 + 96 + 64 + 32, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=2, + dilation=2, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=4, + dilation=4, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=96, + kernel_size=3, + stride=1, + padding=8, + dilation=8, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=64, + kernel_size=3, + stride=1, + padding=16, + dilation=16, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=2, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + ) + + # end + + def forward(self, tenInput): + return self.netMain(tenInput) + + # end + + # end + + self.netExtractor = Extractor() + + self.netTwo = Decoder(2) + self.netThr = Decoder(3) + self.netFou = Decoder(4) + self.netFiv = Decoder(5) + self.netSix = Decoder(6) + + self.netRefiner = Refiner() + + self.load_state_dict( + { + strKey.replace("module", "net"): tenWeight + for strKey, tenWeight in torch.hub.load_state_dict_from_url( + url="http://content.sniklaus.com/github/pytorch-pwc/network-" + + "default" + + ".pytorch", + model_dir=get_ckpt_container_path(MODEL_TYPE) + ).items() + } + ) + + # end + + def forward(self, tenFirst, tenSecond, *args): + # optionally pass pre-extracted feature pyramid in as args + if len(args) == 0: + tenFirst = self.netExtractor(tenFirst) + tenSecond = self.netExtractor(tenSecond) + else: + tenFirst, tenSecond = args + + objEstimate = self.netSix(tenFirst[-1], tenSecond[-1], None) + objEstimate = self.netFiv(tenFirst[-2], tenSecond[-2], objEstimate) + objEstimate = self.netFou(tenFirst[-3], tenSecond[-3], objEstimate) + objEstimate = self.netThr(tenFirst[-4], tenSecond[-4], objEstimate) + objEstimate = self.netTwo(tenFirst[-5], tenSecond[-5], objEstimate) + + return objEstimate["tenFlow"] + self.netRefiner(objEstimate["tenFeat"]) + + # end + + def extract_pyramid(self, tenFirst, tenSecond): + return self.netExtractor(tenFirst), self.netExtractor(tenSecond) + + def extract_pyramid_single(self, tenFirst): + return self.netExtractor(tenFirst) + + +# end + +netNetwork = None + +########################################################## + + +def estimate(tenFirst, tenSecond): + global netNetwork + + if netNetwork is None: + netNetwork = Network().cuda().eval() + # end + + assert tenFirst.shape[1] == tenSecond.shape[1] + assert tenFirst.shape[2] == tenSecond.shape[2] + + intWidth = tenFirst.shape[2] + intHeight = tenFirst.shape[1] + + assert ( + intWidth == 1024 + ) # remember that there is no guarantee for correctness, comment this line out if you acknowledge this and want to continue + assert ( + intHeight == 436 + ) # remember that there is no guarantee for correctness, comment this line out if you acknowledge this and want to continue + + tenPreprocessedFirst = tenFirst.cuda().view(1, 3, intHeight, intWidth) + tenPreprocessedSecond = tenSecond.cuda().view(1, 3, intHeight, intWidth) + + intPreprocessedWidth = int(math.floor(math.ceil(intWidth / 64.0) * 64.0)) + intPreprocessedHeight = int(math.floor(math.ceil(intHeight / 64.0) * 64.0)) + + tenPreprocessedFirst = torch.nn.functional.interpolate( + input=tenPreprocessedFirst, + size=(intPreprocessedHeight, intPreprocessedWidth), + mode="bilinear", + align_corners=False, + ) + tenPreprocessedSecond = torch.nn.functional.interpolate( + input=tenPreprocessedSecond, + size=(intPreprocessedHeight, intPreprocessedWidth), + mode="bilinear", + align_corners=False, + ) + + tenFlow = 20.0 * torch.nn.functional.interpolate( + input=netNetwork(tenPreprocessedFirst, tenPreprocessedSecond), + size=(intHeight, intWidth), + mode="bilinear", + align_corners=False, + ) + + tenFlow[:, 0, :, :] *= float(intWidth) / float(intPreprocessedWidth) + tenFlow[:, 1, :, :] *= float(intHeight) / float(intPreprocessedHeight) + + return tenFlow[0, :, :, :].cpu() + + +# end + + +class Upsampler_8tap(nn.Module): + def __init__(self): + super(Upsampler_8tap, self).__init__() + filt_8tap = torch.tensor([[-1, 4, -11, 40, 40, -11, 4, -1]]).div(64) + self.filter = nn.Parameter(filt_8tap.repeat(3, 1, 1, 1), requires_grad=False) + + def forward(self, im): + b, c, h, w = im.shape + im_up = torch.zeros(b, c, h * 2, w * 2).to(im.device) + im_up[:, :, ::2, ::2] = im + + p = (8 - 1) // 2 + im_up_row = F.conv2d( + F.pad(im, pad=(p, p + 1, 0, 0), mode="reflect"), self.filter, groups=3 + ) + im_up[:, :, 0::2, 1::2] = im_up_row + im_up_col = torch.transpose( + F.conv2d( + F.pad(torch.transpose(im, 2, 3), pad=(p, p + 1, 0, 0), mode="reflect"), + self.filter, + groups=3, + ), + 2, + 3, + ) + im_up[:, :, 1::2, 0::2] = im_up_col + im_up_cross = F.conv2d( + F.pad(im_up[:, :, 1::2, ::2], pad=(p, p + 1, 0, 0), mode="reflect"), + self.filter, + groups=3, + ) + im_up[:, :, 1::2, 1::2] = im_up_cross + return im_up + +# end + + +model_urls = { + "r3d_18": "https://download.pytorch.org/models/r3d_18-b3b3357e.pth", + "mc3_18": "https://download.pytorch.org/models/mc3_18-a90a0ba3.pth", + "r2plus1d_18": "https://download.pytorch.org/models/r2plus1d_18-91a641e6.pth", +} + + +class Conv3DSimple(nn.Conv3d): + def __init__(self, in_planes, out_planes, midplanes=None, stride=1, padding=1): + super(Conv3DSimple, self).__init__( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=(3, 3, 3), + stride=stride, + padding=padding, + bias=False, + ) + + @staticmethod + def get_downsample_stride(stride, temporal_stride): + if temporal_stride: + return (temporal_stride, stride, stride) + else: + return (stride, stride, stride) + + +class Conv2Plus1D(nn.Sequential): + def __init__(self, in_planes, out_planes, midplanes, stride=1, padding=1): + super(Conv2Plus1D, self).__init__( + nn.Conv3d( + in_planes, + midplanes, + kernel_size=(1, 3, 3), + stride=(1, stride, stride), + padding=(0, padding, padding), + bias=False, + ), + batchnorm(midplanes), + nn.ReLU(inplace=True), + nn.Conv3d( + midplanes, + out_planes, + kernel_size=(3, 1, 1), + stride=(stride, 1, 1), + padding=(padding, 0, 0), + bias=False, + ), + ) + + @staticmethod + def get_downsample_stride(stride): + return stride, stride, stride + + +class Conv3DNoTemporal(nn.Conv3d): + def __init__(self, in_planes, out_planes, midplanes=None, stride=1, padding=1): + super(Conv3DNoTemporal, self).__init__( + in_channels=in_planes, + out_channels=out_planes, + kernel_size=(1, 3, 3), + stride=(1, stride, stride), + padding=(0, padding, padding), + bias=False, + ) + + @staticmethod + def get_downsample_stride(stride): + return 1, stride, stride + + +class SEGating(nn.Module): + def __init__(self, inplanes, reduction=16): + super().__init__() + + self.pool = nn.AdaptiveAvgPool3d(1) + self.attn_layer = nn.Sequential( + nn.Conv3d(inplanes, inplanes, kernel_size=1, stride=1, bias=True), + nn.Sigmoid(), + ) + + def forward(self, x): + out = self.pool(x) + y = self.attn_layer(out) + return x * y + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, conv_builder, stride=1, downsample=None): + midplanes = (inplanes * planes * 3 * 3 * 3) // (inplanes * 3 * 3 + 3 * planes) + + super(BasicBlock, self).__init__() + self.conv1 = nn.Sequential( + conv_builder(inplanes, planes, midplanes, stride), + batchnorm(planes), + nn.ReLU(inplace=True), + ) + self.conv2 = nn.Sequential( + conv_builder(planes, planes, midplanes), batchnorm(planes) + ) + self.fg = SEGating(planes) ## Feature Gating, from FLAVR + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + out = self.conv1(x) + out = self.conv2(out) + out = self.fg(out) + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, conv_builder, stride=1, downsample=None): + super(Bottleneck, self).__init__() + midplanes = (inplanes * planes * 3 * 3 * 3) // (inplanes * 3 * 3 + 3 * planes) + + # 1x1x1 + self.conv1 = nn.Sequential( + nn.Conv3d(inplanes, planes, kernel_size=1, bias=False), + batchnorm(planes), + nn.ReLU(inplace=True), + ) + # Second kernel + self.conv2 = nn.Sequential( + conv_builder(planes, planes, midplanes, stride), + batchnorm(planes), + nn.ReLU(inplace=True), + ) + + # 1x1x1 + self.conv3 = nn.Sequential( + nn.Conv3d(planes, planes * self.expansion, kernel_size=1, bias=False), + batchnorm(planes * self.expansion), + ) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.conv2(out) + out = self.conv3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class BasicStem(nn.Sequential): + """The default conv-batchnorm-relu stem""" + + def __init__(self, outplanes=32): + super(BasicStem, self).__init__( + nn.Conv3d( + 3, + outplanes, + kernel_size=(3, 7, 7), + stride=(1, 2, 2), + padding=(1, 3, 3), + bias=False, + ), + batchnorm(outplanes), + nn.ReLU(inplace=True), + ) + + +class R2Plus1dStem(nn.Sequential): + """R(2+1)D stem is different than the default one as it uses separated 3D convolution""" + + def __init__(self): + super(R2Plus1dStem, self).__init__( + nn.Conv3d( + 3, + 45, + kernel_size=(1, 7, 7), + stride=(1, 2, 2), + padding=(0, 3, 3), + bias=False, + ), + batchnorm(45), + nn.ReLU(inplace=True), + nn.Conv3d( + 45, + 64, + kernel_size=(3, 1, 1), + stride=(1, 1, 1), + padding=(1, 0, 0), + bias=False, + ), + batchnorm(64), + nn.ReLU(inplace=True), + ) + + +class VideoResNet(nn.Module): + def __init__( + self, + block, + conv_makers, + layers, + stem, + zero_init_residual=False, + channels=[32, 64, 96, 128], + ): + """Generic resnet video generator. + + Args: + block (nn.Module): resnet building block + conv_makers (list(functions)): generator function for each layer + layers (List[int]): number of blocks per layer + stem (nn.Module, optional): Resnet stem, if None, defaults to conv-bn-relu. Defaults to None. + zero_init_residual (bool, optional): Zero init bottleneck residual BN. Defaults to False. + """ + super(VideoResNet, self).__init__() + self.inplanes = channels[0] # output channel of first stem + + self.stem = stem() + + self.layer1 = self._make_layer( + block, conv_makers[0], channels[0], layers[0], stride=1 + ) + self.layer2 = self._make_layer( + block, conv_makers[1], channels[1], layers[1], stride=2, temporal_stride=1 + ) + self.layer3 = self._make_layer( + block, conv_makers[2], channels[2], layers[2], stride=2, temporal_stride=1 + ) + self.layer4 = self._make_layer( + block, conv_makers[3], channels[3], layers[3], stride=1, temporal_stride=1 + ) + + # init weights + self._initialize_weights() + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + nn.init.constant_(m.bn3.weight, 0) + + def forward(self, x): + tensorConv0 = self.stem(x) + tensorConv1 = self.layer1(tensorConv0) + tensorConv2 = self.layer2(tensorConv1) + tensorConv3 = self.layer3(tensorConv2) + tensorConv4 = self.layer4(tensorConv3) + return tensorConv0, tensorConv1, tensorConv2, tensorConv3, tensorConv4 + + def _make_layer( + self, block, conv_builder, planes, blocks, stride=1, temporal_stride=None + ): + downsample = None + + if stride != 1 or self.inplanes != planes * block.expansion: + ds_stride = conv_builder.get_downsample_stride(stride, temporal_stride) + downsample = nn.Sequential( + nn.Conv3d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=ds_stride, + bias=False, + ), + batchnorm(planes * block.expansion), + ) + stride = ds_stride + + layers = [] + layers.append(block(self.inplanes, planes, conv_builder, stride, downsample)) + + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, conv_builder)) + + return nn.Sequential(*layers) + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv3d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm3d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +def _video_resnet(arch, pretrained=False, progress=True, **kwargs): + model = VideoResNet(**kwargs) + + if pretrained: + state_dict = load_state_dict_from_url(model_urls[arch], progress=progress, model_dir=get_ckpt_container_path(MODEL_TYPE)) + model.load_state_dict(state_dict) + return model + + +def r3d_18(bn=False, pretrained=False, progress=True, **kwargs): + """Construct 18 layer Resnet3D model as in + https://arxiv.org/abs/1711.11248 + + Args: + pretrained (bool): If True, returns a model pre-trained on Kinetics-400 + progress (bool): If True, displays a progress bar of the download to stderr + + Returns: + nn.Module: R3D-18 network + """ + + global batchnorm + if bn: + batchnorm = nn.BatchNorm3d + else: + batchnorm = identity + + return _video_resnet( + "r3d_18", + pretrained, + progress, + block=BasicBlock, + conv_makers=[Conv3DSimple] * 4, + layers=[2, 2, 2, 2], + stem=BasicStem, + **kwargs, + ) + + +def mc3_18(bn=False, pretrained=False, progress=True, **kwargs): + """Constructor for 18 layer Mixed Convolution network as in + https://arxiv.org/abs/1711.11248 + + Args: + pretrained (bool): If True, returns a model pre-trained on Kinetics-400 + progress (bool): If True, displays a progress bar of the download to stderr + + Returns: + nn.Module: MC3 Network definition + """ + global batchnorm + if bn: + batchnorm = nn.BatchNorm3d + else: + batchnorm = identity + + return _video_resnet( + "mc3_18", + pretrained, + progress, + block=BasicBlock, + conv_makers=[Conv3DSimple] + [Conv3DNoTemporal] * 3, + layers=[2, 2, 2, 2], + stem=BasicStem, + **kwargs, + ) + + +def r2plus1d_18(bn=False, pretrained=False, progress=True, **kwargs): + """Constructor for the 18 layer deep R(2+1)D network as in + https://arxiv.org/abs/1711.11248 + + Args: + pretrained (bool): If True, returns a model pre-trained on Kinetics-400 + progress (bool): If True, displays a progress bar of the download to stderr + + Returns: + nn.Module: R(2+1)D-18 network + """ + + global batchnorm + if bn: + batchnorm = nn.BatchNorm3d + else: + batchnorm = identity + + return _video_resnet( + "r2plus1d_18", + pretrained, + progress, + block=BasicBlock, + conv_makers=[Conv2Plus1D] * 4, + layers=[2, 2, 2, 2], + stem=R2Plus1dStem, + **kwargs, + ) + + +class upConv3D(nn.Module): + def __init__(self, in_ch, out_ch, kernel_size, stride, padding, upmode="transpose"): + super().__init__() + self.upmode = upmode + if self.upmode == "transpose": + self.upconv = nn.ModuleList( + [ + nn.ConvTranspose3d( + in_ch, + out_ch, + kernel_size=kernel_size, + stride=stride, + padding=padding, + ), + SEGating(out_ch), + batchnorm(out_ch), + ] + ) + else: + self.upconv = nn.ModuleList( + [ + nn.Upsample( + mode="trilinear", scale_factor=(1, 2, 2), align_corners=False + ), + nn.Conv3d(in_ch, out_ch, kernel_size=1, stride=1), + SEGating(out_ch), + batchnorm(out_ch), + ] + ) + self.upconv = nn.Sequential(*self.upconv) + + def forward(self, x): + return self.upconv(x) + + +class Conv_3d(nn.Module): + def __init__(self, in_ch, out_ch, kernel_size, stride=1, padding=0, bias=True): + super().__init__() + self.conv = nn.Sequential( + nn.Conv3d( + in_ch, + out_ch, + kernel_size=kernel_size, + stride=stride, + padding=padding, + bias=bias, + ), + SEGating(out_ch), + batchnorm(out_ch), + ) + + def forward(self, x): + return self.conv(x) + + +def make_optimizer(args, my_model): + trainable = filter(lambda x: x.requires_grad, my_model.parameters()) + + if args.optimizer == "SGD": + optimizer_function = optim.SGD + kwargs = {"momentum": 0.9} + elif args.optimizer == "ADAM": + optimizer_function = optim.Adam + kwargs = {"betas": (0.9, 0.999), "eps": 1e-08} + elif args.optimizer == "ADAMax": + optimizer_function = optim.Adamax + kwargs = {"betas": (0.9, 0.999), "eps": 1e-08} + elif args.optimizer == "RMSprop": + optimizer_function = optim.RMSprop + kwargs = {"eps": 1e-08} + + kwargs["lr"] = args.lr + kwargs["weight_decay"] = args.weight_decay + + return optimizer_function(trainable, **kwargs) + + +def make_scheduler(args, my_optimizer): + if args.decay_type == "step": + scheduler = lrs.StepLR(my_optimizer, step_size=args.lr_decay, gamma=args.gamma) + elif args.decay_type.find("step") >= 0: + milestones = args.decay_type.split("_") + milestones.pop(0) + milestones = list(map(lambda x: int(x), milestones)) + scheduler = lrs.MultiStepLR( + my_optimizer, milestones=milestones, gamma=args.gamma + ) + elif args.decay_type == "plateau": + scheduler = lrs.ReduceLROnPlateau( + my_optimizer, + mode="max", + factor=args.gamma, + patience=args.patience, + threshold=0.01, # metric to be used is psnr + threshold_mode="abs", + verbose=True, + ) + + return scheduler + + +def gaussian_kernel(sz, sigma): + k = torch.arange(-(sz - 1) / 2, (sz + 1) / 2) + k = torch.exp(-1.0 / (2 * sigma**2) * k**2) + k = k.reshape(-1, 1) * k.reshape(1, -1) + k = k / torch.sum(k) + return k + + +def moduleNormalize(frame): + return torch.cat( + [ + (frame[:, 0:1, :, :] - 0.4631), + (frame[:, 1:2, :, :] - 0.4352), + (frame[:, 2:3, :, :] - 0.3990), + ], + 1, + ) + + +class FoldUnfold: + """ + Class to handle folding tensor frame into batch of patches and back to frame again + Thanks to Charlie Tan (charlie.tan.2019@bristol.ac.uk) for the earier version. + """ + + def __init__(self, height, width, patch_size, overlap): + if height % 2 or width % 2 or patch_size % 2 or overlap % 2: + print( + "only defined for even values of height, width, patch_size size and overlap, odd values will reconstruct incorrectly" + ) + return + + self.height = height + self.width = width + + self.patch_size = patch_size + self.overlap = overlap + self.stride = patch_size - overlap + + def fold_to_patches(self, *frames): + """ + args: frames -- list of (1,3,H,W) tensors + returns: list of (B,3,h,w) image patches + """ + + # number of blocks in each direction + n_blocks_h = (self.height // (self.stride)) + 1 + n_blocks_w = (self.width // (self.stride)) + 1 + + # how much to pad each edge by + self.pad_h = (self.stride * n_blocks_h + self.overlap - self.height) // 2 + self.pad_w = (self.stride * n_blocks_w + self.overlap - self.width) // 2 + self.height_pad = self.height + 2 * self.pad_h + self.width_pad = self.width + 2 * self.pad_w + + # pad the frames and unfold into patches + patches_list = [] + for i in range(len(frames)): + padded = F.pad( + frames[i], + (self.pad_w, self.pad_w, self.pad_h, self.pad_h), + mode="reflect", + ) + unfolded = F.unfold(padded, self.patch_size, stride=self.stride) + patches = unfolded.permute(2, 1, 0).reshape( + -1, 3, self.patch_size, self.patch_size + ) + patches_list.append(patches) + + return patches_list + + def unfold_to_frame(self, patches): + """ + args: patches -- tensor of shape (B,3,h,w) + returns: frame -- tensor of shape (1,3,H,W) + """ + + # reshape and permute back into [frames, chans * patch_size ** 2, num_patches] as expected by fold + frame_unfold = patches.reshape(-1, 3 * self.patch_size**2, 1).permute(2, 1, 0) + + # fold into tensor of shape pad_shape + frame_fold = F.fold( + frame_unfold, + (self.height_pad, self.width_pad), + self.patch_size, + stride=self.stride, + ) + + # unfold sums overlaps instead of averaging so tensor of ones unfolded and + # folded to track overlaps and take mean of overlapping pixels + ones = torch.ones_like(frame_fold) + ones_unfold = F.unfold(ones, self.patch_size, stride=self.stride) + + # divisor is tensor of shape pad_shape where each element is the number of values that have overlapped + # 1 = no overlaps + divisor = F.fold( + ones_unfold, + (self.height_pad, self.width_pad), + self.patch_size, + stride=self.stride, + ) + + # divide reconstructed frame by divisor + frame_div = frame_fold / divisor + + # crop frame to remove the padded areas + frame_crop = frame_div[ + :, :, self.pad_h : -self.pad_h, self.pad_w : -self.pad_w + ].clone() + + return frame_crop + + +def read_frame_yuv2rgb(stream, width, height, iFrame, bit_depth, pix_fmt="420"): + if pix_fmt == "420": + multiplier = 1 + uv_factor = 2 + elif pix_fmt == "444": + multiplier = 2 + uv_factor = 1 + else: + print("Pixel format {} is not supported".format(pix_fmt)) + return + + if bit_depth == 8: + datatype = np.uint8 + stream.seek(iFrame * 1.5 * width * height * multiplier) + Y = np.fromfile(stream, dtype=datatype, count=width * height).reshape( + (height, width) + ) + + # read chroma samples and upsample since original is 4:2:0 sampling + U = np.fromfile( + stream, dtype=datatype, count=(width // uv_factor) * (height // uv_factor) + ).reshape((height // uv_factor, width // uv_factor)) + V = np.fromfile( + stream, dtype=datatype, count=(width // uv_factor) * (height // uv_factor) + ).reshape((height // uv_factor, width // uv_factor)) + + else: + datatype = np.uint16 + stream.seek(iFrame * 3 * width * height * multiplier) + Y = np.fromfile(stream, dtype=datatype, count=width * height).reshape( + (height, width) + ) + + U = np.fromfile( + stream, dtype=datatype, count=(width // uv_factor) * (height // uv_factor) + ).reshape((height // uv_factor, width // uv_factor)) + V = np.fromfile( + stream, dtype=datatype, count=(width // uv_factor) * (height // uv_factor) + ).reshape((height // uv_factor, width // uv_factor)) + + if pix_fmt == "420": + yuv = np.empty((height * 3 // 2, width), dtype=datatype) + yuv[0:height, :] = Y + + yuv[height : height + height // 4, :] = U.reshape(-1, width) + yuv[height + height // 4 :, :] = V.reshape(-1, width) + + if bit_depth != 8: + yuv = (yuv / (2**bit_depth - 1) * 255).astype(np.uint8) + + # convert to rgb + rgb = cv2.cvtColor(yuv, cv2.COLOR_YUV2RGB_I420) + + else: + yvu = np.stack([Y, V, U], axis=2) + if bit_depth != 8: + yvu = (yvu / (2**bit_depth - 1) * 255).astype(np.uint8) + rgb = cv2.cvtColor(yvu, cv2.COLOR_YCrCb2RGB) + + return rgb + + +def quantize(imTensor): + return imTensor.clamp(0.0, 1.0).mul(255).round() + + +def tensor2rgb(tensor): + """ + Convert GPU Tensor to RGB image (numpy array) + """ + out = [] + for b in range(tensor.shape[0]): + out.append( + np.moveaxis(quantize(tensor[b]).cpu().detach().numpy(), 0, 2).astype( + np.uint8 + ) + ) + return np.array(out) # (B,H,W,C) + + +class Identity(nn.Module): + def __init__(self, *args): + super(Identity, self).__init__() + + def forward(self, x): + return x + + +class SEBlock(nn.Module): + def __init__(self, input_dim, reduction=16): + super(SEBlock, self).__init__() + mid = int(input_dim / reduction) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(input_dim, mid), + nn.ReLU(inplace=True), + nn.Linear(mid, input_dim), + nn.Sigmoid(), + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + return x * y + + +class ResNextBlock(nn.Module): + def __init__( + self, down, cin, cout, ks, stride=1, groups=32, base_width=4, norm_layer=None + ): + super(ResNextBlock, self).__init__() + if norm_layer is None or norm_layer == "batch": + norm_layer = nn.BatchNorm2d + elif norm_layer == "identity": + norm_layer = Identity + width = int(cout * (base_width / 64.0)) * groups + # Both self.conv2 and self.downsample layers downsample the input when stride != 1 + self.conv1 = nn.Conv2d(cin, width, kernel_size=1, stride=1, bias=False) + self.bn1 = norm_layer(width) + if down: + self.conv2 = nn.Conv2d( + width, + width, + kernel_size=ks, + stride=stride, + padding=(ks - 1) // 2, + groups=groups, + bias=False, + ) + else: + self.conv2 = nn.ConvTranspose2d( + width, + width, + kernel_size=ks, + stride=stride, + padding=(ks - stride) // 2, + groups=groups, + bias=False, + ) + self.bn2 = norm_layer(width) + self.conv3 = nn.Conv2d(width, cout, kernel_size=1, stride=1, bias=False) + self.bn3 = norm_layer(cout) + self.relu = nn.ReLU(inplace=True) + self.downsample = None + if stride != 1 or cin != cout: + if down: + self.downsample = nn.Sequential( + nn.Conv2d(cin, cout, kernel_size=1, stride=stride, bias=False), + norm_layer(cout), + ) + else: + self.downsample = nn.Sequential( + # ks = stride here s.t. resolution can be kept + nn.ConvTranspose2d( + cin, cout, kernel_size=2, stride=stride, bias=False + ), + norm_layer(cout), + ) + self.stride = stride + + def forward(self, x): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class MultiScaleResNextBlock(nn.Module): + def __init__(self, down, cin, cout, ks_s, ks_l, stride, norm_layer): + super(MultiScaleResNextBlock, self).__init__() + self.resnext_small = ResNextBlock( + down, cin, cout // 2, ks_s, stride, norm_layer=norm_layer + ) + self.resnext_large = ResNextBlock( + down, cin, cout // 2, ks_l, stride, norm_layer=norm_layer + ) + self.attention = SEBlock(cout) + + def forward(self, tensorCombine): + out_small = self.resnext_small(tensorCombine) + out_large = self.resnext_large(tensorCombine) + out = torch.cat([out_small, out_large], 1) + out = self.attention(out) + return out + + +class UMultiScaleResNext(nn.Module): + def __init__( + self, channels=[64, 128, 256, 512], norm_layer="batch", inplanes=6, **kwargs + ): + super(UMultiScaleResNext, self).__init__() + self.conv1 = MultiScaleResNextBlock( + True, inplanes, channels[0], ks_s=3, ks_l=7, stride=2, norm_layer=norm_layer + ) + self.conv2 = MultiScaleResNextBlock( + True, + channels[0], + channels[1], + ks_s=3, + ks_l=7, + stride=2, + norm_layer=norm_layer, + ) + self.conv3 = MultiScaleResNextBlock( + True, + channels[1], + channels[2], + ks_s=3, + ks_l=5, + stride=2, + norm_layer=norm_layer, + ) + self.conv4 = MultiScaleResNextBlock( + True, + channels[2], + channels[3], + ks_s=3, + ks_l=5, + stride=2, + norm_layer=norm_layer, + ) + + self.deconv4 = MultiScaleResNextBlock( + True, + channels[3], + channels[3], + ks_s=3, + ks_l=5, + stride=1, + norm_layer=norm_layer, + ) + self.deconv3 = MultiScaleResNextBlock( + False, + channels[3], + channels[2], + ks_s=4, + ks_l=6, + stride=2, + norm_layer=norm_layer, + ) + self.deconv2 = MultiScaleResNextBlock( + False, + channels[2], + channels[1], + ks_s=4, + ks_l=8, + stride=2, + norm_layer=norm_layer, + ) + self.deconv1 = MultiScaleResNextBlock( + False, + channels[1], + channels[0], + ks_s=4, + ks_l=8, + stride=2, + norm_layer=norm_layer, + ) + + def forward(self, im0, im2): + tensorJoin = torch.cat([im0, im2], 1) # (B,6,H,W) + + tensorConv1 = self.conv1(tensorJoin) + tensorConv2 = self.conv2(tensorConv1) + tensorConv3 = self.conv3(tensorConv2) + tensorConv4 = self.conv4(tensorConv3) + + tensorDeconv4 = self.deconv4(tensorConv4) + tensorDeconv3 = self.deconv3(tensorDeconv4 + tensorConv4) + tensorDeconv2 = self.deconv2(tensorDeconv3 + tensorConv3) + tensorDeconv1 = self.deconv1(tensorDeconv2 + tensorConv2) + + return tensorDeconv1 + + +class MultiInputGridNet(nn.Module): + def __init__(self, in_chs, out_chs, grid_chs=(32, 64, 96), n_row=3, n_col=6): + super(MultiInputGridNet, self).__init__() + + self.n_row = n_row + self.n_col = n_col + self.n_chs = grid_chs + assert ( + len(grid_chs) == self.n_row + ), "should give num channels for each row (scale stream)" + assert ( + len(in_chs) == self.n_row + ), "should give input channels for each row (scale stream)" + + for r, n_ch in enumerate(self.n_chs): + setattr(self, f"lateral_{r}_0", LateralBlock(in_chs[r], n_ch)) + for c in range(1, self.n_col): + setattr(self, f"lateral_{r}_{c}", LateralBlock(n_ch, n_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[:-1], self.n_chs[1:])): + for c in range(int(self.n_col / 2)): + setattr(self, f"down_{r}_{c}", DownSamplingBlock(in_ch, out_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[1:], self.n_chs[:-1])): + for c in range(int(self.n_col / 2)): + setattr(self, f"up_{r}_{c}", UpSamplingBlock(in_ch, out_ch)) + + self.lateral_final = LateralBlock(self.n_chs[0], out_chs) + + def forward(self, *args): + assert len(args) == self.n_row + + # extensible, memory-efficient + cur_col = list(args) + for c in range(int(self.n_col / 2)): + for r in range(self.n_row): + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + if r != 0: + cur_col[r] += getattr(self, f"down_{r-1}_{c}")(cur_col[r - 1]) + + for c in range(int(self.n_col / 2), self.n_col): + for r in range(self.n_row - 1, -1, -1): + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + if r != self.n_row - 1: + cur_col[r] += getattr(self, f"up_{r}_{c-int(self.n_col/2)}")( + cur_col[r + 1] + ) + + return self.lateral_final(cur_col[0]) + + +class MIMOGridNet(nn.Module): + def __init__( + self, in_chs, out_chs, grid_chs=(32, 64, 96), n_row=3, n_col=6, outrow=(0, 1, 2) + ): + super(MIMOGridNet, self).__init__() + + self.n_row = n_row + self.n_col = n_col + self.n_chs = grid_chs + self.outrow = outrow + assert ( + len(grid_chs) == self.n_row + ), "should give num channels for each row (scale stream)" + assert ( + len(in_chs) == self.n_row + ), "should give input channels for each row (scale stream)" + assert len(out_chs) == len( + self.outrow + ), "should give out channels for each output row (scale stream)" + + for r, n_ch in enumerate(self.n_chs): + setattr(self, f"lateral_{r}_0", LateralBlock(in_chs[r], n_ch)) + for c in range(1, self.n_col): + setattr(self, f"lateral_{r}_{c}", LateralBlock(n_ch, n_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[:-1], self.n_chs[1:])): + for c in range(int(self.n_col / 2)): + setattr(self, f"down_{r}_{c}", DownSamplingBlock(in_ch, out_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[1:], self.n_chs[:-1])): + for c in range(int(self.n_col / 2)): + setattr(self, f"up_{r}_{c}", UpSamplingBlock(in_ch, out_ch)) + + for i, r in enumerate(outrow): + setattr(self, f"lateral_final_{r}", LateralBlock(self.n_chs[r], out_chs[i])) + + def forward(self, *args): + assert len(args) == self.n_row + + # extensible, memory-efficient + cur_col = list(args) + for c in range(int(self.n_col / 2)): + for r in range(self.n_row): + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + if r != 0: + cur_col[r] += getattr(self, f"down_{r-1}_{c}")(cur_col[r - 1]) + + for c in range(int(self.n_col / 2), self.n_col): + for r in range(self.n_row - 1, -1, -1): + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + if r != self.n_row - 1: + cur_col[r] += getattr(self, f"up_{r}_{c-int(self.n_col/2)}")( + cur_col[r + 1] + ) + + out = [] + for r in self.outrow: + out.append(getattr(self, f"lateral_final_{r}")(cur_col[r])) + + return out + + +class GeneralGridNet(nn.Module): + def __init__(self, in_chs, out_chs, grid_chs=(32, 64, 96), n_row=3, n_col=6): + super(GeneralGridNet, self).__init__() + + self.n_row = n_row + self.n_col = n_col + self.n_chs = grid_chs + assert ( + len(grid_chs) == self.n_row + ), "should give num channels for each row (scale stream)" + + for r, n_ch in enumerate(self.n_chs): + if r == 0: + setattr(self, f"lateral_{r}_0", LateralBlock(in_chs, n_ch)) + for c in range(1, self.n_col): + setattr(self, f"lateral_{r}_{c}", LateralBlock(n_ch, n_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[:-1], self.n_chs[1:])): + for c in range(int(self.n_col / 2)): + setattr(self, f"down_{r}_{c}", DownSamplingBlock(in_ch, out_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[1:], self.n_chs[:-1])): + for c in range(int(self.n_col / 2)): + setattr(self, f"up_{r}_{c}", UpSamplingBlock(in_ch, out_ch)) + + self.lateral_final = LateralBlock(self.n_chs[0], out_chs) + + def forward(self, x): + cur_col = [x] + [None] * (self.n_row - 1) + for c in range(int(self.n_col / 2)): + for r in range(self.n_row): + if cur_col[r] != None: + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + else: + cur_col[r] = 0.0 + if r != 0: + cur_col[r] += getattr(self, f"down_{r-1}_{c}")(cur_col[r - 1]) + + for c in range(int(self.n_col / 2), self.n_col): + for r in range(self.n_row - 1, -1, -1): + cur_col[r] = getattr(self, f"lateral_{r}_{c}")(cur_col[r]) + if r != self.n_row - 1: + cur_col[r] += getattr(self, f"up_{r}_{c-int(self.n_col/2)}")( + cur_col[r + 1] + ) + + return self.lateral_final(cur_col[0]) + + +class GridNet(nn.Module): + def __init__(self, in_chs, out_chs, grid_chs=(32, 64, 96)): + super(GridNet, self).__init__() + + self.n_row = 3 + self.n_col = 6 + self.n_chs = grid_chs + assert ( + len(grid_chs) == self.n_row + ), "should give num channels for each row (scale stream)" + + self.lateral_init = LateralBlock(in_chs, self.n_chs[0]) + + for r, n_ch in enumerate(self.n_chs): + for c in range(self.n_col - 1): + setattr(self, f"lateral_{r}_{c}", LateralBlock(n_ch, n_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[:-1], self.n_chs[1:])): + for c in range(int(self.n_col / 2)): + setattr(self, f"down_{r}_{c}", DownSamplingBlock(in_ch, out_ch)) + + for r, (in_ch, out_ch) in enumerate(zip(self.n_chs[1:], self.n_chs[:-1])): + for c in range(int(self.n_col / 2)): + setattr(self, f"up_{r}_{c}", UpSamplingBlock(in_ch, out_ch)) + + self.lateral_final = LateralBlock(self.n_chs[0], out_chs) + + def forward(self, x): + state_00 = self.lateral_init(x) + state_10 = self.down_0_0(state_00) + state_20 = self.down_1_0(state_10) + + state_01 = self.lateral_0_0(state_00) + state_11 = self.down_0_1(state_01) + self.lateral_1_0(state_10) + state_21 = self.down_1_1(state_11) + self.lateral_2_0(state_20) + + state_02 = self.lateral_0_1(state_01) + state_12 = self.down_0_2(state_02) + self.lateral_1_1(state_11) + state_22 = self.down_1_2(state_12) + self.lateral_2_1(state_21) + + state_23 = self.lateral_2_2(state_22) + state_13 = self.up_1_0(state_23) + self.lateral_1_2(state_12) + state_03 = self.up_0_0(state_13) + self.lateral_0_2(state_02) + + state_24 = self.lateral_2_3(state_23) + state_14 = self.up_1_1(state_24) + self.lateral_1_3(state_13) + state_04 = self.up_0_1(state_14) + self.lateral_0_3(state_03) + + state_25 = self.lateral_2_4(state_24) + state_15 = self.up_1_2(state_25) + self.lateral_1_4(state_14) + state_05 = self.up_0_2(state_15) + self.lateral_0_4(state_04) + + return self.lateral_final(state_05) + + +class LateralBlock(nn.Module): + def __init__(self, ch_in, ch_out): + super(LateralBlock, self).__init__() + self.f = nn.Sequential( + nn.PReLU(), + nn.Conv2d(ch_in, ch_out, kernel_size=3, padding=1), + nn.PReLU(), + nn.Conv2d(ch_out, ch_out, kernel_size=3, padding=1), + ) + if ch_in != ch_out: + self.conv = nn.Conv2d(ch_in, ch_out, kernel_size=3, padding=1) + + def forward(self, x): + fx = self.f(x) + if fx.shape[1] != x.shape[1]: + x = self.conv(x) + return fx + x + + +class DownSamplingBlock(nn.Module): + def __init__(self, ch_in, ch_out): + super(DownSamplingBlock, self).__init__() + self.f = nn.Sequential( + nn.PReLU(), + nn.Conv2d(ch_in, ch_out, kernel_size=3, stride=2, padding=1), + nn.PReLU(), + nn.Conv2d(ch_out, ch_out, kernel_size=3, padding=1), + ) + + def forward(self, x): + return self.f(x) + + +class UpSamplingBlock(nn.Module): + def __init__(self, ch_in, ch_out): + super(UpSamplingBlock, self).__init__() + self.f = nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False), + nn.PReLU(), + nn.Conv2d(ch_in, ch_out, kernel_size=3, padding=1), + nn.PReLU(), + nn.Conv2d(ch_out, ch_out, kernel_size=3, padding=1), + ) + + def forward(self, x): + return self.f(x) + +# end + + +class Network(torch.nn.Module): + def __init__(self): + super(Network, self).__init__() + + class Extractor(torch.nn.Module): + def __init__(self): + super(Extractor, self).__init__() + + self.netOne = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=3, + out_channels=16, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=16, + out_channels=16, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=16, + out_channels=16, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netTwo = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=16, + out_channels=32, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netThr = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=32, + out_channels=64, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFou = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, + out_channels=96, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFiv = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=96, + out_channels=128, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netSix = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=128, + out_channels=196, + kernel_size=3, + stride=2, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=196, + out_channels=196, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=196, + out_channels=196, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + # end + + def forward(self, tenInput): + tenOne = self.netOne(tenInput) + tenTwo = self.netTwo(tenOne) + tenThr = self.netThr(tenTwo) + tenFou = self.netFou(tenThr) + tenFiv = self.netFiv(tenFou) + tenSix = self.netSix(tenFiv) + + return [tenOne, tenTwo, tenThr, tenFou, tenFiv, tenSix] + + # end + + # end + + class Decoder(torch.nn.Module): + def __init__(self, intLevel): + super(Decoder, self).__init__() + + intPrevious = [ + None, + None, + 81 + 32 + 2 + 2, + 81 + 64 + 2 + 2, + 81 + 96 + 2 + 2, + 81 + 128 + 2 + 2, + 81, + None, + ][intLevel + 1] + intCurrent = [ + None, + None, + 81 + 32 + 2 + 2, + 81 + 64 + 2 + 2, + 81 + 96 + 2 + 2, + 81 + 128 + 2 + 2, + 81, + None, + ][intLevel + 0] + + if intLevel < 6: + self.netUpflow = torch.nn.ConvTranspose2d( + in_channels=2, + out_channels=2, + kernel_size=4, + stride=2, + padding=1, + ) + if intLevel < 6: + self.netUpfeat = torch.nn.ConvTranspose2d( + in_channels=intPrevious + 128 + 128 + 96 + 64 + 32, + out_channels=2, + kernel_size=4, + stride=2, + padding=1, + ) + if intLevel < 6: + self.fltBackwarp = [None, None, None, 5.0, 2.5, 1.25, 0.625, None][ + intLevel + 1 + ] + + self.netOne = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netTwo = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netThr = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128, + out_channels=96, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFou = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96, + out_channels=64, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netFiv = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96 + 64, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + ) + + self.netSix = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=intCurrent + 128 + 128 + 96 + 64 + 32, + out_channels=2, + kernel_size=3, + stride=1, + padding=1, + ) + ) + + # end + + def forward(self, tenFirst, tenSecond, objPrevious): + tenFlow = None + tenFeat = None + + if objPrevious is None: + tenFlow = None + tenFeat = None + + tenVolume = torch.nn.functional.leaky_relu( + input=FunctionCorrelation( + tenFirst=tenFirst, tenSecond=tenSecond + ), + negative_slope=0.1, + inplace=False, + ) + + tenFeat = torch.cat([tenVolume], 1) + + elif objPrevious is not None: + tenFlow = self.netUpflow(objPrevious["tenFlow"]) + tenFeat = self.netUpfeat(objPrevious["tenFeat"]) + + tenVolume = torch.nn.functional.leaky_relu( + input=FunctionCorrelation( + tenFirst=tenFirst, + tenSecond=backwarp( + tenInput=tenSecond, tenFlow=tenFlow * self.fltBackwarp + ), + ), + negative_slope=0.1, + inplace=False, + ) + + tenFeat = torch.cat([tenVolume, tenFirst, tenFlow, tenFeat], 1) + + # end + + tenFeat = torch.cat([self.netOne(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netTwo(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netThr(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netFou(tenFeat), tenFeat], 1) + tenFeat = torch.cat([self.netFiv(tenFeat), tenFeat], 1) + + tenFlow = self.netSix(tenFeat) + + return {"tenFlow": tenFlow, "tenFeat": tenFeat} + + # end + + # end + + class Refiner(torch.nn.Module): + def __init__(self): + super(Refiner, self).__init__() + + self.netMain = torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=81 + 32 + 2 + 2 + 128 + 128 + 96 + 64 + 32, + out_channels=128, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=2, + dilation=2, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=128, + kernel_size=3, + stride=1, + padding=4, + dilation=4, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=128, + out_channels=96, + kernel_size=3, + stride=1, + padding=8, + dilation=8, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=96, + out_channels=64, + kernel_size=3, + stride=1, + padding=16, + dilation=16, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=64, + out_channels=32, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + torch.nn.LeakyReLU(inplace=False, negative_slope=0.1), + torch.nn.Conv2d( + in_channels=32, + out_channels=2, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + ), + ) + + # end + + def forward(self, tenInput): + return self.netMain(tenInput) + + # end + + # end + + self.netExtractor = Extractor() + + self.netTwo = Decoder(2) + self.netThr = Decoder(3) + self.netFou = Decoder(4) + self.netFiv = Decoder(5) + self.netSix = Decoder(6) + + self.netRefiner = Refiner() + + self.load_state_dict( + { + strKey.replace("module", "net"): tenWeight + for strKey, tenWeight in torch.hub.load_state_dict_from_url( + url="http://content.sniklaus.com/github/pytorch-pwc/network-" + + "default" + + ".pytorch", + model_dir=get_ckpt_container_path(MODEL_TYPE) + ).items() + } + ) + + # end + + def forward(self, tenFirst, tenSecond, *args): + # optionally pass pre-extracted feature pyramid in as args + if len(args) == 0: + tenFirst = self.netExtractor(tenFirst) + tenSecond = self.netExtractor(tenSecond) + else: + tenFirst, tenSecond = args + + objEstimate = self.netSix(tenFirst[-1], tenSecond[-1], None) + objEstimate = self.netFiv(tenFirst[-2], tenSecond[-2], objEstimate) + objEstimate = self.netFou(tenFirst[-3], tenSecond[-3], objEstimate) + objEstimate = self.netThr(tenFirst[-4], tenSecond[-4], objEstimate) + objEstimate = self.netTwo(tenFirst[-5], tenSecond[-5], objEstimate) + + return objEstimate["tenFlow"] + self.netRefiner(objEstimate["tenFeat"]) + + # end + + def extract_pyramid(self, tenFirst, tenSecond): + return self.netExtractor(tenFirst), self.netExtractor(tenSecond) + + def extract_pyramid_single(self, tenFirst): + return self.netExtractor(tenFirst) + + +# end + +netNetwork = None + +########################################################## + + +def estimate(tenFirst, tenSecond): + global netNetwork + + if netNetwork is None: + netNetwork = Network().cuda().eval() + # end + + assert tenFirst.shape[1] == tenSecond.shape[1] + assert tenFirst.shape[2] == tenSecond.shape[2] + + intWidth = tenFirst.shape[2] + intHeight = tenFirst.shape[1] + + assert ( + intWidth == 1024 + ) # remember that there is no guarantee for correctness, comment this line out if you acknowledge this and want to continue + assert ( + intHeight == 436 + ) # remember that there is no guarantee for correctness, comment this line out if you acknowledge this and want to continue + + tenPreprocessedFirst = tenFirst.cuda().view(1, 3, intHeight, intWidth) + tenPreprocessedSecond = tenSecond.cuda().view(1, 3, intHeight, intWidth) + + intPreprocessedWidth = int(math.floor(math.ceil(intWidth / 64.0) * 64.0)) + intPreprocessedHeight = int(math.floor(math.ceil(intHeight / 64.0) * 64.0)) + + tenPreprocessedFirst = torch.nn.functional.interpolate( + input=tenPreprocessedFirst, + size=(intPreprocessedHeight, intPreprocessedWidth), + mode="bilinear", + align_corners=False, + ) + tenPreprocessedSecond = torch.nn.functional.interpolate( + input=tenPreprocessedSecond, + size=(intPreprocessedHeight, intPreprocessedWidth), + mode="bilinear", + align_corners=False, + ) + + tenFlow = 20.0 * torch.nn.functional.interpolate( + input=netNetwork(tenPreprocessedFirst, tenPreprocessedSecond), + size=(intHeight, intWidth), + mode="bilinear", + align_corners=False, + ) + + tenFlow[:, 0, :, :] *= float(intWidth) / float(intPreprocessedWidth) + tenFlow[:, 1, :, :] *= float(intHeight) / float(intPreprocessedHeight) + + return tenFlow[0, :, :, :].cpu() + + +# end + + +class UNet3d_18(nn.Module): + def __init__(self, channels=[32, 64, 96, 128], bn=True): + super(UNet3d_18, self).__init__() + growth = 2 # since concatenating previous outputs + upmode = "transpose" # use transposeConv to upsample + + self.channels = channels + + self.lrelu = nn.LeakyReLU(0.2, True) + + self.encoder = r3d_18(bn=bn, channels=channels) + + self.decoder = nn.Sequential( + Conv_3d( + channels[::-1][0], + channels[::-1][1], + kernel_size=3, + padding=1, + bias=True, + ), + upConv3D( + channels[::-1][1] * growth, + channels[::-1][2], + kernel_size=(3, 4, 4), + stride=(1, 2, 2), + padding=(1, 1, 1), + upmode=upmode, + ), + upConv3D( + channels[::-1][2] * growth, + channels[::-1][3], + kernel_size=(3, 4, 4), + stride=(1, 2, 2), + padding=(1, 1, 1), + upmode=upmode, + ), + Conv_3d( + channels[::-1][3] * growth, + channels[::-1][3], + kernel_size=3, + padding=1, + bias=True, + ), + upConv3D( + channels[::-1][3] * growth, + channels[::-1][3], + kernel_size=(3, 4, 4), + stride=(1, 2, 2), + padding=(1, 1, 1), + upmode=upmode, + ), + ) + + self.feature_fuse = nn.Sequential( + *( + [ + nn.Conv2d( + channels[::-1][3] * 5, + channels[::-1][3], + kernel_size=1, + stride=1, + bias=False, + ) + ] + + [nn.BatchNorm2d(channels[::-1][3]) if bn else Identity] + ) + ) + + self.outconv = nn.Sequential( + nn.ReflectionPad2d(3), + nn.Conv2d(channels[::-1][3], 3, kernel_size=7, stride=1, padding=0), + ) + + def forward(self, im1, im3, im5, im7, im4_tilde): + images = torch.stack((im1, im3, im4_tilde, im5, im7), dim=2) + + x_0, x_1, x_2, x_3, x_4 = self.encoder(images) + + dx_3 = self.lrelu(self.decoder[0](x_4)) + dx_3 = torch.cat([dx_3, x_3], dim=1) + + dx_2 = self.lrelu(self.decoder[1](dx_3)) + dx_2 = torch.cat([dx_2, x_2], dim=1) + + dx_1 = self.lrelu(self.decoder[2](dx_2)) + dx_1 = torch.cat([dx_1, x_1], dim=1) + + dx_0 = self.lrelu(self.decoder[3](dx_1)) + dx_0 = torch.cat([dx_0, x_0], dim=1) + + dx_out = self.lrelu(self.decoder[4](dx_0)) + dx_out = torch.cat(torch.unbind(dx_out, 2), 1) + + out = self.lrelu(self.feature_fuse(dx_out)) + out = self.outconv(out) + + return out + + +class KernelEstimation(torch.nn.Module): + def __init__(self, kernel_size): + super(KernelEstimation, self).__init__() + self.kernel_size = kernel_size + + def Subnet_offset(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True), + torch.nn.Conv2d( + in_channels=ks, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + ) + + def Subnet_weight(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True), + torch.nn.Conv2d( + in_channels=ks, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.Softmax(dim=1), + ) + + def Subnet_offset_ds(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + ) + + def Subnet_weight_ds(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.Softmax(dim=1), + ) + + def Subnet_offset_us(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Upsample(scale_factor=4, mode="bilinear", align_corners=True), + torch.nn.Conv2d( + in_channels=ks, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + ) + + def Subnet_weight_us(ks): + return torch.nn.Sequential( + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Conv2d( + in_channels=64, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.ReLU(inplace=False), + torch.nn.Upsample(scale_factor=4, mode="bilinear", align_corners=True), + torch.nn.Conv2d( + in_channels=ks, out_channels=ks, kernel_size=3, stride=1, padding=1 + ), + torch.nn.Softmax(dim=1), + ) + + self.moduleWeight1_ds = Subnet_weight_ds(self.kernel_size**2) + self.moduleAlpha1_ds = Subnet_offset_ds(self.kernel_size**2) + self.moduleBeta1_ds = Subnet_offset_ds(self.kernel_size**2) + self.moduleWeight2_ds = Subnet_weight_ds(self.kernel_size**2) + self.moduleAlpha2_ds = Subnet_offset_ds(self.kernel_size**2) + self.moduleBeta2_ds = Subnet_offset_ds(self.kernel_size**2) + + self.moduleWeight1 = Subnet_weight(self.kernel_size**2) + self.moduleAlpha1 = Subnet_offset(self.kernel_size**2) + self.moduleBeta1 = Subnet_offset(self.kernel_size**2) + self.moduleWeight2 = Subnet_weight(self.kernel_size**2) + self.moduleAlpha2 = Subnet_offset(self.kernel_size**2) + self.moduleBeta2 = Subnet_offset(self.kernel_size**2) + + self.moduleWeight1_us = Subnet_weight_us(self.kernel_size**2) + self.moduleAlpha1_us = Subnet_offset_us(self.kernel_size**2) + self.moduleBeta1_us = Subnet_offset_us(self.kernel_size**2) + self.moduleWeight2_us = Subnet_weight_us(self.kernel_size**2) + self.moduleAlpha2_us = Subnet_offset_us(self.kernel_size**2) + self.moduleBeta2_us = Subnet_offset_us(self.kernel_size**2) + + def forward(self, tensorCombine): + # Frame 0 + Weight1_ds = self.moduleWeight1_ds(tensorCombine) + Weight1 = self.moduleWeight1(tensorCombine) + Weight1_us = self.moduleWeight1_us(tensorCombine) + Alpha1_ds = self.moduleAlpha1_ds(tensorCombine) + Alpha1 = self.moduleAlpha1(tensorCombine) + Alpha1_us = self.moduleAlpha1_us(tensorCombine) + Beta1_ds = self.moduleBeta1_ds(tensorCombine) + Beta1 = self.moduleBeta1(tensorCombine) + Beta1_us = self.moduleBeta1_us(tensorCombine) + + # Frame 2 + Weight2_ds = self.moduleWeight2_ds(tensorCombine) + Weight2 = self.moduleWeight2(tensorCombine) + Weight2_us = self.moduleWeight2_us(tensorCombine) + Alpha2_ds = self.moduleAlpha2_ds(tensorCombine) + Alpha2 = self.moduleAlpha2(tensorCombine) + Alpha2_us = self.moduleAlpha2_us(tensorCombine) + Beta2_ds = self.moduleBeta2_ds(tensorCombine) + Beta2 = self.moduleBeta2(tensorCombine) + Beta2_us = self.moduleBeta2_us(tensorCombine) + + return ( + Weight1_ds, + Alpha1_ds, + Beta1_ds, + Weight2_ds, + Alpha2_ds, + Beta2_ds, + Weight1, + Alpha1, + Beta1, + Weight2, + Alpha2, + Beta2, + Weight1_us, + Alpha1_us, + Beta1_us, + Weight2_us, + Alpha2_us, + Beta2_us, + ) + + +class STMFNet_Model(torch.nn.Module): + def __init__(self): + super(STMFNet_Model, self).__init__() + + class Metric(torch.nn.Module): + def __init__(self): + super(Metric, self).__init__() + self.paramScale = torch.nn.Parameter(-torch.ones(1, 1, 1, 1)) + + def forward(self, tenFirst, tenSecond, tenFlow): + return self.paramScale * F.l1_loss( + input=tenFirst, + target=backwarp(tenSecond, tenFlow), + reduction="none", + ).mean(1, True) + + self.kernel_size = 5 + self.dilation = 1 + self.featc = [64, 128, 256, 512] + self.featnorm = "batch" + self.finetune_pwc = False + + self.kernel_pad = int(((self.kernel_size - 1) * self.dilation) / 2.0) + + self.feature_extractor = UMultiScaleResNext( + self.featc, norm_layer=self.featnorm + ) + + self.get_kernel = KernelEstimation(self.kernel_size) + + self.modulePad = torch.nn.ReplicationPad2d( + [self.kernel_pad, self.kernel_pad, self.kernel_pad, self.kernel_pad] + ) + + self.moduleAdaCoF = FunctionAdaCoF.apply + + self.gauss_kernel = torch.nn.Parameter( + gaussian_kernel(5, 0.5).repeat(3, 1, 1, 1), requires_grad=False + ) + + self.upsampler = Upsampler_8tap() + + self.scale_synthesis = MIMOGridNet( + (6, 6 + 6, 6), (3,), grid_chs=(32, 64, 96), n_row=3, n_col=4, outrow=(1,) + ) + + self.flow_estimator = PWCNet() + + self.softsplat = ModuleSoftsplat(strType="softmax") + + self.metric = Metric() + + self.dyntex_generator = UNet3d_18(bn=self.featnorm) + + # freeze weights of PWCNet if not finetuning it + if not self.finetune_pwc: + for param in self.flow_estimator.parameters(): + param.requires_grad = False + + def forward(self, I0, I1, I2, I3): + h0 = int(list(I1.size())[2]) + w0 = int(list(I1.size())[3]) + h2 = int(list(I2.size())[2]) + w2 = int(list(I2.size())[3]) + if h0 != h2 or w0 != w2: + sys.exit("Frame sizes do not match") + + h_padded = False + w_padded = False + if h0 % 128 != 0: + pad_h = 128 - (h0 % 128) + I0 = F.pad(I0, (0, 0, 0, pad_h), mode="reflect") + I1 = F.pad(I1, (0, 0, 0, pad_h), mode="reflect") + I2 = F.pad(I2, (0, 0, 0, pad_h), mode="reflect") + I3 = F.pad(I3, (0, 0, 0, pad_h), mode="reflect") + h_padded = True + + if w0 % 128 != 0: + pad_w = 128 - (w0 % 128) + I0 = F.pad(I0, (0, pad_w, 0, 0), mode="reflect") + I1 = F.pad(I1, (0, pad_w, 0, 0), mode="reflect") + I2 = F.pad(I2, (0, pad_w, 0, 0), mode="reflect") + I3 = F.pad(I3, (0, pad_w, 0, 0), mode="reflect") + w_padded = True + + feats = self.feature_extractor(moduleNormalize(I1), moduleNormalize(I2)) + kernelest = self.get_kernel(feats) + Weight1_ds, Alpha1_ds, Beta1_ds, Weight2_ds, Alpha2_ds, Beta2_ds = kernelest[:6] + Weight1, Alpha1, Beta1, Weight2, Alpha2, Beta2 = kernelest[6:12] + Weight1_us, Alpha1_us, Beta1_us, Weight2_us, Alpha2_us, Beta2_us = kernelest[ + 12: + ] + + # Original scale + tensorAdaCoF1 = ( + self.moduleAdaCoF(self.modulePad(I1), Weight1, Alpha1, Beta1, self.dilation) + * 1.0 + ) + tensorAdaCoF2 = ( + self.moduleAdaCoF(self.modulePad(I2), Weight2, Alpha2, Beta2, self.dilation) + * 1.0 + ) + + # 1/2 downsampled version + c, h, w = I1.shape[1:] + p = (self.gauss_kernel.shape[-1] - 1) // 2 + I1_blur = F.conv2d( + F.pad(I1, pad=(p, p, p, p), mode="reflect"), self.gauss_kernel, groups=c + ) + I2_blur = F.conv2d( + F.pad(I2, pad=(p, p, p, p), mode="reflect"), self.gauss_kernel, groups=c + ) + I1_ds = F.interpolate( + I1_blur, size=(h // 2, w // 2), mode="bilinear", align_corners=False + ) + I2_ds = F.interpolate( + I2_blur, size=(h // 2, w // 2), mode="bilinear", align_corners=False + ) + tensorAdaCoF1_ds = ( + self.moduleAdaCoF( + self.modulePad(I1_ds), Weight1_ds, Alpha1_ds, Beta1_ds, self.dilation + ) + * 1.0 + ) + tensorAdaCoF2_ds = ( + self.moduleAdaCoF( + self.modulePad(I2_ds), Weight2_ds, Alpha2_ds, Beta2_ds, self.dilation + ) + * 1.0 + ) + + # x2 upsampled version + I1_us = self.upsampler(I1) + I2_us = self.upsampler(I2) + tensorAdaCoF1_us = ( + self.moduleAdaCoF( + self.modulePad(I1_us), Weight1_us, Alpha1_us, Beta1_us, self.dilation + ) + * 1.0 + ) + tensorAdaCoF2_us = ( + self.moduleAdaCoF( + self.modulePad(I2_us), Weight2_us, Alpha2_us, Beta2_us, self.dilation + ) + * 1.0 + ) + + # use softsplat for refinement + pyramid0, pyramid2 = self.flow_estimator.extract_pyramid(I1, I2) + flow_0_2 = 20 * self.flow_estimator(I1, I2, pyramid0, pyramid2) + flow_0_2 = F.interpolate( + flow_0_2, size=(h, w), mode="bilinear", align_corners=False + ) + flow_2_0 = 20 * self.flow_estimator(I2, I1, pyramid2, pyramid0) + flow_2_0 = F.interpolate( + flow_2_0, size=(h, w), mode="bilinear", align_corners=False + ) + metric_0_2 = self.metric(I1, I2, flow_0_2) + metric_2_0 = self.metric(I2, I1, flow_2_0) + tensorSoftsplat0 = self.softsplat(I1, 0.5 * flow_0_2, metric_0_2) + tensorSoftsplat2 = self.softsplat(I2, 0.5 * flow_2_0, metric_2_0) + + # synthesize multiple scales + tensorCombine_us = torch.cat([tensorAdaCoF1_us, tensorAdaCoF2_us], dim=1) + tensorCombine = torch.cat( + [tensorAdaCoF1, tensorAdaCoF2, tensorSoftsplat0, tensorSoftsplat2], dim=1 + ) + tensorCombine_ds = torch.cat([tensorAdaCoF1_ds, tensorAdaCoF2_ds], dim=1) + output_tilde = self.scale_synthesis( + tensorCombine_us, tensorCombine, tensorCombine_ds + )[0] + + # generate dynamic texture + dyntex = self.dyntex_generator(I0, I1, I2, I3, output_tilde) + output = output_tilde + dyntex + + if h_padded: + output = output[:, :, 0:h0, :] + if w_padded: + output = output[:, :, :, 0:w0] + + if self.training: + return {"frame1": output} + else: + return output \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e52fc1a85dfadc84dd517875abd7d592e3c087e2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/__init__.py @@ -0,0 +1,115 @@ +import argparse +import torch +import torch.nn as nn +import torch.nn.functional as F +import einops +from torch.utils.data import DataLoader +import pathlib +from vfi_utils import load_file_from_github_release, preprocess_frames, postprocess_frames, InterpolationStateList +import typing +from comfy.model_management import get_torch_device + +CKPT_CONFIGS = { + "XVFInet_X4K1000FPS_exp1_latest.pt": { + "module_scale_factor": 4, + "S_trn": 3, + "S_tst": 5 + }, + "XVFInet_Vimeo_exp1_latest.pt": { + "module_scale_factor": 2, + "S_trn": 1, + "S_tst": 1 + } +} + +class XVFI_Inference(nn.Module): + def __init__(self, model_path, model_config) -> None: + super(XVFI_Inference, self).__init__() + from .xvfi_arch import XVFInet, weights_init + model_config = model_config + args = argparse.Namespace( + gpu=get_torch_device(), + nf=64, + **model_config, + img_ch=3, + ) + self.model = XVFInet(args).apply(weights_init).to(get_torch_device()) + self.model.load_state_dict(torch.load(model_path, map_location=get_torch_device())["state_dict_Model"]) + + def forward(self, I0, I1, timestep): + #"Real" inference is called "test_custom" in the original repo + #https://github.com/JihyongOh/XVFI/blob/main/utils.py#L434 + #https://github.com/JihyongOh/XVFI/blob/main/main.py#L336 + + x = torch.stack([I0, I1], dim=0) + x = einops.rearrange(x, "t b c h w -> b c t h w") + return self.model(x, timestep, is_training=False) + +MODEL_TYPE = pathlib.Path(__file__).parent.name + +class XVFI: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "ckpt_name": (list(CKPT_CONFIGS.keys()), ), + "frames": ("IMAGE", ), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 100}), + "multipler": ("INT", {"default": 2, "min": 2, "max": 1000}), + }, + "optional": { + "optional_interpolation_states": ("INTERPOLATION_STATES", ), + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "vfi" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def vfi( + self, + ckpt_name: typing.AnyStr, + frames: torch.Tensor, + batch_size: typing.SupportsInt = 1, + multipler: typing.SupportsInt = 2, + optional_interpolation_states: InterpolationStateList = None + ): + model_path = load_file_from_github_release(MODEL_TYPE, ckpt_name) + ckpt_config = CKPT_CONFIGS[ckpt_name] + global model + model = XVFI_Inference(model_path, ckpt_config) + + frames = preprocess_frames(frames) + #https://github.com/JihyongOh/XVFI/blob/main/main.py#L314 + divide = 2 ** (ckpt_config["S_tst"]) * ckpt_config["module_scale_factor"] * 4 + B, C, H, W = frames.size() + H_padding = (divide - H % divide) % divide + W_padding = (divide - W % divide) % divide + if H_padding != 0 or W_padding != 0: + frames = F.pad(frames, (0, W_padding, 0, H_padding), "constant") + + frame_dict = { + str(i): frames[i].unsqueeze(0) for i in range(frames.shape[0]) + } + + if optional_interpolation_states is None: + interpolation_states = [True] * (frames.shape[0] - 1) + else: + interpolation_states = optional_interpolation_states + + enabled_former_idxs = [i for i, state in enumerate(interpolation_states) if state] + former_idxs_loader = DataLoader(enabled_former_idxs, batch_size=batch_size) + + for former_idxs_batch in former_idxs_loader: + for middle_i in range(1, multipler): + _middle_frames = model( + frames[former_idxs_batch], + frames[former_idxs_batch + 1], + timestep=torch.tensor([middle_i/multipler]).repeat(len(former_idxs_batch)).unsqueeze(1).to(get_torch_device()) + ) + for i, former_idx in enumerate(former_idxs_batch): + frame_dict[f'{former_idx}.{middle_i}'] = _middle_frames[i].unsqueeze(0) + + out_frames = torch.cat([frame_dict[key] for key in sorted(frame_dict.keys())], dim=0)[:, :, :H, :W] + return (postprocess_frames(out_frames), ) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/xvfi_arch.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/xvfi_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..dae013e749616c9b6b2e2767db08b8c8dab6046f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_models/xvfi/xvfi_arch.py @@ -0,0 +1,506 @@ +import functools, random +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.autograd import Variable +import numpy as np +from torch.nn import init +from comfy.model_management import get_torch_device + + + +class XVFInet(nn.Module): + + def __init__(self, args): + super(XVFInet, self).__init__() + self.args = args + self.device = get_torch_device() + self.nf = args.nf + self.scale = args.module_scale_factor + self.vfinet = VFInet(args) + self.lrelu = nn.ReLU() + self.in_channels = 3 + self.channel_converter = nn.Sequential( + nn.Conv3d(self.in_channels, self.nf, [1, 3, 3], [1, 1, 1], [0, 1, 1]), + nn.ReLU()) + + self.rec_ext_ds_module = [self.channel_converter] + self.rec_ext_ds = nn.Conv3d(self.nf, self.nf, [1, 3, 3], [1, 2, 2], [0, 1, 1]) + for _ in range(int(np.log2(self.scale))): + self.rec_ext_ds_module.append(self.rec_ext_ds) + self.rec_ext_ds_module.append(nn.ReLU()) + self.rec_ext_ds_module.append(nn.Conv3d(self.nf, self.nf, [1, 3, 3], 1, [0, 1, 1])) + self.rec_ext_ds_module.append(RResBlock2D_3D(args, T_reduce_flag=False)) + self.rec_ext_ds_module = nn.Sequential(*self.rec_ext_ds_module) + + self.rec_ctx_ds = nn.Conv3d(self.nf, self.nf, [1, 3, 3], [1, 2, 2], [0, 1, 1]) + + print("The lowest scale depth for training (S_trn): ", self.args.S_trn) + print("The lowest scale depth for test (S_tst): ", self.args.S_tst) + + def forward(self, x, t_value, is_training=True): + ''' + x shape : [B,C,T,H,W] + t_value shape : [B,1] ############### + ''' + B, C, T, H, W = x.size() + B2, C2 = t_value.size() + assert C2 == 1, "t_value shape is [B,]" + assert T % 2 == 0, "T must be an even number" + t_value = t_value.view(B, 1, 1, 1) + + flow_l = None + feat_x = self.rec_ext_ds_module(x) + feat_x_list = [feat_x] + self.lowest_depth_level = self.args.S_trn if is_training else self.args.S_tst + for level in range(1, self.lowest_depth_level+1): + feat_x = self.rec_ctx_ds(feat_x) + feat_x_list.append(feat_x) + + if is_training: + out_l_list = [] + flow_refine_l_list = [] + out_l, flow_l, flow_refine_l = self.vfinet(x, feat_x_list[self.args.S_trn], flow_l, t_value, level=self.args.S_trn, is_training=True) + out_l_list.append(out_l) + flow_refine_l_list.append(flow_refine_l) + for level in range(self.args.S_trn-1, 0, -1): ## self.args.S_trn, self.args.S_trn-1, ..., 1. level 0 is not included + out_l, flow_l = self.vfinet(x, feat_x_list[level], flow_l, t_value, level=level, is_training=True) + out_l_list.append(out_l) + out_l, flow_l, flow_refine_l, occ_0_l0 = self.vfinet(x, feat_x_list[0], flow_l, t_value, level=0, is_training=True) + out_l_list.append(out_l) + flow_refine_l_list.append(flow_refine_l) + return out_l_list[::-1], flow_refine_l_list[::-1], occ_0_l0, torch.mean(x, dim=2) # out_l_list should be reversed. [out_l0, out_l1, ...] + + else: # Testing + for level in range(self.args.S_tst, 0, -1): ## self.args.S_tst, self.args.S_tst-1, ..., 1. level 0 is not included + flow_l = self.vfinet(x, feat_x_list[level], flow_l, t_value, level=level, is_training=False) + out_l = self.vfinet(x, feat_x_list[0], flow_l, t_value, level=0, is_training=False) + return out_l + + +class VFInet(nn.Module): + + def __init__(self, args): + super(VFInet, self).__init__() + self.args = args + self.device = get_torch_device() + self.nf = args.nf + self.scale = args.module_scale_factor + self.in_channels = 3 + + self.conv_flow_bottom = nn.Sequential( + nn.Conv2d(2*self.nf, 2*self.nf, [4,4], 2, [1,1]), + nn.ReLU(), + nn.Conv2d(2*self.nf, 4*self.nf, [4,4], 2, [1,1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(4 * self.nf, 2 * self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(2 * self.nf, self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.Conv2d(self.nf, 6, [3,3], 1, [1,1]), + ) + + self.conv_flow1 = nn.Conv2d(2*self.nf, self.nf, [3, 3], 1, [1, 1]) + + self.conv_flow2 = nn.Sequential( + nn.Conv2d(2*self.nf + 4, 2 * self.nf, [4, 4], 2, [1, 1]), + nn.ReLU(), + nn.Conv2d(2 * self.nf, 4 * self.nf, [4, 4], 2, [1, 1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(4 * self.nf, 2 * self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(2 * self.nf, self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.Conv2d(self.nf, 6, [3, 3], 1, [1, 1]), + ) + + self.conv_flow3 = nn.Sequential( + nn.Conv2d(4 + self.nf * 4, self.nf, [1, 1], 1, [0, 0]), + nn.ReLU(), + nn.Conv2d(self.nf, 2 * self.nf, [4, 4], 2, [1, 1]), + nn.ReLU(), + nn.Conv2d(2 * self.nf, 4 * self.nf, [4, 4], 2, [1, 1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(4 * self.nf, 2 * self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.UpsamplingNearest2d(scale_factor=2), + nn.Conv2d(2 * self.nf, self.nf, [3, 3], 1, [1, 1]), + nn.ReLU(), + nn.Conv2d(self.nf, 4, [3, 3], 1, [1, 1]), + ) + + self.refine_unet = RefineUNet(args) + self.lrelu = nn.ReLU() + + def forward(self, x, feat_x, flow_l_prev, t_value, level, is_training): + ''' + x shape : [B,C,T,H,W] + t_value shape : [B,1] ############### + ''' + B, C, T, H, W = x.size() + assert T % 2 == 0, "T must be an even number" + + ####################### For a single level + l = 2 ** level + x_l = x.permute(0,2,1,3,4) + x_l = x_l.contiguous().view(B * T, C, H, W) + + if level == 0: + pass + else: + x_l = F.interpolate(x_l, scale_factor=(1.0 / l, 1.0 / l), mode='bicubic', align_corners=False) + ''' + Down pixel-shuffle + ''' + x_l = x_l.view(B, T, C, H//l, W//l) + x_l = x_l.permute(0,2,1,3,4) + + B, C, T, H, W = x_l.size() + + ## Feature extraction + feat0_l = feat_x[:,:,0,:,:] + feat1_l = feat_x[:,:,1,:,:] + + ## Flow estimation + if flow_l_prev is None: + flow_l_tmp = self.conv_flow_bottom(torch.cat((feat0_l, feat1_l), dim=1)) + flow_l = flow_l_tmp[:,:4,:,:] + else: + up_flow_l_prev = 2.0*F.interpolate(flow_l_prev.detach(), scale_factor=(2,2), mode='bilinear', align_corners=False) + warped_feat1_l = self.bwarp(feat1_l, up_flow_l_prev[:,:2,:,:]) + warped_feat0_l = self.bwarp(feat0_l, up_flow_l_prev[:,2:,:,:]) + flow_l_tmp = self.conv_flow2(torch.cat([self.conv_flow1(torch.cat([feat0_l, warped_feat1_l],dim=1)), self.conv_flow1(torch.cat([feat1_l, warped_feat0_l],dim=1)), up_flow_l_prev],dim=1)) + flow_l = flow_l_tmp[:,:4,:,:] + up_flow_l_prev + + if not is_training and level!=0: + return flow_l + + flow_01_l = flow_l[:,:2,:,:] + flow_10_l = flow_l[:,2:,:,:] + z_01_l = torch.sigmoid(flow_l_tmp[:,4:5,:,:]) + z_10_l = torch.sigmoid(flow_l_tmp[:,5:6,:,:]) + + ## Complementary Flow Reversal (CFR) + flow_forward, norm0_l = self.z_fwarp(flow_01_l, t_value * flow_01_l, z_01_l) ## Actually, F (t) -> (t+1). Translation only. Not normalized yet + flow_backward, norm1_l = self.z_fwarp(flow_10_l, (1-t_value) * flow_10_l, z_10_l) ## Actually, F (1-t) -> (-t). Translation only. Not normalized yet + + flow_t0_l = -(1-t_value) * ((t_value)*flow_forward) + (t_value) * ((t_value)*flow_backward) # The numerator of Eq.(1) in the paper. + flow_t1_l = (1-t_value) * ((1-t_value)*flow_forward) - (t_value) * ((1-t_value)*flow_backward) # The numerator of Eq.(2) in the paper. + + norm_l = (1-t_value)*norm0_l + t_value*norm1_l + mask_ = (norm_l.detach() > 0).type(norm_l.type()) + flow_t0_l = (1-mask_) * flow_t0_l + mask_ * (flow_t0_l.clone() / (norm_l.clone() + (1-mask_))) # Divide the numerator with denominator in Eq.(1) + flow_t1_l = (1-mask_) * flow_t1_l + mask_ * (flow_t1_l.clone() / (norm_l.clone() + (1-mask_))) # Divide the numerator with denominator in Eq.(2) + + ## Feature warping + warped0_l = self.bwarp(feat0_l, flow_t0_l) + warped1_l = self.bwarp(feat1_l, flow_t1_l) + + ## Flow refinement + flow_refine_l = torch.cat([feat0_l, warped0_l, warped1_l, feat1_l, flow_t0_l, flow_t1_l], dim=1) + flow_refine_l = self.conv_flow3(flow_refine_l) + torch.cat([flow_t0_l, flow_t1_l], dim=1) + flow_t0_l = flow_refine_l[:, :2, :, :] + flow_t1_l = flow_refine_l[:, 2:4, :, :] + + warped0_l = self.bwarp(feat0_l, flow_t0_l) + warped1_l = self.bwarp(feat1_l, flow_t1_l) + + ## Flow upscale + flow_t0_l = self.scale * F.interpolate(flow_t0_l, scale_factor=(self.scale, self.scale), mode='bilinear',align_corners=False) + flow_t1_l = self.scale * F.interpolate(flow_t1_l, scale_factor=(self.scale, self.scale), mode='bilinear',align_corners=False) + + ## Image warping and blending + warped_img0_l = self.bwarp(x_l[:,:,0,:,:], flow_t0_l) + warped_img1_l = self.bwarp(x_l[:,:,1,:,:], flow_t1_l) + + refine_out = self.refine_unet(torch.cat([F.pixel_shuffle(torch.cat([feat0_l, feat1_l, warped0_l, warped1_l],dim=1), self.scale), x_l[:,:,0,:,:], x_l[:,:,1,:,:], warped_img0_l, warped_img1_l, flow_t0_l, flow_t1_l],dim=1)) + occ_0_l = torch.sigmoid(refine_out[:, 0:1, :, :]) + occ_1_l = 1-occ_0_l + + out_l = (1-t_value)*occ_0_l*warped_img0_l + t_value*occ_1_l*warped_img1_l + out_l = out_l / ( (1-t_value)*occ_0_l + t_value*occ_1_l ) + refine_out[:, 1:4, :, :] + + if not is_training and level==0: + return out_l + + if is_training: + if flow_l_prev is None: + # if level == self.args.S_trn: + return out_l, flow_l, flow_refine_l[:, 0:4, :, :] + elif level != 0: + return out_l, flow_l + else: # level==0 + return out_l, flow_l, flow_refine_l[:, 0:4, :, :], occ_0_l + + def bwarp(self, x, flo): + ''' + x: [B, C, H, W] (im2) + flo: [B, 2, H, W] flow + ''' + B, C, H, W = x.size() + # mesh grid + xx = torch.arange(0, W).view(1, 1, 1, W).expand(B, 1, H, W) + yy = torch.arange(0, H).view(1, 1, H, 1).expand(B, 1, H, W) + grid = torch.cat((xx, yy), 1).float() + + grid = grid.to(self.device) + vgrid = torch.autograd.Variable(grid) + flo + + # scale grid to [-1,1] + vgrid[:, 0, :, :] = 2.0 * vgrid[:, 0, :, :].clone() / max(W - 1, 1) - 1.0 + vgrid[:, 1, :, :] = 2.0 * vgrid[:, 1, :, :].clone() / max(H - 1, 1) - 1.0 + + vgrid = vgrid.permute(0, 2, 3, 1) # [B,H,W,2] + output = nn.functional.grid_sample(x, vgrid, align_corners=True) + mask = torch.autograd.Variable(torch.ones(x.size())).to(self.device) + mask = nn.functional.grid_sample(mask, vgrid, align_corners=True) + + # mask[mask<0.9999] = 0 + # mask[mask>0] = 1 + mask = mask.masked_fill_(mask < 0.999, 0) + mask = mask.masked_fill_(mask > 0, 1) + + return output * mask + + def fwarp(self, img, flo): + + """ + -img: image (N, C, H, W) + -flo: optical flow (N, 2, H, W) + elements of flo is in [0, H] and [0, W] for dx, dy + https://github.com/lyh-18/EQVI/blob/EQVI-master/models/forward_warp_gaussian.py + """ + + # (x1, y1) (x1, y2) + # +---------------+ + # | | + # | o(x, y) | + # | | + # | | + # | | + # | | + # +---------------+ + # (x2, y1) (x2, y2) + + N, C, _, _ = img.size() + + # translate start-point optical flow to end-point optical flow + y = flo[:, 0:1:, :] + x = flo[:, 1:2, :, :] + + x = x.repeat(1, C, 1, 1) + y = y.repeat(1, C, 1, 1) + + # Four point of square (x1, y1), (x1, y2), (x2, y1), (y2, y2) + x1 = torch.floor(x) + x2 = x1 + 1 + y1 = torch.floor(y) + y2 = y1 + 1 + + # firstly, get gaussian weights + w11, w12, w21, w22 = self.get_gaussian_weights(x, y, x1, x2, y1, y2) + + # secondly, sample each weighted corner + img11, o11 = self.sample_one(img, x1, y1, w11) + img12, o12 = self.sample_one(img, x1, y2, w12) + img21, o21 = self.sample_one(img, x2, y1, w21) + img22, o22 = self.sample_one(img, x2, y2, w22) + + imgw = img11 + img12 + img21 + img22 + o = o11 + o12 + o21 + o22 + + return imgw, o + + + def z_fwarp(self, img, flo, z): + """ + -img: image (N, C, H, W) + -flo: optical flow (N, 2, H, W) + elements of flo is in [0, H] and [0, W] for dx, dy + modified from https://github.com/lyh-18/EQVI/blob/EQVI-master/models/forward_warp_gaussian.py + """ + + # (x1, y1) (x1, y2) + # +---------------+ + # | | + # | o(x, y) | + # | | + # | | + # | | + # | | + # +---------------+ + # (x2, y1) (x2, y2) + + N, C, _, _ = img.size() + + # translate start-point optical flow to end-point optical flow + y = flo[:, 0:1:, :] + x = flo[:, 1:2, :, :] + + x = x.repeat(1, C, 1, 1) + y = y.repeat(1, C, 1, 1) + + # Four point of square (x1, y1), (x1, y2), (x2, y1), (y2, y2) + x1 = torch.floor(x) + x2 = x1 + 1 + y1 = torch.floor(y) + y2 = y1 + 1 + + # firstly, get gaussian weights + w11, w12, w21, w22 = self.get_gaussian_weights(x, y, x1, x2, y1, y2, z+1e-5) + + # secondly, sample each weighted corner + img11, o11 = self.sample_one(img, x1, y1, w11) + img12, o12 = self.sample_one(img, x1, y2, w12) + img21, o21 = self.sample_one(img, x2, y1, w21) + img22, o22 = self.sample_one(img, x2, y2, w22) + + imgw = img11 + img12 + img21 + img22 + o = o11 + o12 + o21 + o22 + + return imgw, o + + + def get_gaussian_weights(self, x, y, x1, x2, y1, y2, z=1.0): + # z 0.0 ~ 1.0 + w11 = z * torch.exp(-((x - x1) ** 2 + (y - y1) ** 2)) + w12 = z * torch.exp(-((x - x1) ** 2 + (y - y2) ** 2)) + w21 = z * torch.exp(-((x - x2) ** 2 + (y - y1) ** 2)) + w22 = z * torch.exp(-((x - x2) ** 2 + (y - y2) ** 2)) + + return w11, w12, w21, w22 + + def sample_one(self, img, shiftx, shifty, weight): + """ + Input: + -img (N, C, H, W) + -shiftx, shifty (N, c, H, W) + """ + + N, C, H, W = img.size() + + # flatten all (all restored as Tensors) + flat_shiftx = shiftx.view(-1) + flat_shifty = shifty.view(-1) + flat_basex = torch.arange(0, H, requires_grad=False).view(-1, 1)[None, None].to(self.device).long().repeat(N, C,1,W).view(-1) + flat_basey = torch.arange(0, W, requires_grad=False).view(1, -1)[None, None].to(self.device).long().repeat(N, C,H,1).view(-1) + flat_weight = weight.view(-1) + flat_img = img.contiguous().view(-1) + + # The corresponding positions in I1 + idxn = torch.arange(0, N, requires_grad=False).view(N, 1, 1, 1).to(self.device).long().repeat(1, C, H, W).view(-1) + idxc = torch.arange(0, C, requires_grad=False).view(1, C, 1, 1).to(self.device).long().repeat(N, 1, H, W).view(-1) + idxx = flat_shiftx.long() + flat_basex + idxy = flat_shifty.long() + flat_basey + + # recording the inside part the shifted + mask = idxx.ge(0) & idxx.lt(H) & idxy.ge(0) & idxy.lt(W) + + # Mask off points out of boundaries + ids = (idxn * C * H * W + idxc * H * W + idxx * W + idxy) + ids_mask = torch.masked_select(ids, mask).clone().to(self.device) + + # Note here! accmulate fla must be true for proper bp + img_warp = torch.zeros([N * C * H * W, ]).to(self.device) + img_warp.put_(ids_mask, torch.masked_select(flat_img * flat_weight, mask), accumulate=True) + + one_warp = torch.zeros([N * C * H * W, ]).to(self.device) + one_warp.put_(ids_mask, torch.masked_select(flat_weight, mask), accumulate=True) + + return img_warp.view(N, C, H, W), one_warp.view(N, C, H, W) + +class RefineUNet(nn.Module): + def __init__(self, args): + super(RefineUNet, self).__init__() + self.args = args + self.scale = args.module_scale_factor + self.nf = args.nf + self.conv1 = nn.Conv2d(self.nf, self.nf, [3,3], 1, [1,1]) + self.conv2 = nn.Conv2d(self.nf, self.nf, [3,3], 1, [1,1]) + self.lrelu = nn.ReLU() + self.NN = nn.UpsamplingNearest2d(scale_factor=2) + + self.enc1 = nn.Conv2d((4*self.nf)//self.scale//self.scale + 4*args.img_ch + 4, self.nf, [4, 4], 2, [1, 1]) + self.enc2 = nn.Conv2d(self.nf, 2*self.nf, [4, 4], 2, [1, 1]) + self.enc3 = nn.Conv2d(2*self.nf, 4*self.nf, [4, 4], 2, [1, 1]) + self.dec0 = nn.Conv2d(4*self.nf, 4*self.nf, [3, 3], 1, [1, 1]) + self.dec1 = nn.Conv2d(4*self.nf + 2*self.nf, 2*self.nf, [3, 3], 1, [1, 1]) ## input concatenated with enc2 + self.dec2 = nn.Conv2d(2*self.nf + self.nf, self.nf, [3, 3], 1, [1, 1]) ## input concatenated with enc1 + self.dec3 = nn.Conv2d(self.nf, 1+args.img_ch, [3, 3], 1, [1, 1]) ## input added with warped image + + def forward(self, concat): + enc1 = self.lrelu(self.enc1(concat)) + enc2 = self.lrelu(self.enc2(enc1)) + out = self.lrelu(self.enc3(enc2)) + + out = self.lrelu(self.dec0(out)) + out = self.NN(out) + + out = torch.cat((out,enc2),dim=1) + out = self.lrelu(self.dec1(out)) + + out = self.NN(out) + out = torch.cat((out,enc1),dim=1) + out = self.lrelu(self.dec2(out)) + + out = self.NN(out) + out = self.dec3(out) + return out + +class ResBlock2D_3D(nn.Module): + ## Shape of input [B,C,T,H,W] + ## Shape of output [B,C,T,H,W] + def __init__(self, args): + super(ResBlock2D_3D, self).__init__() + self.args = args + self.nf = args.nf + + self.conv3x3_1 = nn.Conv3d(self.nf, self.nf, [1,3,3], 1, [0,1,1]) + self.conv3x3_2 = nn.Conv3d(self.nf, self.nf, [1,3,3], 1, [0,1,1]) + self.lrelu = nn.ReLU() + + def forward(self, x): + ''' + x shape : [B,C,T,H,W] + ''' + B, C, T, H, W = x.size() + + out = self.conv3x3_2(self.lrelu(self.conv3x3_1(x))) + + return x + out + +class RResBlock2D_3D(nn.Module): + + def __init__(self, args, T_reduce_flag=False): + super(RResBlock2D_3D, self).__init__() + self.args = args + self.nf = args.nf + self.T_reduce_flag = T_reduce_flag + self.resblock1 = ResBlock2D_3D(self.args) + self.resblock2 = ResBlock2D_3D(self.args) + if T_reduce_flag: + self.reduceT_conv = nn.Conv3d(self.nf, self.nf, [3,1,1], 1, [0,0,0]) + + def forward(self, x): + ''' + x shape : [B,C,T,H,W] + ''' + out = self.resblock1(x) + out = self.resblock2(out) + if self.T_reduce_flag: + return self.reduceT_conv(out + x) + else: + return out + x + +def weights_init(m): + classname = m.__class__.__name__ + if (classname.find('Conv2d') != -1) or (classname.find('Conv3d') != -1): + init.xavier_normal_(m.weight) + # init.kaiming_normal_(m.weight, nonlinearity='relu') + if hasattr(m, 'bias') and m.bias is not None: + init.zeros_(m.bias) \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_utils.py b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7504637e63abfc8b826a36f11456e8a47d2dbf5d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Frame-Interpolation/vfi_utils.py @@ -0,0 +1,295 @@ +import yaml +import os +from torch.hub import download_url_to_file, get_dir +from urllib.parse import urlparse +import torch +import typing +import traceback +import einops +import gc +import torchvision.transforms.functional as transform +from comfy.model_management import soft_empty_cache, get_torch_device +import numpy as np + +BASE_MODEL_DOWNLOAD_URLS = [ + "https://github.com/styler00dollar/VSGAN-tensorrt-docker/releases/download/models/", + "https://github.com/Fannovel16/ComfyUI-Frame-Interpolation/releases/download/models/", + "https://github.com/dajes/frame-interpolation-pytorch/releases/download/v1.0.0/" +] + +config_path = os.path.join(os.path.dirname(__file__), "./config.yaml") +if os.path.exists(config_path): + config = yaml.load(open(config_path, "r"), Loader=yaml.FullLoader) +else: + raise Exception("config.yaml file is neccessary, plz recreate the config file by downloading it from https://github.com/Fannovel16/ComfyUI-Frame-Interpolation") +DEVICE = get_torch_device() + +class InterpolationStateList(): + + def __init__(self, frame_indices: typing.List[int], is_skip_list: bool): + self.frame_indices = frame_indices + self.is_skip_list = is_skip_list + + def is_frame_skipped(self, frame_index): + is_frame_in_list = frame_index in self.frame_indices + return self.is_skip_list and is_frame_in_list or not self.is_skip_list and not is_frame_in_list + + +class MakeInterpolationStateList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "frame_indices": ("STRING", {"multiline": True, "default": "1,2,3"}), + "is_skip_list": ("BOOLEAN", {"default": True},), + }, + } + + RETURN_TYPES = ("INTERPOLATION_STATES",) + FUNCTION = "create_options" + CATEGORY = "ComfyUI-Frame-Interpolation/VFI" + + def create_options(self, frame_indices: str, is_skip_list: bool): + frame_indices_list = [int(item) for item in frame_indices.split(',')] + + interpolation_state_list = InterpolationStateList( + frame_indices=frame_indices_list, + is_skip_list=is_skip_list, + ) + return (interpolation_state_list,) + + +def get_ckpt_container_path(model_type): + return os.path.abspath(os.path.join(os.path.dirname(__file__), config["ckpts_path"], model_type)) + +def load_file_from_url(url, model_dir=None, progress=True, file_name=None): + """Load file form http url, will download models if necessary. + + Ref:https://github.com/1adrianb/face-alignment/blob/master/face_alignment/utils.py + + Args: + url (str): URL to be downloaded. + model_dir (str): The path to save the downloaded model. Should be a full path. If None, use pytorch hub_dir. + Default: None. + progress (bool): Whether to show the download progress. Default: True. + file_name (str): The downloaded file name. If None, use the file name in the url. Default: None. + + Returns: + str: The path to the downloaded file. + """ + if model_dir is None: # use the pytorch hub_dir + hub_dir = get_dir() + model_dir = os.path.join(hub_dir, 'checkpoints') + + os.makedirs(model_dir, exist_ok=True) + + parts = urlparse(url) + file_name = os.path.basename(parts.path) + if file_name is not None: + file_name = file_name + cached_file = os.path.abspath(os.path.join(model_dir, file_name)) + if not os.path.exists(cached_file): + print(f'Downloading: "{url}" to {cached_file}\n') + download_url_to_file(url, cached_file, hash_prefix=None, progress=progress) + return cached_file + +def load_file_from_github_release(model_type, ckpt_name): + error_strs = [] + for i, base_model_download_url in enumerate(BASE_MODEL_DOWNLOAD_URLS): + try: + return load_file_from_url(base_model_download_url + ckpt_name, get_ckpt_container_path(model_type)) + except Exception: + traceback_str = traceback.format_exc() + if i < len(BASE_MODEL_DOWNLOAD_URLS) - 1: + print("Failed! Trying another endpoint.") + error_strs.append(f"Error when downloading from: {base_model_download_url + ckpt_name}\n\n{traceback_str}") + + error_str = '\n\n'.join(error_strs) + raise Exception(f"Tried all GitHub base urls to download {ckpt_name} but no suceess. Below is the error log:\n\n{error_str}") + + +def load_file_from_direct_url(model_type, url): + return load_file_from_url(url, get_ckpt_container_path(model_type)) + +def preprocess_frames(frames): + return einops.rearrange(frames[..., :3], "n h w c -> n c h w") + +def postprocess_frames(frames): + return einops.rearrange(frames, "n c h w -> n h w c")[..., :3].cpu() + +def assert_batch_size(frames, batch_size=2, vfi_name=None): + subject_verb = "Most VFI models require" if vfi_name is None else f"VFI model {vfi_name} requires" + assert len(frames) >= batch_size, f"{subject_verb} at least {batch_size} frames to work with, only found {frames.shape[0]}. Please check the frame input using PreviewImage." + +def _generic_frame_loop( + frames, + clear_cache_after_n_frames, + multiplier: typing.Union[typing.SupportsInt, typing.List], + return_middle_frame_function, + *return_middle_frame_function_args, + interpolation_states: InterpolationStateList = None, + use_timestep=True, + dtype=torch.float16, + final_logging=True): + + #https://github.com/hzwer/Practical-RIFE/blob/main/inference_video.py#L169 + def non_timestep_inference(frame0, frame1, n): + middle = return_middle_frame_function(frame0, frame1, None, *return_middle_frame_function_args) + if n == 1: + return [middle] + first_half = non_timestep_inference(frame0, middle, n=n//2) + second_half = non_timestep_inference(middle, frame1, n=n//2) + if n%2: + return [*first_half, middle, *second_half] + else: + return [*first_half, *second_half] + + output_frames = torch.zeros(multiplier*frames.shape[0], *frames.shape[1:], dtype=dtype, device="cpu") + out_len = 0 + + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + + for frame_itr in range(len(frames) - 1): # Skip the final frame since there are no frames after it + frame0 = frames[frame_itr:frame_itr+1] + output_frames[out_len] = frame0 # Start with first frame + out_len += 1 + # Ensure that input frames are in fp32 - the same dtype as model + frame0 = frame0.to(dtype=torch.float32) + frame1 = frames[frame_itr+1:frame_itr+2].to(dtype=torch.float32) + + if interpolation_states is not None and interpolation_states.is_frame_skipped(frame_itr): + continue + + # Generate and append a batch of middle frames + middle_frame_batches = [] + + if use_timestep: + for middle_i in range(1, multiplier): + timestep = middle_i/multiplier + + middle_frame = return_middle_frame_function( + frame0.to(DEVICE), + frame1.to(DEVICE), + timestep, + *return_middle_frame_function_args + ).detach().cpu() + middle_frame_batches.append(middle_frame.to(dtype=dtype)) + else: + middle_frames = non_timestep_inference(frame0.to(DEVICE), frame1.to(DEVICE), multiplier - 1) + middle_frame_batches.extend(torch.cat(middle_frames, dim=0).detach().cpu().to(dtype=dtype)) + + # Copy middle frames to output + for middle_frame in middle_frame_batches: + output_frames[out_len] = middle_frame + out_len += 1 + + number_of_frames_processed_since_last_cleared_cuda_cache += 1 + # Try to avoid a memory overflow by clearing cuda cache regularly + if number_of_frames_processed_since_last_cleared_cuda_cache >= clear_cache_after_n_frames: + print("Comfy-VFI: Clearing cache...", end=' ') + soft_empty_cache() + number_of_frames_processed_since_last_cleared_cuda_cache = 0 + print("Done cache clearing") + + gc.collect() + + if final_logging: + print(f"Comfy-VFI done! {len(output_frames)} frames generated at resolution: {output_frames[0].shape}") + # Append final frame + output_frames[out_len] = frames[-1:] + out_len += 1 + # clear cache for courtesy + if final_logging: + print("Comfy-VFI: Final clearing cache...", end = ' ') + soft_empty_cache() + if final_logging: + print("Done cache clearing") + return output_frames[:out_len] + +def generic_frame_loop( + model_name, + frames, + clear_cache_after_n_frames, + multiplier: typing.Union[typing.SupportsInt, typing.List], + return_middle_frame_function, + *return_middle_frame_function_args, + interpolation_states: InterpolationStateList = None, + use_timestep=True, + dtype=torch.float32): + + assert_batch_size(frames, vfi_name=model_name.replace('_', ' ').replace('VFI', '')) + if type(multiplier) == int: + return _generic_frame_loop( + frames, + clear_cache_after_n_frames, + multiplier, + return_middle_frame_function, + *return_middle_frame_function_args, + interpolation_states=interpolation_states, + use_timestep=use_timestep, + dtype=dtype + ) + if type(multiplier) == list: + multipliers = list(map(int, multiplier)) + multipliers += [2] * (len(frames) - len(multipliers) - 1) + frame_batches = [] + for frame_itr in range(len(frames) - 1): + multiplier = multipliers[frame_itr] + if multiplier == 0: continue + frame_batch = _generic_frame_loop( + frames[frame_itr:frame_itr+2], + clear_cache_after_n_frames, + multiplier, + return_middle_frame_function, + *return_middle_frame_function_args, + interpolation_states=interpolation_states, + use_timestep=use_timestep, + dtype=dtype, + final_logging=False + ) + if frame_itr != len(frames) - 2: # Not append last frame unless this batch is the last one + frame_batch = frame_batch[:-1] + frame_batches.append(frame_batch) + output_frames = torch.cat(frame_batches) + print(f"Comfy-VFI done! {len(output_frames)} frames generated at resolution: {output_frames[0].shape}") + return output_frames + raise NotImplementedError(f"multipiler of {type(multiplier)}") + +class FloatToInt: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "float": ("FLOAT", {"default": 0, 'min': 0, 'step': 0.01}) + } + } + + RETURN_TYPES = ("INT",) + FUNCTION = "convert" + CATEGORY = "ComfyUI-Frame-Interpolation" + + def convert(self, float): + if hasattr(float, "__iter__"): + return (list(map(int, float)),) + return (int(float),) + +""" def generic_4frame_loop( + frames, + clear_cache_after_n_frames, + multiplier: typing.SupportsInt, + return_middle_frame_function, + *return_middle_frame_function_args, + interpolation_states: InterpolationStateList = None, + use_timestep=False): + + if use_timestep: raise NotImplementedError("Timestep 4 frame VFI model") + def non_timestep_inference(frame_0, frame_1, frame_2, frame_3, n): + middle = return_middle_frame_function(frame_0, frame_1, None, *return_middle_frame_function_args) + if n == 1: + return [middle] + first_half = non_timestep_inference(frame_0, middle, n=n//2) + second_half = non_timestep_inference(middle, frame_1, n=n//2) + if n%2: + return [*first_half, middle, *second_half] + else: + return [*first_half, *second_half] """ \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/.gitignore b/src/comfyui/custom_nodes/ComfyUI-GGUF/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..691fb419f4ade324b6b9bc2a6e7437d3105c951b --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/.gitignore @@ -0,0 +1,167 @@ +*.bin +*.gguf +*.safetensors +tools/llama.cpp/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/LICENSE b/src/comfyui/custom_nodes/ComfyUI-GGUF/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/README.md b/src/comfyui/custom_nodes/ComfyUI-GGUF/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e19aa88aaa702b337f179a80da86c7bae8500ee4 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/README.md @@ -0,0 +1,44 @@ +# ComfyUI-GGUF +GGUF Quantization support for native ComfyUI models + +This is currently very much WIP. These custom nodes provide support for model files stored in the GGUF format popularized by [llama.cpp](https://github.com/ggerganov/llama.cpp). + +While quantization wasn't feasible for regular UNET models (conv2d), transformer/DiT models such as flux seem less affected by quantization. This allows running it in much lower bits per weight variable bitrate quants on low-end GPUs. For further VRAM savings, a node to load a quantized version of the T5 text encoder is also included. + +![Comfy_Flux1_dev_Q4_0_GGUF_1024](https://github.com/user-attachments/assets/70d16d97-c522-4ef4-9435-633f128644c8) + +Note: The "Force/Set CLIP Device" is **NOT** part of this node pack. Do not install it if you only have one GPU. Do not set it to cuda:0 then complain about OOM errors if you do not undestand what it is for. There is not need to copy the workflow above, just use your own workflow and replace the stock "Load Diffusion Model" with the "Unet Loader (GGUF)" node. + +## Installation + +> [!IMPORTANT] +> Make sure your ComfyUI is on a recent-enough version to support custom ops when loading the UNET-only. + +To install the custom node normally, git clone this repository into your custom nodes folder (`ComfyUI/custom_nodes`) and install the only dependency for inference (`pip install --upgrade gguf`) + +``` +git clone https://github.com/city96/ComfyUI-GGUF +``` + +To install the custom node on a standalone ComfyUI release, open a CMD inside the "ComfyUI_windows_portable" folder (where your `run_nvidia_gpu.bat` file is) and use the following commands: + +``` +git clone https://github.com/city96/ComfyUI-GGUF ComfyUI/custom_nodes/ComfyUI-GGUF +.\python_embeded\python.exe -s -m pip install -r .\ComfyUI\custom_nodes\ComfyUI-GGUF\requirements.txt +``` + +## Usage + +Simply use the GGUF Unet loader found under the `bootleg` category. Place the .gguf model files in your `ComfyUI/models/unet` folder. + +LoRA loading is experimental but it should work with just the built-in LoRA loader node(s). + +Pre-quantized models: + +- [flux1-dev GGUF](https://huggingface.co/city96/FLUX.1-dev-gguf) +- [flux1-schnell GGUF](https://huggingface.co/city96/FLUX.1-schnell-gguf) + +Initial support for quantizing T5 has also been added recently, these can be used using the various `*CLIPLoader (gguf)` nodes which can be used inplace of the regular ones. For the CLIP model, use whatever model you were using before for CLIP. The loader can handle both types of files - `gguf` and regular `safetensors`/`bin`. + +- [t5_v1.1-xxl GGUF](https://huggingface.co/city96/t5-v1_1-xxl-encoder-gguf) + diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/__init__.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a03726e3b0a08957ded67cdd21beb9544a3f6e4d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/__init__.py @@ -0,0 +1,9 @@ +# only import if running as a custom node +try: + import comfy.utils +except ImportError: + pass +else: + from .nodes import NODE_CLASS_MAPPINGS + NODE_DISPLAY_NAME_MAPPINGS = {k:v.TITLE for k,v in NODE_CLASS_MAPPINGS.items()} + __all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3154806bf8d2fe0036384d506a6278586d26e43e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/dequant.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/dequant.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f48e9457d9ce8342f955ae2b87b6983000bd872 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/dequant.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e0d2a26b6ee3004f008b4e720b2bd4244d660738 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/ops.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/ops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a30382d16b0c11b50a29f6b2792c7bacbd3d94ac Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-GGUF/__pycache__/ops.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/dequant.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/dequant.py new file mode 100644 index 0000000000000000000000000000000000000000..8232500cb57400f2e4176d79809d2ebc509ebcc1 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/dequant.py @@ -0,0 +1,248 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import gguf +import torch +from tqdm import tqdm + + +TORCH_COMPATIBLE_QTYPES = {None, gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16} + +def is_torch_compatible(tensor): + return tensor is None or getattr(tensor, "tensor_type", None) in TORCH_COMPATIBLE_QTYPES + +def is_quantized(tensor): + return not is_torch_compatible(tensor) + +def dequantize_tensor(tensor, dtype=None, dequant_dtype=None): + qtype = getattr(tensor, "tensor_type", None) + oshape = getattr(tensor, "tensor_shape", tensor.shape) + + if qtype in TORCH_COMPATIBLE_QTYPES: + return tensor.to(dtype) + elif qtype in dequantize_functions: + dequant_dtype = dtype if dequant_dtype == "target" else dequant_dtype + return dequantize(tensor.data, qtype, oshape, dtype=dequant_dtype).to(dtype) + else: + # this is incredibly slow + tqdm.write(f"Falling back to numpy dequant for qtype: {qtype}") + new = gguf.quants.dequantize(tensor.cpu().numpy(), qtype) + return torch.from_numpy(new).to(tensor.device, dtype=dtype) + +def dequantize(data, qtype, oshape, dtype=None): + """ + Dequantize tensor back to usable shape/dtype + """ + block_size, type_size = gguf.GGML_QUANT_SIZES[qtype] + dequantize_blocks = dequantize_functions[qtype] + + rows = data.reshape( + (-1, data.shape[-1]) + ).view(torch.uint8) + + n_blocks = rows.numel() // type_size + blocks = rows.reshape((n_blocks, type_size)) + blocks = dequantize_blocks(blocks, block_size, type_size, dtype) + return blocks.reshape(oshape) + +def to_uint32(x): + # no uint32 :( + x = x.view(torch.uint8).to(torch.int32) + return (x[:, 0] | x[:, 1] << 8 | x[:, 2] << 16 | x[:, 3] << 24).unsqueeze(1) + +def split_block_dims(blocks, *args): + n_max = blocks.shape[1] + dims = list(args) + [n_max - sum(args)] + return torch.split(blocks, dims, dim=1) + +# Full weights # +def dequantize_blocks_BF16(blocks, block_size, type_size, dtype=None): + return (blocks.view(torch.int16).to(torch.int32) << 16).view(torch.float32) + +# Legacy Quants # +def dequantize_blocks_Q8_0(blocks, block_size, type_size, dtype=None): + d, x = split_block_dims(blocks, 2) + d = d.view(torch.float16).to(dtype) + x = x.view(torch.int8) + return (d * x) + +def dequantize_blocks_Q5_1(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, m, qh, qs = split_block_dims(blocks, 2, 2, 4) + d = d.view(torch.float16).to(dtype) + m = m.view(torch.float16).to(dtype) + qh = to_uint32(qh) + + qh = qh.reshape((n_blocks, 1)) >> torch.arange(32, device=d.device, dtype=torch.int32).reshape(1, 32) + ql = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + qh = (qh & 1).to(torch.uint8) + ql = (ql & 0x0F).reshape((n_blocks, -1)) + + qs = (ql | (qh << 4)) + return (d * qs) + m + +def dequantize_blocks_Q5_0(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, qh, qs = split_block_dims(blocks, 2, 4) + d = d.view(torch.float16).to(dtype) + qh = to_uint32(qh) + + qh = qh.reshape(n_blocks, 1) >> torch.arange(32, device=d.device, dtype=torch.int32).reshape(1, 32) + ql = qs.reshape(n_blocks, -1, 1, block_size // 2) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + + qh = (qh & 1).to(torch.uint8) + ql = (ql & 0x0F).reshape(n_blocks, -1) + + qs = (ql | (qh << 4)).to(torch.int8) - 16 + return (d * qs) + +def dequantize_blocks_Q4_1(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, m, qs = split_block_dims(blocks, 2, 2) + d = d.view(torch.float16).to(dtype) + m = m.view(torch.float16).to(dtype) + + qs = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape(1, 1, 2, 1) + qs = (qs & 0x0F).reshape(n_blocks, -1) + + return (d * qs) + m + +def dequantize_blocks_Q4_0(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, qs = split_block_dims(blocks, 2) + d = d.view(torch.float16).to(dtype) + + qs = qs.reshape((n_blocks, -1, 1, block_size // 2)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qs = (qs & 0x0F).reshape((n_blocks, -1)).to(torch.int8) - 8 + return (d * qs) + +# K Quants # +QK_K = 256 +K_SCALE_SIZE = 12 + +def get_scale_min(scales): + n_blocks = scales.shape[0] + scales = scales.view(torch.uint8) + scales = scales.reshape((n_blocks, 3, 4)) + + d, m, m_d = torch.split(scales, scales.shape[-2] // 3, dim=-2) + + sc = torch.cat([d & 0x3F, (m_d & 0x0F) | ((d >> 2) & 0x30)], dim=-1) + min = torch.cat([m & 0x3F, (m_d >> 4) | ((m >> 2) & 0x30)], dim=-1) + + return (sc.reshape((n_blocks, 8)), min.reshape((n_blocks, 8))) + +def dequantize_blocks_Q6_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + ql, qh, scales, d, = split_block_dims(blocks, QK_K // 2, QK_K // 4, QK_K // 16) + + scales = scales.view(torch.int8).to(dtype) + d = d.view(torch.float16).to(dtype) + d = (d * scales).reshape((n_blocks, QK_K // 16, 1)) + + ql = ql.reshape((n_blocks, -1, 1, 64)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + ql = (ql & 0x0F).reshape((n_blocks, -1, 32)) + qh = qh.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + qh = (qh & 0x03).reshape((n_blocks, -1, 32)) + q = (ql | (qh << 4)).to(torch.int8) - 32 + q = q.reshape((n_blocks, QK_K // 16, -1)) + + return (d * q).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q5_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, dmin, scales, qh, qs = split_block_dims(blocks, 2, 2, K_SCALE_SIZE, QK_K // 8) + + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + sc, m = get_scale_min(scales) + + d = (d * sc).reshape((n_blocks, -1, 1)) + dm = (dmin * m).reshape((n_blocks, -1, 1)) + + ql = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qh = qh.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([i for i in range(8)], device=d.device, dtype=torch.uint8).reshape((1, 1, 8, 1)) + ql = (ql & 0x0F).reshape((n_blocks, -1, 32)) + qh = (qh & 0x01).reshape((n_blocks, -1, 32)) + q = (ql | (qh << 4)) + + return (d * q - dm).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q4_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + d, dmin, scales, qs = split_block_dims(blocks, 2, 2, K_SCALE_SIZE) + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + sc, m = get_scale_min(scales) + + d = (d * sc).reshape((n_blocks, -1, 1)) + dm = (dmin * m).reshape((n_blocks, -1, 1)) + + qs = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 1, 2, 1)) + qs = (qs & 0x0F).reshape((n_blocks, -1, 32)) + + return (d * qs - dm).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q3_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + hmask, qs, scales, d = split_block_dims(blocks, QK_K // 8, QK_K // 4, 12) + d = d.view(torch.float16).to(dtype) + + lscales, hscales = scales[:, :8], scales[:, 8:] + lscales = lscales.reshape((n_blocks, 1, 8)) >> torch.tensor([0, 4], device=d.device, dtype=torch.uint8).reshape((1, 2, 1)) + lscales = lscales.reshape((n_blocks, 16)) + hscales = hscales.reshape((n_blocks, 1, 4)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 4, 1)) + hscales = hscales.reshape((n_blocks, 16)) + scales = (lscales & 0x0F) | ((hscales & 0x03) << 4) + scales = (scales.to(torch.int8) - 32) + + dl = (d * scales).reshape((n_blocks, 16, 1)) + + ql = qs.reshape((n_blocks, -1, 1, 32)) >> torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + qh = hmask.reshape(n_blocks, -1, 1, 32) >> torch.tensor([i for i in range(8)], device=d.device, dtype=torch.uint8).reshape((1, 1, 8, 1)) + ql = ql.reshape((n_blocks, 16, QK_K // 16)) & 3 + qh = (qh.reshape((n_blocks, 16, QK_K // 16)) & 1) ^ 1 + q = (ql.to(torch.int8) - (qh << 2).to(torch.int8)) + + return (dl * q).reshape((n_blocks, QK_K)) + +def dequantize_blocks_Q2_K(blocks, block_size, type_size, dtype=None): + n_blocks = blocks.shape[0] + + scales, qs, d, dmin = split_block_dims(blocks, QK_K // 16, QK_K // 4, 2) + d = d.view(torch.float16).to(dtype) + dmin = dmin.view(torch.float16).to(dtype) + + # (n_blocks, 16, 1) + dl = (d * (scales & 0xF)).reshape((n_blocks, QK_K // 16, 1)) + ml = (dmin * (scales >> 4)).reshape((n_blocks, QK_K // 16, 1)) + + shift = torch.tensor([0, 2, 4, 6], device=d.device, dtype=torch.uint8).reshape((1, 1, 4, 1)) + + qs = (qs.reshape((n_blocks, -1, 1, 32)) >> shift) & 3 + qs = qs.reshape((n_blocks, QK_K // 16, 16)) + qs = dl * qs - ml + + return qs.reshape((n_blocks, -1)) + +dequantize_functions = { + gguf.GGMLQuantizationType.BF16: dequantize_blocks_BF16, + gguf.GGMLQuantizationType.Q8_0: dequantize_blocks_Q8_0, + gguf.GGMLQuantizationType.Q5_1: dequantize_blocks_Q5_1, + gguf.GGMLQuantizationType.Q5_0: dequantize_blocks_Q5_0, + gguf.GGMLQuantizationType.Q4_1: dequantize_blocks_Q4_1, + gguf.GGMLQuantizationType.Q4_0: dequantize_blocks_Q4_0, + gguf.GGMLQuantizationType.Q6_K: dequantize_blocks_Q6_K, + gguf.GGMLQuantizationType.Q5_K: dequantize_blocks_Q5_K, + gguf.GGMLQuantizationType.Q4_K: dequantize_blocks_Q4_K, + gguf.GGMLQuantizationType.Q3_K: dequantize_blocks_Q3_K, + gguf.GGMLQuantizationType.Q2_K: dequantize_blocks_Q2_K, +} diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/nodes.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..3533b895c4cfd8bd9a5f9b701a6fa6f8c7559358 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/nodes.py @@ -0,0 +1,402 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import torch +import gguf +import copy +import logging + +import comfy.sd +import comfy.utils +import comfy.model_management +import comfy.model_patcher +import folder_paths + +from .ops import GGMLTensor, GGMLOps, move_patch_to_device +from .dequant import is_quantized, is_torch_compatible + +# Add a custom keys for files ending in .gguf +if "unet_gguf" not in folder_paths.folder_names_and_paths: + orig = folder_paths.folder_names_and_paths.get("diffusion_models", folder_paths.folder_names_and_paths.get("unet", [[], set()])) + folder_paths.folder_names_and_paths["unet_gguf"] = (orig[0], {".gguf"}) + +if "clip_gguf" not in folder_paths.folder_names_and_paths: + orig = folder_paths.folder_names_and_paths.get("clip", [[], set()]) + folder_paths.folder_names_and_paths["clip_gguf"] = (orig[0], {".gguf"}) + +def gguf_sd_loader_get_orig_shape(reader, tensor_name): + field_key = f"comfy.gguf.orig_shape.{tensor_name}" + field = reader.get_field(field_key) + if field is None: + return None + # Has original shape metadata, so we try to decode it. + if len(field.types) != 2 or field.types[0] != gguf.GGUFValueType.ARRAY or field.types[1] != gguf.GGUFValueType.INT32: + raise TypeError(f"Bad original shape metadata for {field_key}: Expected ARRAY of INT32, got {field.types}") + return torch.Size(tuple(int(field.parts[part_idx][0]) for part_idx in field.data)) + +def gguf_sd_loader(path, handle_prefix="model.diffusion_model."): + """ + Read state dict as fake tensors + """ + reader = gguf.GGUFReader(path) + + # filter and strip prefix + has_prefix = False + if handle_prefix is not None: + prefix_len = len(handle_prefix) + tensor_names = set(tensor.name for tensor in reader.tensors) + has_prefix = any(s.startswith(handle_prefix) for s in tensor_names) + + tensors = [] + for tensor in reader.tensors: + sd_key = tensor_name = tensor.name + if has_prefix: + if not tensor_name.startswith(handle_prefix): + continue + sd_key = tensor_name[prefix_len:] + tensors.append((sd_key, tensor)) + + # detect and verify architecture + compat = None + arch_str = None + arch_field = reader.get_field("general.architecture") + if arch_field is not None: + if len(arch_field.types) != 1 or arch_field.types[0] != gguf.GGUFValueType.STRING: + raise TypeError(f"Bad type for GGUF general.architecture key: expected string, got {arch_field.types!r}") + arch_str = str(arch_field.parts[arch_field.data[-1]], encoding="utf-8") + if arch_str not in {"flux", "sd1", "sdxl", "t5", "t5encoder"}: + raise ValueError(f"Unexpected architecture type in GGUF file, expected one of flux, sd1, sdxl, t5encoder but got {arch_str!r}") + else: # stable-diffusion.cpp + # import here to avoid changes to convert.py breaking regular models + from .tools.convert import detect_arch + arch_str = detect_arch(set(val[0] for val in tensors)) + compat = "sd.cpp" + + # main loading loop + state_dict = {} + qtype_dict = {} + for sd_key, tensor in tensors: + tensor_name = tensor.name + tensor_type_str = str(tensor.tensor_type) + torch_tensor = torch.from_numpy(tensor.data) # mmap + + shape = gguf_sd_loader_get_orig_shape(reader, tensor_name) + if shape is None: + shape = torch.Size(tuple(int(v) for v in reversed(tensor.shape))) + # Workaround for stable-diffusion.cpp SDXL detection. + if compat == "sd.cpp" and arch_str == "sdxl": + if any([tensor_name.endswith(x) for x in (".proj_in.weight", ".proj_out.weight")]): + while len(shape) > 2 and shape[-1] == 1: + shape = shape[:-1] + + # add to state dict + if tensor.tensor_type in {gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16}: + torch_tensor = torch_tensor.view(*shape) + state_dict[sd_key] = GGMLTensor(torch_tensor, tensor_type=tensor.tensor_type, tensor_shape=shape) + qtype_dict[tensor_type_str] = qtype_dict.get(tensor_type_str, 0) + 1 + + # sanity check debug print + print("\nggml_sd_loader:") + for k,v in qtype_dict.items(): + print(f" {k:30}{v:3}") + + return state_dict + +# for remapping llama.cpp -> original key names +clip_sd_map = { + "enc.": "encoder.", + ".blk.": ".block.", + "token_embd": "shared", + "output_norm": "final_layer_norm", + "attn_q": "layer.0.SelfAttention.q", + "attn_k": "layer.0.SelfAttention.k", + "attn_v": "layer.0.SelfAttention.v", + "attn_o": "layer.0.SelfAttention.o", + "attn_norm": "layer.0.layer_norm", + "attn_rel_b": "layer.0.SelfAttention.relative_attention_bias", + "ffn_up": "layer.1.DenseReluDense.wi_1", + "ffn_down": "layer.1.DenseReluDense.wo", + "ffn_gate": "layer.1.DenseReluDense.wi_0", + "ffn_norm": "layer.1.layer_norm", +} + +def gguf_clip_loader(path): + raw_sd = gguf_sd_loader(path) + assert "enc.blk.23.ffn_up.weight" in raw_sd, "Invalid Text Encoder!" + sd = {} + for k,v in raw_sd.items(): + for s,d in clip_sd_map.items(): + k = k.replace(s,d) + sd[k] = v + return sd + +# TODO: Temporary fix for now +import collections +class GGUFModelPatcher(comfy.model_patcher.ModelPatcher): + patch_on_device = False + + def patch_weight_to_device(self, key, device_to=None, inplace_update=False): + if key not in self.patches: + return + weight = comfy.utils.get_attr(self.model, key) + + try: + from comfy.lora import calculate_weight + except Exception: + calculate_weight = self.calculate_weight + + patches = self.patches[key] + if is_quantized(weight): + out_weight = weight.to(device_to) + patches = move_patch_to_device(patches, self.load_device if self.patch_on_device else self.offload_device) + # TODO: do we ever have legitimate duplicate patches? (i.e. patch on top of patched weight) + out_weight.patches = [(calculate_weight, patches, key)] + else: + inplace_update = self.weight_inplace_update or inplace_update + if key not in self.backup: + self.backup[key] = collections.namedtuple('Dimension', ['weight', 'inplace_update'])( + weight.to(device=self.offload_device, copy=inplace_update), inplace_update + ) + + if device_to is not None: + temp_weight = comfy.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) + else: + temp_weight = weight.to(torch.float32, copy=True) + + out_weight = calculate_weight(patches, temp_weight, key) + out_weight = comfy.float.stochastic_rounding(out_weight, weight.dtype) + + if inplace_update: + comfy.utils.copy_to_param(self.model, key, out_weight) + else: + comfy.utils.set_attr_param(self.model, key, out_weight) + + def unpatch_model(self, device_to=None, unpatch_weights=True): + if unpatch_weights: + for p in self.model.parameters(): + if is_torch_compatible(p): + continue + patches = getattr(p, "patches", []) + if len(patches) > 0: + p.patches = [] + # TODO: Find another way to not unload after patches + return super().unpatch_model(device_to=device_to, unpatch_weights=unpatch_weights) + + mmap_released = False + def load(self, *args, force_patch_weights=False, **kwargs): + # always call `patch_weight_to_device` even for lowvram + super().load(*args, force_patch_weights=True, **kwargs) + + # make sure nothing stays linked to mmap after first load + if not self.mmap_released: + linked = [] + if kwargs.get("lowvram_model_memory", 0) > 0: + for n, m in self.model.named_modules(): + if hasattr(m, "weight"): + device = getattr(m.weight, "device", None) + if device == self.offload_device: + linked.append((n, m)) + continue + if hasattr(m, "bias"): + device = getattr(m.bias, "device", None) + if device == self.offload_device: + linked.append((n, m)) + continue + if linked: + print(f"Attempting to release mmap ({len(linked)})") + for n, m in linked: + # TODO: possible to OOM, find better way to detach + m.to(self.load_device).to(self.offload_device) + self.mmap_released = True + + def clone(self, *args, **kwargs): + n = GGUFModelPatcher(self.model, self.load_device, self.offload_device, self.size, weight_inplace_update=self.weight_inplace_update) + n.patches = {} + for k in self.patches: + n.patches[k] = self.patches[k][:] + n.patches_uuid = self.patches_uuid + + n.object_patches = self.object_patches.copy() + n.model_options = copy.deepcopy(self.model_options) + n.backup = self.backup + n.object_patches_backup = self.object_patches_backup + n.patch_on_device = getattr(self, "patch_on_device", False) + return n + +class UnetLoaderGGUF: + @classmethod + def INPUT_TYPES(s): + unet_names = [x for x in folder_paths.get_filename_list("unet_gguf")] + return { + "required": { + "unet_name": (unet_names,), + } + } + + RETURN_TYPES = ("MODEL",) + FUNCTION = "load_unet" + CATEGORY = "bootleg" + TITLE = "Unet Loader (GGUF)" + + def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_device=None): + ops = GGMLOps() + + if dequant_dtype in ("default", None): + ops.Linear.dequant_dtype = None + elif dequant_dtype in ["target"]: + ops.Linear.dequant_dtype = dequant_dtype + else: + ops.Linear.dequant_dtype = getattr(torch, dequant_dtype) + + if patch_dtype in ("default", None): + ops.Linear.patch_dtype = None + elif patch_dtype in ["target"]: + ops.Linear.patch_dtype = patch_dtype + else: + ops.Linear.patch_dtype = getattr(torch, patch_dtype) + + # init model + unet_path = folder_paths.get_full_path("unet", unet_name) + sd = gguf_sd_loader(unet_path) + model = comfy.sd.load_diffusion_model_state_dict( + sd, model_options={"custom_operations": ops} + ) + if model is None: + logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path)) + raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) + model = GGUFModelPatcher.clone(model) + model.patch_on_device = patch_on_device + return (model,) + +class UnetLoaderGGUFAdvanced(UnetLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + unet_names = [x for x in folder_paths.get_filename_list("unet_gguf")] + return { + "required": { + "unet_name": (unet_names,), + "dequant_dtype": (["default", "target", "float32", "float16", "bfloat16"], {"default": "default"}), + "patch_dtype": (["default", "target", "float32", "float16", "bfloat16"], {"default": "default"}), + "patch_on_device": ("BOOLEAN", {"default": False}), + } + } + TITLE = "Unet Loader (GGUF/Advanced)" + +clip_name_dict = { + "stable_diffusion": comfy.sd.CLIPType.STABLE_DIFFUSION, + "stable_cascade": comfy.sd.CLIPType.STABLE_CASCADE, + "stable_audio": comfy.sd.CLIPType.STABLE_AUDIO, + "sdxl": comfy.sd.CLIPType.STABLE_DIFFUSION, + "sd3": comfy.sd.CLIPType.SD3, + "flux": comfy.sd.CLIPType.FLUX, +} + +class CLIPLoaderGGUF: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "clip_name": (s.get_filename_list(),), + "type": (["stable_diffusion", "stable_cascade", "sd3", "stable_audio"],), + } + } + + RETURN_TYPES = ("CLIP",) + FUNCTION = "load_clip" + CATEGORY = "bootleg" + TITLE = "CLIPLoader (GGUF)" + + @classmethod + def get_filename_list(s): + files = [] + files += folder_paths.get_filename_list("clip") + files += folder_paths.get_filename_list("clip_gguf") + return sorted(files) + + def load_data(self, ckpt_paths): + clip_data = [] + for p in ckpt_paths: + if p.endswith(".gguf"): + clip_data.append(gguf_clip_loader(p)) + else: + sd = comfy.utils.load_torch_file(p, safe_load=True) + clip_data.append( + {k:GGMLTensor(v, tensor_type=gguf.GGMLQuantizationType.F16, tensor_shape=v.shape) for k,v in sd.items()} + ) + return clip_data + + def load_patcher(self, clip_paths, clip_type, clip_data): + clip = comfy.sd.load_text_encoder_state_dicts( + clip_type = clip_type, + state_dicts = clip_data, + model_options = { + "custom_operations": GGMLOps, + "initial_device": comfy.model_management.text_encoder_offload_device() + }, + embedding_directory = folder_paths.get_folder_paths("embeddings"), + ) + clip.patcher = GGUFModelPatcher.clone(clip.patcher) + + # for some reason this is just missing in some SAI checkpoints + if getattr(clip.cond_stage_model, "clip_l", None) is not None: + if getattr(clip.cond_stage_model.clip_l.transformer.text_projection.weight, "tensor_shape", None) is None: + clip.cond_stage_model.clip_l.transformer.text_projection = comfy.ops.manual_cast.Linear(768, 768) + if getattr(clip.cond_stage_model, "clip_g", None) is not None: + if getattr(clip.cond_stage_model.clip_g.transformer.text_projection.weight, "tensor_shape", None) is None: + clip.cond_stage_model.clip_g.transformer.text_projection = comfy.ops.manual_cast.Linear(1280, 1280) + + return clip + + def load_clip(self, clip_name, type="stable_diffusion"): + clip_path = folder_paths.get_full_path("clip", clip_name) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher([clip_path], clip_type, self.load_data([clip_path])),) + +class DualCLIPLoaderGGUF(CLIPLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + file_options = (s.get_filename_list(), ) + return { + "required": { + "clip_name1": file_options, + "clip_name2": file_options, + "type": (("sdxl", "sd3", "flux"), ), + } + } + + TITLE = "DualCLIPLoader (GGUF)" + + def load_clip(self, clip_name1, clip_name2, type): + clip_path1 = folder_paths.get_full_path("clip", clip_name1) + clip_path2 = folder_paths.get_full_path("clip", clip_name2) + clip_paths = (clip_path1, clip_path2) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher(clip_paths, clip_type, self.load_data(clip_paths)),) + +class TripleCLIPLoaderGGUF(CLIPLoaderGGUF): + @classmethod + def INPUT_TYPES(s): + file_options = (s.get_filename_list(), ) + return { + "required": { + "clip_name1": file_options, + "clip_name2": file_options, + "clip_name3": file_options, + } + } + + TITLE = "TripleCLIPLoader (GGUF)" + + def load_clip(self, clip_name1, clip_name2, clip_name3, type="sd3"): + clip_path1 = folder_paths.get_full_path("clip", clip_name1) + clip_path2 = folder_paths.get_full_path("clip", clip_name2) + clip_path3 = folder_paths.get_full_path("clip", clip_name3) + clip_paths = (clip_path1, clip_path2, clip_path3) + clip_type = clip_name_dict.get(type, comfy.sd.CLIPType.STABLE_DIFFUSION) + return (self.load_patcher(clip_paths, clip_type, self.load_data(clip_paths)),) + +NODE_CLASS_MAPPINGS = { + "UnetLoaderGGUF": UnetLoaderGGUF, + "CLIPLoaderGGUF": CLIPLoaderGGUF, + "DualCLIPLoaderGGUF": DualCLIPLoaderGGUF, + "TripleCLIPLoaderGGUF": TripleCLIPLoaderGGUF, + "UnetLoaderGGUFAdvanced": UnetLoaderGGUFAdvanced, +} diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/ops.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..2b9be4415f903cc07618b0178fdb0254dc206173 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/ops.py @@ -0,0 +1,212 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import gguf +import torch + +import comfy.ops +import comfy.model_management +from .dequant import dequantize_tensor, is_quantized + +class GGMLTensor(torch.Tensor): + """ + Main tensor-like class for storing quantized weights + """ + def __init__(self, *args, tensor_type, tensor_shape, patches=[], **kwargs): + super().__init__() + self.tensor_type = tensor_type + self.tensor_shape = tensor_shape + self.patches = patches + + def __new__(cls, *args, tensor_type, tensor_shape, patches=[], **kwargs): + return super().__new__(cls, *args, **kwargs) + + def to(self, *args, **kwargs): + new = super().to(*args, **kwargs) + new.tensor_type = getattr(self, "tensor_type", None) + new.tensor_shape = getattr(self, "tensor_shape", new.data.shape) + new.patches = getattr(self, "patches", []).copy() + return new + + def clone(self, *args, **kwargs): + return self + + def detach(self, *args, **kwargs): + return self + + def copy_(self, *args, **kwargs): + # fixes .weight.copy_ in comfy/clip_model/CLIPTextModel + try: + return super().copy_(*args, **kwargs) + except Exception as e: + print(f"ignoring 'copy_' on tensor: {e}") + + def __deepcopy__(self, *args, **kwargs): + # Intel Arc fix, ref#50 + new = super().__deepcopy__(*args, **kwargs) + new.tensor_type = getattr(self, "tensor_type", None) + new.tensor_shape = getattr(self, "tensor_shape", new.data.shape) + new.patches = getattr(self, "patches", []).copy() + return new + + @property + def shape(self): + if not hasattr(self, "tensor_shape"): + self.tensor_shape = self.size() + return self.tensor_shape + +class GGMLLayer(torch.nn.Module): + """ + This (should) be responsible for de-quantizing on the fly + """ + comfy_cast_weights = True + dequant_dtype = None + patch_dtype = None + torch_compatible_tensor_types = {None, gguf.GGMLQuantizationType.F32, gguf.GGMLQuantizationType.F16} + + def is_ggml_quantized(self, *, weight=None, bias=None): + if weight is None: + weight = self.weight + if bias is None: + bias = self.bias + return is_quantized(weight) or is_quantized(bias) + + def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): + weight, bias = state_dict.get(f"{prefix}weight"), state_dict.get(f"{prefix}bias") + # NOTE: using modified load for linear due to not initializing on creation, see GGMLOps todo + if self.is_ggml_quantized(weight=weight, bias=bias) or isinstance(self, torch.nn.Linear): + return self.ggml_load_from_state_dict(state_dict, prefix, *args, **kwargs) + return super()._load_from_state_dict(state_dict, prefix, *args, **kwargs) + + def ggml_load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs): + prefix_len = len(prefix) + for k,v in state_dict.items(): + if k[prefix_len:] == "weight": + self.weight = torch.nn.Parameter(v, requires_grad=False) + elif k[prefix_len:] == "bias" and v is not None: + self.bias = torch.nn.Parameter(v, requires_grad=False) + else: + missing_keys.append(k) + + def _save_to_state_dict(self, *args, **kwargs): + if self.is_ggml_quantized(): + return self.ggml_save_to_state_dict(*args, **kwargs) + return super()._save_to_state_dict(*args, **kwargs) + + def ggml_save_to_state_dict(self, destination, prefix, keep_vars): + # This is a fake state dict for vram estimation + weight = torch.zeros_like(self.weight, device=torch.device("meta")) + destination[prefix + "weight"] = weight + if self.bias is not None: + bias = torch.zeros_like(self.bias, device=torch.device("meta")) + destination[prefix + "bias"] = bias + return + + # This would return the actual state dict + destination[prefix + "weight"] = self.get_weight(self.weight) + if bias is not None: + destination[prefix + "bias"] = self.get_weight(self.bias) + + def get_weight(self, tensor, dtype): + if tensor is None: + return + + # consolidate and load patches to GPU in async + patch_list = [] + device = tensor.device + for function, patches, key in getattr(tensor, "patches", []): + patch_list += move_patch_to_device(patches, device) + + # dequantize tensor while patches load + weight = dequantize_tensor(tensor, dtype, self.dequant_dtype) + + # apply patches + if patch_list: + if self.patch_dtype is None: + weight = function(patch_list, weight, key) + else: + # for testing, may degrade image quality + patch_dtype = dtype if self.patch_dtype == "target" else self.patch_dtype + weight = function(patch_list, weight, key, patch_dtype) + return weight + + def cast_bias_weight(s, input=None, dtype=None, device=None, bias_dtype=None): + if input is not None: + if dtype is None: + dtype = getattr(input, "dtype", torch.float32) + if bias_dtype is None: + bias_dtype = dtype + if device is None: + device = input.device + + bias = None + non_blocking = comfy.model_management.device_supports_non_blocking(device) + if s.bias is not None: + bias = s.get_weight(s.bias.to(device), dtype) + bias = comfy.ops.cast_to(bias, bias_dtype, device, non_blocking=non_blocking, copy=False) + + weight = s.get_weight(s.weight.to(device), dtype) + weight = comfy.ops.cast_to(weight, dtype, device, non_blocking=non_blocking, copy=False) + return weight, bias + + def forward_comfy_cast_weights(self, input, *args, **kwargs): + if self.is_ggml_quantized(): + return self.forward_ggml_cast_weights(input, *args, **kwargs) + return super().forward_comfy_cast_weights(input, *args, **kwargs) + + def forward_ggml_cast_weights(self, input): + raise NotImplementedError + +class GGMLOps(comfy.ops.manual_cast): + """ + Dequantize weights on the fly before doing the compute + """ + class Linear(GGMLLayer, comfy.ops.manual_cast.Linear): + def __init__(self, in_features, out_features, bias=True, device=None, dtype=None): + torch.nn.Module.__init__(self) + # TODO: better workaround for reserved memory spike on windows + # Issue is with `torch.empty` still reserving the full memory for the layer + # Windows doesn't over-commit memory so without this 24GB+ of pagefile is used + self.in_features = in_features + self.out_features = out_features + self.weight = None + self.bias = None + + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.linear(input, weight, bias) + + class Conv2d(GGMLLayer, comfy.ops.manual_cast.Conv2d): + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return self._conv_forward(input, weight, bias) + + class Embedding(GGMLLayer, comfy.ops.manual_cast.Embedding): + def forward_ggml_cast_weights(self, input, out_dtype=None): + output_dtype = out_dtype + if self.weight.dtype == torch.float16 or self.weight.dtype == torch.bfloat16: + out_dtype = None + weight, _bias = self.cast_bias_weight(self, device=input.device, dtype=out_dtype) + return torch.nn.functional.embedding( + input, weight, self.padding_idx, self.max_norm, self.norm_type, self.scale_grad_by_freq, self.sparse + ).to(dtype=output_dtype) + + class LayerNorm(GGMLLayer, comfy.ops.manual_cast.LayerNorm): + def forward_ggml_cast_weights(self, input): + if self.weight is None: + return super().forward_comfy_cast_weights(input) + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps) + + class GroupNorm(GGMLLayer, comfy.ops.manual_cast.GroupNorm): + def forward_ggml_cast_weights(self, input): + weight, bias = self.cast_bias_weight(input) + return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps) + +def move_patch_to_device(item, device): + if isinstance(item, torch.Tensor): + return item.to(device, non_blocking=True) + elif isinstance(item, tuple): + return tuple(move_patch_to_device(x, device) for x in item) + elif isinstance(item, list): + return [move_patch_to_device(x, device) for x in item] + else: + return item diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-GGUF/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c762673141849ccbe784c9134dfb732182632ef --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/requirements.txt @@ -0,0 +1,2 @@ +gguf>=0.9.1 +numpy<2.0.0 diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/README.md b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/README.md new file mode 100644 index 0000000000000000000000000000000000000000..23c40d77c1251bd54ba75949478df99bc240dde2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/README.md @@ -0,0 +1,49 @@ +This needs the llama.cpp version of gguf-py to work at the moment, not the pip one as that one does not have the python quantization code yet. + +``` +git clone https://github.com/ggerganov/llama.cpp +pip install llama.cpp/gguf-py +``` + + +To convert your initial source model to FP16 (or BF16), run the following command: +``` +python convert.py --src E:\models\unet\flux1-dev.safetensors +``` + + +To quantize the model, first apply the provided patch to the llama.cpp repo you've just cloned. If you get a "corrupt patch" error, you may have to [change the line endings in the patch file](https://github.com/city96/ComfyUI-GGUF/issues/90#issuecomment-2323011648). +``` +cd llama.cpp +git checkout tags/b3600 +git apply ..\lcpp.patch +``` + + +Then, compile the llama-quantize binary. This example uses cmake, on linux you can just use make. +``` +mkdir build +cd build +cmake .. +cmake --build . --config Debug -j10 --target llama-quantize +cd .. +cd .. +``` + + +Now you can use the newly build binary to quantize your model to the desired format: +``` +llama.cpp\build\bin\Debug\llama-quantize.exe E:\models\unet\flux1-dev-BF16.gguf E:\models\unet\flux1-dev-Q4_K_S.gguf Q4_K_S +``` + + +You can extract the patch again with `git diff src\llama.cpp > lcpp.patch` if you wish to change something and contribute back. + + +> [!WARNING] +> Do not use the diffusers UNET for flux, it won't work, use the default/reference checkpoint format. This is due to q/k/v being merged into one qkv key. You can convert it by loading it in ComfyUI and saving it using the built-in "ModelSave" node. + + +> [!WARNING] +> Do not quantize SDXL / SD1 / other Conv2D heavy models. There's little to no benefit with these models. If you do, make sure to **extract the UNET model first**. +>This should be obvious, but also don't use the resulting llama-quantize binary with LLMs. diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/__pycache__/convert.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/__pycache__/convert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7242624759f10c2cd4c7ebded644cd8e52192ecc Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/__pycache__/convert.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/convert.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/convert.py new file mode 100644 index 0000000000000000000000000000000000000000..63bded913fa4f84ad52590673990ade4a50c4193 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/convert.py @@ -0,0 +1,199 @@ +# (c) City96 || Apache-2.0 (apache.org/licenses/LICENSE-2.0) +import os +import torch +import gguf # This needs to be the llama.cpp one specifically! +import argparse +from tqdm import tqdm + +from safetensors.torch import load_file + +QUANTIZATION_THRESHOLD = 1024 +REARRANGE_THRESHOLD = 512 +MAX_TENSOR_NAME_LENGTH = 127 + +# Tuple of arch_name, match_lists. +# Each item in match_lists is a tuple of keys that must match. +# All keys in a match_lists item must exist for the architecture to match. +# The architectures are checked in order and the first successful match terminates the search. +MODEL_DETECTION = ( + ("flux", ( + ("transformer_blocks.0.attn.norm_added_k.weight",), + ("double_blocks.0.img_attn.proj.weight",), + )), + ("sd3", ( + ("transformer_blocks.0.attn.add_q_proj.weight",), + )), + ("sdxl", ( + ("down_blocks.0.downsamplers.0.conv.weight", "add_embedding.linear_1.weight",), + ( + "input_blocks.3.0.op.weight", "input_blocks.6.0.op.weight", + "output_blocks.2.2.conv.weight", "output_blocks.5.2.conv.weight", + ), # Non-diffusers + ("label_emb.0.0.weight",), + )), + ("sd1", ( + ("down_blocks.0.downsamplers.0.conv.weight",), + ( + "input_blocks.3.0.op.weight", "input_blocks.6.0.op.weight", "input_blocks.9.0.op.weight", + "output_blocks.2.1.conv.weight", "output_blocks.5.2.conv.weight", "output_blocks.8.2.conv.weight" + ), # Non-diffusers + )), +) + + +def parse_args(): + parser = argparse.ArgumentParser(description="Generate F16 GGUF files from single UNET") + parser.add_argument("--src", required=True, help="Source model ckpt file.") + parser.add_argument("--dst", help="Output unet gguf file.") + args = parser.parse_args() + + if not os.path.isfile(args.src): + parser.error("No input provided!") + + return args + +def load_state_dict(path): + if any(path.endswith(x) for x in [".ckpt", ".pt", ".bin", ".pth"]): + state_dict = torch.load(path, map_location="cpu", weights_only=True) + state_dict = state_dict.get("model", state_dict) + else: + state_dict = load_file(path) + + # only keep unet with no prefix! + sd = {} + has_prefix = any(["model.diffusion_model." in x for x in state_dict.keys()]) + for k, v in state_dict.items(): + if has_prefix and "model.diffusion_model." not in k: + continue + if has_prefix: + k = k.replace("model.diffusion_model.", "") + sd[k] = v + + return sd + +def detect_arch(state_dict): + for arch, match_lists in MODEL_DETECTION: + for match_list in match_lists: + if all(key in state_dict for key in match_list): + return arch + breakpoint() + raise ValueError("Unknown model architecture!") + + +def load_model(path): + state_dict = load_state_dict(path) + arch = detect_arch(state_dict) + print(f"* Architecture detected from input: {arch}") + if arch == "flux" and "transformer_blocks.0.attn.norm_added_k.weight" in state_dict: + raise ValueError("The Diffusers UNET can not be used for this!") + writer = gguf.GGUFWriter(path=None, arch=arch) + return (writer, state_dict) + +def handle_tensors(args, writer, state_dict): + # TODO list: + # - do something about this being awful and hacky + + name_lengths = tuple(sorted( + ((key, len(key)) for key in state_dict.keys()), + key=lambda item: item[1], + reverse=True, + )) + if not name_lengths: + return + max_name_len = name_lengths[0][1] + if max_name_len > MAX_TENSOR_NAME_LENGTH: + bad_list = ", ".join(f"{key!r} ({namelen})" for key, namelen in name_lengths if namelen > MAX_TENSOR_NAME_LENGTH) + raise ValueError(f"Can only handle tensor names up to {MAX_TENSOR_NAME_LENGTH} characters. Tensors exceeding the limit: {bad_list}") + for key, data in tqdm(state_dict.items()): + old_dtype = data.dtype + + if data.dtype == torch.bfloat16: + data = data.to(torch.float32).numpy() + # this is so we don't break torch 2.0.X + elif data.dtype in [getattr(torch, "float8_e4m3fn", "_invalid"), getattr(torch, "float8_e5m2", "_invalid")]: + data = data.to(torch.float16).numpy() + else: + data = data.numpy() + + n_dims = len(data.shape) + data_shape = data.shape + data_qtype = getattr( + gguf.GGMLQuantizationType, + "BF16" if old_dtype == torch.bfloat16 else "F16" + ) + + # get number of parameters (AKA elements) in this tensor + n_params = 1 + for dim_size in data_shape: + n_params *= dim_size + + # keys to keep as max precision + blacklist = { + "time_embedding.", + "add_embedding.", + "time_in.", + "txt_in.", + "vector_in.", + "img_in.", + "guidance_in.", + "final_layer.", + } + + if old_dtype in (torch.float32, torch.bfloat16): + if n_dims == 1: + # one-dimensional tensors should be kept in F32 + # also speeds up inference due to not dequantizing + data_qtype = gguf.GGMLQuantizationType.F32 + + elif n_params <= QUANTIZATION_THRESHOLD: + # very small tensors + data_qtype = gguf.GGMLQuantizationType.F32 + + elif ".weight" in key and any(x in key for x in blacklist): + data_qtype = gguf.GGMLQuantizationType.F32 + + if ( n_dims > 1 # Skip one-dimensional tensors + and n_params >= REARRANGE_THRESHOLD # Only rearrange tensors meeting the size requirement + and (n_params / 256).is_integer() # Rearranging only makes sense if total elements is divisible by 256 + and not (data.shape[-1] / 256).is_integer() # Only need to rearrange if the last dimension is not divisible by 256 + ): + orig_shape = data.shape + data = data.reshape(n_params // 256, 256) + writer.add_array(f"comfy.gguf.orig_shape.{key}", tuple(int(dim) for dim in orig_shape)) + + try: + data = gguf.quants.quantize(data, data_qtype) + except (AttributeError, gguf.QuantError) as e: + tqdm.write(f"falling back to F16: {e}") + data_qtype = gguf.GGMLQuantizationType.F16 + data = gguf.quants.quantize(data, data_qtype) + + new_name = key # do we need to rename? + + shape_str = f"{{{', '.join(str(n) for n in reversed(data.shape))}}}" + tqdm.write(f"{f'%-{max_name_len + 4}s' % f'{new_name}'} {old_dtype} --> {data_qtype.name}, shape = {shape_str}") + + writer.add_tensor(new_name, data, raw_dtype=data_qtype) + +if __name__ == "__main__": + args = parse_args() + path = args.src + writer, state_dict = load_model(path) + + writer.add_quantization_version(gguf.GGML_QUANT_VERSION) + if next(iter(state_dict.values())).dtype == torch.bfloat16: + out_path = f"{os.path.splitext(path)[0]}-BF16.gguf" + writer.add_file_type(gguf.LlamaFileType.MOSTLY_BF16) + else: + out_path = f"{os.path.splitext(path)[0]}-F16.gguf" + writer.add_file_type(gguf.LlamaFileType.MOSTLY_F16) + + out_path = args.dst or out_path + if os.path.isfile(out_path): + input("Output exists enter to continue or ctrl+c to abort!") + + handle_tensors(path, writer, state_dict) + writer.write_header_to_file(path=out_path) + writer.write_kv_data_to_file() + writer.write_tensors_to_file(progress=True) + writer.close() diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/lcpp.patch b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/lcpp.patch new file mode 100644 index 0000000000000000000000000000000000000000..e7bfbffafaace3077bc52b35bafdea3542367e4c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/lcpp.patch @@ -0,0 +1,223 @@ +diff --git a/ggml/include/ggml.h b/ggml/include/ggml.h +index 1d2a3540..b1a9ee96 100644 +--- a/ggml/include/ggml.h ++++ b/ggml/include/ggml.h +@@ -230,7 +230,7 @@ + #define GGML_MAX_CONTEXTS 64 + #define GGML_MAX_SRC 10 + #ifndef GGML_MAX_NAME +-#define GGML_MAX_NAME 64 ++#define GGML_MAX_NAME 128 + #endif + #define GGML_MAX_OP_PARAMS 64 + #define GGML_DEFAULT_N_THREADS 4 +diff --git a/src/llama.cpp b/src/llama.cpp +index 5ab65ea9..35580d9d 100644 +--- a/src/llama.cpp ++++ b/src/llama.cpp +@@ -212,6 +212,9 @@ enum llm_arch { + LLM_ARCH_JAIS, + LLM_ARCH_NEMOTRON, + LLM_ARCH_EXAONE, ++ LLM_ARCH_FLUX, ++ LLM_ARCH_SD1, ++ LLM_ARCH_SDXL, + LLM_ARCH_UNKNOWN, + }; + +@@ -259,6 +262,9 @@ static const std::map LLM_ARCH_NAMES = { + { LLM_ARCH_JAIS, "jais" }, + { LLM_ARCH_NEMOTRON, "nemotron" }, + { LLM_ARCH_EXAONE, "exaone" }, ++ { LLM_ARCH_FLUX, "flux" }, ++ { LLM_ARCH_SD1, "sd1" }, ++ { LLM_ARCH_SDXL, "sdxl" }, + { LLM_ARCH_UNKNOWN, "(unknown)" }, + }; + +@@ -1337,6 +1343,9 @@ static const std::map> LLM_TENSOR_NA + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, ++ { LLM_ARCH_FLUX, {}}, ++ { LLM_ARCH_SD1, {}}, ++ { LLM_ARCH_SDXL, {}}, + { + LLM_ARCH_UNKNOWN, + { +@@ -4629,6 +4638,12 @@ static void llm_load_hparams( + // get general kv + ml.get_key(LLM_KV_GENERAL_NAME, model.name, false); + ++ // Disable LLM metadata for image models ++ if (model.arch == LLM_ARCH_FLUX || model.arch == LLM_ARCH_SD1 || model.arch == LLM_ARCH_SDXL) { ++ model.ftype = ml.ftype; ++ return; ++ } ++ + // get hparams kv + ml.get_key(LLM_KV_VOCAB_SIZE, hparams.n_vocab, false) || ml.get_arr_n(LLM_KV_TOKENIZER_LIST, hparams.n_vocab); + +@@ -15827,11 +15842,162 @@ static void llama_tensor_dequantize_internal( + workers.clear(); + } + ++static ggml_type img_tensor_get_type(quantize_state_internal & qs, ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype) { ++ // Special function for quantizing image model tensors ++ const std::string name = ggml_get_name(tensor); ++ const llm_arch arch = qs.model.arch; ++ ++ // Sanity check ++ if ( ++ (name.find("model.diffusion_model.") != std::string::npos) || ++ (name.find("first_stage_model.") != std::string::npos) || ++ (name.find("single_transformer_blocks.") != std::string::npos) ++ ) { ++ throw std::runtime_error("Invalid input GGUF file. This is not a supported UNET model"); ++ } ++ ++ // Unsupported quant types - exclude all IQ quants for now ++ if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ++ ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M || ++ ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || ++ ftype == LLAMA_FTYPE_MOSTLY_IQ1_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ++ ftype == LLAMA_FTYPE_MOSTLY_IQ4_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_S || ++ ftype == LLAMA_FTYPE_MOSTLY_IQ3_M || ftype == LLAMA_FTYPE_MOSTLY_Q4_0_4_4 || ++ ftype == LLAMA_FTYPE_MOSTLY_Q4_0_4_8 || ftype == LLAMA_FTYPE_MOSTLY_Q4_0_8_8) { ++ throw std::runtime_error("Invalid quantization type for image model (Not supported)"); ++ } ++ ++ if ( // Tensors to keep in FP32 precision ++ (arch == LLM_ARCH_FLUX) && ( ++ (name.find("img_in.") != std::string::npos) || ++ (name.find("time_in.in_layer.") != std::string::npos) || ++ (name.find("vector_in.in_layer.") != std::string::npos) || ++ (name.find("guidance_in.in_layer.") != std::string::npos) || ++ (name.find("final_layer.linear.") != std::string::npos) ++ ) || (arch == LLM_ARCH_SD1 || arch == LLM_ARCH_SDXL) && ( ++ (name.find("conv_in.") != std::string::npos) || ++ (name.find("conv_out.") != std::string::npos) || ++ (name == "input_blocks.0.0.weight") || ++ (name == "out.2.weight") ++ )) { ++ new_type = GGML_TYPE_F32; ++ } else if ( // Tensors to keep in FP16 precision ++ (arch == LLM_ARCH_FLUX) && ( ++ (name.find("txt_in.") != std::string::npos) || ++ (name.find("time_in.") != std::string::npos) || ++ (name.find("vector_in.") != std::string::npos) || ++ (name.find("guidance_in.") != std::string::npos) || ++ (name.find("final_layer.") != std::string::npos) ++ ) || (arch == LLM_ARCH_SD1 || arch == LLM_ARCH_SDXL) && ( ++ (name.find("class_embedding.") != std::string::npos) || ++ (name.find("time_embedding.") != std::string::npos) || ++ (name.find("add_embedding.") != std::string::npos) || ++ (name.find("time_embed.") != std::string::npos) || ++ (name.find("label_emb.") != std::string::npos) || ++ (name.find("proj_in.") != std::string::npos) || ++ (name.find("proj_out.") != std::string::npos) ++ // (name.find("conv_shortcut.") != std::string::npos) // marginal improvement ++ )) { ++ new_type = GGML_TYPE_F16; ++ } else if ( // Rules for to_v attention ++ (name.find("attn_v.weight") != std::string::npos) || ++ (name.find(".to_v.weight") != std::string::npos) ++ ){ ++ if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) { ++ new_type = GGML_TYPE_Q3_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { ++ new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { ++ new_type = GGML_TYPE_Q5_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) { ++ new_type = GGML_TYPE_Q6_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && qs.i_attention_wv < 4) { ++ new_type = GGML_TYPE_Q5_K; ++ } ++ ++qs.i_attention_wv; ++ } else if ( // Rules for fused qkv attention ++ (name.find("attn_qkv.weight") != std::string::npos) || ++ (name.find("attn.qkv.weight") != std::string::npos) ++ ) { ++ if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { ++ new_type = GGML_TYPE_Q4_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { ++ new_type = GGML_TYPE_Q5_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) { ++ new_type = GGML_TYPE_Q6_K; ++ } ++ } else if ( // Rules for ffn ++ (name.find("ffn_down") != std::string::npos) || ++ (name.find("DenseReluDense.wo") != std::string::npos) ++ ) { ++ // TODO: add back `layer_info` with some model specific logic + logic further down ++ if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { ++ new_type = GGML_TYPE_Q4_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { ++ new_type = GGML_TYPE_Q5_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S) { ++ new_type = GGML_TYPE_Q5_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { ++ new_type = GGML_TYPE_Q6_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) { ++ new_type = GGML_TYPE_Q6_K; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_0) { ++ new_type = GGML_TYPE_Q4_1; ++ } ++ else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_0) { ++ new_type = GGML_TYPE_Q5_1; ++ } ++ ++qs.i_ffn_down; ++ } ++ ++ // Sanity check for row shape ++ bool convert_incompatible_tensor = false; ++ if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || ++ new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K) { ++ int nx = tensor->ne[0]; ++ int ny = tensor->ne[1]; ++ if (nx % QK_K != 0) { ++ LLAMA_LOG_WARN("\n\n%s : tensor cols %d x %d are not divisible by %d, required for %s", __func__, nx, ny, QK_K, ggml_type_name(new_type)); ++ convert_incompatible_tensor = true; ++ } else { ++ ++qs.n_k_quantized; ++ } ++ } ++ if (convert_incompatible_tensor) { ++ // TODO: Possibly reenable this in the future ++ // switch (new_type) { ++ // case GGML_TYPE_Q2_K: ++ // case GGML_TYPE_Q3_K: ++ // case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; ++ // case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; ++ // case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; ++ // default: throw std::runtime_error("\nUnsupported tensor size encountered\n"); ++ // } ++ new_type = GGML_TYPE_F16; ++ LLAMA_LOG_WARN(" - using fallback quantization %s\n", ggml_type_name(new_type)); ++ ++qs.n_fallback; ++ } ++ return new_type; ++} ++ ++ + static ggml_type llama_tensor_get_type(quantize_state_internal & qs, ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype) { + const std::string name = ggml_get_name(tensor); + + // TODO: avoid hardcoded tensor names - use the TN_* constants + const llm_arch arch = qs.model.arch; ++ if (arch == LLM_ARCH_FLUX || arch == LLM_ARCH_SD1 || arch == LLM_ARCH_SDXL) { return img_tensor_get_type(qs, new_type, tensor, ftype); }; + const auto tn = LLM_TN(arch); + + auto use_more_bits = [](int i_layer, int n_layers) -> bool { diff --git a/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/read_tensors.py b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/read_tensors.py new file mode 100644 index 0000000000000000000000000000000000000000..1bdff028a787c09b38e5616ef75a2f070c672445 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-GGUF/tools/read_tensors.py @@ -0,0 +1,21 @@ +#!/usr/bin/python3 +import os +import sys +import gguf + +def read_tensors(path): + reader = gguf.GGUFReader(path) + for tensor in reader.tensors: + if tensor.tensor_type == gguf.GGMLQuantizationType.F32: + continue + print(f"{str(tensor.tensor_type):32}: {tensor.name}") + +try: + path = sys.argv[1] + assert os.path.isfile(path), "Invalid path" + print(f"input: {path}") +except Exception as e: + input(f"failed: {e}") +else: + read_tensors(path) + input() diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..2dc53ca34ca78153451a0c01f67518ec2e81ac36 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/LICENSE b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..362230d3b8645380c9d1ac5347eec0890914bc56 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__init__.py @@ -0,0 +1,8 @@ +from ._mappings import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS + +__version__ = '0.0.1a' +__author__ = 'Thomas Ward ' +__copyright__ = 'Copyright 2024' +__license__ = "GPL-3.0" + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f5b0e846a47ed0cb4c1be52678ffe602e981fb9 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_image_util.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_image_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce2c89cfa73044bb452c59beadd4fd26cb6217ae Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_image_util.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_mappings.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_mappings.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6837a057366249f8ee44d92376a99b201cd08da Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/_mappings.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/base.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e5a2a5b22fa69a41ebc888ab43ce9abfa241009 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/base.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/conditioning.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/conditioning.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3947881c0c4486e371dbdd918415d5c6c7e0714 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/conditioning.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/image.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/image.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3bb9ca5d7d4d44e162d6926cf6d650b592e4824e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/image.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/models.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa2720964e65237e6d1d9c87af3b4e5ae35bd7d5 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/models.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/prompt.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/prompt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd820ac822c302044c4bfbd7d5a43bd49b82129a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/prompt.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sampler.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sampler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2bdea37aef1bd40b1665cb689172bf4c273310b Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sampler.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/scheduler.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/scheduler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83e90942eb386a3d084928e5225996b39449f9a5 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/scheduler.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sdxl.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sdxl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b1c989963312363aa96a3a3b5da16d1d6a278cc Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/sdxl.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/util.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b6fcc6971e412c539e4daa9a242c28729ccb829 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/__pycache__/util.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_image_util.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_image_util.py new file mode 100644 index 0000000000000000000000000000000000000000..ffb6066cacf257e59d755837ca0cfe5c0fc3f861 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_image_util.py @@ -0,0 +1,50 @@ +from datetime import datetime, timezone +import hashlib + + +def make_pathname(filename: str, seed: int, modelname: str, counter: int, time_format: str): + filename = filename.replace("%date", get_timestamp("%Y-%m-%d")) + filename = filename.replace("%time", get_timestamp(time_format)) + filename = filename.replace("%model", modelname) + filename = filename.replace("%seed", str(seed)) + filename = filename.replace("%counter", str(counter)) + return filename + + +def make_filename(filename: str, seed, modelname, counter, time_format): + filename = make_pathname(filename, seed, modelname, counter, time_format) + + return get_timestamp(time_format) if filename == "" else filename + + +def parse_name(ckpt_name): + path = ckpt_name + filename = path.split("/")[-1] + filename = filename.split(".")[:-1] + filename = ".".join(filename) + return filename + + +def get_timestamp(time_format): + now = datetime.now(tz=timezone.utc) + try: + timestamp = now.strftime(time_format) + except: + timestamp = now.strftime("%Y-%m-%d-%H%M%SUTC") + + return timestamp + + +def calculate_sha256(file_path): + sha256_hash = hashlib.sha256() + + with open(file_path, "rb") as f: + # Read the file in chunks to avoid loading the entire file into memory + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + + return sha256_hash.hexdigest() + + +def handle_whitespace(string: str): + return string.strip().replace("\n", " ").replace("\r", " ").replace("\t", " ") \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_mappings.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_mappings.py new file mode 100644 index 0000000000000000000000000000000000000000..4b59c7e15232397cbb13b3d912a84e24997953ad --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/_mappings.py @@ -0,0 +1,49 @@ +from .util import HelperNodes_MultilineStringLiteral, HelperNodes_StringLiteral +from .util import HelperNodes_Steps, HelperNodes_CfgScale, HelperNodes_WidthHeight + +from .scheduler import HelperNodes_SchedulerSelector + +from .sampler import HelperNodes_SamplerSelector, HelperNodes_SeedSelector + +from .conditioning import HelperNodes_CLIPSkip # Not currently provided while we refine this. + +from .models import HelperNodes_VAESelector, HelperNodes_CheckpointSelector + +from .prompt import HelperNodes_Prompt + +from .sdxl import HelperNodes_SDXLCommonResolutions + +from .image import HelperNodes_SaveImage + + +NODE_CLASS_MAPPINGS = { + "HelperNodes_MultilineStringLiteral": HelperNodes_MultilineStringLiteral, + "HelperNodes_StringLiteral": HelperNodes_StringLiteral, + "HelperNodes_Steps": HelperNodes_Steps, + "HelperNodes_CfgScale": HelperNodes_CfgScale, + "HelperNodes_WidthHeight": HelperNodes_WidthHeight, + "HelperNodes_SchedulerSelector": HelperNodes_SchedulerSelector, + "HelperNodes_SamplerSelector": HelperNodes_SamplerSelector, + "HelperNodes_SeedSelector": HelperNodes_SeedSelector, + "HelperNodes_CheckpointSelector": HelperNodes_CheckpointSelector, + "HelperNodes_VAESelector": HelperNodes_VAESelector, + "HelperNodes_Prompt": HelperNodes_Prompt, + "HelperNodes_SDXLCommonResolutions": HelperNodes_SDXLCommonResolutions, + "HelperNodes_SaveImage": HelperNodes_SaveImage, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "HelperNodes_MultilineStringLiteral": "String Literal (multi-line)", + "HelperNodes_StringLiteral": "String Literal", + "HelperNodes_Steps": "Steps", + "HelperNodes_CfgScale": "CFG Scale", + "HelperNodes_WidthHeight": "Image Dimensions", + "HelperNodes_SchedulerSelector": "Scheduler Selector", + "HelperNodes_SamplerSelector": "Sampler Selector", + "HelperNodes_SeedSelector": "Seed", + "HelperNodes_CheckpointSelector": "Checkpoint Selector", + "HelperNodes_VAESelector": "VAE Selector", + "HelperNodes_Prompt": "Positive/Negative Prompts", + "HelperNodes_SDXLCommonResolutions": "Common SDXL Resolutions", + "HelperNodes_SaveImage": "Save Image", +} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/base.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/base.py new file mode 100644 index 0000000000000000000000000000000000000000..5c7b487dfeeb43c0b383b48ba4dcec5685389f88 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/base.py @@ -0,0 +1,34 @@ +"""Base functionality and nodes are incorporated here. + +Primarily contains base class declarations inherited elsewhere. + +Also has some global declarations.""" + + +GLOBAL_CATEGORY = "ComfyUI-Helper-Nodes" + + +class BaseNode: + """ + Base class for all custom ComfyUI nodes in this repository. + + Mostly done to makes sure that things're defined properly in + any inherited functions during development. + """ + def __init__(self, **kwargs) -> None: + pass + + # noinspection PyPep8Naming + @classmethod + def INPUT_TYPES(cls) -> dict: + raise NotImplementedError + + RETURN_TYPES: tuple = None + RETURN_NAMES: tuple = None + + CATEGORY: str = GLOBAL_CATEGORY + + FUNCTION: str = "process" + + def process(self, **kwargs) -> tuple: + raise NotImplementedError diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/conditioning.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/conditioning.py new file mode 100644 index 0000000000000000000000000000000000000000..8752200972dc03e8f3a3a712d8462065f62efeb8 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/conditioning.py @@ -0,0 +1,42 @@ +from .base import GLOBAL_CATEGORY, BaseNode + +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy.samplers + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/conditioning" + + +class HelperNodes_CLIPSkip(BaseNode): + """ + Core implementation of this is basically the same as ComfyUI's node + for CLIPSetLastLayer, but making it a positive number like A1111 does + for user understanding. + + Functions otherwise identical, converting the positive number to negative + before passing into CLIP object. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "clip": ("CLIP", ), + "skip_layers": ("INT", { + "default": 1, + "min": 1, + "max": 24, + "step": 1, + "display": "number" + }) + } + } + + RETURN_TYPES = ("CLIP",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, clip, skip_layers) -> tuple: + clip = clip.clone() + clip.clip_layer(skip_layers * -1) + return (clip,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/image.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/image.py new file mode 100644 index 0000000000000000000000000000000000000000..94fb91b14e193917d03b9cd18b4cbea6c2c81838 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/image.py @@ -0,0 +1,189 @@ +import os +import json + +from .base import BaseNode, GLOBAL_CATEGORY + +# noinspection PyUnresolvedReferences +import folder_paths +# noinspection PyUnresolvedReferences,PyPackageRequirements +from nodes import MAX_RESOLUTION + +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy.samplers + +from PIL import Image, ExifTags +from PIL.PngImagePlugin import PngInfo +import piexif +import piexif.helper +import numpy as np + +from ._image_util import * + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/image" + + +class HelperNodes_SaveImage(BaseNode): + """ + Essentially, this does the same function as ImageSaveWithMetadata from + https://github.com/giriss/comfy-image-saver.git but allows us to + greatly REDUCE runtime by not serializing the ComfyUI workflow into the metadata. + + That is controlled by multiple boolean values to control whether we output metadata + and if we do what we include. + + This is designed to work with other nodes in this library, which provide aspect_ratio and orientation. + + Orientation can be calculated based on width and height, if not provided otherwise. + """ + def __init__(self): + super().__init__() + self.output_dir = folder_paths.output_directory + + @classmethod + def INPUT_TYPES(cls) -> dict: + # This relies heavily on components that're from comfy-image-saver by + # https://github.com/giriss/comfy-image-saver.git - but with differences + # in actual implementation + # + # We decide whether to write to individual files for metadata, or add to image, + # or both, or neither. Yes, we allow just saving the image directly. + + inputs = { + "required": { + "images": ("IMAGE", {"forceInput": True}), + "filename": ("STRING", {"default": f'%time_%seed', "multiline": False}), + "path": ("STRING", {"default": '', "multiline": False}), + "extension": (['png', 'jpeg', 'webp'], {"default": "png"}), + "steps": ("INT", {"forceInput": True}), + "cfg": ("FLOAT", {"forceInput": True}), + "model_name": (folder_paths.get_filename_list("checkpoints"), {"forceInput": True}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, {"forceInput": True}), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, {"forceInput": True}), + }, + "optional": { + "positive_prompt": ("STRING", {"default": "unknown", "multiline": True, "forceInput": True}), + "negative_prompt": ("STRING", {"default": "unknown", "multiline": True, "forceInput": True}), + "seed_value": ("INT", {"default": 0, "min": 0, "max": 18446744073709551615, "step": 1}), + "width": ("INT", {"default": 1024, "min": 8, "max": MAX_RESOLUTION, "step": 8}), + "height": ("INT", {"default": 1024, "min": 8, "max": MAX_RESOLUTION, "step": 8}), + "aspect_ratio": ("STRING", {"default": "unknown", "forceInput": True}), + "orientation": ("STRING", {"default": "unknown", "forceInput": True}), + "lossless_webp": ("BOOLEAN", {"default": True}), + "quality_jpeg_or_webp": ("INT", {"default": 100, "min": 1, "max": 100}), + "counter": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "time_format": ("STRING", {"default": "%Y-%m-%d-%H%M%S", "multiline": False}), + "include_metadata": ("BOOLEAN", {"default": True}), + "save_prompt_with_metadata": ("BOOLEAN", {"default": False}), + "save_extra_pnginfo_with_metadata": ("BOOLEAN", {"default": True}) + }, + "hidden": { + "prompt": "PROMPT", + "extra_pnginfo": "EXTRA_PNGINFO" + } + } + + return inputs + + RETURN_TYPES = () # We don't return anything here + OUTPUT_NODE = True # We are an OUTPUT node. + + FUNCTION = "save_files" + + # noinspection PyShadowingNames + def save_files(self, images, filename: str, path: str, extension: str, steps: int, cfg: float, + model_name: str, sampler_name: str, scheduler: str, positive_prompt: str, negative_prompt: str, + seed_value: int, width: int, height: int, lossless_webp: bool, + quality_jpeg_or_webp: str, counter: int, time_format: str, include_metadata: bool, + save_prompt_with_metadata: bool, save_extra_pnginfo_with_metadata: bool, prompt=None, + extra_pnginfo=None, aspect_ratio: str = None, orientation: str = None): + filename = make_filename(filename, seed_value, model_name, counter, time_format) + path = make_pathname(path, seed_value, model_name, counter, time_format) + ckpt_path = folder_paths.get_full_path("checkpoints", model_name) + basemodelname = parse_name(model_name) + modelhash = calculate_sha256(ckpt_path)[:10] + comment = (f"Prompt: {handle_whitespace(positive_prompt)} || \n" + f"Negative prompt: {handle_whitespace(negative_prompt)} || \n" + f"Steps: {steps}, Sampler: {sampler_name} \n") + + if scheduler != "normal": + comment += f"Scheduler: {scheduler}, " + + comment += f"CFG Scale: {cfg}, Seed: {seed_value}, Size: {width}x{height}, " + + if aspect_ratio: + comment += f"Aspect Ratio: {aspect_ratio}, " + + if orientation: + comment += f"Orientation: {orientation}, " + else: + if width == height: + comment += f"Orientation: square, " + elif width > height: + comment += f"Orientation: landscape, " + else: + comment += f"Orientation: portrait, " + + comment += f"Model: {basemodelname}, Model Hash: {modelhash}, Version: ComfyUI" + + output_path = os.path.join(self.output_dir, path) + + if output_path.strip() != '': + if not os.path.exists(output_path.strip()): + print(f"The specified path `{output_path.strip()}` does not exist. Creating directory.") + os.makedirs(output_path, exist_ok=True) + + filenames = self.save_images(images, output_path, filename, comment, extension, quality_jpeg_or_webp, + lossless_webp, prompt=prompt, extra_pnginfo=extra_pnginfo, + include_metadata=include_metadata, + include_prompt_in_metadata=save_prompt_with_metadata, + include_extra_pnginfo=save_extra_pnginfo_with_metadata) + + subfolder = os.path.normpath(path) + return {"ui": {"images": map( + lambda filename: {"filename": filename, "subfolder": subfolder if subfolder != '.' else '', + "type": 'output'}, filenames)}} + + @staticmethod + def save_images(images, output_path, filename_prefix, comment, extension, quality_jpeg_or_webp, lossless_webp, + prompt=None, extra_pnginfo=None, include_metadata=True, include_prompt_in_metadata=True, + include_extra_pnginfo=False) -> list[str]: + img_count = 1 + paths = [] + + for image in images: + i = 255. * image.cpu().numpy() + img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)) + if images.size()[0] > 1: + filename_prefix += "{:02d}".format(img_count) + + if extension == "png": + metadata = PngInfo() + if include_metadata: + metadata.add_text("parameters", comment) + + if prompt is not None and include_prompt_in_metadata: + metadata.add_text("prompt", json.dumps(prompt)) + if extra_pnginfo is not None and include_extra_pnginfo: + for x in extra_pnginfo: + if x.lower() == 'workflow' and not include_prompt_in_metadata: + continue # skip adding the workflow. + metadata.add_text(x, json.dumps(extra_pnginfo[x])) + + filename = f"{filename_prefix}.png" + img.save(os.path.join(output_path, filename), pnginfo=metadata, optimize=True) + else: + filename = f"{filename_prefix}.{extension}" + file = os.path.join(output_path, filename) + img.save(file, optimize=True, quality=quality_jpeg_or_webp, lossless=lossless_webp) + if include_metadata: + exif_bytes = piexif.dump({ + "Exif": { + piexif.ExifIFD.UserComment: piexif.helper.UserComment.dump(comment, encoding="unicode") + }, + }) + piexif.insert(exif_bytes, file) + + paths.append(filename) + img_count += 1 + + return paths diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/models.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/models.py new file mode 100644 index 0000000000000000000000000000000000000000..0a75c4e233dad5c40bd5e3f0fd9a074ce203a7bc --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/models.py @@ -0,0 +1,111 @@ +from .base import BaseNode, GLOBAL_CATEGORY + +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy.utils +# noinspection PyUnresolvedReferences,PyPackageRequirements +import folder_paths + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/models" + + +class HelperNodes_CheckpointSelector(BaseNode): + """ + Simple selector node that allows the selection of Checkpoint/Model. + + This should then be passed into either a conditioner or into a LoRA loader. + + Does not include LoRA selection, which is done in the standard Load LoRA nodes. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "chkpt_name": (folder_paths.get_filename_list("checkpoints"),) + } + } + + CATEGORY = MODULE_CATEGORY + + RETURN_TYPES = (folder_paths.get_filename_list("checkpoints"),) + RETURN_NAMES = ("chkpt_name",) + + def process(self, chkpt_name) -> tuple: + return (chkpt_name,) + + +class HelperNodes_VAESelector(BaseNode): + """ + Simple selector node that allows the selection of VAEs. + + This should then be passed to a VAE decoder node as it returns a VAE. + """ + + @staticmethod + def vae_list(): + # Borrowed verbatim from comfyui's implementations. + vaes = folder_paths.get_filename_list("vae") + approx_vaes = folder_paths.get_filename_list("vae_approx") + sdxl_taesd_enc = False + sdxl_taesd_dec = False + sd1_taesd_enc = False + sd1_taesd_dec = False + + for v in approx_vaes: + if v.startswith("taesd_decoder."): + sd1_taesd_dec = True + elif v.startswith("taesd_encoder."): + sd1_taesd_enc = True + elif v.startswith("taesdxl_decoder."): + sdxl_taesd_dec = True + elif v.startswith("taesdxl_encoder."): + sdxl_taesd_enc = True + if sd1_taesd_dec and sd1_taesd_enc: + vaes.append("taesd") + if sdxl_taesd_dec and sdxl_taesd_enc: + vaes.append("taesdxl") + return vaes + + @staticmethod + def load_taesd(name): + # Borrowed verbatim from comfyui's implementations + sd = {} + approx_vaes = folder_paths.get_filename_list("vae_approx") + + encoder = next(filter(lambda a: a.startswith("{}_encoder.".format(name)), approx_vaes)) + decoder = next(filter(lambda a: a.startswith("{}_decoder.".format(name)), approx_vaes)) + + enc = comfy.utils.load_torch_file(folder_paths.get_full_path("vae_approx", encoder)) + for k in enc: + sd["taesd_encoder.{}".format(k)] = enc[k] + + dec = comfy.utils.load_torch_file(folder_paths.get_full_path("vae_approx", decoder)) + for k in dec: + sd["taesd_decoder.{}".format(k)] = dec[k] + + if name == "taesd": + sd["vae_scale"] = torch.tensor(0.18215) + elif name == "taesdxl": + sd["vae_scale"] = torch.tensor(0.13025) + return sd + + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "vae_name": (cls.vae_list(),) + } + } + + CATEGORY = f"{MODULE_CATEGORY}" + + RETURN_TYPES = ("VAE",) + RETURN_NAMES = ("VAE",) + + def process(self, vae_name) -> tuple: + if vae_name in ["taesd", "taesdxl"]: + sd = self.load_taesd(vae_name) + else: + vae_path = folder_paths.get_full_path("vae", vae_name) + sd = comfy.utils.load_torch_file(vae_path) + vae = comfy.sd.VAE(sd=sd) + return (vae,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/prompt.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..cb731b7776e6e7c7b2c1589582983aa57ffc05fb --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/prompt.py @@ -0,0 +1,34 @@ +from .base import GLOBAL_CATEGORY, BaseNode + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}" + + +class HelperNodes_Prompt(BaseNode): + """ + This is a multi-field TEXT node that allows entering a positive and negative + prompt and pass them both out. + + Contains two multiline text input fields, neg_prompt is optional. + """ + + RETURN_TYPES = ("STRING", "STRING",) + RETURN_NAMES = ("PROMPT", "NEGPROMPT") + + CATEGORY = MODULE_CATEGORY + + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "prompt": ("STRING", {"multiline": True}) + }, + "optional": { + "neg_prompt": ("STRING", {"multiline": True}) + } + } + + def process(self, prompt, neg_prompt) -> tuple: + if not neg_prompt: + neg_prompt = "" + + return prompt, neg_prompt diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f5e95acba8cccc9b58baf45a69ff73fe2aeb9836 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/requirements.txt @@ -0,0 +1,4 @@ +numpy +pillow +piexif +whratio \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sampler.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..72d76a069a43b5fca6cc61251973bcddb518a8be --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sampler.py @@ -0,0 +1,87 @@ +from datetime import timezone, datetime +import random + +from .base import GLOBAL_CATEGORY, BaseNode + +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy.samplers + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/sampler" + +# Initialize the random system anew. This is because some extensions may alter +# this seed generation process and cause problems. +initial_random_state = random.getstate() +random.seed(datetime.now().timestamp()) +seed_random_state = random.getstate() +random.setstate(initial_random_state) + + +def new_random_seed(): + """ Gets a new random seed from the rgthree_seed_random_state and resetting the previous state.""" + global seed_random_state + prev_random_state = random.getstate() + random.setstate(seed_random_state) + seed = random.randint(1, 18446744073709551615) + seed_random_state = random.getstate() + random.setstate(prev_random_state) + return seed + + +# noinspection PyUnresolvedReferences +class HelperNodes_SamplerSelector(BaseNode): + """ + Simple Selector node that allows selection of a Sampler from + known samplers in the environment. + """ + + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,) + } + } + + RETURN_TYPES = (comfy.samplers.KSampler.SAMPLERS,) + RETURN_NAMES = ("sampler",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, sampler_name) -> tuple: + return (sampler_name,) + + +class HelperNodes_SeedSelector(BaseNode): + """ + Integer value node that has a Random Number Generator component in it. + + -1 makes a new random seed every time. + """ + + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "seed": ("INT", { + "default": -1, + "min": -1, + "max": 18446744073709551615, # equivalent to 0xffffffffffffffff - 64-bit integer max + "step": 1 + }), + }, + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("seed",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, seed) -> tuple: + if seed == -1: + # When seed value is -1, we generate a random value. + original_seed = seed + seed = new_random_seed() + + return (seed,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/scheduler.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..ee38779471e8cb8199ab6b0c6fadd76366bc02af --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/scheduler.py @@ -0,0 +1,28 @@ +from .base import GLOBAL_CATEGORY, BaseNode + +import comfy +import comfy.samplers + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/scheduler" + + +class HelperNodes_SchedulerSelector(BaseNode): + """ + Simple Selector node that allows selection of a Scheduler from + known schedulers in the environment. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "scheduler_name": (comfy.samplers.KSampler.SCHEDULERS,) + } + } + + RETURN_TYPES = (comfy.samplers.KSampler.SCHEDULERS,) + RETURN_NAMES = ("scheduler",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, scheduler_name): + return (scheduler_name,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sdxl.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sdxl.py new file mode 100644 index 0000000000000000000000000000000000000000..66d66d3be9297b2db183b41af8e68da01dcd1889 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/sdxl.py @@ -0,0 +1,41 @@ +from .base import BaseNode, GLOBAL_CATEGORY + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/sdxl" + + +class HelperNodes_SDXLCommonResolutions(BaseNode): + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "dimensions": ([ + "640 x 1536 (5:12 Portrait)", + "768 x 1344 (4:7 Portrait)", + "832 x 1216 (13:19 Portrait)", + "896 x 1152 (7:9 Portrait)", + "1024 x 1024 (1:1 Square)", + "1152 x 896 (9:7 Landscape)", + "1216 x 832 (19:13 Landscape)", + "1344 x 768 (7:4 Landscape)", + "1536 x 640 (12:5 Landscape)" + ],) + } + } + + RETURN_TYPES = ("INT", "INT", "STRING", "STRING",) + RETURN_NAMES = ("width", "height", "aspect ratio", "orientation",) + + CATEGORY = MODULE_CATEGORY + + def process(self, dimensions: str) -> tuple: + dim, asp = dimensions.split(' (', 1) + sasp: str = asp.strip('()') + aspect = sasp.split(' ', 1)[0] + orient = sasp.split(' ', 1)[1] + dims: str = dim.lower().split(' x ') + fwidth = float(dims[0]) + fheight = float(dims[1]) + width = int(fwidth) if fwidth.is_integer() else round(fwidth, 2) + height = int(fheight) if fheight.is_integer() else round(fheight, 2) + + return width, height, aspect, orient.lower() diff --git a/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/util.py b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/util.py new file mode 100644 index 0000000000000000000000000000000000000000..54b0594d5f1ec269abf9135d8c59c63c099c27df --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Helper-Nodes/util.py @@ -0,0 +1,179 @@ +import math + +import whratio + +from .base import GLOBAL_CATEGORY, BaseNode + +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy +# noinspection PyUnresolvedReferences,PyPackageRequirements +import comfy.samplers + +# noinspection PyUnresolvedReferences,PyPackageRequirements +from nodes import MAX_RESOLUTION + +MODULE_CATEGORY = f"{GLOBAL_CATEGORY}/util" + + +class HelperNodes_WidthHeight(BaseNode): + """ + Simple integer values node that allows definition of the + image height and width for passing into empty latents + as Integers. + + Permits custom values between 8 and 4096, in steps of 8. + """ + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "width": ("INT", { + "default": 1024, + "min": 8, + "max": MAX_RESOLUTION, + "step": 8, + "display": "number" + }), + "height": ("INT", { + "default": 1024, + "min": 8, + "max": MAX_RESOLUTION, + "step": 8, + "display": "number" + }), + } + } + + RETURN_TYPES = ("INT", "INT", "STRING", "STRING",) + RETURN_NAMES = ("width", "height", "aspect ratio", "orientation",) + + CATEGORY = f"{MODULE_CATEGORY}" + + FUNCTION = "process" + + def process(self, width, height): + if width > height: + orientation = "landscape" + elif width < height: + orientation = "portrait" + else: + orientation = "square" + + aspect = whratio.as_int(width, height) + aspect_ratio = f"{aspect[0]}:{aspect[1]}" + + return width, height, aspect_ratio, orientation + + +class HelperNodes_CfgScale(BaseNode): + """ + Simple integer value node that allows you to specify the CFG scale + for how strictly to the prompt the AI is. + + Permits values between 0 and 10, defaults at 8.0, and permits + revisions as small as 0.25 on CFG scale selection. Rounds to two + decimal points. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "scale": ("FLOAT", { + "default": 8.00, + "min": 0.00, + "max": 10.00, + "step": 0.25, + "round": 0.00, + "display": "number" + }), + } + } + + RETURN_TYPES = ("FLOAT", ) + RETURN_NAMES = ("CFG",) + + CATEGORY = f"{MODULE_CATEGORY}" + + FUNCTION = "process" + + def process(self, scale) -> tuple: + return (scale,) + + +class HelperNodes_Steps(BaseNode): + """ + Simple integer value node that allows you to specify the number of + sample steps to make in KSampler. + + Permits you to select between 1 and 100, but defaults at 25. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "steps": ("INT", { + "default": 25, + "min": 1, + "max": 100, + "step": 1, + "display": "number" + }), + } + } + + RETURN_TYPES = ("INT",) + RETURN_NAMES = ("steps",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, steps) -> tuple: + return (steps,) + + +class HelperNodes_StringLiteral(BaseNode): + """ + Simple String value node that allows you to specify a string to pass + into other nodes. + + Does not permit multiline text. See HelperNodes_MultilineStringLiteral + for multiline text values. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "string": ("STRING", {"multiline": False}) + } + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("STRING",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, string) -> tuple: + return (string,) + + +class HelperNodes_MultilineStringLiteral(BaseNode): + """ + Simple String value node that allows you to specify a string to pass + into other nodes. + + This node permits multiline text. + """ + @classmethod + def INPUT_TYPES(cls) -> dict: + return { + "required": { + "string": ("STRING", {"multiline": True}) + } + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("STRING",) + + CATEGORY = f"{MODULE_CATEGORY}" + + def process(self, string) -> tuple: + return (string,) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.github/workflows/publish.yml b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.github/workflows/publish.yml new file mode 100644 index 0000000000000000000000000000000000000000..828f300203c628d32100495bfc3c1f2a94eead55 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.github/workflows/publish.yml @@ -0,0 +1,21 @@ +name: Publish to Comfy registry +on: + workflow_dispatch: + push: + branches: + - main + paths: + - "pyproject.toml" + +jobs: + publish-node: + name: Publish Custom Node to registry + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v4 + - name: Publish Custom Node + uses: Comfy-Org/publish-node-action@main + with: + ## Add your own personal access token to your Github Repository secrets and reference it here. + personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }} \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..43aa4d524041f5277ba65c3d513eea5326fcb9cc --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitignore @@ -0,0 +1,9 @@ +__pycache__ +*.ini +wildcards/** +.vscode/ +.idea/ +subpack +impact_subpack +*.txt +*.yaml diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitmodules b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitmodules new file mode 100644 index 0000000000000000000000000000000000000000..9180e6465120d9a6e7de990c99b7517e29cda2e3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/.gitmodules @@ -0,0 +1,3 @@ +[submodule "subpack"] + path = subpack + url = https://github.com/ltdrdata/ComfyUI-Impact-Subpack diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/LICENSE.txt b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/LICENSE.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/README.md b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c372fc651a82ac1690d9094ed8000c1a5fe740b0 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/README.md @@ -0,0 +1,490 @@ +[![Youtube Badge](https://img.shields.io/badge/Youtube-FF0000?style=for-the-badge&logo=Youtube&logoColor=white&link=https://www.youtube.com/watch?v=AccoxDZIg3Y&list=PL_Ej2RDzjQLGfEeizq4GISeY3FtVyFmGP)](https://www.youtube.com/watch?v=AccoxDZIg3Y&list=PL_Ej2RDzjQLGfEeizq4GISeY3FtVyFmGP) + +# ComfyUI-Impact-Pack + +**Custom nodes pack for ComfyUI** +This custom node helps to conveniently enhance images through Detector, Detailer, Upscaler, Pipe, and more. + + +## NOTICE +* V7.0: Supports Switch based on Execution Model Inversion. +* V6.0: Supports FLUX.1 model in Impact KSampler, Detailers, PreviewBridgeLatent +* V5.0: It is no longer compatible with versions of ComfyUI before 2024.04.08. +* V4.87.4: Update to a version of ComfyUI after 2024.04.08 for proper functionality. +* V4.85: Incompatible with the outdated **ComfyUI IPAdapter Plus**. (A version dated March 24th or later is required.) +* V4.77: Compatibility patch applied. Requires ComfyUI version (Oct. 8th) or later. +* V4.73.3: ControlNetApply (SEGS) supports AnimateDiff +* V4.20.1: Due to the feature update in `RegionalSampler`, the parameter order has changed, causing malfunctions in previously created `RegionalSamplers`. Please adjust the parameters accordingly. +* V4.12: `MASKS` is changed to `MASK`. +* V4.7.2 isn't compatible with old version of `ControlNet Auxiliary Preprocessor`. If you will use `MediaPipe FaceMesh to SEGS` update to latest version(Sep. 17th). +* Selection weight syntax is changed(: -> ::) since V3.16. ([tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/ImpactWildcardProcessor.md)) +* Starting from V3.6, requires latest version(Aug 8, 9ccc965) of ComfyUI. +* **In versions below V3.3.1, there was an issue with the image quality generated after using the UltralyticsDetectorProvider. Please make sure to upgrade to a newer version.** +* Starting from V3.0, nodes related to `mmdet` are optional nodes that are activated only based on the configuration settings. + - Through ComfyUI-Impact-Subpack, you can utilize UltralyticsDetectorProvider to access various detection models. +* Between versions 2.22 and 2.21, there is partial compatibility loss regarding the Detailer workflow. If you continue to use the existing workflow, errors may occur during execution. An additional output called "enhanced_alpha_list" has been added to Detailer-related nodes. +* The permission error related to cv2 that occurred during the installation of Impact Pack has been patched in version 2.21.4. However, please note that the latest versions of ComfyUI and ComfyUI-Manager are required. +* The "PreviewBridge" feature may not function correctly on ComfyUI versions released before July 1, 2023. +* Attempting to load the "ComfyUI-Impact-Pack" on ComfyUI versions released before June 27, 2023, will result in a failure. +* With the addition of wildcard support in FaceDetailer, the structure of DETAILER_PIPE-related nodes and Detailer nodes has changed. There may be malfunctions when using the existing workflow. + + +## Custom Nodes +### [Detector nodes](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/detectors.md) + * `SAMLoader` - Loads the SAM model. + * `UltralyticsDetectorProvider` - Loads the Ultralystics model to provide SEGM_DETECTOR, BBOX_DETECTOR. + - Unlike `MMDetDetectorProvider`, for segm models, `BBOX_DETECTOR` is also provided. + - The various models available in UltralyticsDetectorProvider can be downloaded through **ComfyUI-Manager**. + * `ONNXDetectorProvider` - Loads the ONNX model to provide BBOX_DETECTOR. + * `CLIPSegDetectorProvider` - Wrapper for CLIPSeg to provide BBOX_DETECTOR. + * You need to install the ComfyUI-CLIPSeg node extension. + * `SEGM Detector (combined)` - Detects segmentation and returns a mask from the input image. + * `BBOX Detector (combined)` - Detects bounding boxes and returns a mask from the input image. + * `SAMDetector (combined)` - Utilizes the SAM technology to extract the segment at the location indicated by the input SEGS on the input image and outputs it as a unified mask. + * `SAMDetector (Segmented)` - It is similar to `SAMDetector (combined)`, but it separates and outputs the detected segments. Multiple segments can be found for the same detected area, and currently, a policy is in place to group them arbitrarily in sets of three. This aspect is expected to be improved in the future. + * As a result, it outputs the `combined_mask`, which is a unified mask, and `batch_masks`, which are multiple masks grouped together in batch form. + * While `batch_masks` may not be completely separated, it provides functionality to perform some level of segmentation. + * `Simple Detector (SEGS)` - Operating primarily with `BBOX_DETECTOR`, and with the additional provision of `SAM_MODEL` or `SEGM_DETECTOR`, this node internally generates improved SEGS through mask operations on both *bbox* and *silhouette*. It serves as a convenient tool to simplify a somewhat intricate workflow. + +### ControlNet, IPAdapter + * `ControlNetApply (SEGS)` - To apply ControlNet in SEGS, you need to use the Preprocessor Provider node from the Inspire Pack to utilize this node. + * `segs_preprocessor` and `control_image` can be selectively applied. If an `control_image` is given, `segs_preprocessor` will be ignored. + * If set to `control_image`, you can preview the cropped cnet image through `SEGSPreview (CNET Image)`. Images generated by `segs_preprocessor` should be verified through the `cnet_images` output of each Detailer. + * The `segs_preprocessor` operates by applying preprocessing on-the-fly based on the cropped image during the detailing process, while `control_image` will be cropped and used as input to `ControlNetApply (SEGS)`. + * `ControlNetClear (SEGS)` - Clear applied ControlNet in SEGS + * `IPAdapterApply (SEGS)` - To apply IPAdapter in SEGS, you need to use the Preprocessor Provider node from the Inspire Pack to utilize this node. + +### Mask operation + * `Pixelwise(SEGS & SEGS)` - Performs a 'pixelwise and' operation between two SEGS. + * `Pixelwise(SEGS - SEGS)` - Subtracts one SEGS from another. + * `Pixelwise(SEGS & MASK)` - Performs a pixelwise AND operation between SEGS and MASK. + * `Pixelwise(SEGS & MASKS ForEach)` - Performs a pixelwise AND operation between SEGS and MASKS. + * Please note that this operation is performed with batches of MASKS, not just a single MASK. + * `Pixelwise(MASK & MASK)` - Performs a 'pixelwise and' operation between two masks. + * `Pixelwise(MASK - MASK)` - Subtracts one mask from another. + * `Pixelwise(MASK + MASK)` - Combine two masks. + * `SEGM Detector (SEGS)` - Detects segmentation and returns SEGS from the input image. + * `BBOX Detector (SEGS)` - Detects bounding boxes and returns SEGS from the input image. + * `Dilate Mask` - Dilate Mask. + * Support erosion for negative value. + * `Gaussian Blur Mask` - Apply Gaussian Blur to Mask. You can utilize this for mask feathering. + +### [Detailer nodes](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/detailers.md) + * `Detailer (SEGS)` - Refines the image based on SEGS. + * `DetailerDebug (SEGS)` - Refines the image based on SEGS. Additionally, it provides the ability to monitor the cropped image and the refined image of the cropped image. + * To prevent regeneration caused by the seed that does not change every time when using 'external_seed', please disable the 'seed random generate' option in the 'Detailer...' node. + * `MASK to SEGS` - Generates SEGS based on the mask. + * `MASK to SEGS For AnimateDiff` - Generates SEGS based on the mask for AnimateDiff. + * When using a single mask, convert it to SEGS to apply it to the entire frame. + * When using a batch mask, the contour fill feature is disabled. + * `MediaPipe FaceMesh to SEGS` - Separate each landmark from the mediapipe facemesh image to create labeled SEGS. + * Usually, the size of images created through the MediaPipe facemesh preprocessor is downscaled. It resizes the MediaPipe facemesh image to the original size given as reference_image_opt for matching sizes during processing. + * `ToBinaryMask` - Separates the mask generated with alpha values between 0 and 255 into 0 and 255. The non-zero parts are always set to 255. + * `Masks to Mask List` - This node converts the MASKS in batch form to a list of individual masks. + * `Mask List to Masks` - This node converts the MASK list to MASK batch form. + * `EmptySEGS` - Provides an empty SEGS. + * `MaskPainter` - Provides a feature to draw masks. + * `FaceDetailer` - Easily detects faces and improves them. + * `FaceDetailer (pipe)` - Easily detects faces and improves them (for multipass). + * `MaskDetailer (pipe)` - This is a simple inpaint node that applies the Detailer to the mask area. + + * `FromDetailer (SDXL/pipe)`, `BasicPipe -> DetailerPipe (SDXL)`, `Edit DetailerPipe (SDXL)` - These are pipe functions used in Detailer for utilizing the refiner model of SDXL. + +### SEGS Manipulation nodes + * `SEGSDetailer` - Performs detailed work on SEGS without pasting it back onto the original image. + * `SEGSPaste` - Pastes the results of SEGS onto the original image. + * If `ref_image_opt` is present, the images contained within SEGS are ignored. Instead, the image within `ref_image_opt` corresponding to the crop area of SEGS is taken and pasted. The size of the image in `ref_image_opt` should be the same as the original image size. + * This node can be used in conjunction with the processing results of AnimateDiff. + * `SEGSPreview` - Provides a preview of SEGS. + * This option is used to preview the improved image through `SEGSDetailer` before merging it into the original. Prior to going through ```SEGSDetailer```, SEGS only contains mask information without image information. If fallback_image_opt is connected to the original image, SEGS without image information will generate a preview using the original image. However, if SEGS already contains image information, fallback_image_opt will be ignored. + * This node can be used in conjunction with the processing results of AnimateDiff. + * `SEGSPreview (CNET Image)` - Show images configured with `ControlNetApply (SEGS)` for debugging purposes. + * `SEGSToImageList` - Convert SEGS To Image List + * `SEGSToMaskList` - Convert SEGS To Mask List + * `SEGS Filter (label)` - This node filters SEGS based on the label of the detected areas. + * `SEGS Filter (ordered)` - This node sorts SEGS based on size and position and retrieves SEGs within a certain range. + * `SEGS Filter (range)` - This node retrieves only SEGs from SEGS that have a size and position within a certain range. + * `SEGS Assign (label)` - Assign labels sequentially to SEGS. This node is useful when used with `[LAB]` of FaceDetailer. + * `SEGSConcat` - Concatenate segs1 and segs2. If source shape of segs1 and segs2 are different from segs2 will be ignored. + * `Picker (SEGS)` - Among the input SEGS, you can select a specific SEG through a dialog. If no SEG is selected, it outputs an empty SEGS. Increasing the batch_size of SEGSDetailer can be used for the purpose of selecting from the candidates. + * `Set Default Image For SEGS` - Set a default image for SEGS. SEGS with images set this way do not need to have a fallback image set. When override is set to false, the original image is preserved. + * `Remove Image from SEGS` - Remove the image set for the SEGS that has been configured by "Set Default Image for SEGS" or SEGSDetailer. When the image for the SEGS is removed, the Detailer node will operate based on the currently processed image instead of the SEGS. + * `Make Tile SEGS` - [experimental] Create SEGS in the form of tiles from an image to facilitate experiments for Tiled Upscale using the Detailer. + * The `filter_in_segs_opt` and `filter_out_segs_opt` are optional inputs. If these inputs are provided, when creating the tiles, the mask for each tile is generated by overlapping with the mask of `filter_in_segs_opt` and excluding the overlap with the mask of `filter_out_segs_opt`. Tiles with an empty mask will not be created as SEGS. + * `Dilate Mask (SEGS)` - Dilate/Erosion Mask in SEGS + * `Gaussian Blur Mask (SEGS)` - Apply Gaussian Blur to Mask in SEGS + * `SEGS_ELT Manipulation` - experimental nodes + * `DecomposeSEGS` - Decompose SEGS to allow for detailed manipulation. + * `AssembleSEGS` - Reassemble the decomposed SEGS. + * `From SEG_ELT` - Extract detailed information from SEG_ELT. + * `Edit SEG_ELT` - Modify some of the information in SEG_ELT. + * `Dilate SEG_ELT` - Dilate the mask of SEG_ELT. + * `From SEG_ELT` bbox - Extract coordinate from bbox in SEG_ELT + * `From SEG_ELT` crop_region - Extract coordinate from crop_region in SEG_ELT + * `Count Elt in SEGS` - Number of Elts ins SEGS + +### Pipe nodes + * `ToDetailerPipe`, `FromDetailerPipe` - These nodes are used to bundle multiple inputs used in the detailer, such as models and vae, ..., into a single DETAILER_PIPE or extract the elements that are bundled in the DETAILER_PIPE. + * `ToBasicPipe`, `FromBasicPipe` - These nodes are used to bundle model, clip, vae, positive conditioning, and negative conditioning into a single BASIC_PIPE, or extract each element from the BASIC_PIPE. + * `EditBasicPipe`, `EditDetailerPipe` - These nodes are used to replace some elements in BASIC_PIPE or DETAILER_PIPE. + * `FromDetailerPipe_v2`, `FromBasicPipe_v2` - It has the same functionality as `FromDetailerPipe` and `FromBasicPipe`, but it has an additional output that directly exports the input pipe. It is useful when editing EditBasicPipe and EditDetailerPipe. +* `Latent Scale (on Pixel Space)` - This node converts latent to pixel space, upscales it, and then converts it back to latent. + * If upscale_model_opt is provided, it uses the model to upscale the pixel and then downscales it using the interpolation method provided in scale_method to the target resolution. +* `PixelKSampleUpscalerProvider` - An upscaler is provided that converts latent to pixels using VAEDecode, performs upscaling, converts back to latent using VAEEncode, and then performs k-sampling. This upscaler can be attached to nodes such as `Iterative Upscale` for use. + * Similar to `Latent Scale (on Pixel Space)`, if upscale_model_opt is provided, it performs pixel upscaling using the model. +* `PixelTiledKSampleUpscalerProvider` - It is similar to `PixelKSampleUpscalerProvider`, but it uses `ComfyUI_TiledKSampler` and Tiled VAE Decoder/Encoder to avoid GPU VRAM issues at high resolutions. + * You need to install the [BlenderNeko/ComfyUI_TiledKSampler](https://github.com/BlenderNeko/ComfyUI_TiledKSampler) node extension. + +### PK_HOOK + * `DenoiseScheduleHookProvider` - IterativeUpscale provides a hook that gradually changes the denoise to target_denoise as the iterative-step progresses. + * `CfgScheduleHookProvider` - IterativeUpscale provides a hook that gradually changes the cfg to target_cfg as the iterative-step progresses. + * `StepsScheduleHookProvider` - IterativeUpscale provides a hook that gradually changes the sampling-steps to target_steps as the iterative-step progresses. + * `NoiseInjectionHookProvider` - During each iteration of IterativeUpscale, noise is injected into the latent space while varying the strength according to a schedule. + * You need to install the [BlenderNeko/ComfyUI_Noise](https://github.com/BlenderNeko/ComfyUI_Noise) node extension. + * The seed serves as the initial value required for generating noise, and it increments by 1 with each iteration as the process unfolds. + * The source determines the types of CPU noise and GPU noise to be configured. + * Currently, there is only a simple schedule available, where the strength of the noise varies from start_strength to end_strength during the progression of each iteration. + * `UnsamplerHookProvider` - Apply Unsampler during each iteration. To use this node, ComfyUI_Noise must be installed. + * `PixelKSampleHookCombine` - This is used to connect two PK_HOOKs. hook1 is executed first and then hook2 is executed. + * If you want to simultaneously change cfg and denoise, you can combine the PK_HOOKs of CfgScheduleHookProvider and PixelKSampleHookCombine. + +### DETAILER_HOOK + * `NoiseInjectionDetailerHookProvider` - The `detailer_hook` is a hook in the `Detailer` that injects noise during the processing of each SEGS. + * `UnsamplerDetailerHookProvider` - Apply Unsampler during each cycle. To use this node, ComfyUI_Noise must be installed. + * `DenoiseSchedulerDetailerHookProvider` - During the progress of the cycle, the detailer's denoise is altered up to the `target_denoise`. + * `CoreMLDetailerHookProvider` - CoreML supports only 512x512, 512x768, 768x512, 768x768 size sampling. CoreMLDetailerHookProvider precisely fixes the upscale of the crop_region to this size. When using this hook, it will always be selected size, regardless of the guide_size. However, if the guide_size is too small, skipping will occur. + * `DetailerHookCombine` - This is used to connect two DETAILER_HOOKs. Similar to PixelKSampleHookCombine. + * `SEGSOrderedFilterDetailerHook`, SEGSRangeFilterDetailerHook, SEGSLabelFilterDetailerHook - There are a wrapper node that provides SEGSFilter nodes to be applied in FaceDetailer or Detector by creating DETAILER_HOOK. + * `PreviewDetailerHook` - Connecting this hook node helps provide assistance for viewing previews whenever SEGS Detailing tasks are completed. When working with a large number of SEGS, such as Make Tile SEGS, it allows for monitoring the situation as improvements progress incrementally. + * Since this is the hook applied when pasting onto the original image, it has no effect on nodes like `SEGSDetailer`. + * `VariationNoiseDetailerHookProvider` - Apply variation seed to the detailer. It can be applied in multiple stages through combine. + +### Iterative Upscale nodes + * `Iterative Upscale (Latent/on Pixel Space)` - The upscaler takes the input upscaler and splits the scale_factor into steps, then iteratively performs upscaling. + This takes latent as input and outputs latent as the result. + * `Iterative Upscale (Image)` - The upscaler takes the input upscaler and splits the scale_factor into steps, then iteratively performs upscaling. This takes image as input and outputs image as the result. + * Internally, this node uses 'Iterative Upscale (Latent)'. + +### TwoSamplers nodes +* `TwoSamplersForMask` - This node can apply two samplers depending on the mask area. The base_sampler is applied to the area where the mask is 0, while the mask_sampler is applied to the area where the mask is 1. + * Note: The latent encoded through VAEEncodeForInpaint cannot be used. +* `KSamplerProvider` - This is a wrapper that enables KSampler to be used in TwoSamplersForMask TwoSamplersForMaskUpscalerProvider. +* `TiledKSamplerProvider` - ComfyUI_TiledKSampler is a wrapper that provides KSAMPLER. + * You need to install the [BlenderNeko/ComfyUI_TiledKSampler](https://github.com/BlenderNeko/ComfyUI_TiledKSampler) node extension. + +* `TwoAdvancedSamplersForMask` - TwoSamplersForMask is similar to TwoAdvancedSamplersForMask, but they differ in their operation. TwoSamplersForMask performs sampling in the mask area only after all the samples in the base area are finished. On the other hand, TwoAdvancedSamplersForMask performs sampling in both the base area and the mask area sequentially at each step. +* `KSamplerAdvancedProvider` - This is a wrapper that enables KSampler to be used in TwoAdvancedSamplersForMask, RegionalSampler. + * sigma_factor: By multiplying the denoise schedule by the sigma_factor, you can adjust the amount of denoising based on the configured denoise. + +* `TwoSamplersForMaskUpscalerProvider` - This is an Upscaler that extends TwoSamplersForMask to be used in Iterative Upscale. + * TwoSamplersForMaskUpscalerProviderPipe - pipe version of TwoSamplersForMaskUpscalerProvider. + +### Image Utils + * `PreviewBridge (image)` - This custom node can be used with a bridge for image when using the MaskEditor feature of Clipspace. + * `PreviewBridge (latent)` - This custom node can be used with a bridge for latent image when using the MaskEditor feature of Clipspace. + * If a latent with a mask is provided as input, it displays the mask. Additionally, the mask output provides the mask set in the latent. + * If a latent without a mask is provided as input, it outputs the original latent as is, but the mask output provides an output with the entire region set as a mask. + * When set mask through MaskEditor, a mask is applied to the latent, and the output includes the stored mask. The same mask is also output as the mask output. + * When connected to `vae_opt`, it takes higher priority than the `preview_method`. + * `ImageSender`, `ImageReceiver` - The images generated in ImageSender are automatically sent to the ImageReceiver with the same link_id. + * `LatentSender`, `LatentReceiver` - The latent generated in LatentSender are automatically sent to the LatentReceiver with the same link_id. + * Furthermore, LatentSender is implemented with PreviewLatent, which stores the latent in payload form within the image thumbnail. + * Due to the current structure of ComfyUI, it is unable to distinguish between SDXL latent and SD1.5/SD2.1 latent. Therefore, it generates thumbnails by decoding them using the SD1.5 method. + +### Switch nodes + * `Switch (image,mask)`, `Switch (latent)`, `Switch (SEGS)` - Among multiple inputs, it selects the input designated by the selector and outputs it. The first input must be provided, while the others are optional. However, if the input specified by the selector is not connected, an error may occur. + * `Switch (Any)` - This is a Switch node that takes an arbitrary number of inputs and produces a single output. Its type is determined when connected to any node, and connecting inputs increases the available slots for connections. + * `Inversed Switch (Any)` - In contrast to `Switch (Any)`, it takes a single input and outputs one of many. + * NOTE: See this [tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/switch.md) + +### [Wildcards](http://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/ImpactWildcard.md) nodes + * These are nodes that supports syntax in the form of `__wildcard-name__` and dynamic prompt syntax like `{a|b|c}`. + * Wildcard files can be used by placing `.txt` or `.yaml` files under either `ComfyUI-Impact-Pack/wildcards` or `ComfyUI-Impact-Pack/custom_wildcards` paths. + * You can download and use [Wildcard YAML](https://civitai.com/models/138970/billions-of-wildcards-all-in-one) files in this format. + * After the first execution, you can change the custom wildcards path in the `custom_wildcards` entry within the `ComfyUI-Impact-Pack/impact-pack.ini` file created. + * `ImpactWildcardProcessor` - The text is generated by processing the wildcard in the Text. If the mode is set to "populate", a dynamic prompt is generated with each execution and the input is filled in the second textbox. If the mode is set to "fixed", the content of the second textbox remains unchanged. + * When an image is generated with the "fixed" mode, the prompt used for that particular generation is stored in the metadata. + * `ImpactWildcardEncode` - Similar to ImpactWildcardProcessor, this provides the loading functionality of LoRAs (e.g. ``). Populated prompts are encoded using the clip after all the lora loading is done. + * If the `Inspire Pack` is installed, you can use **Lora Block Weight** in the form of `LBW=lbw spec;` + * ``, ``, `` + +### Regional Sampling + * These nodes offer the capability to divide regions and perform partial sampling using a mask. Unlike TwoSamplersForMask, sampling for each region is applied during each step. + * `RegionalPrompt` - This node combines a **mask** for specifying regions and the **sampler** to apply to each region to create `REGIONAL_PROMPTS`. + * `CombineRegionalPrompts` - Combine multiple `REGIONAL_PROMPTS` to create a single `REGIONAL_PROMPTS`. + * `RegionalSampler` - This node performs sampling using a base sampler and regional prompts. Sampling by the base sampler is executed at each step, while sampling for each region is performed through the sampler bound to each region. + * overlap_factor - Specifies the amount of overlap for each region to blend well with the area outside the mask. + * restore_latent - When sampling each region, restore the areas outside the mask to the base latent, preventing additional noise from being introduced outside the mask during region sampling. + * `RegionalSamplerAdvanced` - This is the Advanced version of the RegionalSampler. You can control it using `step` instead of `denoise`. + > NOTE: The `sde` sampler and `uni_pc` sampler introduce additional noise during each step of the sampling process. To mitigate this, when sampling each region, the `uni_pc` sampler applies additional `dpmpp_fast`, and the sde sampler applies the `dpmpp_2m` sampler as an additional measure. + + +### Impact KSampler + * These samplers support basic_pipe and AYS scheduler + * `KSampler (pipe)` - pipe version of KSampler + * `KSampler (advanced/pipe)` - pipe version of KSamplerAdvacned + * When converting the scheduler widget to input, refer to the `Impact Scheduler Adapter` node to resolve compatibility issues. + * `GITSScheduler Func Provider` - provider scheduler function for GITSScheduler + + +### Batch/List Util + * `Image batch To Image List` - Convert Image batch to Image List + - You can use images generated in a multi batch to handle them + * `Make Image List` - Convert multiple images into a single image list + * `Make Image Batch` - Convert multiple images into a single image batch + - The input of images can be scaled up as needed + + +### Logics (experimental) + * These nodes are experimental nodes designed to implement the logic for loops and dynamic switching. + * `ImpactCompare`, `ImpactConditionalBranch`, `ImpactConditionalBranchSelMode`, `ImpactInt`, `ImpactValueSender`, `ImpactValueReceiver`, `ImpactImageInfo`, `ImpactMinMax`, `ImpactNeg`, `ImpactConditionalStopIteration` + * `ImpactIsNotEmptySEGS` - This node returns `true` only if the input SEGS is not empty. + * `ImpactIfNone` - Returns `true` if any_input is None, and returns `false` if it is not None. + * `Queue Trigger` - When this node is executed, it adds a new queue to assist with repetitive tasks. It will only execute if the signal's status changes. + * `Queue Trigger (Countdown)` - Like the Queue Trigger, it adds a queue, but only adds it if it's greater than 1, and decrements the count by one each time it runs. + * `Sleep` - Waits for the specified time (in seconds). + * `Set Widget Value` - This node sets one of the optional inputs to the specified node's widget. An error may occur if the types do not match. + * `Set Mute State` - This node changes the mute state of a specific node. + * `Control Bridge` - This node modifies the state of the connected control nodes based on the `mode` and `behavior` . If there are nodes that require a change, the current execution is paused, the mute status is updated, and a new prompt queue is inserted. + * When the `mode` is `active`, it makes the connected control nodes active regardless of the behavior. + * When the `mode` is `Bypass/Mute`, it changes the state of the connected nodes based on whether the behavior is `Bypass` or `Mute`. + * **Limitation**: Due to these characteristics, it does not function correctly when the batch count exceeds 1. Additionally, it does not guarantee proper operation when the seed is randomized or when the state of nodes is altered by actions such as `Queue Trigger`, `Set Widget Value`, `Set Mute`, before the Control Bridge. + * When utilizing this node, please structure the workflow in such a way that `Queue Trigger`, `Set Widget Value`, `Set Mute State`, and similar actions are executed at the end of the workflow. + * If you want to change the value of the seed at each iteration, please ensure that Set Widget Value is executed at the end of the workflow instead of using randomization. + * It is not a problem if the seed changes due to randomization as long as it occurs after the Control Bridge section. + * `Remote Boolean (on prompt)`, `Remote Int (on prompt)` - At the start of the prompt, this node forcibly sets the `widget_value` of `node_id`. It is disregarded if the target widget type is different. + * You can find the `node_id` by checking through [ComfyUI-Manager](https://github.com/ltdrdata/ComfyUI-Manager) using the format `Badge: #ID Nickname`. + * Experimental set of nodes for implementing loop functionality (tutorial to be prepared later / [example workflow](test/loop-test.json)). + +### HuggingFace nodes + * These nodes provide functionalities based on HuggingFace repository models. + * The path where the HuggingFace model cache is stored can be changed through the `HF_HOME` environment variable. + * `HF Transformers Classifier Provider` - This is a node that provides a classifier based on HuggingFace's transformers models. + * The 'repo id' parameter should contain HuggingFace's repo id. When `preset_repo_id` is set to `Manual repo id`, use the manually entered repo id in `manual_repo_id`. + * e.g. 'rizvandwiki/gender-classification-2' is a repository that provides a model for gender classification. + * `SEGS Classify` - This node utilizes the `TRANSFORMERS_CLASSIFIER` loaded with 'HF Transformers Classifier Provider' to classify `SEGS`. + * The 'expr' allows for forms like `label > number`, and in the case of `preset_expr` being `Manual expr`, it uses the expression entered in `manual_expr`. + * For example, in the case of `male <= 0.4`, if the score of the `male` label in the classification result is less than or equal to 0.4, it is categorized as `filtered_SEGS`, otherwise, it is categorized as `remained_SEGS`. + * For supported labels, please refer to the `config.json` of the respective HuggingFace repository. + * `#Female` and `#Male` are symbols that group multiple labels such as `Female, women, woman, ...`, for convenience, rather than being single labels. + +### Etc nodes + * `Impact Scheduler Adapter` - With the addition of AYS to the scheduler of the Impact Pack and Inspire Pack, there is an issue of incompatibility when the existing scheduler widget is converted to input. The Impact Scheduler Adapter allows for an indirect connection to be possible. + * `StringListToString` - Convert String List to String + * `WildcardPromptFromString` - Create labeled wildcard for detailer from string. + * This node works well when used with MakeTileSEGS. [[Link](https://github.com/ltdrdata/ComfyUI-Impact-Pack/pull/536#discussion_r1586060779)] + + * `String Selector` - It selects and returns a portion of the string. When `multiline` mode is disabled, it simply returns the string of the line pointed to by the selector. When `multiline` mode is enabled, it divides the string based on lines that start with `#` and returns them. If the `select` value is larger than the number of items, it will start counting from the first line again and return accordingly. + * `Combine Conditionings` - It takes multiple conditionings as input and combines them into a single conditioning. + * `Concat Conditionings` - It takes multiple conditionings as input and concat them into a single conditioning. + * `Negative Cond Placeholder` - Models like FLUX.1 do not use Negative Conditioning. This is a placeholder node for them. You can use FLUX.1 by replacing the Negative Conditioning used in Impact KSampler, KSampler (Inspire), and Detailer with this node. + * `Execution Order Controller` - A helper node that can forcibly control the execution order of nodes. + * Connect the output of the node that should be executed first to the signal, and make the input of the node that should be executed later pass through this node. + + +## MMDet nodes (DEPRECATED) - Don't use these nodes +* MMDetDetectorProvider - Loads the MMDet model to provide BBOX_DETECTOR and SEGM_DETECTOR. +* To use the existing MMDetDetectorProvider, you need to enable the MMDet usage configuration. + + +## Feature +* `Interactive SAM Detector (Clipspace)` - When you right-click on a node that has 'MASK' and 'IMAGE' outputs, a context menu will open. From this menu, you can either open a dialog to create a SAM Mask using 'Open in SAM Detector', or copy the content (likely mask data) using 'Copy (Clipspace)' and generate a mask using 'Impact SAM Detector' from the clipspace menu, and then paste it using 'Paste (Clipspace)'. +* Providing a feature to detect errors that occur when mixing models and clips from checkpoints such as `SDXL Base`, `SDXL Refiner`, `SD1.x`, `SD2.x` during sample execution, and reporting appropriate errors. + + +## Deprecated +* The following nodes have been kept only for compatibility with existing workflows, and are no longer supported. Please replace them with new nodes. + * ONNX Detector (SEGS) - BBOX Detector (SEGS) + * MMDetLoader -> MMDetDetectorProvider + * SegsMaskCombine -> SEGS to MASK (combined) + * BboxDetectorForEach -> BBOX Detector (SEGS) + * SegmDetectorForEach -> SEGM Detector (SEGS) + * BboxDetectorCombined -> BBOX Detector (combined) + * SegmDetectorCombined -> SEGM Detector (combined) + * MaskPainter -> PreviewBridge +* To use the existing deprecated legacy nodes, you need to enable the MMDet usage configuration. + + +## Ultralytics models +* huggingface.co/Bingsu/[adetailer](https://github.com/ultralytics/assets/releases/) - You can download face, people detection models, and clothing detection models. +* ultralytics/[assets](https://github.com/ultralytics/assets/releases/) - You can download various types of detection models other than faces or people. +* civitai/[adetailer](https://civitai.com/search/models?sortBy=models_v5&query=adetailer) - You can download various types detection models....Many models are associated with NSFW content. + +## How to activate 'MMDet usage' (DEPRECATED) +* Upon the initial execution, an `impact-pack.ini` file will be generated in the custom_nodes/ComfyUI-Impact-Pack directory. +``` +[default] +dependency_version = 2 +mmdet_skip = True +``` +* Change `mmdet_skip = True` to `mmdet_skip = False` +``` +[default] +dependency_version = 2 +mmdet_skip = False +``` +* Restart ComfyUI + + +## Installation + +1. `cd custom_nodes` +2. `git clone https://github.com/ltdrdata/ComfyUI-Impact-Pack.git` +3. `cd ComfyUI-Impact-Pack` +4. (optional) `git clone https://github.com/ltdrdata/ComfyUI-Impact-Subpack impact_subpack` + * Impact Pack will automatically download subpack during its initial launch. +5. (optional) `python install.py` + * Impact Pack will automatically install its dependencies during its initial launch. + * For the portable version, you should execute the command `..\..\..\python_embeded\python.exe install.py` to run the installation script. + + +6. Restart ComfyUI + +* NOTE1: If an error occurs during the installation process, please refer to [Troubleshooting Page](troubleshooting/TROUBLESHOOTING.md) for assistance. +* NOTE2: You can use this colab notebook [colab notebook](https://colab.research.google.com/github/ltdrdata/ComfyUI-Impact-Pack/blob/Main/notebook/comfyui_colab_impact_pack.ipynb) to launch it. This notebook automatically downloads the impact pack to the custom_nodes directory, installs the tested dependencies, and runs it. +* NOTE3: If you create an empty file named `skip_download_model` in the `ComfyUI/custom_nodes/` directory, it will skip the model download step during the installation of the impact pack. + +## Package Dependencies (If you need to manual setup.) + +* pip install + * openmim + * segment-anything + * ultralytics + * scikit-image + * piexif + * (optional) pycocotools + * (optional) onnxruntime + +* mim install (deprecated) + * mmcv==2.0.0, mmdet==3.0.0, mmengine==0.7.2 + +* linux packages (ubuntu) + * libgl1-mesa-glx + * libglib2.0-0 + + +## Config example +* Once you run the Impact Pack for the first time, an `impact-pack.ini` file will be automatically generated in the Impact Pack directory. You can modify this configuration file to customize the default behavior. + * `dependency_version` - don't touch this + * `mmdet_skip` - disable MMDet based nodes and legacy nodes if `True` + * `sam_editor_cpu` - use cpu for `SAM editor` instead of gpu + * sam_editor_model: Specify the SAM model for the SAM editor. + * You can download various SAM models using ComfyUI-Manager. + * Path to SAM model: `ComfyUI/models/sams` +``` +[default] +dependency_version = 9 +mmdet_skip = True +sam_editor_cpu = False +sam_editor_model = sam_vit_b_01ec64.pth +``` + + +## Other Materials (auto-download on initial startup) + +* ComfyUI/models/mmdets/bbox <= https://huggingface.co/dustysys/ddetailer/resolve/main/mmdet/bbox/mmdet_anime-face_yolov3.pth +* ComfyUI/models/mmdets/bbox <= https://raw.githubusercontent.com/Bing-su/dddetailer/master/config/mmdet_anime-face_yolov3.py +* ComfyUI/models/sams <= https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth + +## Troubleshooting page +* [Troubleshooting Page](troubleshooting/TROUBLESHOOTING.md) + + +## How to use (DDetailer feature) + +#### 1. Basic auto face detection and refine exapmle. +![simple](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/simple.png) +* The face that has been damaged due to low resolution is restored with high resolution by generating and synthesizing it, in order to restore the details. +* The FaceDetailer node is a combination of a Detector node for face detection and a Detailer node for image enhancement. See the [Advanced Tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/tutorial/advanced.md) for a more detailed explanation. +* Pass the MMDetLoader 's bbox model and the detection model loaded by SAMLoader to FaceDetailer . Since it performs the function of KSampler for image enhancement, it overlaps with KSampler's options. +* The MASK output of FaceDetailer provides a visualization of where the detected and enhanced areas are. + +![simple-orig](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/simple-original.png) ![simple-refined](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/simple-refined.png) +* You can see that the face in the image on the left has increased detail as in the image on the right. + +#### 2. 2Pass refine (restore a severely damaged face) +![2pass-workflow-example](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/2pass-simple.png) +* Although two FaceDetailers can be attached together for a 2-pass configuration, various common inputs used in KSampler can be passed through DETAILER_PIPE, so FaceDetailerPipe can be used to configure easily. +* In 1pass, only rough outline recovery is required, so restore with a reasonable resolution and low options. However, if you increase the dilation at this time, not only the face but also the surrounding parts are included in the recovery range, so it is useful when you need to reshape the face other than the facial part. + +![2pass-example-original](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/2pass-original.png) ![2pass-example-middle](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/2pass-1pass.png) ![2pass-example-result](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/2pass-2pass.png) +* In the first stage, the severely damaged face is restored to some extent, and in the second stage, the details are restored + +#### 3. Face Bbox(bounding box) + Person silhouette segmentation (prevent distortion of the background.) +![combination-workflow-example](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/combination.jpg) +![combination-example-original](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/combination-original.png) ![combination-example-refined](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/combination-refined.png) + +* Facial synthesis that emphasizes details is delicately aligned with the contours of the face, and it can be observed that it does not affect the image outside of the face. + +* The BBoxDetectorForEach node is used to detect faces, and the SAMDetectorCombined node is used to find the segment related to the detected face. By using the Segs & Mask node with the two masks obtained in this way, an accurate mask that intersects based on segs can be generated. If this generated mask is input to the DetailerForEach node, only the target area can be created in high resolution from the image and then composited. + +#### 4. Iterative Upscale +![upscale-workflow-example](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/upscale-workflow.png) + +* The IterativeUpscale node is a node that enlarges an image/latent by a scale_factor. In this process, the upscale is carried out progressively by dividing it into steps. +* IterativeUpscale takes an Upscaler as an input, similar to a plugin, and uses it during each iteration. PixelKSampleUpscalerProvider is an Upscaler that converts the latent representation to pixel space and applies ksampling. + * The upscale_model_opt is an optional parameter that determines whether to use the upscale function of the model base if available. Using the upscale function of the model base can significantly reduce the number of iterative steps required. If an x2 upscaler is used, the image/latent is first upscaled by a factor of 2 and then downscaled to the target scale at each step before further processing is done. + +* The following image is an image of 304x512 pixels and the same image scaled up to three times its original size using IterativeUpscale. + +![combination-example-original](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/upscale-original.png) ![combination-example-refined](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/upscale-3x.png) + + +#### 5. Interactive SAM Detector (Clipspace) + +* When you right-click on the node that outputs 'MASK' and 'IMAGE', a menu called "Open in SAM Detector" appears, as shown in the following picture. Clicking on the menu opens a dialog in SAM's functionality, allowing you to generate a segment mask. +![samdetector-menu](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/SAMDetector-menu.png) + +* By clicking the left mouse button on a coordinate, a positive prompt in blue color is entered, indicating the area that should be included. Clicking the right mouse button on a coordinate enters a negative prompt in red color, indicating the area that should be excluded. Positive prompts represent the areas that should be included, while negative prompts represent the areas that should be excluded. +* You can remove the points that were added by using the "undo" button. After selecting the points, pressing the "detect" button generates the mask. Additionally, you can adjust the fidelity slider to determine the extent to which the mask belongs to the confidence region. + +![samdetector-dialog](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/SAMDetector-dialog.jpg) + +* If you opened the dialog through "Open in SAM Detector" from the node, you can directly apply the changes by clicking the "Save to node" button. However, if you opened the dialog through the "clipspace" menu, you can save it to clipspace by clicking the "Save" button. + +![samdetector-result](https://github.com/ltdrdata/ComfyUI-extension-tutorials/raw/Main/ComfyUI-Impact-Pack/images/SAMDetector-result.jpg) + +* When you execute using the reflected mask in the node, you can observe that the image and mask are displayed separately. + + +## Others Tutorials +* [ComfyUI-extension-tutorials/ComfyUI-Impact-Pack](https://github.com/ltdrdata/ComfyUI-extension-tutorials/tree/Main/ComfyUI-Impact-Pack) - You can find various tutorials and workflows on this page. +* [Advanced Tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/advanced.md) +* [SAM Application](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/sam.md) +* [PreviewBridge](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/previewbridge.md) +* [Mask Pointer](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/maskpointer.md) +* [ONNX Tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/ONNX.md) +* [CLIPSeg Tutorial](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/clipseg.md) +* [Extreme Highresolution Upscale](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/extreme-upscale.md) +* [TwoSamplersForMask](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/TwoSamplers.md) +* [TwoAdvancedSamplersForMask](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/TwoAdvancedSamplers.md) +* [Advanced Iterative Upscale: PK_HOOK](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/pk_hook.md) +* [Advanced Iterative Upscale: TwoSamplersForMask Upscale Provider](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/TwoSamplersUpscale.md) +* [Interactive SAM + PreviewBridge](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/sam_with_preview_bridge.md) +* [ImageSender/ImageReceiver/LatentSender/LatentReceiver](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/sender_receiver.md) +* [ImpactWildcardProcessor](https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/ImpactWildcardProcessor.md) + + +## Credits + +ComfyUI/[ComfyUI](https://github.com/comfyanonymous/ComfyUI) - A powerful and modular stable diffusion GUI. + +dustysys/[ddetailer](https://github.com/dustysys/ddetailer) - DDetailer for Stable-diffusion-webUI extension. + +Bing-su/[dddetailer](https://github.com/Bing-su/dddetailer) - The anime-face-detector used in ddetailer has been updated to be compatible with mmdet 3.0.0, and we have also applied a patch to the pycocotools dependency for Windows environment in ddetailer. + +facebook/[segment-anything](https://github.com/facebookresearch/segment-anything) - Segmentation Anything! + +hysts/[anime-face-detector](https://github.com/hysts/anime-face-detector) - Creator of `anime-face_yolov3`, which has impressive performance on a variety of art styles. + +open-mmlab/[mmdetection](https://github.com/open-mmlab/mmdetection) - Object detection toolset. `dd-person_mask2former` was trained via transfer learning using their [R-50 Mask2Former instance segmentation model](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask2former#instance-segmentation) as a base. + +biegert/[ComfyUI-CLIPSeg](https://github.com/biegert/ComfyUI-CLIPSeg) - This is a custom node that enables the use of CLIPSeg technology, which can find segments through prompts, in ComfyUI. + +BlenderNeok/[ComfyUI-TiledKSampler](https://github.com/BlenderNeko/ComfyUI_TiledKSampler) - The tile sampler allows high-resolution sampling even in places with low GPU VRAM. + +BlenderNeok/[ComfyUI_Noise](https://github.com/BlenderNeko/ComfyUI_Noise) - The noise injection feature relies on this function and slerp code for noise variation + +WASasquatch/[was-node-suite-comfyui](https://github.com/WASasquatch/was-node-suite-comfyui) - A powerful custom node extensions of ComfyUI. + +Trung0246/[ComfyUI-0246](https://github.com/Trung0246/ComfyUI-0246) - Nice bypass hack! diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__init__.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..689b6c412b8d6e6f8a271357e4239101e1382a39 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__init__.py @@ -0,0 +1,499 @@ +""" +@author: Dr.Lt.Data +@title: Impact Pack +@nickname: Impact Pack +@description: This extension offers various detector nodes and detailer nodes that allow you to configure a workflow that automatically enhances facial details. And provide iterative upscaler. +""" + +import shutil +import folder_paths +import os +import sys +import traceback + +comfy_path = os.path.dirname(folder_paths.__file__) +impact_path = os.path.join(os.path.dirname(__file__)) +subpack_path = os.path.join(os.path.dirname(__file__), "impact_subpack") +modules_path = os.path.join(os.path.dirname(__file__), "modules") + +sys.path.append(modules_path) + +import impact.config +import impact.sample_error_enhancer +print(f"### Loading: ComfyUI-Impact-Pack ({impact.config.version})") + + +def do_install(): + import importlib + spec = importlib.util.spec_from_file_location('impact_install', os.path.join(os.path.dirname(__file__), 'install.py')) + impact_install = importlib.util.module_from_spec(spec) + spec.loader.exec_module(impact_install) + + +# ensure dependency +if not os.path.exists(os.path.join(subpack_path, ".git")) and os.path.exists(subpack_path): + print(f"### CompfyUI-Impact-Pack: corrupted subpack detected.") + shutil.rmtree(subpack_path) + +if impact.config.get_config()['dependency_version'] < impact.config.dependency_version or not os.path.exists(subpack_path): + print(f"### ComfyUI-Impact-Pack: Updating dependencies [{impact.config.get_config()['dependency_version']} -> {impact.config.dependency_version}]") + do_install() + +sys.path.append(subpack_path) + +# Core +# recheck dependencies for colab +try: + import impact.subpack_nodes # This import must be done before cv2. + + import folder_paths + import torch + import cv2 + from cv2 import setNumThreads + import numpy as np + import comfy.samplers + import comfy.sd + import warnings + from PIL import Image, ImageFilter + from skimage.measure import label, regionprops + from collections import namedtuple + import piexif + + if not impact.config.get_config()['mmdet_skip']: + import mmcv + from mmdet.apis import (inference_detector, init_detector) + from mmdet.evaluation import get_classes +except: + import importlib + print("### ComfyUI-Impact-Pack: Reinstall dependencies (several dependencies are missing.)") + do_install() + + +import impact.impact_server # to load server api + +from .modules.impact.impact_pack import * +from .modules.impact.detectors import * +from .modules.impact.pipe import * +from .modules.impact.logics import * +from .modules.impact.util_nodes import * +from .modules.impact.segs_nodes import * +from .modules.impact.special_samplers import * +from .modules.impact.hf_nodes import * +from .modules.impact.bridge_nodes import * +from .modules.impact.hook_nodes import * +from .modules.impact.animatediff_nodes import * +from .modules.impact.segs_upscaler import * + +import threading + + +threading.Thread(target=impact.wildcards.wildcard_load).start() + + +NODE_CLASS_MAPPINGS = { + "SAMLoader": SAMLoader, + "CLIPSegDetectorProvider": CLIPSegDetectorProvider, + "ONNXDetectorProvider": ONNXDetectorProvider, + + "BitwiseAndMaskForEach": BitwiseAndMaskForEach, + "SubtractMaskForEach": SubtractMaskForEach, + + "DetailerForEach": DetailerForEach, + "DetailerForEachDebug": DetailerForEachTest, + "DetailerForEachPipe": DetailerForEachPipe, + "DetailerForEachDebugPipe": DetailerForEachTestPipe, + "DetailerForEachPipeForAnimateDiff": DetailerForEachPipeForAnimateDiff, + + "SAMDetectorCombined": SAMDetectorCombined, + "SAMDetectorSegmented": SAMDetectorSegmented, + + "FaceDetailer": FaceDetailer, + "FaceDetailerPipe": FaceDetailerPipe, + "MaskDetailerPipe": MaskDetailerPipe, + + "ToDetailerPipe": ToDetailerPipe, + "ToDetailerPipeSDXL": ToDetailerPipeSDXL, + "FromDetailerPipe": FromDetailerPipe, + "FromDetailerPipe_v2": FromDetailerPipe_v2, + "FromDetailerPipeSDXL": FromDetailerPipe_SDXL, + "ToBasicPipe": ToBasicPipe, + "FromBasicPipe": FromBasicPipe, + "FromBasicPipe_v2": FromBasicPipe_v2, + "BasicPipeToDetailerPipe": BasicPipeToDetailerPipe, + "BasicPipeToDetailerPipeSDXL": BasicPipeToDetailerPipeSDXL, + "DetailerPipeToBasicPipe": DetailerPipeToBasicPipe, + "EditBasicPipe": EditBasicPipe, + "EditDetailerPipe": EditDetailerPipe, + "EditDetailerPipeSDXL": EditDetailerPipeSDXL, + + "LatentPixelScale": LatentPixelScale, + "PixelKSampleUpscalerProvider": PixelKSampleUpscalerProvider, + "PixelKSampleUpscalerProviderPipe": PixelKSampleUpscalerProviderPipe, + "IterativeLatentUpscale": IterativeLatentUpscale, + "IterativeImageUpscale": IterativeImageUpscale, + "PixelTiledKSampleUpscalerProvider": PixelTiledKSampleUpscalerProvider, + "PixelTiledKSampleUpscalerProviderPipe": PixelTiledKSampleUpscalerProviderPipe, + "TwoSamplersForMaskUpscalerProvider": TwoSamplersForMaskUpscalerProvider, + "TwoSamplersForMaskUpscalerProviderPipe": TwoSamplersForMaskUpscalerProviderPipe, + + "PixelKSampleHookCombine": PixelKSampleHookCombine, + "DenoiseScheduleHookProvider": DenoiseScheduleHookProvider, + "StepsScheduleHookProvider": StepsScheduleHookProvider, + "CfgScheduleHookProvider": CfgScheduleHookProvider, + "NoiseInjectionHookProvider": NoiseInjectionHookProvider, + "UnsamplerHookProvider": UnsamplerHookProvider, + "CoreMLDetailerHookProvider": CoreMLDetailerHookProvider, + "PreviewDetailerHookProvider": PreviewDetailerHookProvider, + + "DetailerHookCombine": DetailerHookCombine, + "NoiseInjectionDetailerHookProvider": NoiseInjectionDetailerHookProvider, + "UnsamplerDetailerHookProvider": UnsamplerDetailerHookProvider, + "DenoiseSchedulerDetailerHookProvider": DenoiseSchedulerDetailerHookProvider, + "SEGSOrderedFilterDetailerHookProvider": SEGSOrderedFilterDetailerHookProvider, + "SEGSRangeFilterDetailerHookProvider": SEGSRangeFilterDetailerHookProvider, + "SEGSLabelFilterDetailerHookProvider": SEGSLabelFilterDetailerHookProvider, + "VariationNoiseDetailerHookProvider": VariationNoiseDetailerHookProvider, + # "CustomNoiseDetailerHookProvider": CustomNoiseDetailerHookProvider, + + "BitwiseAndMask": BitwiseAndMask, + "SubtractMask": SubtractMask, + "AddMask": AddMask, + "ImpactSegsAndMask": SegsBitwiseAndMask, + "ImpactSegsAndMaskForEach": SegsBitwiseAndMaskForEach, + "EmptySegs": EmptySEGS, + + "MediaPipeFaceMeshToSEGS": MediaPipeFaceMeshToSEGS, + "MaskToSEGS": MaskToSEGS, + "MaskToSEGS_for_AnimateDiff": MaskToSEGS_for_AnimateDiff, + "ToBinaryMask": ToBinaryMask, + "MasksToMaskList": MasksToMaskList, + "MaskListToMaskBatch": MaskListToMaskBatch, + "ImageListToImageBatch": ImageListToImageBatch, + "SetDefaultImageForSEGS": DefaultImageForSEGS, + "RemoveImageFromSEGS": RemoveImageFromSEGS, + + "BboxDetectorSEGS": BboxDetectorForEach, + "SegmDetectorSEGS": SegmDetectorForEach, + "ONNXDetectorSEGS": BboxDetectorForEach, + "ImpactSimpleDetectorSEGS_for_AD": SimpleDetectorForAnimateDiff, + "ImpactSimpleDetectorSEGS": SimpleDetectorForEach, + "ImpactSimpleDetectorSEGSPipe": SimpleDetectorForEachPipe, + "ImpactControlNetApplySEGS": ControlNetApplySEGS, + "ImpactControlNetApplyAdvancedSEGS": ControlNetApplyAdvancedSEGS, + "ImpactControlNetClearSEGS": ControlNetClearSEGS, + "ImpactIPAdapterApplySEGS": IPAdapterApplySEGS, + + "ImpactDecomposeSEGS": DecomposeSEGS, + "ImpactAssembleSEGS": AssembleSEGS, + "ImpactFrom_SEG_ELT": From_SEG_ELT, + "ImpactEdit_SEG_ELT": Edit_SEG_ELT, + "ImpactDilate_Mask_SEG_ELT": Dilate_SEG_ELT, + "ImpactDilateMask": DilateMask, + "ImpactGaussianBlurMask": GaussianBlurMask, + "ImpactDilateMaskInSEGS": DilateMaskInSEGS, + "ImpactGaussianBlurMaskInSEGS": GaussianBlurMaskInSEGS, + "ImpactScaleBy_BBOX_SEG_ELT": SEG_ELT_BBOX_ScaleBy, + "ImpactFrom_SEG_ELT_bbox": From_SEG_ELT_bbox, + "ImpactFrom_SEG_ELT_crop_region": From_SEG_ELT_crop_region, + "ImpactCount_Elts_in_SEGS": Count_Elts_in_SEGS, + + "BboxDetectorCombined_v2": BboxDetectorCombined, + "SegmDetectorCombined_v2": SegmDetectorCombined, + "SegsToCombinedMask": SegsToCombinedMask, + + "KSamplerProvider": KSamplerProvider, + "TwoSamplersForMask": TwoSamplersForMask, + "TiledKSamplerProvider": TiledKSamplerProvider, + + "KSamplerAdvancedProvider": KSamplerAdvancedProvider, + "TwoAdvancedSamplersForMask": TwoAdvancedSamplersForMask, + + "ImpactNegativeConditioningPlaceholder": NegativeConditioningPlaceholder, + + "PreviewBridge": PreviewBridge, + "PreviewBridgeLatent": PreviewBridgeLatent, + "ImageSender": ImageSender, + "ImageReceiver": ImageReceiver, + "LatentSender": LatentSender, + "LatentReceiver": LatentReceiver, + "ImageMaskSwitch": ImageMaskSwitch, + "LatentSwitch": GeneralSwitch, + "SEGSSwitch": GeneralSwitch, + "ImpactSwitch": GeneralSwitch, + "ImpactInversedSwitch": GeneralInversedSwitch, + + "ImpactWildcardProcessor": ImpactWildcardProcessor, + "ImpactWildcardEncode": ImpactWildcardEncode, + + "SEGSUpscaler": SEGSUpscaler, + "SEGSUpscalerPipe": SEGSUpscalerPipe, + "SEGSDetailer": SEGSDetailer, + "SEGSPaste": SEGSPaste, + "SEGSPreview": SEGSPreview, + "SEGSPreviewCNet": SEGSPreviewCNet, + "SEGSToImageList": SEGSToImageList, + "ImpactSEGSToMaskList": SEGSToMaskList, + "ImpactSEGSToMaskBatch": SEGSToMaskBatch, + "ImpactSEGSConcat": SEGSConcat, + "ImpactSEGSPicker": SEGSPicker, + "ImpactMakeTileSEGS": MakeTileSEGS, + + "SEGSDetailerForAnimateDiff": SEGSDetailerForAnimateDiff, + + "ImpactKSamplerBasicPipe": KSamplerBasicPipe, + "ImpactKSamplerAdvancedBasicPipe": KSamplerAdvancedBasicPipe, + + "ReencodeLatent": ReencodeLatent, + "ReencodeLatentPipe": ReencodeLatentPipe, + + "ImpactImageBatchToImageList": ImageBatchToImageList, + "ImpactMakeImageList": MakeImageList, + "ImpactMakeImageBatch": MakeImageBatch, + + "RegionalSampler": RegionalSampler, + "RegionalSamplerAdvanced": RegionalSamplerAdvanced, + "CombineRegionalPrompts": CombineRegionalPrompts, + "RegionalPrompt": RegionalPrompt, + + "ImpactCombineConditionings": CombineConditionings, + "ImpactConcatConditionings": ConcatConditionings, + + "ImpactSEGSLabelAssign": SEGSLabelAssign, + "ImpactSEGSLabelFilter": SEGSLabelFilter, + "ImpactSEGSRangeFilter": SEGSRangeFilter, + "ImpactSEGSOrderedFilter": SEGSOrderedFilter, + + "ImpactCompare": ImpactCompare, + "ImpactConditionalBranch": ImpactConditionalBranch, + "ImpactConditionalBranchSelMode": ImpactConditionalBranchSelMode, + "ImpactIfNone": ImpactIfNone, + "ImpactConvertDataType": ImpactConvertDataType, + "ImpactLogicalOperators": ImpactLogicalOperators, + "ImpactInt": ImpactInt, + "ImpactFloat": ImpactFloat, + "ImpactValueSender": ImpactValueSender, + "ImpactValueReceiver": ImpactValueReceiver, + "ImpactImageInfo": ImpactImageInfo, + "ImpactLatentInfo": ImpactLatentInfo, + "ImpactMinMax": ImpactMinMax, + "ImpactNeg": ImpactNeg, + "ImpactConditionalStopIteration": ImpactConditionalStopIteration, + "ImpactStringSelector": StringSelector, + "StringListToString": StringListToString, + "WildcardPromptFromString": WildcardPromptFromString, + "ImpactExecutionOrderController": ImpactExecutionOrderController, + + "RemoveNoiseMask": RemoveNoiseMask, + + "ImpactLogger": ImpactLogger, + "ImpactDummyInput": ImpactDummyInput, + + "ImpactQueueTrigger": ImpactQueueTrigger, + "ImpactQueueTriggerCountdown": ImpactQueueTriggerCountdown, + "ImpactSetWidgetValue": ImpactSetWidgetValue, + "ImpactNodeSetMuteState": ImpactNodeSetMuteState, + "ImpactControlBridge": ImpactControlBridge, + "ImpactIsNotEmptySEGS": ImpactNotEmptySEGS, + "ImpactSleep": ImpactSleep, + "ImpactRemoteBoolean": ImpactRemoteBoolean, + "ImpactRemoteInt": ImpactRemoteInt, + + "ImpactHFTransformersClassifierProvider": HF_TransformersClassifierProvider, + "ImpactSEGSClassify": SEGS_Classify, + + "ImpactSchedulerAdapter": ImpactSchedulerAdapter, + "GITSSchedulerFuncProvider": GITSSchedulerFuncProvider +} + + +NODE_DISPLAY_NAME_MAPPINGS = { + "SAMLoader": "SAMLoader (Impact)", + + "BboxDetectorSEGS": "BBOX Detector (SEGS)", + "SegmDetectorSEGS": "SEGM Detector (SEGS)", + "ONNXDetectorSEGS": "ONNX Detector (SEGS/legacy) - use BBOXDetector", + "ImpactSimpleDetectorSEGS_for_AD": "Simple Detector for AnimateDiff (SEGS)", + "ImpactSimpleDetectorSEGS": "Simple Detector (SEGS)", + "ImpactSimpleDetectorSEGSPipe": "Simple Detector (SEGS/pipe)", + "ImpactControlNetApplySEGS": "ControlNetApply (SEGS)", + "ImpactControlNetApplyAdvancedSEGS": "ControlNetApplyAdvanced (SEGS)", + "ImpactIPAdapterApplySEGS": "IPAdapterApply (SEGS)", + + "BboxDetectorCombined_v2": "BBOX Detector (combined)", + "SegmDetectorCombined_v2": "SEGM Detector (combined)", + "SegsToCombinedMask": "SEGS to MASK (combined)", + "MediaPipeFaceMeshToSEGS": "MediaPipe FaceMesh to SEGS", + "MaskToSEGS": "MASK to SEGS", + "MaskToSEGS_for_AnimateDiff": "MASK to SEGS for AnimateDiff", + "BitwiseAndMaskForEach": "Pixelwise(SEGS & SEGS)", + "SubtractMaskForEach": "Pixelwise(SEGS - SEGS)", + "ImpactSegsAndMask": "Pixelwise(SEGS & MASK)", + "ImpactSegsAndMaskForEach": "Pixelwise(SEGS & MASKS ForEach)", + "BitwiseAndMask": "Pixelwise(MASK & MASK)", + "SubtractMask": "Pixelwise(MASK - MASK)", + "AddMask": "Pixelwise(MASK + MASK)", + "DetailerForEach": "Detailer (SEGS)", + "DetailerForEachPipe": "Detailer (SEGS/pipe)", + "DetailerForEachDebug": "DetailerDebug (SEGS)", + "DetailerForEachDebugPipe": "DetailerDebug (SEGS/pipe)", + "SEGSDetailerForAnimateDiff": "SEGSDetailer For AnimateDiff (SEGS/pipe)", + "DetailerForEachPipeForAnimateDiff": "Detailer For AnimateDiff (SEGS/pipe)", + "SEGSUpscaler": "Upscaler (SEGS)", + "SEGSUpscalerPipe": "Upscaler (SEGS/pipe)", + + "SAMDetectorCombined": "SAMDetector (combined)", + "SAMDetectorSegmented": "SAMDetector (segmented)", + "FaceDetailerPipe": "FaceDetailer (pipe)", + "MaskDetailerPipe": "MaskDetailer (pipe)", + + "FromDetailerPipeSDXL": "FromDetailer (SDXL/pipe)", + "BasicPipeToDetailerPipeSDXL": "BasicPipe -> DetailerPipe (SDXL)", + "EditDetailerPipeSDXL": "Edit DetailerPipe (SDXL)", + + "BasicPipeToDetailerPipe": "BasicPipe -> DetailerPipe", + "DetailerPipeToBasicPipe": "DetailerPipe -> BasicPipe", + "EditBasicPipe": "Edit BasicPipe", + "EditDetailerPipe": "Edit DetailerPipe", + + "LatentPixelScale": "Latent Scale (on Pixel Space)", + "IterativeLatentUpscale": "Iterative Upscale (Latent/on Pixel Space)", + "IterativeImageUpscale": "Iterative Upscale (Image)", + + "TwoSamplersForMaskUpscalerProvider": "TwoSamplersForMask Upscaler Provider", + "TwoSamplersForMaskUpscalerProviderPipe": "TwoSamplersForMask Upscaler Provider (pipe)", + + "ReencodeLatent": "Reencode Latent", + "ReencodeLatentPipe": "Reencode Latent (pipe)", + + "ImpactKSamplerBasicPipe": "KSampler (pipe)", + "ImpactKSamplerAdvancedBasicPipe": "KSampler (Advanced/pipe)", + "ImpactSEGSLabelAssign": "SEGS Assign (label)", + "ImpactSEGSLabelFilter": "SEGS Filter (label)", + "ImpactSEGSRangeFilter": "SEGS Filter (range)", + "ImpactSEGSOrderedFilter": "SEGS Filter (ordered)", + "ImpactSEGSConcat": "SEGS Concat", + "ImpactSEGSToMaskList": "SEGS to Mask List", + "ImpactSEGSToMaskBatch": "SEGS to Mask Batch", + "ImpactSEGSPicker": "Picker (SEGS)", + "ImpactMakeTileSEGS": "Make Tile SEGS", + + "ImpactDecomposeSEGS": "Decompose (SEGS)", + "ImpactAssembleSEGS": "Assemble (SEGS)", + "ImpactFrom_SEG_ELT": "From SEG_ELT", + "ImpactEdit_SEG_ELT": "Edit SEG_ELT", + "ImpactFrom_SEG_ELT_bbox": "From SEG_ELT bbox", + "ImpactFrom_SEG_ELT_crop_region": "From SEG_ELT crop_region", + "ImpactDilate_Mask_SEG_ELT": "Dilate Mask (SEG_ELT)", + "ImpactScaleBy_BBOX_SEG_ELT": "ScaleBy BBOX (SEG_ELT)", + "ImpactCount_Elts_in_SEGS": "Count Elts in SEGS", + "ImpactDilateMask": "Dilate Mask", + "ImpactGaussianBlurMask": "Gaussian Blur Mask", + "ImpactDilateMaskInSEGS": "Dilate Mask (SEGS)", + "ImpactGaussianBlurMaskInSEGS": "Gaussian Blur Mask (SEGS)", + + "PreviewBridge": "Preview Bridge (Image)", + "PreviewBridgeLatent": "Preview Bridge (Latent)", + "ImageSender": "Image Sender", + "ImageReceiver": "Image Receiver", + "ImageMaskSwitch": "Switch (images, mask)", + "ImpactSwitch": "Switch (Any)", + "ImpactInversedSwitch": "Inversed Switch (Any)", + "ImpactExecutionOrderController": "Execution Order Controller", + + "MasksToMaskList": "Masks to Mask List", + "MaskListToMaskBatch": "Mask List to Masks", + "ImpactImageBatchToImageList": "Image batch to Image List", + "ImageListToImageBatch": "Image List to Image Batch", + "ImpactMakeImageList": "Make Image List", + "ImpactMakeImageBatch": "Make Image Batch", + "ImpactStringSelector": "String Selector", + "StringListToString": "String List to String", + "WildcardPromptFromString": "Wildcard Prompt from String", + "ImpactIsNotEmptySEGS": "SEGS isn't Empty", + "SetDefaultImageForSEGS": "Set Default Image for SEGS", + "RemoveImageFromSEGS": "Remove Image from SEGS", + + "RemoveNoiseMask": "Remove Noise Mask", + + "ImpactCombineConditionings": "Combine Conditionings", + "ImpactConcatConditionings": "Concat Conditionings", + + "ImpactQueueTrigger": "Queue Trigger", + "ImpactQueueTriggerCountdown": "Queue Trigger (Countdown)", + "ImpactSetWidgetValue": "Set Widget Value", + "ImpactNodeSetMuteState": "Set Mute State", + "ImpactControlBridge": "Control Bridge", + "ImpactSleep": "Sleep", + "ImpactRemoteBoolean": "Remote Boolean (on prompt)", + "ImpactRemoteInt": "Remote Int (on prompt)", + + "ImpactHFTransformersClassifierProvider": "HF Transformers Classifier Provider", + "ImpactSEGSClassify": "SEGS Classify", + + "LatentSwitch": "Switch (latent/legacy)", + "SEGSSwitch": "Switch (SEGS/legacy)", + + "SEGSPreviewCNet": "SEGSPreview (CNET Image)", + + "ImpactSchedulerAdapter": "Impact Scheduler Adapter", + "GITSSchedulerFuncProvider": "GITSScheduler Func Provider", + "ImpactNegativeConditioningPlaceholder": "Negative Cond Placeholder" +} + +if not impact.config.get_config()['mmdet_skip']: + from impact.mmdet_nodes import * + import impact.legacy_nodes + NODE_CLASS_MAPPINGS.update({ + "MMDetDetectorProvider": MMDetDetectorProvider, + "MMDetLoader": impact.legacy_nodes.MMDetLoader, + "MaskPainter": impact.legacy_nodes.MaskPainter, + "SegsMaskCombine": impact.legacy_nodes.SegsMaskCombine, + "BboxDetectorForEach": impact.legacy_nodes.BboxDetectorForEach, + "SegmDetectorForEach": impact.legacy_nodes.SegmDetectorForEach, + "BboxDetectorCombined": impact.legacy_nodes.BboxDetectorCombined, + "SegmDetectorCombined": impact.legacy_nodes.SegmDetectorCombined, + }) + + NODE_DISPLAY_NAME_MAPPINGS.update({ + "MaskPainter": "MaskPainter (Deprecated)", + "MMDetLoader": "MMDetLoader (Legacy)", + "SegsMaskCombine": "SegsMaskCombine (Legacy)", + "BboxDetectorForEach": "BboxDetectorForEach (Legacy)", + "SegmDetectorForEach": "SegmDetectorForEach (Legacy)", + "BboxDetectorCombined": "BboxDetectorCombined (Legacy)", + "SegmDetectorCombined": "SegmDetectorCombined (Legacy)", + }) + +try: + import impact.subpack_nodes + + NODE_CLASS_MAPPINGS.update(impact.subpack_nodes.NODE_CLASS_MAPPINGS) + NODE_DISPLAY_NAME_MAPPINGS.update(impact.subpack_nodes.NODE_DISPLAY_NAME_MAPPINGS) +except Exception as e: + print("### ComfyUI-Impact-Pack: (IMPORT FAILED) Subpack\n") + print(" The module at the `custom_nodes/ComfyUI-Impact-Pack/impact_subpack` path appears to be incomplete.") + print(" Recommended to delete the path and restart ComfyUI.") + print(" If the issue persists, please report it to https://github.com/ltdrdata/ComfyUI-Impact-Pack/issues.") + print("\n---------------------------------") + traceback.print_exc() + print("---------------------------------\n") + +# NOTE: Inject directly into EXTENSION_WEB_DIRS instead of WEB_DIRECTORY +# Provide the js path fixed as ComfyUI-Impact-Pack instead of the path name, making it available for external use + +# WEB_DIRECTORY = "js" -- deprecated method +nodes.EXTENSION_WEB_DIRS["ComfyUI-Impact-Pack"] = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'js') + + +__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS'] + + +try: + import cm_global + cm_global.register_extension('ComfyUI-Impact-Pack', + {'version': config.version_code, + 'name': 'Impact Pack', + 'nodes': set(NODE_CLASS_MAPPINGS.keys()), + 'description': 'This extension provides inpainting functionality based on the detector and detailer, along with convenient workflow features like wildcards and logics.', }) +except: + pass diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/__init__.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5c96830f3cdc2fdbfbe0ee60ca4e4ed0e63f1df Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/install.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/install.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6b816c33747171110273484ceb8bcd7de72b97e Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/__pycache__/install.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/custom_wildcards/put_wildcards_here b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/custom_wildcards/put_wildcards_here new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/disable.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/disable.py new file mode 100644 index 0000000000000000000000000000000000000000..2d62417c14128faca59ced13bbd83d5cd8708da3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/disable.py @@ -0,0 +1,38 @@ +import os +import sys +import time +import platform +import shutil +import subprocess + +comfy_path = '../..' + +def rmtree(path): + retry_count = 3 + + while True: + try: + retry_count -= 1 + + if platform.system() == "Windows": + subprocess.check_call(['attrib', '-R', path + '\\*', '/S']) + + shutil.rmtree(path) + + return True + + except Exception as ex: + print(f"ex: {ex}") + time.sleep(3) + + if retry_count < 0: + raise ex + + print(f"Uninstall retry({retry_count})") + +js_dest_path = os.path.join(comfy_path, "web", "extensions", "impact-pack") + +if os.path.exists(js_dest_path): + rmtree(js_dest_path) + + diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact-pack.ini b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact-pack.ini new file mode 100644 index 0000000000000000000000000000000000000000..903f91c83ac773b5fdc8e65099b0c007a52f9073 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact-pack.ini @@ -0,0 +1,8 @@ +[default] +dependency_version = 22 +mmdet_skip = True +sam_editor_cpu = False +sam_editor_model = sam_vit_b_01ec64.pth +custom_wildcards = /data/comfy/ComfyUI/custom_nodes/ComfyUI-Impact-Pack/custom_wildcards +disable_gpu_opencv = True + diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/.gitignore b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..392e184851e95dded25b3623de11b524e9ae41b2 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/.gitignore @@ -0,0 +1,5 @@ +__pycache__ +*.ini +wildcards/** +.vscode/ +.idea/ \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/LICENSE b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..0ad25db4bd1d86c452db3f9602ccdbe172438f52 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/README.md b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4145c7229b2516da67a6d8ba5a41f5f162402c62 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/README.md @@ -0,0 +1,18 @@ +# ComfyUI-Impact-Subpack +This extension serves as a complement to the Impact Pack, offering features that are not deemed suitable for inclusion by default in the ComfyUI Impact Pack. + +The nodes in this repository cannot be used standalone and depend on [ComfyUI-Impact-Pack](https://github.com/ltdrdata/ComfyUI-Impact-Pack). + +## Nodes +* UltralyticsDetectorProvider - This node provides an object detection detector based on Ultralystics. + * By using this Detector Provider, you can replace the existing mmdet-based detector. + + +## Credits + +ComfyUI/[ComfyUI](https://github.com/comfyanonymous/ComfyUI) - A powerful and modular stable diffusion GUI. + +Bing-su/[adetailer](https://github.com/Bing-su/adetailer/) - This repository provides an object detection model and features based on Ultralystics. + +huggingface/Bingsu/[adetailer](https://huggingface.co/Bingsu/adetailer/tree/main) - This repository offers various models based on Ultralystics. +* You can download other models supported by the UltralyticsDetectorProvider from here. diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subcore.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subcore.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c4811aa710dc8d1dc3c19073786177eb419f29a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subcore.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subpack_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subpack_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6713cc7aeb41d2955b0edac6000e943ed6b421a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/__pycache__/subpack_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subcore.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subcore.py new file mode 100644 index 0000000000000000000000000000000000000000..23d33518c6a87c9f21df364c08a7b8b2dfc66eb6 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subcore.py @@ -0,0 +1,239 @@ +from pathlib import Path +from PIL import Image + +import impact.core as core +import cv2 +import numpy as np +from torchvision.transforms.functional import to_pil_image +import torch + +orig_torch_load = torch.load + +try: + from ultralytics import YOLO +except Exception as e: + print(e) + print(f"\n!!!!!\n\n[ComfyUI-Impact-Subpack] If this error occurs, please check the following link:\n\thttps://github.com/ltdrdata/ComfyUI-Impact-Pack/blob/Main/troubleshooting/TROUBLESHOOTING.md\n\n!!!!!\n") + raise e + +# HOTFIX: https://github.com/ltdrdata/ComfyUI-Impact-Pack/issues/754 +# importing YOLO breaking original torch.load capabilities +torch.load = orig_torch_load + +def load_yolo(model_path: str): + try: + return YOLO(model_path) + except ModuleNotFoundError: + # https://github.com/ultralytics/ultralytics/issues/3856 + YOLO("yolov8n.pt") + return YOLO(model_path) + + +def inference_bbox( + model, + image: Image.Image, + confidence: float = 0.3, + device: str = "", +): + pred = model(image, conf=confidence, device=device) + + bboxes = pred[0].boxes.xyxy.cpu().numpy() + cv2_image = np.array(image) + if len(cv2_image.shape) == 3: + cv2_image = cv2_image[:, :, ::-1].copy() # Convert RGB to BGR for cv2 processing + else: + # Handle the grayscale image here + # For example, you might want to convert it to a 3-channel grayscale image for consistency: + cv2_image = cv2.cvtColor(cv2_image, cv2.COLOR_GRAY2BGR) + cv2_gray = cv2.cvtColor(cv2_image, cv2.COLOR_BGR2GRAY) + + segms = [] + for x0, y0, x1, y1 in bboxes: + cv2_mask = np.zeros(cv2_gray.shape, np.uint8) + cv2.rectangle(cv2_mask, (int(x0), int(y0)), (int(x1), int(y1)), 255, -1) + cv2_mask_bool = cv2_mask.astype(bool) + segms.append(cv2_mask_bool) + + n, m = bboxes.shape + if n == 0: + return [[], [], [], []] + + results = [[], [], [], []] + for i in range(len(bboxes)): + results[0].append(pred[0].names[int(pred[0].boxes[i].cls.item())]) + results[1].append(bboxes[i]) + results[2].append(segms[i]) + results[3].append(pred[0].boxes[i].conf.cpu().numpy()) + + return results + + +def inference_segm( + model, + image: Image.Image, + confidence: float = 0.3, + device: str = "", +): + pred = model(image, conf=confidence, device=device) + + bboxes = pred[0].boxes.xyxy.cpu().numpy() + n, m = bboxes.shape + if n == 0: + return [[], [], [], []] + + # NOTE: masks.data will be None when n == 0 + segms = pred[0].masks.data.cpu().numpy() + + h_segms = segms.shape[1] + w_segms = segms.shape[2] + h_orig = image.size[1] + w_orig = image.size[0] + ratio_segms = h_segms / w_segms + ratio_orig = h_orig / w_orig + + if ratio_segms == ratio_orig: + h_gap = 0 + w_gap = 0 + elif ratio_segms > ratio_orig: + h_gap = int((ratio_segms - ratio_orig) * h_segms) + w_gap = 0 + else: + h_gap = 0 + ratio_segms = w_segms / h_segms + ratio_orig = w_orig / h_orig + w_gap = int((ratio_segms - ratio_orig) * w_segms) + + results = [[], [], [], []] + for i in range(len(bboxes)): + results[0].append(pred[0].names[int(pred[0].boxes[i].cls.item())]) + results[1].append(bboxes[i]) + + mask = torch.from_numpy(segms[i]) + mask = mask[h_gap:mask.shape[0] - h_gap, w_gap:mask.shape[1] - w_gap] + + scaled_mask = torch.nn.functional.interpolate(mask.unsqueeze(0).unsqueeze(0), size=(image.size[1], image.size[0]), + mode='bilinear', align_corners=False) + scaled_mask = scaled_mask.squeeze().squeeze() + + results[2].append(scaled_mask.numpy()) + results[3].append(pred[0].boxes[i].conf.cpu().numpy()) + + return results + + +class UltraBBoxDetector: + bbox_model = None + + def __init__(self, bbox_model): + self.bbox_model = bbox_model + + def detect(self, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + drop_size = max(drop_size, 1) + detected_results = inference_bbox(self.bbox_model, core.tensor2pil(image), threshold) + segmasks = core.create_segmasks(detected_results) + + if dilation > 0: + segmasks = core.dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + + for x, label in zip(segmasks, detected_results[0]): + item_bbox = x[0] + item_mask = x[1] + + y1, x1, y2, x2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: # minimum dimension must be (2,2) to avoid squeeze issue + crop_region = core.make_crop_region(w, h, item_bbox, crop_factor) + + if detailer_hook is not None: + crop_region = detailer_hook.post_crop_region(w, h, item_bbox, crop_region) + + cropped_image = core.crop_image(image, crop_region) + cropped_mask = core.crop_ndarray2(item_mask, crop_region) + confidence = x[2] + # bbox_size = (item_bbox[2]-item_bbox[0],item_bbox[3]-item_bbox[1]) # (w,h) + + item = core.SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, label, None) + + items.append(item) + + shape = image.shape[1], image.shape[2] + segs = shape, items + + if detailer_hook is not None and hasattr(detailer_hook, "post_detection"): + segs = detailer_hook.post_detection(segs) + + return segs + + def detect_combined(self, image, threshold, dilation): + detected_results = inference_bbox(self.bbox_model, core.tensor2pil(image), threshold) + segmasks = core.create_segmasks(detected_results) + if dilation > 0: + segmasks = core.dilate_masks(segmasks, dilation) + + return core.combine_masks(segmasks) + + def setAux(self, x): + pass + + +class UltraSegmDetector: + bbox_model = None + + def __init__(self, bbox_model): + self.bbox_model = bbox_model + + def detect(self, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + drop_size = max(drop_size, 1) + detected_results = inference_segm(self.bbox_model, core.tensor2pil(image), threshold) + segmasks = core.create_segmasks(detected_results) + + if dilation > 0: + segmasks = core.dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + + for x, label in zip(segmasks, detected_results[0]): + item_bbox = x[0] + item_mask = x[1] + + y1, x1, y2, x2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: # minimum dimension must be (2,2) to avoid squeeze issue + crop_region = core.make_crop_region(w, h, item_bbox, crop_factor) + + if detailer_hook is not None: + crop_region = detailer_hook.post_crop_region(w, h, item_bbox, crop_region) + + cropped_image = core.crop_image(image, crop_region) + cropped_mask = core.crop_ndarray2(item_mask, crop_region) + confidence = x[2] + # bbox_size = (item_bbox[2]-item_bbox[0],item_bbox[3]-item_bbox[1]) # (w,h) + + item = core.SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, label, None) + + items.append(item) + + shape = image.shape[1], image.shape[2] + segs = shape, items + + if detailer_hook is not None and hasattr(detailer_hook, "post_detection"): + segs = detailer_hook.post_detection(segs) + + return segs + + def detect_combined(self, image, threshold, dilation): + detected_results = inference_segm(self.bbox_model, core.tensor2pil(image), threshold) + segmasks = core.create_segmasks(detected_results) + if dilation > 0: + segmasks = core.dilate_masks(segmasks, dilation) + + return core.combine_masks(segmasks) + + def setAux(self, x): + pass \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subpack_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subpack_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..47ada442ae27dcf76030e7c3745336def3952d5f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/impact/subpack_nodes.py @@ -0,0 +1,45 @@ +import os +import folder_paths +import impact.core as core +import impact.subcore as subcore +from impact.utils import add_folder_path_and_extensions + +version_code = 23 + +print(f"### Loading: ComfyUI-Impact-Pack (Subpack: V0.7)") + +model_path = folder_paths.models_dir +add_folder_path_and_extensions("ultralytics_bbox", [os.path.join(model_path, "ultralytics", "bbox")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("ultralytics_segm", [os.path.join(model_path, "ultralytics", "segm")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("ultralytics", [os.path.join(model_path, "ultralytics")], folder_paths.supported_pt_extensions) + + +class UltralyticsDetectorProvider: + @classmethod + def INPUT_TYPES(s): + bboxs = ["bbox/"+x for x in folder_paths.get_filename_list("ultralytics_bbox")] + segms = ["segm/"+x for x in folder_paths.get_filename_list("ultralytics_segm")] + return {"required": {"model_name": (bboxs + segms, )}} + RETURN_TYPES = ("BBOX_DETECTOR", "SEGM_DETECTOR") + FUNCTION = "doit" + + CATEGORY = "ImpactPack" + + def doit(self, model_name): + model_path = folder_paths.get_full_path("ultralytics", model_name) + model = subcore.load_yolo(model_path) + + if model_name.startswith("bbox"): + return subcore.UltraBBoxDetector(model), core.NO_SEGM_DETECTOR() + else: + return subcore.UltraBBoxDetector(model), subcore.UltraSegmDetector(model) + + +NODE_CLASS_MAPPINGS = { + "UltralyticsDetectorProvider": UltralyticsDetectorProvider +} + + +NODE_DISPLAY_NAME_MAPPINGS = { + +} diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/install.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/install.py new file mode 100644 index 0000000000000000000000000000000000000000..5a55af77c796cd21c6291e3737a83b51abf96a47 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/install.py @@ -0,0 +1,39 @@ +import os +import sys +from torchvision.datasets.utils import download_url + +subpack_path = os.path.join(os.path.dirname(__file__)) + +comfy_path = os.environ.get('COMFYUI_PATH') +if comfy_path is None: + print(f"\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `{os.path.dirname(__file__)}/../../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr) + comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) + +sys.path.append(comfy_path) + +model_path = os.environ.get('COMFYUI_MODEL_PATH') +if model_path is None: + print(f"\n[bold yellow]WARN: The `COMFYUI_MODEL_PATH` environment variable is not set. Assuming `{model_path}` as the ComfyUI path.[/bold yellow]", file=sys.stderr) + model_path = os.path.abspath(os.path.join(comfy_path, 'models')) + +ultralytics_bbox_path = os.path.join(model_path, "ultralytics", "bbox") +ultralytics_segm_path = os.path.join(model_path, "ultralytics", "segm") + +if not os.path.exists(os.path.join(subpack_path, '..', '..', 'skip_download_model')): + if not os.path.exists(ultralytics_bbox_path): + os.makedirs(ultralytics_bbox_path) + + if not os.path.exists(ultralytics_segm_path): + os.makedirs(ultralytics_segm_path) + + if not os.path.exists(os.path.join(ultralytics_bbox_path, "face_yolov8m.pt")): + download_url("https://huggingface.co/Bingsu/adetailer/resolve/main/face_yolov8m.pt", + ultralytics_bbox_path) + + if not os.path.exists(os.path.join(ultralytics_bbox_path, "hand_yolov8s.pt")): + download_url("https://huggingface.co/Bingsu/adetailer/resolve/main/hand_yolov8s.pt", + ultralytics_bbox_path) + + if not os.path.exists(os.path.join(ultralytics_segm_path, "person_yolov8m-seg.pt")): + download_url("https://huggingface.co/Bingsu/adetailer/resolve/main/person_yolov8m-seg.pt", + ultralytics_segm_path) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/requirements.txt b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..c666410ca77847cd3eadc531398b53adb9d0066c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/impact_subpack/requirements.txt @@ -0,0 +1,2 @@ +matplotlib +ultralytics!=8.2.87 diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/install.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/install.py new file mode 100644 index 0000000000000000000000000000000000000000..f7f2a730b38422715b79e3ad98743bacef7ab6a4 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/install.py @@ -0,0 +1,299 @@ +import os +import shutil +import sys +import subprocess +import threading +import locale +import traceback +import re + + +if sys.argv[0] == 'install.py': + sys.path.append('.') # for portable version + + +impact_path = os.path.join(os.path.dirname(__file__), "modules") +old_subpack_path = os.path.join(os.path.dirname(__file__), "subpack") +subpack_path = os.path.join(os.path.dirname(__file__), "impact_subpack") +subpack_repo = "https://github.com/ltdrdata/ComfyUI-Impact-Subpack" + + +comfy_path = os.environ.get('COMFYUI_PATH') +if comfy_path is None: + print(f"\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `{os.path.dirname(__file__)}/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr) + comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) + +model_path = os.environ.get('COMFYUI_MODEL_PATH') +if model_path is None: + try: + import folder_paths + model_path = folder_paths.models_dir + except: + pass + + if model_path is None: + model_path = os.path.abspath(os.path.join(comfy_path, 'models')) + print(f"\n[bold yellow]WARN: The `COMFYUI_MODEL_PATH` environment variable is not set. Assuming `{model_path}` as the ComfyUI path.[/bold yellow]", file=sys.stderr) + + +sys.path.append(impact_path) +sys.path.append(comfy_path) + + +# --- +def handle_stream(stream, is_stdout): + stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace') + + for msg in stream: + if is_stdout: + print(msg, end="", file=sys.stdout) + else: + print(msg, end="", file=sys.stderr) + + +def process_wrap(cmd_str, cwd=None, handler=None, env=None): + print(f"[Impact Pack] EXECUTE: {cmd_str} in '{cwd}'") + process = subprocess.Popen(cmd_str, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, text=True, bufsize=1) + + if handler is None: + handler = handle_stream + + stdout_thread = threading.Thread(target=handler, args=(process.stdout, True)) + stderr_thread = threading.Thread(target=handler, args=(process.stderr, False)) + + stdout_thread.start() + stderr_thread.start() + + stdout_thread.join() + stderr_thread.join() + + return process.wait() +# --- + + +pip_list = None + + +def get_installed_packages(): + global pip_list + + if pip_list is None: + try: + result = subprocess.check_output([sys.executable, '-m', 'pip', 'list'], universal_newlines=True) + pip_list = set([line.split()[0].lower() for line in result.split('\n') if line.strip()]) + except subprocess.CalledProcessError as e: + print(f"[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.") + return set() + + return pip_list + + +def is_installed(name): + name = name.strip() + pattern = r'([^<>!=]+)([<>!=]=?)' + match = re.search(pattern, name) + + if match: + name = match.group(1) + + result = name.lower() in get_installed_packages() + return result + + +def is_requirements_installed(file_path): + print(f"req_path: {file_path}") + if os.path.exists(file_path): + with open(file_path, 'r') as file: + lines = file.readlines() + for line in lines: + if not is_installed(line): + return False + + return True + +try: + import platform + from torchvision.datasets.utils import download_url + import impact.config + + + print("### ComfyUI-Impact-Pack: Check dependencies") + + if "python_embeded" in sys.executable or "python_embedded" in sys.executable: + pip_install = [sys.executable, '-s', '-m', 'pip', 'install'] + pip_upgrade = [sys.executable, '-s', '-m', 'pip', 'install', '-U'] + mim_install = [sys.executable, '-s', '-m', 'mim', 'install'] + else: + pip_install = [sys.executable, '-m', 'pip', 'install'] + pip_upgrade = [sys.executable, '-m', 'pip', 'install', '-U'] + mim_install = [sys.executable, '-m', 'mim', 'install'] + + + def ensure_subpack(): + import git + if os.path.exists(subpack_path): + try: + repo = git.Repo(subpack_path) + repo.remotes.origin.pull() + except: + traceback.print_exc() + if platform.system() == 'Windows': + print(f"[ComfyUI-Impact-Pack] Please turn off ComfyUI and remove '{subpack_path}' and restart ComfyUI.") + else: + shutil.rmtree(subpack_path) + git.Repo.clone_from(subpack_repo, subpack_path) + else: + git.Repo.clone_from(subpack_repo, subpack_path) + + if os.path.exists(old_subpack_path): + shutil.rmtree(old_subpack_path) + + + def ensure_pip_packages_first(): + subpack_req = os.path.join(subpack_path, "requirements.txt") + if os.path.exists(subpack_req) and not is_requirements_installed(subpack_req): + process_wrap(pip_install + ['-r', 'requirements.txt'], cwd=subpack_path) + + if not impact.config.get_config()['mmdet_skip']: + process_wrap(pip_install + ['openmim']) + + try: + import pycocotools + except Exception: + if platform.system() not in ["Windows"] or platform.machine() not in ["AMD64", "x86_64"]: + print(f"Your system is {platform.system()}; !! You need to install 'libpython3-dev' for this step. !!") + + process_wrap(pip_install + ['pycocotools']) + else: + pycocotools = { + (3, 8): "https://github.com/Bing-su/dddetailer/releases/download/pycocotools/pycocotools-2.0.6-cp38-cp38-win_amd64.whl", + (3, 9): "https://github.com/Bing-su/dddetailer/releases/download/pycocotools/pycocotools-2.0.6-cp39-cp39-win_amd64.whl", + (3, 10): "https://github.com/Bing-su/dddetailer/releases/download/pycocotools/pycocotools-2.0.6-cp310-cp310-win_amd64.whl", + (3, 11): "https://github.com/Bing-su/dddetailer/releases/download/pycocotools/pycocotools-2.0.6-cp311-cp311-win_amd64.whl", + } + + version = sys.version_info[:2] + url = pycocotools[version] + process_wrap(pip_install + [url]) + + + def ensure_pip_packages_last(): + my_path = os.path.dirname(__file__) + requirements_path = os.path.join(my_path, "requirements.txt") + + if not is_requirements_installed(requirements_path): + process_wrap(pip_install + ['-r', requirements_path]) + + # fallback + try: + import segment_anything + from skimage.measure import label, regionprops + import piexif + except Exception: + process_wrap(pip_install + ['-r', requirements_path]) + + # !! cv2 importing test must be very last !! + try: + from cv2 import setNumThreads + except Exception: + try: + is_open_cv_installed = False + + # upgrade if opencv is installed already + if is_installed('opencv-python'): + process_wrap(pip_upgrade + ['opencv-python']) + is_open_cv_installed = True + + if is_installed('opencv-python-headless'): + process_wrap(pip_upgrade + ['opencv-python-headless']) + is_open_cv_installed = True + + if is_installed('opencv-contrib-python'): + process_wrap(pip_upgrade + ['opencv-contrib-python']) + is_open_cv_installed = True + + if is_installed('opencv-contrib-python-headless'): + process_wrap(pip_upgrade + ['opencv-contrib-python-headless']) + is_open_cv_installed = True + + # if opencv is not installed install `opencv-python-headless` + if not is_open_cv_installed: + process_wrap(pip_install + ['opencv-python-headless']) + except: + print(f"[ERROR] ComfyUI-Impact-Pack: failed to install 'opencv-python'. Please, install manually.") + + def ensure_mmdet_package(): + try: + import mmcv + import mmdet + from mmdet.evaluation import get_classes + except Exception: + process_wrap(pip_install + ['opendatalab==0.0.9']) + process_wrap(pip_install + ['-U', 'openmim']) + process_wrap(mim_install + ['mmcv>=2.0.0rc4, <2.1.0']) + process_wrap(mim_install + ['mmdet==3.0.0']) + process_wrap(mim_install + ['mmengine==0.7.4']) + + + def install(): + subpack_install_script = os.path.join(subpack_path, "install.py") + + print(f"### ComfyUI-Impact-Pack: Updating subpack") + try: + import git + except Exception: + if not is_installed('GitPython'): + process_wrap(pip_install + ['GitPython']) + + ensure_subpack() # The installation of the subpack must take place before ensure_pip. cv2 triggers a permission error. + + new_env = os.environ.copy() + new_env["COMFYUI_PATH"] = comfy_path + new_env["COMFYUI_MODEL_PATH"] = model_path + + if os.path.exists(subpack_install_script): + process_wrap([sys.executable, 'install.py'], cwd=subpack_path, env=new_env) + if not is_requirements_installed(os.path.join(subpack_path, 'requirements.txt')): + process_wrap(pip_install + ['-r', 'requirements.txt'], cwd=subpack_path) + else: + print(f"### ComfyUI-Impact-Pack: (Install Failed) Subpack\nFile not found: `{subpack_install_script}`") + + ensure_pip_packages_first() + + if not impact.config.get_config()['mmdet_skip']: + ensure_mmdet_package() + + ensure_pip_packages_last() + + # Download model + print("### ComfyUI-Impact-Pack: Check basic models") + bbox_path = os.path.join(model_path, "mmdets", "bbox") + sam_path = os.path.join(model_path, "sams") + onnx_path = os.path.join(model_path, "onnx") + + if not os.path.exists(os.path.join(os.path.dirname(__file__), '..', 'skip_download_model')): + if not os.path.exists(bbox_path): + os.makedirs(bbox_path) + + if not impact.config.get_config()['mmdet_skip']: + if not os.path.exists(os.path.join(bbox_path, "mmdet_anime-face_yolov3.pth")): + download_url("https://huggingface.co/dustysys/ddetailer/resolve/main/mmdet/bbox/mmdet_anime-face_yolov3.pth", bbox_path) + + if not os.path.exists(os.path.join(bbox_path, "mmdet_anime-face_yolov3.py")): + download_url("https://raw.githubusercontent.com/Bing-su/dddetailer/master/config/mmdet_anime-face_yolov3.py", bbox_path) + + if not os.path.exists(os.path.join(sam_path, "sam_vit_b_01ec64.pth")): + download_url("https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth", sam_path) + + if not os.path.exists(onnx_path): + print(f"### ComfyUI-Impact-Pack: onnx model directory created ({onnx_path})") + os.mkdir(onnx_path) + + impact.config.write_config() + + + install() + +except Exception as e: + print("[ERROR] ComfyUI-Impact-Pack: Dependency installation has failed. Please install manually.") + traceback.print_exc() diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/comboBoolMigration.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/comboBoolMigration.js new file mode 100644 index 0000000000000000000000000000000000000000..fa5521682b0e2454148b940ef77806c690cebc87 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/comboBoolMigration.js @@ -0,0 +1,35 @@ +import { ComfyApp, app } from "../../scripts/app.js"; + +let conflict_check = undefined; + +app.registerExtension({ + name: "Comfy.impact.comboBoolMigration", + + nodeCreated(node, app) { + for(let i in node.widgets) { + let widget = node.widgets[i]; + + if(conflict_check == undefined) { + conflict_check = !!app.extensions.find((ext) => ext.name === "Comfy.comboBoolMigration"); + } + + if(conflict_check) + return; + + if(widget.type == "toggle") { + let value = widget.value; + + var v = Object.getOwnPropertyDescriptor(widget, 'value'); + if(!v) { + Object.defineProperty(widget, "value", { + set: (value) => { + delete widget.value; + widget.value = value == true || value == widget.options.on; + }, + get: () => { return value; } + }); + } + } + } + } +}); diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/common.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/common.js new file mode 100644 index 0000000000000000000000000000000000000000..b60f6c3159dec577e481a2552b695cc5c2b35341 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/common.js @@ -0,0 +1,95 @@ +import { api } from "../../scripts/api.js"; +import { app } from "../../scripts/app.js"; + +let original_show = app.ui.dialog.show; + +function dialog_show_wrapper(html) { + if (typeof html === "string") { + if(html.includes("IMPACT-PACK-SIGNAL: STOP CONTROL BRIDGE")) { + return; + } + + this.textElement.innerHTML = html; + } else { + this.textElement.replaceChildren(html); + } + this.element.style.display = "flex"; +} + +app.ui.dialog.show = dialog_show_wrapper; + + +function nodeFeedbackHandler(event) { + let nodes = app.graph._nodes_by_id; + let node = nodes[event.detail.node_id]; + if(node) { + const w = node.widgets.find((w) => event.detail.widget_name === w.name); + if(w) { + w.value = event.detail.value; + } + } +} + +api.addEventListener("impact-node-feedback", nodeFeedbackHandler); + + +function setMuteState(event) { + let nodes = app.graph._nodes_by_id; + let node = nodes[event.detail.node_id]; + if(node) { + if(event.detail.is_active) + node.mode = 0; + else + node.mode = 2; + } +} + +api.addEventListener("impact-node-mute-state", setMuteState); + + +async function bridgeContinue(event) { + let nodes = app.graph._nodes_by_id; + let node = nodes[event.detail.node_id]; + if(node) { + const mutes = new Set(event.detail.mutes); + const actives = new Set(event.detail.actives); + const bypasses = new Set(event.detail.bypasses); + + for(let i in app.graph._nodes_by_id) { + let this_node = app.graph._nodes_by_id[i]; + if(mutes.has(i)) { + this_node.mode = 2; + } + else if(actives.has(i)) { + this_node.mode = 0; + } + else if(bypasses.has(i)) { + this_node.mode = 4; + } + } + + await app.queuePrompt(0, 1); + } +} + +api.addEventListener("impact-bridge-continue", bridgeContinue); + + +function addQueue(event) { + app.queuePrompt(0, 1); +} + +api.addEventListener("impact-add-queue", addQueue); + + +function refreshPreview(event) { + let node_id = event.detail.node_id; + let item = event.detail.item; + let img = new Image(); + img.src = `/view?filename=${item.filename}&subfolder=${item.subfolder}&type=${item.type}&no-cache=${Date.now()}`; + let node = app.graph._nodes_by_id[node_id]; + if(node) + node.imgs = [img]; +} + +api.addEventListener("impact-preview", refreshPreview); diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-image-util.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-image-util.js new file mode 100644 index 0000000000000000000000000000000000000000..4985a9e5ca0bc19be6159449c87eceeee8ef2192 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-image-util.js @@ -0,0 +1,229 @@ +import { ComfyApp, app } from "../../scripts/app.js"; +import { api } from "../../scripts/api.js"; + +function load_image(str) { + let base64String = canvas.toDataURL('image/png'); + let img = new Image(); + img.src = base64String; +} + +function getFileItem(baseType, path) { + try { + let pathType = baseType; + + if (path.endsWith("[output]")) { + pathType = "output"; + path = path.slice(0, -9); + } else if (path.endsWith("[input]")) { + pathType = "input"; + path = path.slice(0, -8); + } else if (path.endsWith("[temp]")) { + pathType = "temp"; + path = path.slice(0, -7); + } + + const subfolder = path.substring(0, path.lastIndexOf('/')); + const filename = path.substring(path.lastIndexOf('/') + 1); + + return { + filename: filename, + subfolder: subfolder, + type: pathType + }; + } + catch(exception) { + return null; + } +} + +async function loadImageFromUrl(image, node_id, v, need_to_load) { + let item = getFileItem('temp', v); + + if(item) { + let params = `?node_id=${node_id}&filename=${item.filename}&type=${item.type}&subfolder=${item.subfolder}`; + + let res = await api.fetchApi('/impact/set/pb_id_image'+params, { cache: "no-store" }); + if(res.status == 200) { + let pb_id = await res.text(); + if(need_to_load) {; + image.src = api.apiURL(`/view?filename=${item.filename}&type=${item.type}&subfolder=${item.subfolder}`); + } + return pb_id; + } + else { + return `$${node_id}-0`; + } + } + else { + return `$${node_id}-0`; + } +} + +async function loadImageFromId(image, v) { + let res = await api.fetchApi('/impact/get/pb_id_image?id='+v, { cache: "no-store" }); + if(res.status == 200) { + let item = await res.json(); + image.src = api.apiURL(`/view?filename=${item.filename}&type=${item.type}&subfolder=${item.subfolder}`); + return true; + } + + return false; +} + +app.registerExtension({ + name: "Comfy.Impact.img", + + nodeCreated(node, app) { + if(node.comfyClass == "PreviewBridge" || node.comfyClass == "PreviewBridgeLatent") { + let w = node.widgets.find(obj => obj.name === 'image'); + node._imgs = [new Image()]; + node.imageIndex = 0; + + Object.defineProperty(w, 'value', { + async set(v) { + if(w._lock) + return; + + const stackTrace = new Error().stack; + if(stackTrace.includes('presetText.js')) + return; + + var image = new Image(); + if(v && v.constructor == String && v.startsWith('$')) { + // from node feedback + let need_to_load = node._imgs[0].src == ''; + if(await loadImageFromId(image, v, need_to_load)) { + w._value = v; + if(node._imgs[0].src == '') { + node._imgs = [image]; + } + } + else { + w._value = `$${node.id}-0`; + } + } + else { + // from clipspace + w._lock = true; + w._value = await loadImageFromUrl(image, node.id, v, false); + w._lock = false; + } + }, + get() { + if(w._value == undefined) { + w._value = `$${node.id}-0`; + } + return w._value; + } + }); + + Object.defineProperty(node, 'imgs', { + set(v) { + const stackTrace = new Error().stack; + if(v && v.length == 0) + return; + else if(stackTrace.includes('pasteFromClipspace')) { + let sp = new URLSearchParams(v[0].src.split("?")[1]); + let str = ""; + if(sp.get('subfolder')) { + str += sp.get('subfolder') + '/'; + } + str += `${sp.get("filename")} [${sp.get("type")}]`; + + w.value = str; + } + + node._imgs = v; + }, + get() { + return node._imgs; + } + }); + } + + if(node.comfyClass == "ImageReceiver") { + let path_widget = node.widgets.find(obj => obj.name === 'image'); + let w = node.widgets.find(obj => obj.name === 'image_data'); + let stw_widget = node.widgets.find(obj => obj.name === 'save_to_workflow'); + w._value = ""; + + Object.defineProperty(w, 'value', { + set(v) { + if(v != '[IMAGE DATA]') + w._value = v; + }, + get() { + const stackTrace = new Error().stack; + if(!stackTrace.includes('draw') && !stackTrace.includes('graphToPrompt') && stackTrace.includes('app.js')) { + return "[IMAGE DATA]"; + } + else { + if(stw_widget.value) + return w._value; + else + return ""; + } + } + }); + + let set_img_act = (v) => { + node._img = v; + var canvas = document.createElement('canvas'); + canvas.width = v[0].width; + canvas.height = v[0].height; + + var context = canvas.getContext('2d'); + context.drawImage(v[0], 0, 0, v[0].width, v[0].height); + + var base64Image = canvas.toDataURL('image/png'); + w.value = base64Image; + }; + + Object.defineProperty(node, 'imgs', { + set(v) { + if (v && !v[0].complete) { + let orig_onload = v[0].onload; + v[0].onload = function(v2) { + if(orig_onload) + orig_onload(); + set_img_act(v); + }; + } + else { + set_img_act(v); + } + }, + get() { + if(this._img == undefined && w.value != '') { + this._img = [new Image()]; + if(stw_widget.value && w.value != '[IMAGE DATA]') + this._img[0].src = w.value; + } + else if(this._img == undefined && path_widget.value) { + let image = new Image(); + image.src = path_widget.value; + + try { + let item = getFileItem('temp', path_widget.value); + let params = `?filename=${item.filename}&type=${item.type}&subfolder=${item.subfolder}`; + + let res = api.fetchApi('/view/validate'+params, { cache: "no-store" }).then(response => response); + if(res.status == 200) { + image.src = api.apiURL('/view'+params); + } + + this._img = [new Image()]; // placeholder + image.onload = function(v) { + set_img_act([image]); + }; + } + catch { + + } + } + return this._img; + } + }); + } + } +}) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-pack.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-pack.js new file mode 100644 index 0000000000000000000000000000000000000000..5dbc547ed02e194ee6a1cf564b02eb4d898fa073 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-pack.js @@ -0,0 +1,815 @@ +import { ComfyApp, app } from "../../scripts/app.js"; +import { ComfyDialog, $el } from "../../scripts/ui.js"; +import { api } from "../../scripts/api.js"; + +let wildcards_list = []; +async function load_wildcards() { + let res = await api.fetchApi('/impact/wildcards/list'); + let data = await res.json(); + wildcards_list = data.data; +} + +load_wildcards(); + +export function get_wildcards_list() { + return wildcards_list; +} + +// temporary implementation (copying from https://github.com/pythongosssss/ComfyUI-WD14-Tagger) +// I think this should be included into master!! +class ImpactProgressBadge { + constructor() { + if (!window.__progress_badge__) { + window.__progress_badge__ = Symbol("__impact_progress_badge__"); + } + this.symbol = window.__progress_badge__; + } + + getState(node) { + return node[this.symbol] || {}; + } + + setState(node, state) { + node[this.symbol] = state; + app.canvas.setDirty(true); + } + + addStatusHandler(nodeType) { + if (nodeType[this.symbol]?.statusTagHandler) { + return; + } + if (!nodeType[this.symbol]) { + nodeType[this.symbol] = {}; + } + nodeType[this.symbol] = { + statusTagHandler: true, + }; + + api.addEventListener("impact/update_status", ({ detail }) => { + let { node, progress, text } = detail; + const n = app.graph.getNodeById(+(node || app.runningNodeId)); + if (!n) return; + const state = this.getState(n); + state.status = Object.assign(state.status || {}, { progress: text ? progress : null, text: text || null }); + this.setState(n, state); + }); + + const self = this; + const onDrawForeground = nodeType.prototype.onDrawForeground; + nodeType.prototype.onDrawForeground = function (ctx) { + const r = onDrawForeground?.apply?.(this, arguments); + const state = self.getState(this); + if (!state?.status?.text) { + return r; + } + + const { fgColor, bgColor, text, progress, progressColor } = { ...state.status }; + + ctx.save(); + ctx.font = "12px sans-serif"; + const sz = ctx.measureText(text); + ctx.fillStyle = bgColor || "dodgerblue"; + ctx.beginPath(); + ctx.roundRect(0, -LiteGraph.NODE_TITLE_HEIGHT - 20, sz.width + 12, 20, 5); + ctx.fill(); + + if (progress) { + ctx.fillStyle = progressColor || "green"; + ctx.beginPath(); + ctx.roundRect(0, -LiteGraph.NODE_TITLE_HEIGHT - 20, (sz.width + 12) * progress, 20, 5); + ctx.fill(); + } + + ctx.fillStyle = fgColor || "#fff"; + ctx.fillText(text, 6, -LiteGraph.NODE_TITLE_HEIGHT - 6); + ctx.restore(); + return r; + }; + } +} + +const input_tracking = {}; +const input_dirty = {}; +const output_tracking = {}; + +function progressExecuteHandler(event) { + if(event.detail.output.aux){ + const id = event.detail.node; + if(input_tracking.hasOwnProperty(id)) { + if(input_tracking.hasOwnProperty(id) && input_tracking[id][0] != event.detail.output.aux[0]) { + input_dirty[id] = true; + } + else{ + + } + } + + input_tracking[id] = event.detail.output.aux; + } +} + +function imgSendHandler(event) { + if(event.detail.images.length > 0){ + let data = event.detail.images[0]; + let filename = `${data.filename} [${data.type}]`; + + let nodes = app.graph._nodes; + for(let i in nodes) { + if(nodes[i].type == 'ImageReceiver') { + let is_linked = false; + + if(nodes[i].widgets[1].type == 'converted-widget') { + for(let j in nodes[i].inputs) { + let input = nodes[i].inputs[j]; + if(input.name === 'link_id') { + if(input.link) { + let src_node = app.graph._nodes_by_id[app.graph.links[input.link].origin_id]; + if(src_node.type == 'ImpactInt' || src_node.type == 'PrimitiveNode') { + is_linked = true; + } + } + break; + } + } + } + else if(nodes[i].widgets[1].value == event.detail.link_id) { + is_linked = true; + } + + if(is_linked) { + if(data.subfolder) + nodes[i].widgets[0].value = `${data.subfolder}/${data.filename} [${data.type}]`; + else + nodes[i].widgets[0].value = `${data.filename} [${data.type}]`; + + let img = new Image(); + img.onload = (event) => { + nodes[i].imgs = [img]; + nodes[i].size[1] = Math.max(200, nodes[i].size[1]); + app.canvas.setDirty(true); + }; + img.src = `/view?filename=${data.filename}&type=${data.type}&subfolder=${data.subfolder}`+app.getPreviewFormatParam(); + } + } + } + } +} + + +function latentSendHandler(event) { + if(event.detail.images.length > 0){ + let data = event.detail.images[0]; + let filename = `${data.filename} [${data.type}]`; + + let nodes = app.graph._nodes; + for(let i in nodes) { + if(nodes[i].type == 'LatentReceiver') { + if(nodes[i].widgets[1].value == event.detail.link_id) { + if(data.subfolder) + nodes[i].widgets[0].value = `${data.subfolder}/${data.filename} [${data.type}]`; + else + nodes[i].widgets[0].value = `${data.filename} [${data.type}]`; + + let img = new Image(); + img.src = `/view?filename=${data.filename}&type=${data.type}&subfolder=${data.subfolder}`+app.getPreviewFormatParam(); + nodes[i].imgs = [img]; + nodes[i].size[1] = Math.max(200, nodes[i].size[1]); + } + } + } + } +} + + +function valueSendHandler(event) { + let nodes = app.graph._nodes; + for(let i in nodes) { + if(nodes[i].type == 'ImpactValueReceiver') { + if(nodes[i].widgets[2].value == event.detail.link_id) { + nodes[i].widgets[1].value = event.detail.value; + + let typ = typeof event.detail.value; + if(typ == 'string') { + nodes[i].widgets[0].value = "STRING"; + } + else if(typ == "boolean") { + nodes[i].widgets[0].value = "BOOLEAN"; + } + else if(typ != "number") { + nodes[i].widgets[0].value = typeof event.detail.value; + } + else if(Number.isInteger(event.detail.value)) { + nodes[i].widgets[0].value = "INT"; + } + else { + nodes[i].widgets[0].value = "FLOAT"; + } + } + } + } +} + + +const impactProgressBadge = new ImpactProgressBadge(); + +api.addEventListener("stop-iteration", () => { + document.getElementById("autoQueueCheckbox").checked = false; +}); +api.addEventListener("value-send", valueSendHandler); +api.addEventListener("img-send", imgSendHandler); +api.addEventListener("latent-send", latentSendHandler); +api.addEventListener("executed", progressExecuteHandler); + +app.registerExtension({ + name: "Comfy.Impack", + loadedGraphNode(node, app) { + if (node.comfyClass == "MaskPainter") { + input_dirty[node.id + ""] = true; + } + }, + + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (nodeData.name == "IterativeLatentUpscale" || nodeData.name == "IterativeImageUpscale" + || nodeData.name == "RegionalSampler"|| nodeData.name == "RegionalSamplerAdvanced") { + impactProgressBadge.addStatusHandler(nodeType); + } + + if(nodeData.name == "ImpactControlBridge") { + const onConnectionsChange = nodeType.prototype.onConnectionsChange; + nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) { + if(!link_info || this.inputs[0].type != '*') + return; + + // assign type + let slot_type = '*'; + + if(type == 2) { + slot_type = link_info.type; + } + else { + const node = app.graph.getNodeById(link_info.origin_id); + slot_type = node.outputs[link_info.origin_slot].type; + } + + this.inputs[0].type = slot_type; + this.outputs[0].type = slot_type; + this.outputs[0].label = slot_type; + } + } + + if(nodeData.name == "ImpactConditionalBranch" || nodeData.name == "ImpactConditionalBranchSelMode") { + const onConnectionsChange = nodeType.prototype.onConnectionsChange; + nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) { + if(!link_info || this.inputs[0].type != '*') + return; + + if(index >= 2) + return; + + // assign type + let slot_type = '*'; + + if(type == 2) { + slot_type = link_info.type; + } + else { + const node = app.graph.getNodeById(link_info.origin_id); + slot_type = node.outputs[link_info.origin_slot].type; + } + + this.inputs[0].type = slot_type; + this.inputs[1].type = slot_type; + this.outputs[0].type = slot_type; + this.outputs[0].label = slot_type; + } + } + + if(nodeData.name == "ImpactCompare") { + const onConnectionsChange = nodeType.prototype.onConnectionsChange; + nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) { + if(!link_info || this.inputs[0].type != '*' || type == 2) + return; + + // assign type + const node = app.graph.getNodeById(link_info.origin_id); + let slot_type = node.outputs[link_info.origin_slot].type; + + this.inputs[0].type = slot_type; + this.inputs[1].type = slot_type; + } + } + + if(nodeData.name === 'ImpactInversedSwitch') { + nodeData.output = ['*']; + nodeData.output_is_list = [false]; + nodeData.output_name = ['output1']; + + const onConnectionsChange = nodeType.prototype.onConnectionsChange; + nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) { + if(!link_info) + return; + + if(type == 2) { + // connect output + if(connected){ + if(app.graph._nodes_by_id[link_info.target_id].type == 'Reroute') { + app.graph._nodes_by_id[link_info.target_id].disconnectInput(link_info.target_slot); + } + + if(this.outputs[0].type == '*'){ + if(link_info.type == '*') { + app.graph._nodes_by_id[link_info.target_id].disconnectInput(link_info.target_slot); + } + else { + // propagate type + this.outputs[0].type = link_info.type; + this.outputs[0].name = link_info.type; + + for(let i in this.inputs) { + if(this.inputs[i].name != 'select') + this.inputs[i].type = link_info.type; + } + } + } + } + } + else { + if(app.graph._nodes_by_id[link_info.origin_id].type == 'Reroute') + this.disconnectInput(link_info.target_slot); + + // connect input + if(this.inputs[0].type == '*'){ + const node = app.graph.getNodeById(link_info.origin_id); + let origin_type = node.outputs[link_info.origin_slot].type; + + if(origin_type == '*') { + this.disconnectInput(link_info.target_slot); + return; + } + + for(let i in this.inputs) { + if(this.inputs[i].name != 'select') + this.inputs[i].type = origin_type; + } + + this.outputs[0].type = origin_type; + this.outputs[0].name = origin_type; + } + + return; + } + + if (!connected && this.outputs.length > 1) { + const stackTrace = new Error().stack; + + if( + !stackTrace.includes('LGraphNode.prototype.connect') && // for touch device + !stackTrace.includes('LGraphNode.connect') && // for mouse device + !stackTrace.includes('loadGraphData')) { + if(this.outputs[link_info.origin_slot].links.length == 0) + this.removeOutput(link_info.origin_slot); + } + } + + let slot_i = 1; + for (let i = 0; i < this.outputs.length; i++) { + this.outputs[i].name = `output${slot_i}` + slot_i++; + } + + let last_slot = this.outputs[this.outputs.length - 1]; + if (last_slot.slot_index == link_info.origin_slot) { + this.addOutput(`output${slot_i}`, this.outputs[0].type); + } + + let select_slot = this.inputs.find(x => x.name == "select"); + if(this.widgets) { + this.widgets[0].options.max = select_slot?this.outputs.length-1:this.outputs.length; + this.widgets[0].value = Math.min(this.widgets[0].value, this.widgets[0].options.max); + if(this.widgets[0].options.max > 0 && this.widgets[0].value == 0) + this.widgets[0].value = 1; + } + } + } + + if (nodeData.name === 'ImpactMakeImageList' || nodeData.name === 'ImpactMakeImageBatch' || + nodeData.name === 'CombineRegionalPrompts' || + nodeData.name === 'ImpactCombineConditionings' || nodeData.name === 'ImpactConcatConditionings' || + nodeData.name === 'ImpactSEGSConcat' || + nodeData.name === 'ImpactSwitch' || nodeData.name === 'LatentSwitch' || nodeData.name == 'SEGSSwitch') { + var input_name = "input"; + + switch(nodeData.name) { + case 'ImpactMakeImageList': + case 'ImpactMakeImageBatch': + input_name = "image"; + break; + + case 'ImpactSEGSConcat': + input_name = "segs"; + break; + + case 'CombineRegionalPrompts': + input_name = "regional_prompts"; + break; + + case 'ImpactCombineConditionings': + case 'ImpactConcatConditionings': + input_name = "conditioning"; + break; + + case 'LatentSwitch': + input_name = "input"; + break; + + case 'SEGSSwitch': + input_name = "input"; + break; + + case 'ImpactSwitch': + input_name = "input"; + } + + const onConnectionsChange = nodeType.prototype.onConnectionsChange; + nodeType.prototype.onConnectionsChange = function (type, index, connected, link_info) { + if(!link_info) + return; + + if(type == 2) { + // connect output + if(connected && index == 0){ + if(nodeData.name == 'ImpactSwitch' && app.graph._nodes_by_id[link_info.target_id]?.type == 'Reroute') { + app.graph._nodes_by_id[link_info.target_id].disconnectInput(link_info.target_slot); + } + + if(this.outputs[0].type == '*'){ + if(link_info.type == '*') { + app.graph._nodes_by_id[link_info.target_id].disconnectInput(link_info.target_slot); + } + else { + // propagate type + this.outputs[0].type = link_info.type; + this.outputs[0].label = link_info.type; + this.outputs[0].name = link_info.type; + + for(let i in this.inputs) { + let input_i = this.inputs[i]; + if(input_i.name != 'select' && input_i.name != 'sel_mode') + input_i.type = link_info.type; + } + } + } + } + + return; + } + else { + if(nodeData.name == 'ImpactSwitch' && app.graph._nodes_by_id[link_info.origin_id].type == 'Reroute') + this.disconnectInput(link_info.target_slot); + + // connect input + if(this.inputs[index].name == 'select' || this.inputs[index].name == 'sel_mode') + return; + + if(this.inputs[0].type == '*'){ + const node = app.graph.getNodeById(link_info.origin_id); + let origin_type = node.outputs[link_info.origin_slot].type; + + if(origin_type == '*') { + this.disconnectInput(link_info.target_slot); + return; + } + + for(let i in this.inputs) { + let input_i = this.inputs[i]; + if(input_i.name != 'select' && input_i.name != 'sel_mode') + input_i.type = origin_type; + } + + this.outputs[0].type = origin_type; + this.outputs[0].label = origin_type; + this.outputs[0].name = origin_type; + } + } + + let select_slot = this.inputs.find(x => x.name == "select"); + let mode_slot = this.inputs.find(x => x.name == "sel_mode"); + + let converted_count = 0; + converted_count += select_slot?1:0; + converted_count += mode_slot?1:0; + + if (!connected && (this.inputs.length > 1+converted_count)) { + const stackTrace = new Error().stack; + + if( + !stackTrace.includes('LGraphNode.prototype.connect') && // for touch device + !stackTrace.includes('LGraphNode.connect') && // for mouse device + !stackTrace.includes('loadGraphData') && + this.inputs[index].name != 'select') { + this.removeInput(index); + } + } + + let slot_i = 1; + for (let i = 0; i < this.inputs.length; i++) { + let input_i = this.inputs[i]; + if(input_i.name != 'select'&& input_i.name != 'sel_mode') { + input_i.name = `${input_name}${slot_i}` + slot_i++; + } + } + + let last_slot = this.inputs[this.inputs.length - 1]; + if ( + (last_slot.name == 'select' && last_slot.name != 'sel_mode' && this.inputs[this.inputs.length - 2].link != undefined) + || (last_slot.name != 'select' && last_slot.name != 'sel_mode' && last_slot.link != undefined)) { + this.addInput(`${input_name}${slot_i}`, this.outputs[0].type); + } + + if(this.widgets) { + this.widgets[0].options.max = select_slot?this.inputs.length-1:this.inputs.length; + this.widgets[0].value = Math.min(this.widgets[0].value, this.widgets[0].options.max); + if(this.widgets[0].options.max > 0 && this.widgets[0].value == 0) + this.widgets[0].value = 1; + } + } + } + }, + + nodeCreated(node, app) { + if(node.comfyClass == "MaskPainter") { + node.addWidget("button", "Edit mask", null, () => { + ComfyApp.copyToClipspace(node); + ComfyApp.clipspace_return_node = node; + ComfyApp.open_maskeditor(); + }); + } + + switch(node.comfyClass) { + case "ToDetailerPipe": + case "ToDetailerPipeSDXL": + case "BasicPipeToDetailerPipe": + case "BasicPipeToDetailerPipeSDXL": + case "EditDetailerPipe": + case "FaceDetailer": + case "DetailerForEach": + case "DetailerForEachDebug": + case "DetailerForEachPipe": + case "DetailerForEachDebugPipe": + { + for(let i in node.widgets) { + let widget = node.widgets[i]; + if(widget.type === "customtext") { + widget.dynamicPrompts = false; + widget.inputEl.placeholder = "wildcard spec: if kept empty, this option will be ignored"; + widget.serializeValue = () => { + return node.widgets[i].value; + }; + } + } + } + break; + } + + if(node.comfyClass == "ImpactSEGSLabelFilter" || node.comfyClass == "SEGSLabelFilterDetailerHookProvider") { + Object.defineProperty(node.widgets[0], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(node.widgets[1].value.trim() != "" && !node.widgets[1].value.trim().endsWith(",")) + node.widgets[1].value += ", " + + node.widgets[1].value += value; + node.widgets_values[1] = node.widgets[1].value; + } + + node._value = value; + }, + get: () => { + return node._value; + } + }); + } + + if(node.comfyClass == "UltralyticsDetectorProvider") { + let model_name_widget = node.widgets.find((w) => w.name === "model_name"); + let orig_draw = node.onDrawForeground; + node.onDrawForeground = function (ctx) { + const r = orig_draw?.apply?.(this, arguments); + + let is_seg = model_name_widget.value?.startsWith('segm/') || model_name_widget.value?.includes('-seg'); + if(!is_seg) { + var slot_pos = new Float32Array(2); + var pos = node.getConnectionPos(false, 1, slot_pos); + + pos[0] -= node.pos[0] - 10; + pos[1] -= node.pos[1]; + + ctx.beginPath(); + ctx.strokeStyle = "red"; + ctx.lineWidth = 4; + ctx.moveTo(pos[0] - 5, pos[1] - 5); + ctx.lineTo(pos[0] + 5, pos[1] + 5); + ctx.moveTo(pos[0] + 5, pos[1] - 5); + ctx.lineTo(pos[0] - 5, pos[1] + 5); + ctx.stroke(); + } + } + } + + if( + node.comfyClass == "ImpactWildcardEncode" || node.comfyClass == "ImpactWildcardProcessor" + || node.comfyClass == "ToDetailerPipe" || node.comfyClass == "ToDetailerPipeSDXL" + || node.comfyClass == "EditDetailerPipe" || node.comfyClass == "EditDetailerPipeSDXL" + || node.comfyClass == "BasicPipeToDetailerPipe" || node.comfyClass == "BasicPipeToDetailerPipeSDXL") { + node._value = "Select the LoRA to add to the text"; + node._wvalue = "Select the Wildcard to add to the text"; + + var tbox_id = 0; + var combo_id = 3; + var has_lora = true; + + switch(node.comfyClass){ + case "ImpactWildcardEncode": + tbox_id = 0; + combo_id = 3; + break; + + case "ImpactWildcardProcessor": + tbox_id = 0; + combo_id = 4; + has_lora = false; + break; + + case "ToDetailerPipe": + case "ToDetailerPipeSDXL": + case "EditDetailerPipe": + case "EditDetailerPipeSDXL": + case "BasicPipeToDetailerPipe": + case "BasicPipeToDetailerPipeSDXL": + tbox_id = 0; + combo_id = 1; + break; + } + + Object.defineProperty(node.widgets[combo_id+1], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the Wildcard to add to the text") { + if(node.widgets[tbox_id].value != '') + node.widgets[tbox_id].value += ', ' + + node.widgets[tbox_id].value += value; + } + } + }, + get: () => { return "Select the Wildcard to add to the text"; } + }); + + Object.defineProperty(node.widgets[combo_id+1].options, "values", { + set: (x) => {}, + get: () => { + return wildcards_list; + } + }); + + if(has_lora) { + Object.defineProperty(node.widgets[combo_id], "value", { + set: (value) => { + const stackTrace = new Error().stack; + if(stackTrace.includes('inner_value_change')) { + if(value != "Select the LoRA to add to the text") { + let lora_name = value; + if (lora_name.endsWith('.safetensors')) { + lora_name = lora_name.slice(0, -12); + } + + node.widgets[tbox_id].value += ``; + if(node.widgets_values) { + node.widgets_values[tbox_id] = node.widgets[tbox_id].value; + } + } + } + + node._value = value; + }, + + get: () => { return "Select the LoRA to add to the text"; } + }); + } + + // Preventing validation errors from occurring in any situation. + if(has_lora) { + node.widgets[combo_id].serializeValue = () => { return "Select the LoRA to add to the text"; } + } + node.widgets[combo_id+1].serializeValue = () => { return "Select the Wildcard to add to the text"; } + } + + if(node.comfyClass == "ImpactWildcardProcessor" || node.comfyClass == "ImpactWildcardEncode") { + node.widgets[0].inputEl.placeholder = "Wildcard Prompt (User input)"; + node.widgets[1].inputEl.placeholder = "Populated Prompt (Will be generated automatically)"; + node.widgets[1].inputEl.disabled = true; + + const populated_text_widget = node.widgets.find((w) => w.name == 'populated_text'); + const mode_widget = node.widgets.find((w) => w.name == 'mode'); + + // mode combo + Object.defineProperty(mode_widget, "value", { + set: (value) => { + node._mode_value = value == true || value == "Populate"; + populated_text_widget.inputEl.disabled = value == true || value == "Populate"; + }, + get: () => { + if(node._mode_value != undefined) + return node._mode_value; + else + return true; + } + }); + } + + if (node.comfyClass == "MaskPainter") { + node.widgets[0].value = '#placeholder'; + + Object.defineProperty(node, "images", { + set: function(value) { + node._images = value; + }, + get: function() { + const id = node.id+""; + if(node.widgets[0].value != '#placeholder') { + var need_invalidate = false; + + if(input_dirty.hasOwnProperty(id) && input_dirty[id]) { + node.widgets[0].value = {...input_tracking[id][1]}; + input_dirty[id] = false; + need_invalidate = true + this._images = app.nodeOutputs[id].images; + } + + let filename = app.nodeOutputs[id]['aux'][1][0]['filename']; + let subfolder = app.nodeOutputs[id]['aux'][1][0]['subfolder']; + let type = app.nodeOutputs[id]['aux'][1][0]['type']; + + let item = + { + image_hash: app.nodeOutputs[id]['aux'][0], + forward_filename: app.nodeOutputs[id]['aux'][1][0]['filename'], + forward_subfolder: app.nodeOutputs[id]['aux'][1][0]['subfolder'], + forward_type: app.nodeOutputs[id]['aux'][1][0]['type'] + }; + + if(node._images) { + app.nodeOutputs[id].images = [{ + ...node._images[0], + ...item + }]; + + node.widgets[0].value = + { + ...node._images[0], + ...item + }; + } + else { + app.nodeOutputs[id].images = [{ + ...item + }]; + + node.widgets[0].value = + { + ...item + }; + } + + if(need_invalidate) { + Promise.all( + app.nodeOutputs[id].images.map((src) => { + return new Promise((r) => { + const img = new Image(); + img.onload = () => r(img); + img.onerror = () => r(null); + img.src = "/view?" + new URLSearchParams(src).toString(); + }); + }) + ).then((imgs) => { + this.imgs = imgs.filter(Boolean); + this.setSizeForImage?.(); + app.graph.setDirtyCanvas(true); + }); + + app.nodeOutputs[id].images[0] = { ...node.widgets[0].value }; + } + + return app.nodeOutputs[id].images; + } + else { + return node._images; + } + } + }); + } + } +}); diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-sam-editor.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-sam-editor.js new file mode 100644 index 0000000000000000000000000000000000000000..e7bf6f297fc67d41c3e25e6541cd0df98b51fdd1 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-sam-editor.js @@ -0,0 +1,637 @@ +import { app } from "../../scripts/app.js"; +import { api } from "../../scripts/api.js"; +import { ComfyDialog, $el } from "../../scripts/ui.js"; +import { ComfyApp } from "../../scripts/app.js"; +import { ClipspaceDialog } from "../../extensions/core/clipspace.js"; + +function addMenuHandler(nodeType, cb) { + const getOpts = nodeType.prototype.getExtraMenuOptions; + nodeType.prototype.getExtraMenuOptions = function () { + const r = getOpts.apply(this, arguments); + cb.apply(this, arguments); + return r; + }; +} + +// Helper function to convert a data URL to a Blob object +function dataURLToBlob(dataURL) { + const parts = dataURL.split(';base64,'); + const contentType = parts[0].split(':')[1]; + const byteString = atob(parts[1]); + const arrayBuffer = new ArrayBuffer(byteString.length); + const uint8Array = new Uint8Array(arrayBuffer); + for (let i = 0; i < byteString.length; i++) { + uint8Array[i] = byteString.charCodeAt(i); + } + return new Blob([arrayBuffer], { type: contentType }); +} + +function loadedImageToBlob(image) { + const canvas = document.createElement('canvas'); + + canvas.width = image.width; + canvas.height = image.height; + + const ctx = canvas.getContext('2d'); + + ctx.drawImage(image, 0, 0); + + const dataURL = canvas.toDataURL('image/png', 1); + const blob = dataURLToBlob(dataURL); + + return blob; +} + +async function uploadMask(filepath, formData) { + await api.fetchApi('/upload/mask', { + method: 'POST', + body: formData + }).then(response => {}).catch(error => { + console.error('Error:', error); + }); + + ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']] = new Image(); + ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']].src = `view?filename=${filepath.filename}&type=${filepath.type}`; + + if(ComfyApp.clipspace.images) + ComfyApp.clipspace.images[ComfyApp.clipspace['selectedIndex']] = filepath; + + ClipspaceDialog.invalidatePreview(); +} + +class ImpactSamEditorDialog extends ComfyDialog { + static instance = null; + + static getInstance() { + if(!ImpactSamEditorDialog.instance) { + ImpactSamEditorDialog.instance = new ImpactSamEditorDialog(); + } + + return ImpactSamEditorDialog.instance; + } + + constructor() { + super(); + this.element = $el("div.comfy-modal", { parent: document.body }, + [ $el("div.comfy-modal-content", + [...this.createButtons()]), + ]); + } + + createButtons() { + return []; + } + + createButton(name, callback) { + var button = document.createElement("button"); + button.innerText = name; + button.addEventListener("click", callback); + return button; + } + + createLeftButton(name, callback) { + var button = this.createButton(name, callback); + button.style.cssFloat = "left"; + button.style.marginRight = "4px"; + return button; + } + + createRightButton(name, callback) { + var button = this.createButton(name, callback); + button.style.cssFloat = "right"; + button.style.marginLeft = "4px"; + return button; + } + + createLeftSlider(self, name, callback) { + const divElement = document.createElement('div'); + divElement.id = "sam-confidence-slider"; + divElement.style.cssFloat = "left"; + divElement.style.fontFamily = "sans-serif"; + divElement.style.marginRight = "4px"; + divElement.style.color = "var(--input-text)"; + divElement.style.backgroundColor = "var(--comfy-input-bg)"; + divElement.style.borderRadius = "8px"; + divElement.style.borderColor = "var(--border-color)"; + divElement.style.borderStyle = "solid"; + divElement.style.fontSize = "15px"; + divElement.style.height = "21px"; + divElement.style.padding = "1px 6px"; + divElement.style.display = "flex"; + divElement.style.position = "relative"; + divElement.style.top = "2px"; + self.confidence_slider_input = document.createElement('input'); + self.confidence_slider_input.setAttribute('type', 'range'); + self.confidence_slider_input.setAttribute('min', '0'); + self.confidence_slider_input.setAttribute('max', '100'); + self.confidence_slider_input.setAttribute('value', '70'); + const labelElement = document.createElement("label"); + labelElement.textContent = name; + + divElement.appendChild(labelElement); + divElement.appendChild(self.confidence_slider_input); + + self.confidence_slider_input.addEventListener("change", callback); + + return divElement; + } + + async detect_and_invalidate_mask_canvas(self) { + const mask_img = await self.detect(self); + + const canvas = self.maskCtx.canvas; + const ctx = self.maskCtx; + + ctx.clearRect(0, 0, canvas.width, canvas.height); + + await new Promise((resolve, reject) => { + self.mask_image = new Image(); + self.mask_image.onload = function() { + ctx.drawImage(self.mask_image, 0, 0, canvas.width, canvas.height); + resolve(); + }; + self.mask_image.onerror = reject; + self.mask_image.src = mask_img.src; + }); + } + + setlayout(imgCanvas, maskCanvas, pointsCanvas) { + const self = this; + + // If it is specified as relative, using it only as a hidden placeholder for padding is recommended + // to prevent anomalies where it exceeds a certain size and goes outside of the window. + var placeholder = document.createElement("div"); + placeholder.style.position = "relative"; + placeholder.style.height = "50px"; + + var bottom_panel = document.createElement("div"); + bottom_panel.style.position = "absolute"; + bottom_panel.style.bottom = "0px"; + bottom_panel.style.left = "20px"; + bottom_panel.style.right = "20px"; + bottom_panel.style.height = "50px"; + + var brush = document.createElement("div"); + brush.id = "sam-brush"; + brush.style.backgroundColor = "blue"; + brush.style.outline = "2px solid pink"; + brush.style.borderRadius = "50%"; + brush.style.MozBorderRadius = "50%"; + brush.style.WebkitBorderRadius = "50%"; + brush.style.position = "absolute"; + brush.style.zIndex = 100; + brush.style.pointerEvents = "none"; + this.brush = brush; + this.element.appendChild(imgCanvas); + this.element.appendChild(maskCanvas); + this.element.appendChild(pointsCanvas); + this.element.appendChild(placeholder); // must below z-index than bottom_panel to avoid covering button + this.element.appendChild(bottom_panel); + document.body.appendChild(brush); + this.brush_size = 5; + + var confidence_slider = this.createLeftSlider(self, "Confidence", (event) => { + self.confidence = event.target.value; + }); + + var clearButton = this.createLeftButton("Clear", () => { + self.maskCtx.clearRect(0, 0, self.maskCanvas.width, self.maskCanvas.height); + self.pointsCtx.clearRect(0, 0, self.pointsCanvas.width, self.pointsCanvas.height); + + self.prompt_points = []; + + self.invalidatePointsCanvas(self); + }); + + var detectButton = this.createLeftButton("Detect", () => self.detect_and_invalidate_mask_canvas(self)); + + var cancelButton = this.createRightButton("Cancel", () => { + document.removeEventListener("mouseup", ImpactSamEditorDialog.handleMouseUp); + document.removeEventListener("keydown", ImpactSamEditorDialog.handleKeyDown); + self.close(); + }); + + self.saveButton = this.createRightButton("Save", () => { + document.removeEventListener("mouseup", ImpactSamEditorDialog.handleMouseUp); + document.removeEventListener("keydown", ImpactSamEditorDialog.handleKeyDown); + self.save(self); + }); + + var undoButton = this.createLeftButton("Undo", () => { + if(self.prompt_points.length > 0) { + self.prompt_points.pop(); + self.pointsCtx.clearRect(0, 0, self.pointsCanvas.width, self.pointsCanvas.height); + self.invalidatePointsCanvas(self); + } + }); + + bottom_panel.appendChild(clearButton); + bottom_panel.appendChild(detectButton); + bottom_panel.appendChild(self.saveButton); + bottom_panel.appendChild(cancelButton); + bottom_panel.appendChild(confidence_slider); + bottom_panel.appendChild(undoButton); + + imgCanvas.style.position = "relative"; + imgCanvas.style.top = "200"; + imgCanvas.style.left = "0"; + + maskCanvas.style.position = "absolute"; + maskCanvas.style.opacity = 0.5; + pointsCanvas.style.position = "absolute"; + } + + show() { + this.mask_image = null; + self.prompt_points = []; + + this.message_box = $el("p", ["Please wait a moment while the SAM model and the image are being loaded."]); + this.element.appendChild(this.message_box); + + if(self.imgCtx) { + self.imgCtx.clearRect(0, 0, self.imageCanvas.width, self.imageCanvas.height); + } + + const target_image_path = ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']].src; + this.load_sam(target_image_path); + + if(!this.is_layout_created) { + // layout + const imgCanvas = document.createElement('canvas'); + const maskCanvas = document.createElement('canvas'); + const pointsCanvas = document.createElement('canvas'); + + imgCanvas.id = "imageCanvas"; + maskCanvas.id = "maskCanvas"; + pointsCanvas.id = "pointsCanvas"; + + this.setlayout(imgCanvas, maskCanvas, pointsCanvas); + + // prepare content + this.imgCanvas = imgCanvas; + this.maskCanvas = maskCanvas; + this.pointsCanvas = pointsCanvas; + this.maskCtx = maskCanvas.getContext('2d'); + this.pointsCtx = pointsCanvas.getContext('2d'); + + this.is_layout_created = true; + + // replacement of onClose hook since close is not real close + const self = this; + const observer = new MutationObserver(function(mutations) { + mutations.forEach(function(mutation) { + if (mutation.type === 'attributes' && mutation.attributeName === 'style') { + if(self.last_display_style && self.last_display_style != 'none' && self.element.style.display == 'none') { + ComfyApp.onClipspaceEditorClosed(); + } + + self.last_display_style = self.element.style.display; + } + }); + }); + + const config = { attributes: true }; + observer.observe(this.element, config); + } + + this.setImages(target_image_path, this.imgCanvas, this.pointsCanvas); + + if(ComfyApp.clipspace_return_node) { + this.saveButton.innerText = "Save to node"; + } + else { + this.saveButton.innerText = "Save"; + } + this.saveButton.disabled = true; + + this.element.style.display = "block"; + this.element.style.zIndex = 8888; // NOTE: alert dialog must be high priority. + } + + updateBrushPreview(self, event) { + event.preventDefault(); + + const centerX = event.pageX; + const centerY = event.pageY; + + const brush = self.brush; + + brush.style.width = self.brush_size * 2 + "px"; + brush.style.height = self.brush_size * 2 + "px"; + brush.style.left = (centerX - self.brush_size) + "px"; + brush.style.top = (centerY - self.brush_size) + "px"; + } + + setImages(target_image_path, imgCanvas, pointsCanvas) { + const imgCtx = imgCanvas.getContext('2d'); + const maskCtx = this.maskCtx; + const maskCanvas = this.maskCanvas; + + const self = this; + + // image load + const orig_image = new Image(); + window.addEventListener("resize", () => { + // repositioning + imgCanvas.width = window.innerWidth - 250; + imgCanvas.height = window.innerHeight - 200; + + // redraw image + let drawWidth = orig_image.width; + let drawHeight = orig_image.height; + + if (orig_image.width > imgCanvas.width) { + drawWidth = imgCanvas.width; + drawHeight = (drawWidth / orig_image.width) * orig_image.height; + } + + if (drawHeight > imgCanvas.height) { + drawHeight = imgCanvas.height; + drawWidth = (drawHeight / orig_image.height) * orig_image.width; + } + + imgCtx.drawImage(orig_image, 0, 0, drawWidth, drawHeight); + + // update mask + pointsCanvas.width = drawWidth; + pointsCanvas.height = drawHeight; + pointsCanvas.style.top = imgCanvas.offsetTop + "px"; + pointsCanvas.style.left = imgCanvas.offsetLeft + "px"; + + maskCanvas.width = drawWidth; + maskCanvas.height = drawHeight; + maskCanvas.style.top = imgCanvas.offsetTop + "px"; + maskCanvas.style.left = imgCanvas.offsetLeft + "px"; + + self.invalidateMaskCanvas(self); + self.invalidatePointsCanvas(self); + }); + + // original image load + orig_image.onload = () => self.onLoaded(self); + const rgb_url = new URL(target_image_path); + rgb_url.searchParams.delete('channel'); + rgb_url.searchParams.set('channel', 'rgb'); + orig_image.src = rgb_url; + self.image = orig_image; + } + + onLoaded(self) { + if(self.message_box) { + self.element.removeChild(self.message_box); + self.message_box = null; + } + + window.dispatchEvent(new Event('resize')); + + self.setEventHandler(pointsCanvas); + self.saveButton.disabled = false; + } + + setEventHandler(targetCanvas) { + targetCanvas.addEventListener("contextmenu", (event) => { + event.preventDefault(); + }); + + const self = this; + targetCanvas.addEventListener('pointermove', (event) => this.updateBrushPreview(self,event)); + targetCanvas.addEventListener('pointerdown', (event) => this.handlePointerDown(self,event)); + targetCanvas.addEventListener('pointerover', (event) => { this.brush.style.display = "block"; }); + targetCanvas.addEventListener('pointerleave', (event) => { this.brush.style.display = "none"; }); + document.addEventListener('keydown', ImpactSamEditorDialog.handleKeyDown); + } + + static handleKeyDown(event) { + const self = ImpactSamEditorDialog.instance; + if (event.key === '=') { // positive + brush.style.backgroundColor = "blue"; + brush.style.outline = "2px solid pink"; + self.is_positive_mode = true; + } else if (event.key === '-') { // negative + brush.style.backgroundColor = "red"; + brush.style.outline = "2px solid skyblue"; + self.is_positive_mode = false; + } + } + + is_positive_mode = true; + prompt_points = []; + confidence = 70; + + invalidatePointsCanvas(self) { + const ctx = self.pointsCtx; + + for (const i in self.prompt_points) { + const [is_positive, x, y] = self.prompt_points[i]; + + const scaledX = x * ctx.canvas.width / self.image.width; + const scaledY = y * ctx.canvas.height / self.image.height; + + if(is_positive) + ctx.fillStyle = "blue"; + else + ctx.fillStyle = "red"; + ctx.beginPath(); + ctx.arc(scaledX, scaledY, 3, 0, 3 * Math.PI); + ctx.fill(); + } + } + + invalidateMaskCanvas(self) { + if(self.mask_image) { + self.maskCtx.clearRect(0, 0, self.maskCanvas.width, self.maskCanvas.height); + self.maskCtx.drawImage(self.mask_image, 0, 0, self.maskCanvas.width, self.maskCanvas.height); + } + } + + async load_sam(url) { + const parsedUrl = new URL(url); + const searchParams = new URLSearchParams(parsedUrl.search); + + const filename = searchParams.get("filename") || ""; + const fileType = searchParams.get("type") || ""; + const subfolder = searchParams.get("subfolder") || ""; + + const data = { + sam_model_name: "auto", + filename: filename, + type: fileType, + subfolder: subfolder + }; + + api.fetchApi('/sam/prepare', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(data) + }); + } + + async detect(self) { + const positive_points = []; + const negative_points = []; + + for(const i in self.prompt_points) { + const [is_positive, x, y] = self.prompt_points[i]; + const point = [x,y]; + if(is_positive) + positive_points.push(point); + else + negative_points.push(point); + } + + const data = { + positive_points: positive_points, + negative_points: negative_points, + threshold: self.confidence/100 + }; + + const response = await api.fetchApi('/sam/detect', { + method: 'POST', + headers: { 'Content-Type': 'image/png' }, + body: JSON.stringify(data) + }); + + const blob = await response.blob(); + const url = URL.createObjectURL(blob); + + return new Promise((resolve, reject) => { + const image = new Image(); + image.onload = () => resolve(image); + image.onerror = reject; + image.src = url; + }); + } + + handlePointerDown(self, event) { + if ([0, 2, 5].includes(event.button)) { + event.preventDefault(); + const x = event.offsetX || event.targetTouches[0].clientX - maskRect.left; + const y = event.offsetY || event.targetTouches[0].clientY - maskRect.top; + + const originalX = x * self.image.width / self.pointsCanvas.width; + const originalY = y * self.image.height / self.pointsCanvas.height; + + var point = null; + if (event.button == 0) { + // positive + point = [true, originalX, originalY]; + } else { + // negative + point = [false, originalX, originalY]; + } + + self.prompt_points.push(point); + + self.invalidatePointsCanvas(self); + } + } + + async save(self) { + if(!self.mask_image) { + this.close(); + return; + } + + const save_canvas = document.createElement('canvas'); + + const save_ctx = save_canvas.getContext('2d', {willReadFrequently:true}); + save_canvas.width = self.mask_image.width; + save_canvas.height = self.mask_image.height; + + save_ctx.drawImage(self.mask_image, 0, 0, save_canvas.width, save_canvas.height); + + const save_data = save_ctx.getImageData(0, 0, save_canvas.width, save_canvas.height); + + // refine mask image + for (let i = 0; i < save_data.data.length; i += 4) { + if(save_data.data[i]) { + save_data.data[i+3] = 0; + } + else { + save_data.data[i+3] = 255; + } + + save_data.data[i] = 0; + save_data.data[i+1] = 0; + save_data.data[i+2] = 0; + } + + save_ctx.globalCompositeOperation = 'source-over'; + save_ctx.putImageData(save_data, 0, 0); + + const formData = new FormData(); + const filename = "clipspace-mask-" + performance.now() + ".png"; + + const item = + { + "filename": filename, + "subfolder": "", + "type": "temp", + }; + + if(ComfyApp.clipspace.images) + ComfyApp.clipspace.images[0] = item; + + if(ComfyApp.clipspace.widgets) { + const index = ComfyApp.clipspace.widgets.findIndex(obj => obj.name === 'image'); + + if(index >= 0) + ComfyApp.clipspace.widgets[index].value = `${filename} [temp]`; + } + + const dataURL = save_canvas.toDataURL(); + const blob = dataURLToBlob(dataURL); + + let original_url = new URL(this.image.src); + + const original_ref = { filename: original_url.searchParams.get('filename') }; + + let original_subfolder = original_url.searchParams.get("subfolder"); + if(original_subfolder) + original_ref.subfolder = original_subfolder; + + let original_type = original_url.searchParams.get("type"); + if(original_type) + original_ref.type = original_type; + + formData.append('image', blob, filename); + formData.append('original_ref', JSON.stringify(original_ref)); + formData.append('type', "temp"); + + await uploadMask(item, formData); + ComfyApp.onClipspaceEditorSave(); + this.close(); + } +} + +app.registerExtension({ + name: "Comfy.Impact.SAMEditor", + init(app) { + const callback = + function () { + let dlg = ImpactSamEditorDialog.getInstance(); + dlg.show(); + }; + + const context_predicate = () => ComfyApp.clipspace && ComfyApp.clipspace.imgs && ComfyApp.clipspace.imgs.length > 0 + ClipspaceDialog.registerButton("Impact SAM Detector", context_predicate, callback); + }, + + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if (Array.isArray(nodeData.output) && (nodeData.output.includes("MASK") || nodeData.output.includes("IMAGE"))) { + addMenuHandler(nodeType, function (_, options) { + options.unshift({ + content: "Open in SAM Detector", + callback: () => { + ComfyApp.copyToClipspace(this); + ComfyApp.clipspace_return_node = this; + + let dlg = ImpactSamEditorDialog.getInstance(); + dlg.show(); + }, + }); + }); + } + } +}); + diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-segs-picker.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-segs-picker.js new file mode 100644 index 0000000000000000000000000000000000000000..01319f072923294d9a531aa296435ffa78eafe2a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-segs-picker.js @@ -0,0 +1,182 @@ +import { ComfyApp, app } from "../../scripts/app.js"; +import { ComfyDialog, $el } from "../../scripts/ui.js"; +import { api } from "../../scripts/api.js"; + +async function open_picker(node) { + const resp = await api.fetchApi(`/impact/segs/picker/count?id=${node.id}`); + const body = await resp.text(); + + let cnt = parseInt(body); + + var existingPicker = document.getElementById('impact-picker'); + if (existingPicker) { + existingPicker.parentNode.removeChild(existingPicker); + } + + var gallery = document.createElement('div'); + gallery.id = 'impact-picker'; + + gallery.style.position = "absolute"; + gallery.style.height = "80%"; + gallery.style.width = "80%"; + gallery.style.top = "10%"; + gallery.style.left = "10%"; + gallery.style.display = 'flex'; + gallery.style.flexWrap = 'wrap'; + gallery.style.maxHeight = '600px'; + gallery.style.overflow = 'auto'; + gallery.style.backgroundColor = 'rgba(0,0,0,0.3)'; + gallery.style.padding = '20px'; + gallery.draggable = false; + gallery.style.zIndex = 5000; + + var doneButton = document.createElement('button'); + doneButton.textContent = 'Done'; + doneButton.style.padding = '10px 10px'; + doneButton.style.border = 'none'; + doneButton.style.borderRadius = '5px'; + doneButton.style.fontFamily = 'Arial, sans-serif'; + doneButton.style.fontSize = '16px'; + doneButton.style.fontWeight = 'bold'; + doneButton.style.color = '#fff'; + doneButton.style.background = 'linear-gradient(to bottom, #0070B8, #003D66)'; + doneButton.style.boxShadow = '0 2px 4px rgba(0, 0, 0, 0.4)'; + doneButton.style.margin = "20px"; + doneButton.style.height = "40px"; + + var cancelButton = document.createElement('button'); + cancelButton.textContent = 'Cancel'; + cancelButton.style.padding = '10px 10px'; + cancelButton.style.border = 'none'; + cancelButton.style.borderRadius = '5px'; + cancelButton.style.fontFamily = 'Arial, sans-serif'; + cancelButton.style.fontSize = '16px'; + cancelButton.style.fontWeight = 'bold'; + cancelButton.style.color = '#fff'; + cancelButton.style.background = 'linear-gradient(to bottom, #ff70B8, #ff3D66)'; + cancelButton.style.boxShadow = '0 2px 4px rgba(0, 0, 0, 0.4)'; + cancelButton.style.margin = "20px"; + cancelButton.style.height = "40px"; + + const w = node.widgets.find((w) => w.name == 'picks'); + let prev_selected = w.value.split(',').map(function(item) { + return parseInt(item, 10); + }); + + let images = []; + doneButton.onclick = () => { + var result = ''; + for(let i in images) { + if(images[i].isSelected) { + if(result != '') + result += ', '; + + result += (parseInt(i)+1); + } + } + + w.value = result; + + gallery.parentNode.removeChild(gallery); + } + + cancelButton.onclick = () => { + gallery.parentNode.removeChild(gallery); + } + + var panel = document.createElement('div'); + panel.style.clear = 'both'; + panel.style.width = '100%'; + panel.style.height = '40px'; + panel.style.justifyContent = 'center'; + panel.style.alignItems = 'center'; + panel.style.display = 'flex'; + panel.appendChild(doneButton); + panel.appendChild(cancelButton); + gallery.appendChild(panel); + + var hint = document.createElement('label'); + hint.style.position = 'absolute'; + hint.innerHTML = 'Click: Toggle Selection
Ctrl-click: Single Selection'; + gallery.appendChild(hint); + + let max_size = 300; + + for(let i=0; i image.naturalHeight) { + ratio = max_size/image.naturalWidth; + } + else { + ratio = max_size/image.naturalHeight; + } + + let width = image.naturalWidth * ratio; + let height = image.naturalHeight * ratio; + + if(width < height) { + this.style.marginLeft = (200-width)/2+"px"; + } + else{ + this.style.marginTop = (200-height)/2+"px"; + } + + this.style.width = width+"px"; + this.style.height = height+"px"; + this.style.objectFit = 'cover'; + } + + image.addEventListener('click', function(event) { + if(event.ctrlKey) { + for(let i in images) { + if(images[i].isSelected) { + images[i].style.border = 'none'; + images[i].isSelected = false; + } + } + + image.style.border = '2px solid #006699'; + image.isSelected = true; + + return; + } + + if(image.isSelected) { + image.style.border = 'none'; + image.isSelected = false; + } + else { + image.style.border = '2px solid #006699'; + image.isSelected = true; + } + }); + + gallery.appendChild(image); + } + + document.body.appendChild(gallery); +} + + +app.registerExtension({ + name: "Comfy.Impack.Picker", + + nodeCreated(node, app) { + if(node.comfyClass == "ImpactSEGSPicker") { + node.addWidget("button", "pick", "image", () => { + open_picker(node); + }); + } + } +}); \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-wildcard.js b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-wildcard.js new file mode 100644 index 0000000000000000000000000000000000000000..18157e9c2205daf68237510ee3707a68fa3c354c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/js/impact-wildcard.js @@ -0,0 +1,16 @@ +import { ComfyApp, app } from "../../scripts/app.js"; +import { api } from "../../scripts/api.js"; + +let refresh_btn = document.getElementById('comfy-refresh-button'); +let refresh_btn2 = document.querySelector('button[title="Refresh widgets in nodes to find new models or files"]'); + +let orig = refresh_btn.onclick; + +refresh_btn.onclick = function() { + orig(); + api.fetchApi('/impact/wildcards/refresh'); +}; + +refresh_btn2.addEventListener('click', function() { + api.fetchApi('/impact/wildcards/refresh'); +}); \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/latent.png b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/latent.png new file mode 100644 index 0000000000000000000000000000000000000000..19fed324a25a7e1a2252400e7752ce5586742429 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/latent.png differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/animatediff_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/animatediff_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47e7f0f9af14d391c18bbe2b5fdb27ecd2e35739 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/animatediff_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/bridge_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/bridge_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7bbe31ffc9543e4d5d34362a43025affc9b7fd04 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/bridge_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/config.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..005736c8c5fda82c1eb3bb262df37d2d59ce2c2a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/config.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/core.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eff11af666b903b3865ca0a7954ee059842f1a1f Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/core.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/defs.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/defs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70ae53cfc9d79d2ad1454942b1a3c527d1738fb0 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/defs.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/detectors.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/detectors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c53547eaccf1db311342d9ddeae73a6c15ca603 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/detectors.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hf_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hf_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e9ed0c28395ec08e8024ec3eea4c09318a5825a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hf_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hook_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hook_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9be500eda70b66570dc30925c85a6f9d915366a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hook_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hooks.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hooks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86b03a1009f1abb7d97d8d71070b5944fe014c1a Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/hooks.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_pack.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_pack.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06a3b4a7d82db1e158c779e6bc65d25694e9c8a1 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_pack.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_sampling.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de265702f148b5497fcef8f9d173dfc83f53fb6b Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_sampling.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_server.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab104e173e824fd70f8b74db5c06c1e5aaf9ac54 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/impact_server.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/logics.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/logics.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ba307cc3428a872b68d0d00ab1295c5742c12e8 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/logics.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/pipe.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/pipe.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f187b71c342e970906810f7bd7126bc2b5f81c6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/pipe.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/sample_error_enhancer.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/sample_error_enhancer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee3ada4b71d431482d5d0c21e1452be731c4f984 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/sample_error_enhancer.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7600534d4c824af6daa4e28dc9c13c637ae3b5ce Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_upscaler.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_upscaler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..00a65f60dd85b6b04a5b5d32d75d27a9b48169f6 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/segs_upscaler.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/special_samplers.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/special_samplers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bcd5d9b8e88476da3c8f0228bafc534c43eae70d Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/special_samplers.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/util_nodes.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/util_nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6696ab9a226d0b01ef0d1c19f0e8be23d469c644 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/util_nodes.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/utils.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a89c40fa82804447e6e02740d8feafe85ffaad97 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/utils.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/wildcards.cpython-310.pyc b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/wildcards.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af669f736b9c2521185488330d625be3fa54e158 Binary files /dev/null and b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/__pycache__/wildcards.cpython-310.pyc differ diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/additional_dependencies.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/additional_dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..799b0b141370a53ca25163f58c011c2db5e22cb6 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/additional_dependencies.py @@ -0,0 +1,12 @@ +import sys +import subprocess + + +def ensure_onnx_package(): + try: + import onnxruntime + except Exception: + if "python_embeded" in sys.executable or "python_embedded" in sys.executable: + subprocess.check_call([sys.executable, '-s', '-m', 'pip', 'install', 'onnxruntime']) + else: + subprocess.check_call([sys.executable, '-s', '-m', 'pip', 'install', 'onnxruntime']) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/animatediff_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/animatediff_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..d5974cc0dd6623a3c9fba680df8c06dc953ba4ec --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/animatediff_nodes.py @@ -0,0 +1,178 @@ +from nodes import MAX_RESOLUTION +from impact.utils import * +import impact.core as core +from impact.core import SEG +from impact.segs_nodes import SEGSPaste + + +class SEGSDetailerForAnimateDiff: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "image_frames": ("IMAGE", ), + "segs": ("SEGS", ), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 768, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "basic_pipe": ("BASIC_PIPE",), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + }, + "optional": { + "refiner_basic_pipe_opt": ("BASIC_PIPE",), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("SEGS", "IMAGE") + RETURN_NAMES = ("segs", "cnet_images") + OUTPUT_IS_LIST = (False, True) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + @staticmethod + def do_detail(image_frames, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, basic_pipe, refiner_ratio=None, refiner_basic_pipe_opt=None, noise_mask_feather=0, scheduler_func_opt=None): + + model, clip, vae, positive, negative = basic_pipe + if refiner_basic_pipe_opt is None: + refiner_model, refiner_clip, refiner_positive, refiner_negative = None, None, None, None + else: + refiner_model, refiner_clip, _, refiner_positive, refiner_negative = refiner_basic_pipe_opt + + segs = core.segs_scale_match(segs, image_frames.shape) + + new_segs = [] + cnet_image_list = [] + + for seg in segs[1]: + cropped_image_frames = None + + for image in image_frames: + image = image.unsqueeze(0) + cropped_image = seg.cropped_image if seg.cropped_image is not None else crop_tensor4(image, seg.crop_region) + cropped_image = to_tensor(cropped_image) + if cropped_image_frames is None: + cropped_image_frames = cropped_image + else: + cropped_image_frames = torch.concat((cropped_image_frames, cropped_image), dim=0) + + cropped_image_frames = cropped_image_frames.cpu().numpy() + + # It is assumed that AnimateDiff does not support conditioning masks based on test results, but it will be added for future consideration. + cropped_positive = [ + [condition, { + k: core.crop_condition_mask(v, cropped_image_frames, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in positive + ] + + cropped_negative = [ + [condition, { + k: core.crop_condition_mask(v, cropped_image_frames, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in negative + ] + + enhanced_image_tensor, cnet_images = core.enhance_detail_for_animatediff(cropped_image_frames, model, clip, vae, guide_size, guide_size_for, max_size, + seg.bbox, seed, steps, cfg, sampler_name, scheduler, + cropped_positive, cropped_negative, denoise, seg.cropped_mask, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, + refiner_negative=refiner_negative, control_net_wrapper=seg.control_net_wrapper, + noise_mask_feather=noise_mask_feather, scheduler_func=scheduler_func_opt) + if cnet_images is not None: + cnet_image_list.extend(cnet_images) + + if enhanced_image_tensor is None: + new_cropped_image = cropped_image_frames + else: + new_cropped_image = enhanced_image_tensor.cpu().numpy() + + new_seg = SEG(new_cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + new_segs.append(new_seg) + + return (segs[0], new_segs), cnet_image_list + + def doit(self, image_frames, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, basic_pipe, refiner_ratio=None, refiner_basic_pipe_opt=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + segs, cnet_images = SEGSDetailerForAnimateDiff.do_detail(image_frames, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, + scheduler, denoise, basic_pipe, refiner_ratio, refiner_basic_pipe_opt, + noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + if len(cnet_images) == 0: + cnet_images = [empty_pil_tensor()] + + return (segs, cnet_images) + + +class DetailerForEachPipeForAnimateDiff: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "image_frames": ("IMAGE", ), + "segs": ("SEGS", ), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "basic_pipe": ("BASIC_PIPE", ), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + }, + "optional": { + "detailer_hook": ("DETAILER_HOOK",), + "refiner_basic_pipe_opt": ("BASIC_PIPE",), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE", "SEGS", "BASIC_PIPE", "IMAGE") + RETURN_NAMES = ("image", "segs", "basic_pipe", "cnet_images") + OUTPUT_IS_LIST = (False, False, False, True) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + @staticmethod + def doit(image_frames, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, feather, basic_pipe, refiner_ratio=None, detailer_hook=None, refiner_basic_pipe_opt=None, + noise_mask_feather=0, scheduler_func_opt=None): + + enhanced_segs = [] + cnet_image_list = [] + + for sub_seg in segs[1]: + single_seg = segs[0], [sub_seg] + enhanced_seg, cnet_images = SEGSDetailerForAnimateDiff().do_detail(image_frames, single_seg, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, basic_pipe, refiner_ratio, refiner_basic_pipe_opt, noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + image_frames = SEGSPaste.doit(image_frames, enhanced_seg, feather, alpha=255)[0] + + if cnet_images is not None: + cnet_image_list.extend(cnet_images) + + if detailer_hook is not None: + image_frames = detailer_hook.post_paste(image_frames) + + enhanced_segs += enhanced_seg[1] + + new_segs = segs[0], enhanced_segs + return image_frames, new_segs, basic_pipe, cnet_image_list diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/bridge_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/bridge_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..b4bc1d7577c0d60e44322a2cee78923409639bfb --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/bridge_nodes.py @@ -0,0 +1,304 @@ +import os +from PIL import ImageOps +from impact.utils import * +import latent_preview + + +# NOTE: this should not be `from . import core`. +# I don't know why but... 'from .' and 'from impact' refer to different core modules. +# This separates global variables of the core module and breaks the preview bridge. +from impact import core +# <-- +import random + + +class PreviewBridge: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE",), + "image": ("STRING", {"default": ""}), + }, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("IMAGE", "MASK", ) + + FUNCTION = "doit" + + OUTPUT_NODE = True + + CATEGORY = "ImpactPack/Util" + + def __init__(self): + super().__init__() + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prev_hash = None + + @staticmethod + def load_image(pb_id): + is_fail = False + if pb_id not in core.preview_bridge_image_id_map: + is_fail = True + + image_path, ui_item = core.preview_bridge_image_id_map[pb_id] + + if not os.path.isfile(image_path): + is_fail = True + + if not is_fail: + i = Image.open(image_path) + i = ImageOps.exif_transpose(i) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + else: + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + else: + image = empty_pil_tensor() + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + ui_item = { + "filename": 'empty.png', + "subfolder": '', + "type": 'temp' + } + + return image, mask.unsqueeze(0), ui_item + + def doit(self, images, image, unique_id, prompt=None, extra_pnginfo=None): + need_refresh = False + + if unique_id not in core.preview_bridge_cache: + need_refresh = True + + elif core.preview_bridge_cache[unique_id][0] is not images: + need_refresh = True + + if not need_refresh: + pixels, mask, path_item = PreviewBridge.load_image(image) + image = [path_item] + else: + res = nodes.PreviewImage().save_images(images, filename_prefix="PreviewBridge/PB-", prompt=prompt, extra_pnginfo=extra_pnginfo) + image2 = res['ui']['images'] + pixels = images + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + + path = os.path.join(folder_paths.get_temp_directory(), 'PreviewBridge', image2[0]['filename']) + core.set_previewbridge_image(unique_id, path, image2[0]) + core.preview_bridge_image_id_map[image] = (path, image2[0]) + core.preview_bridge_image_name_map[unique_id, path] = (image, image2[0]) + core.preview_bridge_cache[unique_id] = (images, image2) + + image = image2 + + return { + "ui": {"images": image}, + "result": (pixels, mask, ), + } + + +def decode_latent(latent, preview_method, vae_opt=None): + if vae_opt is not None: + image = nodes.VAEDecode().decode(vae_opt, latent)[0] + return image + + from comfy.cli_args import LatentPreviewMethod + import comfy.latent_formats as latent_formats + + if preview_method.startswith("TAE"): + decoder_name = None + + if preview_method == "TAESD15": + decoder_name = "taesd" + elif preview_method == 'TAESDXL': + decoder_name = "taesdxl" + elif preview_method == 'TAESD3': + decoder_name = "taesd3" + + if decoder_name: + vae = nodes.VAELoader().load_vae(decoder_name)[0] + image = nodes.VAEDecode().decode(vae, latent)[0] + return image + + if preview_method == "Latent2RGB-SD15": + latent_format = latent_formats.SD15() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-SDXL": + latent_format = latent_formats.SDXL() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-SD3": + latent_format = latent_formats.SD3() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-SD-X4": + latent_format = latent_formats.SD_X4() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-Playground-2.5": + latent_format = latent_formats.SDXL_Playground_2_5() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-SC-Prior": + latent_format = latent_formats.SC_Prior() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-SC-B": + latent_format = latent_formats.SC_B() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "Latent2RGB-FLUX.1": + latent_format = latent_formats.Flux() + method = LatentPreviewMethod.Latent2RGB + else: + print(f"[Impact Pack] PreviewBridgeLatent: '{preview_method}' is unsupported preview method.") + latent_format = latent_formats.SD15() + method = LatentPreviewMethod.Latent2RGB + + previewer = core.get_previewer("cpu", latent_format=latent_format, force=True, method=method) + samples = latent_format.process_in(latent['samples']) + + pil_image = previewer.decode_latent_to_preview(samples) + pixels_size = pil_image.size[0]*8, pil_image.size[1]*8 + resized_image = pil_image.resize(pixels_size, resample=LANCZOS) + + return to_tensor(resized_image).unsqueeze(0) + + +class PreviewBridgeLatent: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latent": ("LATENT",), + "image": ("STRING", {"default": ""}), + "preview_method": (["Latent2RGB-SD3", "Latent2RGB-SDXL", "Latent2RGB-SD15", + "Latent2RGB-SD-X4", "Latent2RGB-Playground-2.5", + "Latent2RGB-SC-Prior", "Latent2RGB-SC-B", + "Latent2RGB-FLUX.1", + "TAESD3", "TAESDXL", "TAESD15"],), + }, + "optional": { + "vae_opt": ("VAE", ) + }, + "hidden": {"unique_id": "UNIQUE_ID", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + RETURN_TYPES = ("LATENT", "MASK", ) + + FUNCTION = "doit" + + OUTPUT_NODE = True + + CATEGORY = "ImpactPack/Util" + + def __init__(self): + super().__init__() + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + self.prev_hash = None + self.prefix_append = "_temp_" + ''.join(random.choice("abcdefghijklmnopqrstupvxyz") for x in range(5)) + + @staticmethod + def load_image(pb_id): + is_fail = False + if pb_id not in core.preview_bridge_image_id_map: + is_fail = True + + image_path, ui_item = core.preview_bridge_image_id_map[pb_id] + + if not os.path.isfile(image_path): + is_fail = True + + if not is_fail: + i = Image.open(image_path) + i = ImageOps.exif_transpose(i) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + else: + mask = None + else: + image = empty_pil_tensor() + mask = None + ui_item = { + "filename": 'empty.png', + "subfolder": '', + "type": 'temp' + } + + return image, mask, ui_item + + def doit(self, latent, image, preview_method, vae_opt=None, unique_id=None, prompt=None, extra_pnginfo=None): + latent_channels = latent['samples'].shape[1] + preview_method_channels = 16 if 'SD3' in preview_method or 'SC-Prior' in preview_method or 'FLUX.1' in preview_method else 4 + + if vae_opt is None and latent_channels != preview_method_channels: + print(f"[PreviewBridgeLatent] The version of latent is not compatible with preview_method.\nSD3, SD1/SD2, SDXL, SC-Prior, SC-B and FLUX.1 are not compatible with each other.") + raise Exception("The version of latent is not compatible with preview_method.
SD3, SD1/SD2, SDXL, SC-Prior, SC-B and FLUX.1 are not compatible with each other.") + + need_refresh = False + + if unique_id not in core.preview_bridge_cache: + need_refresh = True + + elif (core.preview_bridge_cache[unique_id][0] is not latent + or (vae_opt is None and core.preview_bridge_cache[unique_id][2] is not None) + or (vae_opt is None and core.preview_bridge_cache[unique_id][1] != preview_method) + or (vae_opt is not None and core.preview_bridge_cache[unique_id][2] is not vae_opt)): + need_refresh = True + + if not need_refresh: + pixels, mask, path_item = PreviewBridge.load_image(image) + + if mask is None: + mask = torch.ones(latent['samples'].shape[2:], dtype=torch.float32, device="cpu").unsqueeze(0) + if 'noise_mask' in latent: + res_latent = latent.copy() + del res_latent['noise_mask'] + else: + res_latent = latent + else: + res_latent = latent.copy() + res_latent['noise_mask'] = mask + + res_image = [path_item] + else: + decoded_image = decode_latent(latent, preview_method, vae_opt) + + if 'noise_mask' in latent: + mask = latent['noise_mask'].squeeze(0) # 4D mask -> 3D mask + + decoded_pil = to_pil(decoded_image) + + inverted_mask = 1 - mask # invert + resized_mask = resize_mask(inverted_mask, (decoded_image.shape[1], decoded_image.shape[2])) + result_pil = apply_mask_alpha_to_pil(decoded_pil, resized_mask) + + full_output_folder, filename, counter, _, _ = folder_paths.get_save_image_path("PreviewBridge/PBL-"+self.prefix_append, folder_paths.get_temp_directory(), result_pil.size[0], result_pil.size[1]) + file = f"{filename}_{counter}.png" + result_pil.save(os.path.join(full_output_folder, file), compress_level=4) + res_image = [{ + 'filename': file, + 'subfolder': 'PreviewBridge', + 'type': 'temp', + }] + else: + mask = torch.ones(latent['samples'].shape[2:], dtype=torch.float32, device="cpu").unsqueeze(0) + res = nodes.PreviewImage().save_images(decoded_image, filename_prefix="PreviewBridge/PBL-", prompt=prompt, extra_pnginfo=extra_pnginfo) + res_image = res['ui']['images'] + + path = os.path.join(folder_paths.get_temp_directory(), 'PreviewBridge', res_image[0]['filename']) + core.set_previewbridge_image(unique_id, path, res_image[0]) + core.preview_bridge_image_id_map[image] = (path, res_image[0]) + core.preview_bridge_image_name_map[unique_id, path] = (image, res_image[0]) + core.preview_bridge_cache[unique_id] = (latent, preview_method, vae_opt, res_image) + + res_latent = latent + + return { + "ui": {"images": res_image}, + "result": (res_latent, mask, ), + } diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/config.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/config.py new file mode 100644 index 0000000000000000000000000000000000000000..9a5939ff4db256ca375314e01a41a78bd24afe56 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/config.py @@ -0,0 +1,68 @@ +import configparser +import os + +version_code = [7, 0] +version = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '') + +dependency_version = 22 + +my_path = os.path.dirname(__file__) +old_config_path = os.path.join(my_path, "impact-pack.ini") +config_path = os.path.join(my_path, "..", "..", "impact-pack.ini") +latent_letter_path = os.path.join(my_path, "..", "..", "latent.png") + + +def write_config(): + config = configparser.ConfigParser() + config['default'] = { + 'dependency_version': str(dependency_version), + 'mmdet_skip': str(get_config()['mmdet_skip']), + 'sam_editor_cpu': str(get_config()['sam_editor_cpu']), + 'sam_editor_model': get_config()['sam_editor_model'], + 'custom_wildcards': get_config()['custom_wildcards'], + 'disable_gpu_opencv': get_config()['disable_gpu_opencv'], + } + with open(config_path, 'w') as configfile: + config.write(configfile) + + +def read_config(): + try: + config = configparser.ConfigParser() + config.read(config_path) + default_conf = config['default'] + + if not os.path.exists(default_conf['custom_wildcards']): + print(f"[WARN] ComfyUI-Impact-Pack: custom_wildcards path not found: {default_conf['custom_wildcards']}. Using default path.") + default_conf['custom_wildcards'] = os.path.join(my_path, "..", "..", "custom_wildcards") + + return { + 'dependency_version': int(default_conf['dependency_version']), + 'mmdet_skip': default_conf['mmdet_skip'].lower() == 'true' if 'mmdet_skip' in default_conf else True, + 'sam_editor_cpu': default_conf['sam_editor_cpu'].lower() == 'true' if 'sam_editor_cpu' in default_conf else False, + 'sam_editor_model': default_conf['sam_editor_model'].lower() if 'sam_editor_model' else 'sam_vit_b_01ec64.pth', + 'custom_wildcards': default_conf['custom_wildcards'] if 'custom_wildcards' in default_conf else os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "custom_wildcards")), + 'disable_gpu_opencv': default_conf['disable_gpu_opencv'].lower() == 'true' if 'disable_gpu_opencv' in default_conf else True + } + + except Exception: + return { + 'dependency_version': 0, + 'mmdet_skip': True, + 'sam_editor_cpu': False, + 'sam_editor_model': 'sam_vit_b_01ec64.pth', + 'custom_wildcards': os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "custom_wildcards")), + 'disable_gpu_opencv': True + } + + +cached_config = None + + +def get_config(): + global cached_config + + if cached_config is None: + cached_config = read_config() + + return cached_config diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/core.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/core.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd3ab73e4b8b8a3782416041cd3a00adb3df517 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/core.py @@ -0,0 +1,2186 @@ +import copy +import os +import warnings + +import numpy +import torch +from segment_anything import SamPredictor + +from comfy_extras.nodes_custom_sampler import Noise_RandomNoise +from impact.utils import * +from collections import namedtuple +import numpy as np +from skimage.measure import label + +import nodes +import comfy_extras.nodes_upscale_model as model_upscale +from server import PromptServer +import comfy +import impact.wildcards as wildcards +import math +import cv2 +import time +from comfy import model_management +from impact import utils +from impact import impact_sampling +from concurrent.futures import ThreadPoolExecutor + +try: + from comfy_extras import nodes_differential_diffusion +except Exception: + print(f"\n#############################################\n[Impact Pack] ComfyUI is an outdated version.\n#############################################\n") + raise Exception("[Impact Pack] ComfyUI is an outdated version.") + + +SEG = namedtuple("SEG", + ['cropped_image', 'cropped_mask', 'confidence', 'crop_region', 'bbox', 'label', 'control_net_wrapper'], + defaults=[None]) + +pb_id_cnt = time.time() +preview_bridge_image_id_map = {} +preview_bridge_image_name_map = {} +preview_bridge_cache = {} +current_prompt = None + +SCHEDULERS = comfy.samplers.KSampler.SCHEDULERS + ['AYS SDXL', 'AYS SD1', 'AYS SVD', 'GITS[coeff=1.2]'] + + +def is_execution_model_version_supported(): + try: + import comfy_execution + return True + except: + return False + + +def set_previewbridge_image(node_id, file, item): + global pb_id_cnt + + if file in preview_bridge_image_name_map: + pb_id = preview_bridge_image_name_map[node_id, file] + if pb_id.startswith(f"${node_id}"): + return pb_id + + pb_id = f"${node_id}-{pb_id_cnt}" + preview_bridge_image_id_map[pb_id] = (file, item) + preview_bridge_image_name_map[node_id, file] = (pb_id, item) + pb_id_cnt += 1 + + return pb_id + + +def erosion_mask(mask, grow_mask_by): + mask = make_2d_mask(mask) + + w = mask.shape[1] + h = mask.shape[0] + + device = comfy.model_management.get_torch_device() + mask = mask.clone().to(device) + mask2 = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(w, h), mode="bilinear").to(device) + if grow_mask_by == 0: + mask_erosion = mask2 + else: + kernel_tensor = torch.ones((1, 1, grow_mask_by, grow_mask_by)).to(device) + padding = math.ceil((grow_mask_by - 1) / 2) + + mask_erosion = torch.clamp(torch.nn.functional.conv2d(mask2.round(), kernel_tensor, padding=padding), 0, 1) + + return mask_erosion[:, :, :w, :h].round().cpu() + + +# CREDIT: https://github.com/BlenderNeko/ComfyUI_Noise/blob/afb14757216257b12268c91845eac248727a55e2/nodes.py#L68 +# https://discuss.pytorch.org/t/help-regarding-slerp-function-for-generative-model-sampling/32475/3 +def slerp(val, low, high): + dims = low.shape + + low = low.reshape(dims[0], -1) + high = high.reshape(dims[0], -1) + + low_norm = low/torch.norm(low, dim=1, keepdim=True) + high_norm = high/torch.norm(high, dim=1, keepdim=True) + + low_norm[low_norm != low_norm] = 0.0 + high_norm[high_norm != high_norm] = 0.0 + + omega = torch.acos((low_norm*high_norm).sum(1)) + so = torch.sin(omega) + res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low + (torch.sin(val*omega)/so).unsqueeze(1) * high + + return res.reshape(dims) + + +def mix_noise(from_noise, to_noise, strength, variation_method): + if variation_method == 'slerp': + mixed_noise = slerp(strength, from_noise, to_noise) + else: + # linear + mixed_noise = (1 - strength) * from_noise + strength * to_noise + + # NOTE: Since the variance of the Gaussian noise in mixed_noise has changed, it must be corrected through scaling. + scale_factor = math.sqrt((1 - strength) ** 2 + strength ** 2) + mixed_noise /= scale_factor + + return mixed_noise + + +class REGIONAL_PROMPT: + def __init__(self, mask, sampler, variation_seed=0, variation_strength=0.0, variation_method='linear'): + mask = make_2d_mask(mask) + + self.mask = mask + self.sampler = sampler + self.mask_erosion = None + self.erosion_factor = None + self.variation_seed = variation_seed + self.variation_strength = variation_strength + self.variation_method = variation_method + + def clone_with_sampler(self, sampler): + rp = REGIONAL_PROMPT(self.mask, sampler) + rp.mask_erosion = self.mask_erosion + rp.erosion_factor = self.erosion_factor + rp.variation_seed = self.variation_seed + rp.variation_strength = self.variation_strength + rp.variation_method = self.variation_method + return rp + + def get_mask_erosion(self, factor): + if self.mask_erosion is None or self.erosion_factor != factor: + self.mask_erosion = erosion_mask(self.mask, factor) + self.erosion_factor = factor + + return self.mask_erosion + + def touch_noise(self, noise): + if self.variation_strength > 0.0: + mask = utils.make_3d_mask(self.mask) + mask = utils.resize_mask(mask, (noise.shape[2], noise.shape[3])).unsqueeze(0) + + regional_noise = Noise_RandomNoise(self.variation_seed).generate_noise({'samples': noise}) + mixed_noise = mix_noise(noise, regional_noise, self.variation_strength, variation_method=self.variation_method) + + return (mask == 1).float() * mixed_noise + (mask == 0).float() * noise + + return noise + + +class NO_BBOX_DETECTOR: + pass + + +class NO_SEGM_DETECTOR: + pass + + +def create_segmasks(results): + bboxs = results[1] + segms = results[2] + confidence = results[3] + + results = [] + for i in range(len(segms)): + item = (bboxs[i], segms[i].astype(np.float32), confidence[i]) + results.append(item) + return results + + +def gen_detection_hints_from_mask_area(x, y, mask, threshold, use_negative): + mask = make_2d_mask(mask) + + points = [] + plabs = [] + + # minimum sampling step >= 3 + y_step = max(3, int(mask.shape[0] / 20)) + x_step = max(3, int(mask.shape[1] / 20)) + + for i in range(0, len(mask), y_step): + for j in range(0, len(mask[i]), x_step): + if mask[i][j] > threshold: + points.append((x + j, y + i)) + plabs.append(1) + elif use_negative and mask[i][j] == 0: + points.append((x + j, y + i)) + plabs.append(0) + + return points, plabs + + +def gen_negative_hints(w, h, x1, y1, x2, y2): + npoints = [] + nplabs = [] + + # minimum sampling step >= 3 + y_step = max(3, int(w / 20)) + x_step = max(3, int(h / 20)) + + for i in range(10, h - 10, y_step): + for j in range(10, w - 10, x_step): + if not (x1 - 10 <= j and j <= x2 + 10 and y1 - 10 <= i and i <= y2 + 10): + npoints.append((j, i)) + nplabs.append(0) + + return npoints, nplabs + + +def enhance_detail(image, model, clip, vae, guide_size, guide_size_for_bbox, max_size, bbox, seed, steps, cfg, + sampler_name, + scheduler, positive, negative, denoise, noise_mask, force_inpaint, + wildcard_opt=None, wildcard_opt_concat_mode=None, + detailer_hook=None, + refiner_ratio=None, refiner_model=None, refiner_clip=None, refiner_positive=None, + refiner_negative=None, control_net_wrapper=None, cycle=1, + inpaint_model=False, noise_mask_feather=0, scheduler_func=None): + + if noise_mask is not None: + noise_mask = utils.tensor_gaussian_blur_mask(noise_mask, noise_mask_feather) + noise_mask = noise_mask.squeeze(3) + + if noise_mask_feather > 0: + model = nodes_differential_diffusion.DifferentialDiffusion().apply(model)[0] + + if wildcard_opt is not None and wildcard_opt != "": + model, _, wildcard_positive = wildcards.process_with_loras(wildcard_opt, model, clip) + + if wildcard_opt_concat_mode == "concat": + positive = nodes.ConditioningConcat().concat(positive, wildcard_positive)[0] + else: + positive = wildcard_positive + positive = [positive[0].copy()] + if 'pooled_output' in wildcard_positive[0][1]: + positive[0][1]['pooled_output'] = wildcard_positive[0][1]['pooled_output'] + elif 'pooled_output' in positive[0][1]: + del positive[0][1]['pooled_output'] + + h = image.shape[1] + w = image.shape[2] + + bbox_h = bbox[3] - bbox[1] + bbox_w = bbox[2] - bbox[0] + + # Skip processing if the detected bbox is already larger than the guide_size + if not force_inpaint and bbox_h >= guide_size and bbox_w >= guide_size: + print(f"Detailer: segment skip (enough big)") + return None, None + + if guide_size_for_bbox: # == "bbox" + # Scale up based on the smaller dimension between width and height. + upscale = guide_size / min(bbox_w, bbox_h) + else: + # for cropped_size + upscale = guide_size / min(w, h) + + new_w = int(w * upscale) + new_h = int(h * upscale) + + # safeguard + if 'aitemplate_keep_loaded' in model.model_options: + max_size = min(4096, max_size) + + if new_w > max_size or new_h > max_size: + upscale *= max_size / max(new_w, new_h) + new_w = int(w * upscale) + new_h = int(h * upscale) + + if not force_inpaint: + if upscale <= 1.0: + print(f"Detailer: segment skip [determined upscale factor={upscale}]") + return None, None + + if new_w == 0 or new_h == 0: + print(f"Detailer: segment skip [zero size={new_w, new_h}]") + return None, None + else: + if upscale <= 1.0 or new_w == 0 or new_h == 0: + print(f"Detailer: force inpaint") + upscale = 1.0 + new_w = w + new_h = h + + if detailer_hook is not None: + new_w, new_h = detailer_hook.touch_scaled_size(new_w, new_h) + + print(f"Detailer: segment upscale for ({bbox_w, bbox_h}) | crop region {w, h} x {upscale} -> {new_w, new_h}") + + # upscale + upscaled_image = tensor_resize(image, new_w, new_h) + + cnet_pils = None + if control_net_wrapper is not None: + positive, negative, cnet_pils = control_net_wrapper.apply(positive, negative, upscaled_image, noise_mask) + model, cnet_pils2 = control_net_wrapper.doit_ipadapter(model) + cnet_pils.extend(cnet_pils2) + + # prepare mask + if noise_mask is not None and inpaint_model: + positive, negative, latent_image = nodes.InpaintModelConditioning().encode(positive, negative, upscaled_image, vae, noise_mask) + else: + latent_image = to_latent_image(upscaled_image, vae) + if noise_mask is not None: + latent_image['noise_mask'] = noise_mask + + if detailer_hook is not None: + latent_image = detailer_hook.post_encode(latent_image) + + refined_latent = latent_image + + # ksampler + for i in range(0, cycle): + if detailer_hook is not None: + if detailer_hook is not None: + detailer_hook.set_steps((i, cycle)) + + refined_latent = detailer_hook.cycle_latent(refined_latent) + + model2, seed2, steps2, cfg2, sampler_name2, scheduler2, positive2, negative2, upscaled_latent2, denoise2 = \ + detailer_hook.pre_ksample(model, seed+i, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise) + noise, is_touched = detailer_hook.get_custom_noise(seed+i, torch.zeros(latent_image['samples'].size()), is_touched=False) + if not is_touched: + noise = None + else: + model2, seed2, steps2, cfg2, sampler_name2, scheduler2, positive2, negative2, upscaled_latent2, denoise2 = \ + model, seed + i, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise + noise = None + + refined_latent = impact_sampling.ksampler_wrapper(model2, seed2, steps2, cfg2, sampler_name2, scheduler2, positive2, negative2, + refined_latent, denoise2, refiner_ratio, refiner_model, refiner_clip, refiner_positive, refiner_negative, + noise=noise, scheduler_func=scheduler_func) + + if detailer_hook is not None: + refined_latent = detailer_hook.pre_decode(refined_latent) + + # non-latent downscale - latent downscale cause bad quality + try: + # try to decode image normally + refined_image = vae.decode(refined_latent['samples']) + except Exception as e: + #usually an out-of-memory exception from the decode, so try a tiled approach + refined_image = vae.decode_tiled(refined_latent["samples"], tile_x=64, tile_y=64, ) + + if detailer_hook is not None: + refined_image = detailer_hook.post_decode(refined_image) + + # downscale + refined_image = tensor_resize(refined_image, w, h) + + # prevent mixing of device + refined_image = refined_image.cpu() + + # don't convert to latent - latent break image + # preserving pil is much better + return refined_image, cnet_pils + + +def enhance_detail_for_animatediff(image_frames, model, clip, vae, guide_size, guide_size_for_bbox, max_size, bbox, seed, steps, cfg, + sampler_name, + scheduler, positive, negative, denoise, noise_mask, + wildcard_opt=None, wildcard_opt_concat_mode=None, + detailer_hook=None, + refiner_ratio=None, refiner_model=None, refiner_clip=None, refiner_positive=None, + refiner_negative=None, control_net_wrapper=None, noise_mask_feather=0, scheduler_func=None): + if noise_mask is not None: + noise_mask = utils.tensor_gaussian_blur_mask(noise_mask, noise_mask_feather) + noise_mask = noise_mask.squeeze(3) + + if noise_mask_feather > 0: + model = nodes_differential_diffusion.DifferentialDiffusion().apply(model)[0] + + if wildcard_opt is not None and wildcard_opt != "": + model, _, wildcard_positive = wildcards.process_with_loras(wildcard_opt, model, clip) + + if wildcard_opt_concat_mode == "concat": + positive = nodes.ConditioningConcat().concat(positive, wildcard_positive)[0] + else: + positive = wildcard_positive + + h = image_frames.shape[1] + w = image_frames.shape[2] + + bbox_h = bbox[3] - bbox[1] + bbox_w = bbox[2] - bbox[0] + + # Skip processing if the detected bbox is already larger than the guide_size + if guide_size_for_bbox: # == "bbox" + # Scale up based on the smaller dimension between width and height. + upscale = guide_size / min(bbox_w, bbox_h) + else: + # for cropped_size + upscale = guide_size / min(w, h) + + new_w = int(w * upscale) + new_h = int(h * upscale) + + # safeguard + if 'aitemplate_keep_loaded' in model.model_options: + max_size = min(4096, max_size) + + if new_w > max_size or new_h > max_size: + upscale *= max_size / max(new_w, new_h) + new_w = int(w * upscale) + new_h = int(h * upscale) + + if upscale <= 1.0 or new_w == 0 or new_h == 0: + print(f"Detailer: force inpaint") + upscale = 1.0 + new_w = w + new_h = h + + if detailer_hook is not None: + new_w, new_h = detailer_hook.touch_scaled_size(new_w, new_h) + + print(f"Detailer: segment upscale for ({bbox_w, bbox_h}) | crop region {w, h} x {upscale} -> {new_w, new_h}") + + # upscale the mask tensor by a factor of 2 using bilinear interpolation + if isinstance(noise_mask, np.ndarray): + noise_mask = torch.from_numpy(noise_mask) + + if len(noise_mask.shape) == 2: + noise_mask = noise_mask.unsqueeze(0) + else: # == 3 + noise_mask = noise_mask + + upscaled_mask = None + + for single_mask in noise_mask: + single_mask = single_mask.unsqueeze(0).unsqueeze(0) + upscaled_single_mask = torch.nn.functional.interpolate(single_mask, size=(new_h, new_w), mode='bilinear', align_corners=False) + upscaled_single_mask = upscaled_single_mask.squeeze(0) + + if upscaled_mask is None: + upscaled_mask = upscaled_single_mask + else: + upscaled_mask = torch.cat((upscaled_mask, upscaled_single_mask), dim=0) + + latent_frames = None + for image in image_frames: + image = torch.from_numpy(image).unsqueeze(0) + + # upscale + upscaled_image = tensor_resize(image, new_w, new_h) + + # ksampler + samples = to_latent_image(upscaled_image, vae)['samples'] + + if latent_frames is None: + latent_frames = samples + else: + latent_frames = torch.concat((latent_frames, samples), dim=0) + + cnet_images = None + if control_net_wrapper is not None: + positive, negative, cnet_images = control_net_wrapper.apply(positive, negative, torch.from_numpy(image_frames), noise_mask, use_acn=True) + + if len(upscaled_mask) != len(image_frames) and len(upscaled_mask) > 1: + print(f"[Impact Pack] WARN: DetailerForAnimateDiff - The number of the mask frames({len(upscaled_mask)}) and the image frames({len(image_frames)}) are different. Combine the mask frames and apply.") + combined_mask = upscaled_mask[0].to(torch.uint8) + + for frame_mask in upscaled_mask[1:]: + combined_mask |= (frame_mask * 255).to(torch.uint8) + + combined_mask = (combined_mask/255.0).to(torch.float32) + + upscaled_mask = combined_mask.expand(len(image_frames), -1, -1) + upscaled_mask = utils.to_binary_mask(upscaled_mask, 0.1) + + latent = { + 'noise_mask': upscaled_mask, + 'samples': latent_frames + } + + if detailer_hook is not None: + latent = detailer_hook.post_encode(latent) + + refined_latent = impact_sampling.ksampler_wrapper(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, + latent, denoise, refiner_ratio, refiner_model, refiner_clip, refiner_positive, refiner_negative, scheduler_func=scheduler_func) + + if detailer_hook is not None: + refined_latent = detailer_hook.pre_decode(refined_latent) + + refined_image_frames = None + for refined_sample in refined_latent['samples']: + refined_sample = refined_sample.unsqueeze(0) + + # non-latent downscale - latent downscale cause bad quality + refined_image = vae.decode(refined_sample) + + if refined_image_frames is None: + refined_image_frames = refined_image + else: + refined_image_frames = torch.concat((refined_image_frames, refined_image), dim=0) + + if detailer_hook is not None: + refined_image_frames = detailer_hook.post_decode(refined_image_frames) + + refined_image_frames = nodes.ImageScale().upscale(image=refined_image_frames, upscale_method='lanczos', width=w, height=h, crop='disabled')[0] + + return refined_image_frames, cnet_images + + +def composite_to(dest_latent, crop_region, src_latent): + x1 = crop_region[0] + y1 = crop_region[1] + + # composite to original latent + lc = nodes.LatentComposite() + orig_image = lc.composite(dest_latent, src_latent, x1, y1) + + return orig_image[0] + + +def sam_predict(predictor, points, plabs, bbox, threshold): + point_coords = None if not points else np.array(points) + point_labels = None if not plabs else np.array(plabs) + + box = np.array([bbox]) if bbox is not None else None + + cur_masks, scores, _ = predictor.predict(point_coords=point_coords, point_labels=point_labels, box=box) + + total_masks = [] + + selected = False + max_score = 0 + max_mask = None + for idx in range(len(scores)): + if scores[idx] > max_score: + max_score = scores[idx] + max_mask = cur_masks[idx] + + if scores[idx] >= threshold: + selected = True + total_masks.append(cur_masks[idx]) + else: + pass + + if not selected and max_mask is not None: + total_masks.append(max_mask) + + return total_masks + + +class SAMWrapper: + def __init__(self, model, is_auto_mode, safe_to_gpu=None): + self.model = model + self.safe_to_gpu = safe_to_gpu if safe_to_gpu is not None else SafeToGPU_stub() + self.is_auto_mode = is_auto_mode + + def prepare_device(self): + if self.is_auto_mode: + device = comfy.model_management.get_torch_device() + self.safe_to_gpu.to_device(self.model, device=device) + + def release_device(self): + if self.is_auto_mode: + self.model.to(device="cpu") + + def predict(self, image, points, plabs, bbox, threshold): + predictor = SamPredictor(self.model) + predictor.set_image(image, "RGB") + + return sam_predict(predictor, points, plabs, bbox, threshold) + + +class ESAMWrapper: + def __init__(self, model, device): + self.model = model + self.func_inference = nodes.NODE_CLASS_MAPPINGS['Yoloworld_ESAM_Zho'] + self.device = device + + def prepare_device(self): + pass + + def release_device(self): + pass + + def predict(self, image, points, plabs, bbox, threshold): + if self.device == 'CPU': + self.device = 'cpu' + else: + self.device = 'cuda' + + detected_masks = self.func_inference.inference_sam_with_boxes(image=image, xyxy=[bbox], model=self.model, device=self.device) + return [detected_masks.squeeze(0)] + + +def make_sam_mask(sam, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, mask_hint_use_negative): + + if not hasattr(sam, 'sam_wrapper'): + raise Exception("[Impact Pack] Invalid SAMLoader is connected. Make sure 'SAMLoader (Impact)'.\nKnown issue: The ComfyUI-YOLO node overrides the SAMLoader (Impact), making it unusable. You need to uninstall ComfyUI-YOLO.\n\n\n") + + sam_obj = sam.sam_wrapper + sam_obj.prepare_device() + + try: + image = np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8) + + total_masks = [] + + use_small_negative = mask_hint_use_negative == "Small" + + # seg_shape = segs[0] + segs = segs[1] + if detection_hint == "mask-points": + points = [] + plabs = [] + + for i in range(len(segs)): + bbox = segs[i].bbox + center = center_of_bbox(segs[i].bbox) + points.append(center) + + # small point is background, big point is foreground + if use_small_negative and bbox[2] - bbox[0] < 10: + plabs.append(0) + else: + plabs.append(1) + + detected_masks = sam_obj.predict(image, points, plabs, None, threshold) + total_masks += detected_masks + + else: + for i in range(len(segs)): + bbox = segs[i].bbox + center = center_of_bbox(bbox) + + x1 = max(bbox[0] - bbox_expansion, 0) + y1 = max(bbox[1] - bbox_expansion, 0) + x2 = min(bbox[2] + bbox_expansion, image.shape[1]) + y2 = min(bbox[3] + bbox_expansion, image.shape[0]) + + dilated_bbox = [x1, y1, x2, y2] + + points = [] + plabs = [] + if detection_hint == "center-1": + points.append(center) + plabs = [1] # 1 = foreground point, 0 = background point + + elif detection_hint == "horizontal-2": + gap = (x2 - x1) / 3 + points.append((x1 + gap, center[1])) + points.append((x1 + gap * 2, center[1])) + plabs = [1, 1] + + elif detection_hint == "vertical-2": + gap = (y2 - y1) / 3 + points.append((center[0], y1 + gap)) + points.append((center[0], y1 + gap * 2)) + plabs = [1, 1] + + elif detection_hint == "rect-4": + x_gap = (x2 - x1) / 3 + y_gap = (y2 - y1) / 3 + points.append((x1 + x_gap, center[1])) + points.append((x1 + x_gap * 2, center[1])) + points.append((center[0], y1 + y_gap)) + points.append((center[0], y1 + y_gap * 2)) + plabs = [1, 1, 1, 1] + + elif detection_hint == "diamond-4": + x_gap = (x2 - x1) / 3 + y_gap = (y2 - y1) / 3 + points.append((x1 + x_gap, y1 + y_gap)) + points.append((x1 + x_gap * 2, y1 + y_gap)) + points.append((x1 + x_gap, y1 + y_gap * 2)) + points.append((x1 + x_gap * 2, y1 + y_gap * 2)) + plabs = [1, 1, 1, 1] + + elif detection_hint == "mask-point-bbox": + center = center_of_bbox(segs[i].bbox) + points.append(center) + plabs = [1] + + elif detection_hint == "mask-area": + points, plabs = gen_detection_hints_from_mask_area(segs[i].crop_region[0], segs[i].crop_region[1], + segs[i].cropped_mask, + mask_hint_threshold, use_small_negative) + + if mask_hint_use_negative == "Outter": + npoints, nplabs = gen_negative_hints(image.shape[0], image.shape[1], + segs[i].crop_region[0], segs[i].crop_region[1], + segs[i].crop_region[2], segs[i].crop_region[3]) + + points += npoints + plabs += nplabs + + detected_masks = sam_obj.predict(image, points, plabs, dilated_bbox, threshold) + total_masks += detected_masks + + # merge every collected masks + mask = combine_masks2(total_masks) + + finally: + sam_obj.release_device() + + if mask is not None: + mask = mask.float() + mask = dilate_mask(mask.cpu().numpy(), dilation) + mask = torch.from_numpy(mask) + else: + size = image.shape[0], image.shape[1] + mask = torch.zeros(size, dtype=torch.float32, device="cpu") # empty mask + + mask = utils.make_3d_mask(mask) + return mask + + +def generate_detection_hints(image, seg, center, detection_hint, dilated_bbox, mask_hint_threshold, use_small_negative, + mask_hint_use_negative): + [x1, y1, x2, y2] = dilated_bbox + + points = [] + plabs = [] + if detection_hint == "center-1": + points.append(center) + plabs = [1] # 1 = foreground point, 0 = background point + + elif detection_hint == "horizontal-2": + gap = (x2 - x1) / 3 + points.append((x1 + gap, center[1])) + points.append((x1 + gap * 2, center[1])) + plabs = [1, 1] + + elif detection_hint == "vertical-2": + gap = (y2 - y1) / 3 + points.append((center[0], y1 + gap)) + points.append((center[0], y1 + gap * 2)) + plabs = [1, 1] + + elif detection_hint == "rect-4": + x_gap = (x2 - x1) / 3 + y_gap = (y2 - y1) / 3 + points.append((x1 + x_gap, center[1])) + points.append((x1 + x_gap * 2, center[1])) + points.append((center[0], y1 + y_gap)) + points.append((center[0], y1 + y_gap * 2)) + plabs = [1, 1, 1, 1] + + elif detection_hint == "diamond-4": + x_gap = (x2 - x1) / 3 + y_gap = (y2 - y1) / 3 + points.append((x1 + x_gap, y1 + y_gap)) + points.append((x1 + x_gap * 2, y1 + y_gap)) + points.append((x1 + x_gap, y1 + y_gap * 2)) + points.append((x1 + x_gap * 2, y1 + y_gap * 2)) + plabs = [1, 1, 1, 1] + + elif detection_hint == "mask-point-bbox": + center = center_of_bbox(seg.bbox) + points.append(center) + plabs = [1] + + elif detection_hint == "mask-area": + points, plabs = gen_detection_hints_from_mask_area(seg.crop_region[0], seg.crop_region[1], + seg.cropped_mask, + mask_hint_threshold, use_small_negative) + + if mask_hint_use_negative == "Outter": + npoints, nplabs = gen_negative_hints(image.shape[0], image.shape[1], + seg.crop_region[0], seg.crop_region[1], + seg.crop_region[2], seg.crop_region[3]) + + points += npoints + plabs += nplabs + + return points, plabs + + +def convert_and_stack_masks(masks): + if len(masks) == 0: + return None + + mask_tensors = [] + for mask in masks: + mask_array = np.array(mask, dtype=np.uint8) + mask_tensor = torch.from_numpy(mask_array) + mask_tensors.append(mask_tensor) + + stacked_masks = torch.stack(mask_tensors, dim=0) + stacked_masks = stacked_masks.unsqueeze(1) + + return stacked_masks + + +def merge_and_stack_masks(stacked_masks, group_size): + if stacked_masks is None: + return None + + num_masks = stacked_masks.size(0) + merged_masks = [] + + for i in range(0, num_masks, group_size): + subset_masks = stacked_masks[i:i + group_size] + merged_mask = torch.any(subset_masks, dim=0) + merged_masks.append(merged_mask) + + if len(merged_masks) > 0: + merged_masks = torch.stack(merged_masks, dim=0) + + return merged_masks + + +def segs_scale_match(segs, target_shape): + h = segs[0][0] + w = segs[0][1] + + th = target_shape[1] + tw = target_shape[2] + + if (h == th and w == tw) or h == 0 or w == 0: + return segs + + rh = th / h + rw = tw / w + + new_segs = [] + for seg in segs[1]: + cropped_image = seg.cropped_image + cropped_mask = seg.cropped_mask + x1, y1, x2, y2 = seg.crop_region + bx1, by1, bx2, by2 = seg.bbox + + crop_region = int(x1*rw), int(y1*rw), int(x2*rh), int(y2*rh) + bbox = int(bx1*rw), int(by1*rw), int(bx2*rh), int(by2*rh) + new_w = crop_region[2] - crop_region[0] + new_h = crop_region[3] - crop_region[1] + + if isinstance(cropped_mask, np.ndarray): + cropped_mask = torch.from_numpy(cropped_mask) + + if isinstance(cropped_mask, torch.Tensor) and len(cropped_mask.shape) == 3: + cropped_mask = torch.nn.functional.interpolate(cropped_mask.unsqueeze(0), size=(new_h, new_w), mode='bilinear', align_corners=False) + cropped_mask = cropped_mask.squeeze(0) + else: + cropped_mask = torch.nn.functional.interpolate(cropped_mask.unsqueeze(0).unsqueeze(0), size=(new_h, new_w), mode='bilinear', align_corners=False) + cropped_mask = cropped_mask.squeeze(0).squeeze(0).numpy() + + if cropped_image is not None: + cropped_image = tensor_resize(cropped_image if isinstance(cropped_image, torch.Tensor) else torch.from_numpy(cropped_image), new_w, new_h) + cropped_image = cropped_image.numpy() + + new_seg = SEG(cropped_image, cropped_mask, seg.confidence, crop_region, bbox, seg.label, seg.control_net_wrapper) + new_segs.append(new_seg) + + return (th, tw), new_segs + + +# Used Python's slicing feature. stacked_masks[2::3] means starting from index 2, selecting every third tensor with a step size of 3. +# This allows for quickly obtaining the last tensor of every three tensors in stacked_masks. +def every_three_pick_last(stacked_masks): + selected_masks = stacked_masks[2::3] + return selected_masks + + +def make_sam_mask_segmented(sam, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, mask_hint_use_negative): + + if not hasattr(sam, 'sam_wrapper'): + raise Exception("[Impact Pack] Invalid SAMLoader is connected. Make sure 'SAMLoader (Impact)'.") + + sam_obj = sam.sam_wrapper + sam_obj.prepare_device() + + try: + image = np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8) + + total_masks = [] + + use_small_negative = mask_hint_use_negative == "Small" + + # seg_shape = segs[0] + segs = segs[1] + if detection_hint == "mask-points": + points = [] + plabs = [] + + for i in range(len(segs)): + bbox = segs[i].bbox + center = center_of_bbox(bbox) + points.append(center) + + # small point is background, big point is foreground + if use_small_negative and bbox[2] - bbox[0] < 10: + plabs.append(0) + else: + plabs.append(1) + + detected_masks = sam_obj.predict(image, points, plabs, None, threshold) + total_masks += detected_masks + + else: + for i in range(len(segs)): + bbox = segs[i].bbox + center = center_of_bbox(bbox) + x1 = max(bbox[0] - bbox_expansion, 0) + y1 = max(bbox[1] - bbox_expansion, 0) + x2 = min(bbox[2] + bbox_expansion, image.shape[1]) + y2 = min(bbox[3] + bbox_expansion, image.shape[0]) + + dilated_bbox = [x1, y1, x2, y2] + + points, plabs = generate_detection_hints(image, segs[i], center, detection_hint, dilated_bbox, + mask_hint_threshold, use_small_negative, + mask_hint_use_negative) + + detected_masks = sam_obj.predict(image, points, plabs, dilated_bbox, threshold) + + total_masks += detected_masks + + # merge every collected masks + mask = combine_masks2(total_masks) + + finally: + sam_obj.release_device() + + mask_working_device = torch.device("cpu") + + if mask is not None: + mask = mask.float() + mask = dilate_mask(mask.cpu().numpy(), dilation) + mask = torch.from_numpy(mask) + mask = mask.to(device=mask_working_device) + else: + # Extracting batch, height and width + height, width, _ = image.shape + mask = torch.zeros( + (height, width), dtype=torch.float32, device=mask_working_device + ) # empty mask + + stacked_masks = convert_and_stack_masks(total_masks) + + return (mask, merge_and_stack_masks(stacked_masks, group_size=3)) + # return every_three_pick_last(stacked_masks) + + +def segs_bitwise_and_mask(segs, mask): + mask = make_2d_mask(mask) + + if mask is None: + print("[SegsBitwiseAndMask] Cannot operate: MASK is empty.") + return ([],) + + items = [] + + mask = (mask.cpu().numpy() * 255).astype(np.uint8) + + for seg in segs[1]: + cropped_mask = (seg.cropped_mask * 255).astype(np.uint8) + crop_region = seg.crop_region + + cropped_mask2 = mask[crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] + + new_mask = np.bitwise_and(cropped_mask.astype(np.uint8), cropped_mask2) + new_mask = new_mask.astype(np.float32) / 255.0 + + item = SEG(seg.cropped_image, new_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + items.append(item) + + return segs[0], items + + +def segs_bitwise_subtract_mask(segs, mask): + mask = make_2d_mask(mask) + + if mask is None: + print("[SegsBitwiseSubtractMask] Cannot operate: MASK is empty.") + return ([],) + + items = [] + + mask = (mask.cpu().numpy() * 255).astype(np.uint8) + + for seg in segs[1]: + cropped_mask = (seg.cropped_mask * 255).astype(np.uint8) + crop_region = seg.crop_region + + cropped_mask2 = mask[crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] + + new_mask = cv2.subtract(cropped_mask.astype(np.uint8), cropped_mask2) + new_mask = new_mask.astype(np.float32) / 255.0 + + item = SEG(seg.cropped_image, new_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + items.append(item) + + return segs[0], items + + +def apply_mask_to_each_seg(segs, masks): + if masks is None: + print("[SegsBitwiseAndMask] Cannot operate: MASK is empty.") + return (segs[0], [],) + + items = [] + + masks = masks.squeeze(1) + + for seg, mask in zip(segs[1], masks): + cropped_mask = (seg.cropped_mask * 255).astype(np.uint8) + crop_region = seg.crop_region + + cropped_mask2 = (mask.cpu().numpy() * 255).astype(np.uint8) + cropped_mask2 = cropped_mask2[crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] + + new_mask = np.bitwise_and(cropped_mask.astype(np.uint8), cropped_mask2) + new_mask = new_mask.astype(np.float32) / 255.0 + + item = SEG(seg.cropped_image, new_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + items.append(item) + + return segs[0], items + + +def dilate_segs(segs, factor): + if factor == 0: + return segs + + new_segs = [] + for seg in segs[1]: + new_mask = dilate_mask(seg.cropped_mask, factor) + new_seg = SEG(seg.cropped_image, new_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + new_segs.append(new_seg) + + return (segs[0], new_segs) + + +class ONNXDetector: + onnx_model = None + + def __init__(self, onnx_model): + self.onnx_model = onnx_model + + def detect(self, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + drop_size = max(drop_size, 1) + try: + import impact.onnx as onnx + + h = image.shape[1] + w = image.shape[2] + + labels, scores, boxes = onnx.onnx_inference(image, self.onnx_model) + + # collect feasible item + result = [] + + for i in range(len(labels)): + if scores[i] > threshold: + item_bbox = boxes[i] + x1, y1, x2, y2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: # minimum dimension must be (2,2) to avoid squeeze issue + crop_region = make_crop_region(w, h, item_bbox, crop_factor) + + if detailer_hook is not None: + crop_region = item_bbox.post_crop_region(w, h, item_bbox, crop_region) + + crop_x1, crop_y1, crop_x2, crop_y2, = crop_region + + # prepare cropped mask + cropped_mask = np.zeros((crop_y2 - crop_y1, crop_x2 - crop_x1)) + cropped_mask[y1 - crop_y1:y2 - crop_y1, x1 - crop_x1:x2 - crop_x1] = 1 + cropped_mask = dilate_mask(cropped_mask, dilation) + + # make items. just convert the integer label to a string + item = SEG(None, cropped_mask, scores[i], crop_region, item_bbox, str(labels[i]), None) + result.append(item) + + shape = h, w + segs = shape, result + + if detailer_hook is not None and hasattr(detailer_hook, "post_detection"): + segs = detailer_hook.post_detection(segs) + + return segs + except Exception as e: + print(f"ONNXDetector: unable to execute.\n{e}") + pass + + def detect_combined(self, image, threshold, dilation): + return segs_to_combined_mask(self.detect(image, threshold, dilation, 1)) + + def setAux(self, x): + pass + + +def batch_mask_to_segs(mask, combined, crop_factor, bbox_fill, drop_size=1, label='A', crop_min_size=None, detailer_hook=None): + combined_mask = mask.max(dim=0).values + + segs = mask_to_segs(combined_mask, combined, crop_factor, bbox_fill, drop_size, label, crop_min_size, detailer_hook) + + new_segs = [] + for seg in segs[1]: + x1, y1, x2, y2 = seg.crop_region + cropped_mask = mask[:, y1:y2, x1:x2] + item = SEG(None, cropped_mask, 1.0, seg.crop_region, seg.bbox, label, None) + new_segs.append(item) + + return segs[0], new_segs + + +def mask_to_segs(mask, combined, crop_factor, bbox_fill, drop_size=1, label='A', crop_min_size=None, detailer_hook=None, is_contour=True): + drop_size = max(drop_size, 1) + if mask is None: + print("[mask_to_segs] Cannot operate: MASK is empty.") + return ([],) + + if isinstance(mask, np.ndarray): + pass # `mask` is already a NumPy array + else: + try: + mask = mask.numpy() + except AttributeError: + print("[mask_to_segs] Cannot operate: MASK is not a NumPy array or Tensor.") + return ([],) + + if mask is None: + print("[mask_to_segs] Cannot operate: MASK is empty.") + return ([],) + + result = [] + + if len(mask.shape) == 2: + mask = np.expand_dims(mask, axis=0) + + for i in range(mask.shape[0]): + mask_i = mask[i] + + if combined: + indices = np.nonzero(mask_i) + if len(indices[0]) > 0 and len(indices[1]) > 0: + bbox = ( + np.min(indices[1]), + np.min(indices[0]), + np.max(indices[1]), + np.max(indices[0]), + ) + crop_region = make_crop_region( + mask_i.shape[1], mask_i.shape[0], bbox, crop_factor + ) + x1, y1, x2, y2 = crop_region + + if detailer_hook is not None: + crop_region = detailer_hook.post_crop_region(mask_i.shape[1], mask_i.shape[0], bbox, crop_region) + + if x2 - x1 > 0 and y2 - y1 > 0: + cropped_mask = mask_i[y1:y2, x1:x2] + + if bbox_fill: + bx1, by1, bx2, by2 = bbox + cropped_mask = cropped_mask.copy() + cropped_mask[by1:by2, bx1:bx2] = 1.0 + + if cropped_mask is not None: + item = SEG(None, cropped_mask, 1.0, crop_region, bbox, label, None) + result.append(item) + + else: + mask_i_uint8 = (mask_i * 255.0).astype(np.uint8) + contours, ctree = cv2.findContours(mask_i_uint8, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + for j, contour in enumerate(contours): + hierarchy = ctree[0][j] + if hierarchy[3] != -1: + continue + + separated_mask = np.zeros_like(mask_i_uint8) + cv2.drawContours(separated_mask, [contour], 0, 255, -1) + separated_mask = np.array(separated_mask / 255.0).astype(np.float32) + + x, y, w, h = cv2.boundingRect(contour) + bbox = x, y, x + w, y + h + crop_region = make_crop_region( + mask_i.shape[1], mask_i.shape[0], bbox, crop_factor, crop_min_size + ) + + if detailer_hook is not None: + crop_region = detailer_hook.post_crop_region(mask_i.shape[1], mask_i.shape[0], bbox, crop_region) + + if w > drop_size and h > drop_size: + if is_contour: + mask_src = separated_mask + else: + mask_src = mask_i * separated_mask + + cropped_mask = np.array( + mask_src[ + crop_region[1]: crop_region[3], + crop_region[0]: crop_region[2], + ] + ) + + if bbox_fill: + cx1, cy1, _, _ = crop_region + bx1 = x - cx1 + bx2 = x+w - cx1 + by1 = y - cy1 + by2 = y+h - cy1 + cropped_mask[by1:by2, bx1:bx2] = 1.0 + + if cropped_mask is not None: + cropped_mask = torch.clip(torch.from_numpy(cropped_mask), 0, 1.0) + item = SEG(None, cropped_mask.numpy(), 1.0, crop_region, bbox, label, None) + result.append(item) + + if not result: + print(f"[mask_to_segs] Empty mask.") + + print(f"# of Detected SEGS: {len(result)}") + # for r in result: + # print(f"\tbbox={r.bbox}, crop={r.crop_region}, label={r.label}") + + # shape: (b,h,w) -> (h,w) + return (mask.shape[1], mask.shape[2]), result + + +def mediapipe_facemesh_to_segs(image, crop_factor, bbox_fill, crop_min_size, drop_size, dilation, face, mouth, left_eyebrow, left_eye, left_pupil, right_eyebrow, right_eye, right_pupil): + parts = { + "face": np.array([0x0A, 0xC8, 0x0A]), + "mouth": np.array([0x0A, 0xB4, 0x0A]), + "left_eyebrow": np.array([0xB4, 0xDC, 0x0A]), + "left_eye": np.array([0xB4, 0xC8, 0x0A]), + "left_pupil": np.array([0xFA, 0xC8, 0x0A]), + "right_eyebrow": np.array([0x0A, 0xDC, 0xB4]), + "right_eye": np.array([0x0A, 0xC8, 0xB4]), + "right_pupil": np.array([0x0A, 0xC8, 0xFA]), + } + + def create_segments(image, color): + image = (image * 255).to(torch.uint8) + image = image.squeeze(0).numpy() + mask = cv2.inRange(image, color, color) + + contours, ctree = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + mask_list = [] + for i, contour in enumerate(contours): + hierarchy = ctree[0][i] + if hierarchy[3] == -1: + convex_hull = cv2.convexHull(contour) + convex_segment = np.zeros_like(image) + cv2.fillPoly(convex_segment, [convex_hull], (255, 255, 255)) + + convex_segment = np.expand_dims(convex_segment, axis=0).astype(np.float32) / 255.0 + tensor = torch.from_numpy(convex_segment) + mask_tensor = torch.any(tensor != 0, dim=-1).float() + mask_tensor = mask_tensor.squeeze(0) + mask_tensor = torch.from_numpy(dilate_mask(mask_tensor.numpy(), dilation)) + mask_list.append(mask_tensor.unsqueeze(0)) + + return mask_list + + segs = [] + + def create_seg(label): + mask_list = create_segments(image, parts[label]) + for mask in mask_list: + seg = mask_to_segs(mask, False, crop_factor, bbox_fill, drop_size=drop_size, label=label, crop_min_size=crop_min_size) + if len(seg[1]) > 0: + segs.extend(seg[1]) + + if face: + create_seg('face') + + if mouth: + create_seg('mouth') + + if left_eyebrow: + create_seg('left_eyebrow') + + if left_eye: + create_seg('left_eye') + + if left_pupil: + create_seg('left_pupil') + + if right_eyebrow: + create_seg('right_eyebrow') + + if right_eye: + create_seg('right_eye') + + if right_pupil: + create_seg('right_pupil') + + return (image.shape[1], image.shape[2]), segs + + +def segs_to_combined_mask(segs): + shape = segs[0] + h = shape[0] + w = shape[1] + + mask = np.zeros((h, w), dtype=np.uint8) + + for seg in segs[1]: + cropped_mask = seg.cropped_mask + crop_region = seg.crop_region + mask[crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] |= (cropped_mask * 255).astype(np.uint8) + + return torch.from_numpy(mask.astype(np.float32) / 255.0) + + +def segs_to_masklist(segs): + shape = segs[0] + h = shape[0] + w = shape[1] + + masks = [] + for seg in segs[1]: + if isinstance(seg.cropped_mask, np.ndarray): + cropped_mask = torch.from_numpy(seg.cropped_mask) + else: + cropped_mask = seg.cropped_mask + + if cropped_mask.ndim == 2: + cropped_mask = cropped_mask.unsqueeze(0) + + n = len(cropped_mask) + + mask = torch.zeros((n, h, w), dtype=torch.uint8) + crop_region = seg.crop_region + mask[:, crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] |= (cropped_mask * 255).to(torch.uint8) + mask = (mask / 255.0).to(torch.float32) + + for x in mask: + masks.append(x) + + if len(masks) == 0: + empty_mask = torch.zeros((h, w), dtype=torch.float32, device="cpu") + masks = [empty_mask] + + return masks + + +def vae_decode(vae, samples, use_tile, hook, tile_size=512): + if use_tile: + pixels = nodes.VAEDecodeTiled().decode(vae, samples, tile_size)[0] + else: + pixels = nodes.VAEDecode().decode(vae, samples)[0] + + if hook is not None: + pixels = hook.post_decode(pixels) + + return pixels + + +def vae_encode(vae, pixels, use_tile, hook, tile_size=512): + if use_tile: + samples = nodes.VAEEncodeTiled().encode(vae, pixels, tile_size)[0] + else: + samples = nodes.VAEEncode().encode(vae, pixels)[0] + + if hook is not None: + samples = hook.post_encode(samples) + + return samples + + +def latent_upscale_on_pixel_space_shape(samples, scale_method, w, h, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + return latent_upscale_on_pixel_space_shape2(samples, scale_method, w, h, vae, use_tile, tile_size, save_temp_prefix, hook)[0] + + +def latent_upscale_on_pixel_space_shape2(samples, scale_method, w, h, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + pixels = vae_decode(vae, samples, use_tile, hook, tile_size=tile_size) + + if save_temp_prefix is not None: + nodes.PreviewImage().save_images(pixels, filename_prefix=save_temp_prefix) + + pixels = nodes.ImageScale().upscale(pixels, scale_method, int(w), int(h), False)[0] + + old_pixels = pixels + if hook is not None: + pixels = hook.post_upscale(pixels) + + return (vae_encode(vae, pixels, use_tile, hook, tile_size=tile_size), old_pixels) + + +def latent_upscale_on_pixel_space(samples, scale_method, scale_factor, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + return latent_upscale_on_pixel_space2(samples, scale_method, scale_factor, vae, use_tile, tile_size, save_temp_prefix, hook)[0] + + +def latent_upscale_on_pixel_space2(samples, scale_method, scale_factor, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + pixels = vae_decode(vae, samples, use_tile, hook, tile_size=tile_size) + + if save_temp_prefix is not None: + nodes.PreviewImage().save_images(pixels, filename_prefix=save_temp_prefix) + + w = pixels.shape[2] * scale_factor + h = pixels.shape[1] * scale_factor + pixels = nodes.ImageScale().upscale(pixels, scale_method, int(w), int(h), False)[0] + + old_pixels = pixels + if hook is not None: + pixels = hook.post_upscale(pixels) + + return (vae_encode(vae, pixels, use_tile, hook, tile_size=tile_size), old_pixels) + + +def latent_upscale_on_pixel_space_with_model_shape(samples, scale_method, upscale_model, new_w, new_h, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + return latent_upscale_on_pixel_space_with_model_shape2(samples, scale_method, upscale_model, new_w, new_h, vae, use_tile, tile_size, save_temp_prefix, hook)[0] + + +def latent_upscale_on_pixel_space_with_model_shape2(samples, scale_method, upscale_model, new_w, new_h, vae, use_tile=False, tile_size=512, save_temp_prefix=None, hook=None): + pixels = vae_decode(vae, samples, use_tile, hook, tile_size=tile_size) + + if save_temp_prefix is not None: + nodes.PreviewImage().save_images(pixels, filename_prefix=save_temp_prefix) + + w = pixels.shape[2] + + # upscale by model upscaler + current_w = w + while current_w < new_w: + pixels = model_upscale.ImageUpscaleWithModel().upscale(upscale_model, pixels)[0] + current_w = pixels.shape[2] + if current_w == w: + print(f"[latent_upscale_on_pixel_space_with_model] x1 upscale model selected") + break + + # downscale to target scale + pixels = nodes.ImageScale().upscale(pixels, scale_method, int(new_w), int(new_h), False)[0] + + old_pixels = pixels + if hook is not None: + pixels = hook.post_upscale(pixels) + + return (vae_encode(vae, pixels, use_tile, hook, tile_size=tile_size), old_pixels) + + +def latent_upscale_on_pixel_space_with_model(samples, scale_method, upscale_model, scale_factor, vae, use_tile=False, + tile_size=512, save_temp_prefix=None, hook=None): + return latent_upscale_on_pixel_space_with_model2(samples, scale_method, upscale_model, scale_factor, vae, use_tile, tile_size, save_temp_prefix, hook)[0] + +def latent_upscale_on_pixel_space_with_model2(samples, scale_method, upscale_model, scale_factor, vae, use_tile=False, + tile_size=512, save_temp_prefix=None, hook=None): + pixels = vae_decode(vae, samples, use_tile, hook, tile_size=tile_size) + + if save_temp_prefix is not None: + nodes.PreviewImage().save_images(pixels, filename_prefix=save_temp_prefix) + + w = pixels.shape[2] + h = pixels.shape[1] + + new_w = w * scale_factor + new_h = h * scale_factor + + # upscale by model upscaler + current_w = w + while current_w < new_w: + pixels = model_upscale.ImageUpscaleWithModel().upscale(upscale_model, pixels)[0] + current_w = pixels.shape[2] + if current_w == w: + print(f"[latent_upscale_on_pixel_space_with_model] x1 upscale model selected") + break + + # downscale to target scale + pixels = nodes.ImageScale().upscale(pixels, scale_method, int(new_w), int(new_h), False)[0] + + old_pixels = pixels + if hook is not None: + pixels = hook.post_upscale(pixels) + + return (vae_encode(vae, pixels, use_tile, hook, tile_size=tile_size), old_pixels) + + +class TwoSamplersForMaskUpscaler: + def __init__(self, scale_method, sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, vae, + full_sampler_opt=None, upscale_model_opt=None, hook_base_opt=None, hook_mask_opt=None, + hook_full_opt=None, + tile_size=512): + + mask = make_2d_mask(mask) + + mask = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])) + + self.params = scale_method, sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, vae + self.upscale_model = upscale_model_opt + self.full_sampler = full_sampler_opt + self.hook_base = hook_base_opt + self.hook_mask = hook_mask_opt + self.hook_full = hook_full_opt + self.use_tiled_vae = use_tiled_vae + self.tile_size = tile_size + self.is_tiled = False + self.vae = vae + + def upscale(self, step_info, samples, upscale_factor, save_temp_prefix=None): + scale_method, sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, vae = self.params + + mask = make_2d_mask(mask) + + self.prepare_hook(step_info) + + # upscale latent + if self.upscale_model is None: + upscaled_latent = latent_upscale_on_pixel_space(samples, scale_method, upscale_factor, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook_base, tile_size=self.tile_size) + else: + upscaled_latent = latent_upscale_on_pixel_space_with_model(samples, scale_method, self.upscale_model, + upscale_factor, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook_mask, tile_size=self.tile_size) + + return self.do_samples(step_info, base_sampler, mask_sampler, sample_schedule, mask, upscaled_latent) + + def prepare_hook(self, step_info): + if self.hook_base is not None: + self.hook_base.set_steps(step_info) + if self.hook_mask is not None: + self.hook_mask.set_steps(step_info) + if self.hook_full is not None: + self.hook_full.set_steps(step_info) + + def upscale_shape(self, step_info, samples, w, h, save_temp_prefix=None): + scale_method, sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, vae = self.params + + mask = make_2d_mask(mask) + + self.prepare_hook(step_info) + + # upscale latent + if self.upscale_model is None: + upscaled_latent = latent_upscale_on_pixel_space_shape(samples, scale_method, w, h, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook_base, + tile_size=self.tile_size) + else: + upscaled_latent = latent_upscale_on_pixel_space_with_model_shape(samples, scale_method, self.upscale_model, + w, h, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook_mask, + tile_size=self.tile_size) + + return self.do_samples(step_info, base_sampler, mask_sampler, sample_schedule, mask, upscaled_latent) + + def is_full_sample_time(self, step_info, sample_schedule): + cur_step, total_step = step_info + + # make start from 1 instead of zero + cur_step += 1 + total_step += 1 + + if sample_schedule == "none": + return False + + elif sample_schedule == "interleave1": + return cur_step % 2 == 0 + + elif sample_schedule == "interleave2": + return cur_step % 3 == 0 + + elif sample_schedule == "interleave3": + return cur_step % 4 == 0 + + elif sample_schedule == "last1": + return cur_step == total_step + + elif sample_schedule == "last2": + return cur_step >= total_step - 1 + + elif sample_schedule == "interleave1+last1": + return cur_step % 2 == 0 or cur_step >= total_step - 1 + + elif sample_schedule == "interleave2+last1": + return cur_step % 2 == 0 or cur_step >= total_step - 1 + + elif sample_schedule == "interleave3+last1": + return cur_step % 2 == 0 or cur_step >= total_step - 1 + + def do_samples(self, step_info, base_sampler, mask_sampler, sample_schedule, mask, upscaled_latent): + mask = make_2d_mask(mask) + + if self.is_full_sample_time(step_info, sample_schedule): + print(f"step_info={step_info} / full time") + + upscaled_latent = base_sampler.sample(upscaled_latent, self.hook_base) + sampler = self.full_sampler if self.full_sampler is not None else base_sampler + return sampler.sample(upscaled_latent, self.hook_full) + + else: + print(f"step_info={step_info} / non-full time") + # upscale mask + if mask.ndim == 2: + mask = mask[None, :, :, None] + upscaled_mask = F.interpolate(mask, size=(upscaled_latent['samples'].shape[2], upscaled_latent['samples'].shape[3]), mode='bilinear', align_corners=True) + upscaled_mask = upscaled_mask[:, :, :upscaled_latent['samples'].shape[2], :upscaled_latent['samples'].shape[3]] + + # base sampler + upscaled_inv_mask = torch.where(upscaled_mask != 1.0, torch.tensor(1.0), torch.tensor(0.0)) + upscaled_latent['noise_mask'] = upscaled_inv_mask + upscaled_latent = base_sampler.sample(upscaled_latent, self.hook_base) + + # mask sampler + upscaled_latent['noise_mask'] = upscaled_mask + upscaled_latent = mask_sampler.sample(upscaled_latent, self.hook_mask) + + # remove mask + del upscaled_latent['noise_mask'] + return upscaled_latent + + +class PixelKSampleUpscaler: + def __init__(self, scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + use_tiled_vae, upscale_model_opt=None, hook_opt=None, tile_size=512, scheduler_func=None, + tile_cnet_opt=None, tile_cnet_strength=1.0): + self.params = scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise + self.upscale_model = upscale_model_opt + self.hook = hook_opt + self.use_tiled_vae = use_tiled_vae + self.tile_size = tile_size + self.is_tiled = False + self.vae = vae + self.scheduler_func = scheduler_func + self.tile_cnet = tile_cnet_opt + self.tile_cnet_strength = tile_cnet_strength + + def sample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise, images): + if self.tile_cnet is not None: + image_batch, image_w, image_h, _ = images.shape + if image_batch > 1: + warnings.warn('Multiple latents in batch, Tile ControlNet being ignored') + else: + if 'TilePreprocessor' not in nodes.NODE_CLASS_MAPPINGS: + raise RuntimeError("'TilePreprocessor' node (from comfyui_controlnet_aux) isn't installed.") + preprocessor = nodes.NODE_CLASS_MAPPINGS['TilePreprocessor']() + # might add capacity to set pyrUp_iters later, not needed for now though + preprocessed = preprocessor.execute(images, pyrUp_iters=3, resolution=min(image_w, image_h))[0] + apply_cnet = getattr(nodes.ControlNetApply(), nodes.ControlNetApply.FUNCTION) + positive = apply_cnet(positive, self.tile_cnet, preprocessed, strength=self.tile_cnet_strength)[0] + + refined_latent = impact_sampling.impact_sample(model, seed, steps, cfg, sampler_name, scheduler, + positive, negative, upscaled_latent, denoise, scheduler_func=self.scheduler_func) + + return refined_latent + + def upscale(self, step_info, samples, upscale_factor, save_temp_prefix=None): + scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + + if self.hook is not None: + self.hook.set_steps(step_info) + + if self.upscale_model is None: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space2(samples, scale_method, upscale_factor, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, hook=self.hook, tile_size=512) + else: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_with_model2(samples, scale_method, self.upscale_model, + upscale_factor, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook, + tile_size=self.tile_size) + + if self.hook is not None: + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise = \ + self.hook.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, + upscaled_latent, denoise) + + refined_latent = self.sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise, upscaled_images) + return refined_latent + + def upscale_shape(self, step_info, samples, w, h, save_temp_prefix=None): + scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + + if self.hook is not None: + self.hook.set_steps(step_info) + + if self.upscale_model is None: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_shape2(samples, scale_method, w, h, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, hook=self.hook, + tile_size=self.tile_size) + else: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_with_model_shape2(samples, scale_method, self.upscale_model, + w, h, vae, + use_tile=self.use_tiled_vae, + save_temp_prefix=save_temp_prefix, + hook=self.hook, + tile_size=self.tile_size) + + if self.hook is not None: + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise = \ + self.hook.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, + upscaled_latent, denoise) + + refined_latent = self.sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise, upscaled_images) + return refined_latent + + +class IPAdapterWrapper: + def __init__(self, ipadapter_pipe, weight, noise, weight_type, start_at, end_at, unfold_batch, weight_v2, reference_image, neg_image=None, prev_control_net=None, combine_embeds='concat'): + self.reference_image = reference_image + self.ipadapter_pipe = ipadapter_pipe + self.weight = weight + self.weight_type = weight_type + self.noise = noise + self.start_at = start_at + self.end_at = end_at + self.unfold_batch = unfold_batch + self.prev_control_net = prev_control_net + self.weight_v2 = weight_v2 + self.image = reference_image + self.neg_image = neg_image + self.combine_embeds = combine_embeds + + # name 'apply_ipadapter' isn't allowed + def doit_ipadapter(self, model): + cnet_image_list = [self.image] + prev_cnet_images = [] + + if 'IPAdapterAdvanced' not in nodes.NODE_CLASS_MAPPINGS: + if 'IPAdapterApply' in nodes.NODE_CLASS_MAPPINGS: + raise Exception(f"[ERROR] 'ComfyUI IPAdapter Plus' is outdated.") + + utils.try_install_custom_node('https://github.com/cubiq/ComfyUI_IPAdapter_plus', + "To use 'IPAdapterApplySEGS' node, 'ComfyUI IPAdapter Plus' extension is required.") + raise Exception(f"[ERROR] To use IPAdapterApplySEGS, you need to install 'ComfyUI IPAdapter Plus'") + + obj = nodes.NODE_CLASS_MAPPINGS['IPAdapterAdvanced'] + + ipadapter, _, clip_vision, insightface, lora_loader = self.ipadapter_pipe + model = lora_loader(model) + + if self.prev_control_net is not None: + model, prev_cnet_images = self.prev_control_net.doit_ipadapter(model) + + model = obj().apply_ipadapter(model=model, ipadapter=ipadapter, weight=self.weight, weight_type=self.weight_type, + start_at=self.start_at, end_at=self.end_at, combine_embeds=self.combine_embeds, + clip_vision=clip_vision, image=self.image, image_negative=self.neg_image, attn_mask=None, + insightface=insightface, weight_faceidv2=self.weight_v2)[0] + + cnet_image_list.extend(prev_cnet_images) + + return model, cnet_image_list + + def apply(self, positive, negative, image, mask=None, use_acn=False): + if self.prev_control_net is not None: + return self.prev_control_net.apply(positive, negative, image, mask, use_acn=use_acn) + else: + return positive, negative, [] + + +class ControlNetWrapper: + def __init__(self, control_net, strength, preprocessor, prev_control_net=None, original_size=None, crop_region=None, control_image=None): + self.control_net = control_net + self.strength = strength + self.preprocessor = preprocessor + self.prev_control_net = prev_control_net + + if original_size is not None and crop_region is not None and control_image is not None: + self.control_image = utils.tensor_resize(control_image, original_size[1], original_size[0]) + self.control_image = torch.tensor(utils.tensor_crop(self.control_image, crop_region)) + else: + self.control_image = None + + def apply(self, positive, negative, image, mask=None, use_acn=False): + cnet_image_list = [] + prev_cnet_images = [] + + if self.prev_control_net is not None: + positive, negative, prev_cnet_images = self.prev_control_net.apply(positive, negative, image, mask, use_acn=use_acn) + + if self.control_image is not None: + cnet_image = self.control_image + elif self.preprocessor is not None: + cnet_image = self.preprocessor.apply(image, mask) + else: + cnet_image = image + + cnet_image_list.extend(prev_cnet_images) + cnet_image_list.append(cnet_image) + + if use_acn: + if "ACN_AdvancedControlNetApply" in nodes.NODE_CLASS_MAPPINGS: + acn = nodes.NODE_CLASS_MAPPINGS['ACN_AdvancedControlNetApply']() + positive, negative, _ = acn.apply_controlnet(positive=positive, negative=negative, control_net=self.control_net, image=cnet_image, + strength=self.strength, start_percent=0.0, end_percent=1.0) + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_TiledKSampler', + "To use 'ControlNetWrapper' for AnimateDiff, 'ComfyUI-Advanced-ControlNet' extension is required.") + raise Exception("'ACN_AdvancedControlNetApply' node isn't installed.") + else: + positive = nodes.ControlNetApply().apply_controlnet(positive, self.control_net, cnet_image, self.strength)[0] + + return positive, negative, cnet_image_list + + def doit_ipadapter(self, model): + if self.prev_control_net is not None: + return self.prev_control_net.doit_ipadapter(model) + else: + return model, [] + + +class ControlNetAdvancedWrapper: + def __init__(self, control_net, strength, start_percent, end_percent, preprocessor, prev_control_net=None, + original_size=None, crop_region=None, control_image=None): + self.control_net = control_net + self.strength = strength + self.preprocessor = preprocessor + self.prev_control_net = prev_control_net + self.start_percent = start_percent + self.end_percent = end_percent + + if original_size is not None and crop_region is not None and control_image is not None: + self.control_image = utils.tensor_resize(control_image, original_size[1], original_size[0]) + self.control_image = torch.tensor(utils.tensor_crop(self.control_image, crop_region)) + else: + self.control_image = None + + def doit_ipadapter(self, model): + if self.prev_control_net is not None: + return self.prev_control_net.doit_ipadapter(model) + else: + return model, [] + + def apply(self, positive, negative, image, mask=None, use_acn=False): + cnet_image_list = [] + prev_cnet_images = [] + + if self.prev_control_net is not None: + positive, negative, prev_cnet_images = self.prev_control_net.apply(positive, negative, image, mask) + + if self.control_image is not None: + cnet_image = self.control_image + elif self.preprocessor is not None: + cnet_image = self.preprocessor.apply(image, mask) + else: + cnet_image = image + + cnet_image_list.extend(prev_cnet_images) + cnet_image_list.append(cnet_image) + + if use_acn: + if "ACN_AdvancedControlNetApply" in nodes.NODE_CLASS_MAPPINGS: + acn = nodes.NODE_CLASS_MAPPINGS['ACN_AdvancedControlNetApply']() + positive, negative, _ = acn.apply_controlnet(positive=positive, negative=negative, control_net=self.control_net, image=cnet_image, + strength=self.strength, start_percent=self.start_percent, end_percent=self.end_percent) + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_TiledKSampler', + "To use 'ControlNetAdvancedWrapper' for AnimateDiff, 'ComfyUI-Advanced-ControlNet' extension is required.") + raise Exception("'ACN_AdvancedControlNetApply' node isn't installed.") + else: + positive, negative = nodes.ControlNetApplyAdvanced().apply_controlnet(positive, negative, self.control_net, cnet_image, self.strength, self.start_percent, self.end_percent) + + return positive, negative, cnet_image_list + + +# REQUIREMENTS: BlenderNeko/ComfyUI_TiledKSampler +class TiledKSamplerWrapper: + params = None + + def __init__(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + tile_width, tile_height, tiling_strategy): + self.params = model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, tile_width, tile_height, tiling_strategy + + def sample(self, latent_image, hook=None): + if "BNK_TiledKSampler" in nodes.NODE_CLASS_MAPPINGS: + TiledKSampler = nodes.NODE_CLASS_MAPPINGS['BNK_TiledKSampler'] + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_TiledKSampler', + "To use 'TiledKSamplerProvider', 'Tiled sampling for ComfyUI' extension is required.") + raise Exception("'BNK_TiledKSampler' node isn't installed.") + + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, tile_width, tile_height, tiling_strategy = self.params + + if hook is not None: + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise = \ + hook.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, + denoise) + + return TiledKSampler().sample(model, seed, tile_width, tile_height, tiling_strategy, steps, cfg, sampler_name, + scheduler, positive, negative, latent_image, denoise)[0] + + +class PixelTiledKSampleUpscaler: + def __init__(self, scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, + denoise, + tile_width, tile_height, tiling_strategy, + upscale_model_opt=None, hook_opt=None, tile_cnet_opt=None, tile_size=512, tile_cnet_strength=1.0): + self.params = scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise + self.vae = vae + self.tile_params = tile_width, tile_height, tiling_strategy + self.upscale_model = upscale_model_opt + self.hook = hook_opt + self.tile_cnet = tile_cnet_opt + self.tile_size = tile_size + self.is_tiled = True + self.tile_cnet_strength = tile_cnet_strength + + def tiled_ksample(self, latent, images): + if "BNK_TiledKSampler" in nodes.NODE_CLASS_MAPPINGS: + TiledKSampler = nodes.NODE_CLASS_MAPPINGS['BNK_TiledKSampler'] + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_TiledKSampler', + "To use 'PixelTiledKSampleUpscalerProvider', 'Tiled sampling for ComfyUI' extension is required.") + raise RuntimeError("'BNK_TiledKSampler' node isn't installed.") + + scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + tile_width, tile_height, tiling_strategy = self.tile_params + + if self.tile_cnet is not None: + image_batch, image_w, image_h, _ = images.shape + if image_batch > 1: + warnings.warn('Multiple latents in batch, Tile ControlNet being ignored') + else: + if 'TilePreprocessor' not in nodes.NODE_CLASS_MAPPINGS: + raise RuntimeError("'TilePreprocessor' node (from comfyui_controlnet_aux) isn't installed.") + preprocessor = nodes.NODE_CLASS_MAPPINGS['TilePreprocessor']() + # might add capacity to set pyrUp_iters later, not needed for now though + preprocessed = preprocessor.execute(images, pyrUp_iters=3, resolution=min(image_w, image_h))[0] + apply_cnet = getattr(nodes.ControlNetApply(), nodes.ControlNetApply.FUNCTION) + positive = apply_cnet(positive, self.tile_cnet, preprocessed, strength=self.tile_cnet_strength)[0] + + return TiledKSampler().sample(model, seed, tile_width, tile_height, tiling_strategy, steps, cfg, sampler_name, + scheduler, positive, negative, latent, denoise)[0] + + def upscale(self, step_info, samples, upscale_factor, save_temp_prefix=None): + scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + + if self.hook is not None: + self.hook.set_steps(step_info) + + if self.upscale_model is None: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space2(samples, scale_method, upscale_factor, vae, + use_tile=True, save_temp_prefix=save_temp_prefix, + hook=self.hook, tile_size=self.tile_size) + else: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_with_model2(samples, scale_method, self.upscale_model, + upscale_factor, vae, use_tile=True, + save_temp_prefix=save_temp_prefix, + hook=self.hook, tile_size=self.tile_size) + + refined_latent = self.tiled_ksample(upscaled_latent, upscaled_images) + + return refined_latent + + def upscale_shape(self, step_info, samples, w, h, save_temp_prefix=None): + scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + + if self.hook is not None: + self.hook.set_steps(step_info) + + if self.upscale_model is None: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_shape2(samples, scale_method, w, h, vae, + use_tile=True, save_temp_prefix=save_temp_prefix, + hook=self.hook, tile_size=self.tile_size) + else: + upscaled_latent, upscaled_images = \ + latent_upscale_on_pixel_space_with_model_shape2(samples, scale_method, + self.upscale_model, w, h, vae, + use_tile=True, + save_temp_prefix=save_temp_prefix, + hook=self.hook, + tile_size=self.tile_size) + + refined_latent = self.tiled_ksample(upscaled_latent, upscaled_images) + + return refined_latent + + +# REQUIREMENTS: biegert/ComfyUI-CLIPSeg +class BBoxDetectorBasedOnCLIPSeg: + prompt = None + blur = None + threshold = None + dilation_factor = None + aux = None + + def __init__(self, prompt, blur, threshold, dilation_factor): + self.prompt = prompt + self.blur = blur + self.threshold = threshold + self.dilation_factor = dilation_factor + + def detect(self, image, bbox_threshold, bbox_dilation, bbox_crop_factor, drop_size=1, detailer_hook=None): + mask = self.detect_combined(image, bbox_threshold, bbox_dilation) + + mask = make_2d_mask(mask) + + segs = mask_to_segs(mask, False, bbox_crop_factor, True, drop_size, detailer_hook=detailer_hook) + + if detailer_hook is not None and hasattr(detailer_hook, "post_detection"): + segs = detailer_hook.post_detection(segs) + + return segs + + def detect_combined(self, image, bbox_threshold, bbox_dilation): + if "CLIPSeg" in nodes.NODE_CLASS_MAPPINGS: + CLIPSeg = nodes.NODE_CLASS_MAPPINGS['CLIPSeg'] + else: + utils.try_install_custom_node('https://github.com/biegert/ComfyUI-CLIPSeg/raw/main/custom_nodes/clipseg.py', + "To use 'CLIPSegDetectorProvider', 'CLIPSeg' extension is required.") + raise Exception("'CLIPSeg' node isn't installed.") + + if self.threshold is None: + threshold = bbox_threshold + else: + threshold = self.threshold + + if self.dilation_factor is None: + dilation_factor = bbox_dilation + else: + dilation_factor = self.dilation_factor + + prompt = self.aux if self.prompt == '' and self.aux is not None else self.prompt + + mask, _, _ = CLIPSeg().segment_image(image, prompt, self.blur, threshold, dilation_factor) + mask = to_binary_mask(mask) + return mask + + def setAux(self, x): + self.aux = x + + +def update_node_status(node, text, progress=None): + if PromptServer.instance.client_id is None: + return + + PromptServer.instance.send_sync("impact/update_status", { + "node": node, + "progress": progress, + "text": text + }, PromptServer.instance.client_id) + + +def random_mask_raw(mask, bbox, factor): + x1, y1, x2, y2 = bbox + w = x2 - x1 + h = y2 - y1 + + factor = max(6, int(min(w, h) * factor / 4)) + + def draw_random_circle(center, radius): + i, j = center + for x in range(int(i - radius), int(i + radius)): + for y in range(int(j - radius), int(j + radius)): + if np.linalg.norm(np.array([x, y]) - np.array([i, j])) <= radius: + mask[x, y] = 1 + + def draw_irregular_line(start, end, pivot, is_vertical): + i = start + while i < end: + base_radius = np.random.randint(5, factor) + radius = int(base_radius) + + if is_vertical: + draw_random_circle((i, pivot), radius) + else: + draw_random_circle((pivot, i), radius) + + i += radius + + def draw_irregular_line_parallel(start, end, pivot, is_vertical): + with ThreadPoolExecutor(max_workers=16) as executor: + futures = [] + step = (end - start) // 16 + for i in range(start, end, step): + future = executor.submit(draw_irregular_line, i, min(i + step, end), pivot, is_vertical) + futures.append(future) + + for future in futures: + future.result() + + draw_irregular_line_parallel(y1 + factor, y2 - factor, x1 + factor, True) + draw_irregular_line_parallel(y1 + factor, y2 - factor, x2 - factor, True) + draw_irregular_line_parallel(x1 + factor, x2 - factor, y1 + factor, False) + draw_irregular_line_parallel(x1 + factor, x2 - factor, y2 - factor, False) + + mask[y1 + factor:y2 - factor, x1 + factor:x2 - factor] = 1.0 + + +def random_mask(mask, bbox, factor, size=128): + small_mask = np.zeros((size, size)).astype(np.float32) + random_mask_raw(small_mask, (0, 0, size, size), factor) + + x1, y1, x2, y2 = bbox + small_mask = torch.tensor(small_mask).unsqueeze(0).unsqueeze(0) + bbox_mask = torch.nn.functional.interpolate(small_mask, size=(y2 - y1, x2 - x1), mode='bilinear', align_corners=False) + bbox_mask = bbox_mask.squeeze(0).squeeze(0) + mask[y1:y2, x1:x2] = bbox_mask + + +def adaptive_mask_paste(dest_mask, src_mask, bbox): + x1, y1, x2, y2 = bbox + small_mask = torch.tensor(src_mask).unsqueeze(0).unsqueeze(0) + bbox_mask = torch.nn.functional.interpolate(small_mask, size=(y2 - y1, x2 - x1), mode='bilinear', align_corners=False) + bbox_mask = bbox_mask.squeeze(0).squeeze(0) + dest_mask[y1:y2, x1:x2] = bbox_mask + + +def crop_condition_mask(mask, image, crop_region): + cond_scale = (mask.shape[1] / image.shape[1], mask.shape[2] / image.shape[2]) + mask_region = [round(v * cond_scale[i % 2]) for i, v in enumerate(crop_region)] + return crop_ndarray3(mask, mask_region) + + +class SafeToGPU: + def __init__(self, size): + self.size = size + + def to_device(self, obj, device): + if utils.is_same_device(device, 'cpu'): + obj.to(device) + else: + if utils.is_same_device(obj.device, 'cpu'): # cpu to gpu + model_management.free_memory(self.size * 1.3, device) + if model_management.get_free_memory(device) > self.size * 1.3: + try: + obj.to(device) + except: + print(f"WARN: The model is not moved to the '{device}' due to insufficient memory. [1]") + else: + print(f"WARN: The model is not moved to the '{device}' due to insufficient memory. [2]") + + +from comfy.cli_args import args, LatentPreviewMethod +import folder_paths +from latent_preview import TAESD, TAESDPreviewerImpl, Latent2RGBPreviewer + +try: + import comfy.latent_formats as latent_formats + + + def get_previewer(device, latent_format=latent_formats.SD15(), force=False, method=None): + previewer = None + + if method is None: + method = args.preview_method + + if method != LatentPreviewMethod.NoPreviews or force: + # TODO previewer methods + taesd_decoder_path = None + + if hasattr(latent_format, "taesd_decoder_path"): + taesd_decoder_path = folder_paths.get_full_path("vae_approx", latent_format.taesd_decoder_name) + + if method == LatentPreviewMethod.Auto: + method = LatentPreviewMethod.Latent2RGB + if taesd_decoder_path: + method = LatentPreviewMethod.TAESD + + if method == LatentPreviewMethod.TAESD: + if taesd_decoder_path: + taesd = TAESD(None, taesd_decoder_path, latent_channels=latent_format.latent_channels).to(device) + previewer = TAESDPreviewerImpl(taesd) + else: + print("Warning: TAESD previews enabled, but could not find models/vae_approx/{}".format( + latent_format.taesd_decoder_name)) + + if previewer is None: + previewer = Latent2RGBPreviewer(latent_format.latent_rgb_factors) + return previewer + +except: + print(f"#########################################################################") + print(f"[ERROR] ComfyUI-Impact-Pack: Please update ComfyUI to the latest version.") + print(f"#########################################################################") diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/defs.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/defs.py new file mode 100644 index 0000000000000000000000000000000000000000..c898f8c7cb5eb0fe244325f7e5cb4c5600c33a5f --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/defs.py @@ -0,0 +1,17 @@ +detection_labels = [ + 'hand', 'face', 'mouth', 'eyes', 'eyebrows', 'pupils', + 'left_eyebrow', 'left_eye', 'left_pupil', 'right_eyebrow', 'right_eye', 'right_pupil', + 'short_sleeved_shirt', 'long_sleeved_shirt', 'short_sleeved_outwear', 'long_sleeved_outwear', + 'vest', 'sling', 'shorts', 'trousers', 'skirt', 'short_sleeved_dress', 'long_sleeved_dress', 'vest_dress', 'sling_dress', + "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", + "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", + "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", + "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", + "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", + "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", + "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", + "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", + "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", + "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", + "hair drier", "toothbrush" + ] \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/detectors.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/detectors.py new file mode 100644 index 0000000000000000000000000000000000000000..ea2ae3f556e6d8bd54928d34833606fd1d6e9540 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/detectors.py @@ -0,0 +1,455 @@ +import impact.core as core +from nodes import MAX_RESOLUTION +import impact.segs_nodes as segs_nodes +import impact.utils as utils +import torch +from impact.core import SEG + + +class SAMDetectorCombined: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "sam_model": ("SAM_MODEL", ), + "segs": ("SEGS", ), + "image": ("IMAGE", ), + "detection_hint": (["center-1", "horizontal-2", "vertical-2", "rect-4", "diamond-4", "mask-area", + "mask-points", "mask-point-bbox", "none"],), + "dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "threshold": ("FLOAT", {"default": 0.93, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + "mask_hint_use_negative": (["False", "Small", "Outter"], ) + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, sam_model, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, mask_hint_use_negative): + return (core.make_sam_mask(sam_model, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, mask_hint_use_negative), ) + + +class SAMDetectorSegmented: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "sam_model": ("SAM_MODEL", ), + "segs": ("SEGS", ), + "image": ("IMAGE", ), + "detection_hint": (["center-1", "horizontal-2", "vertical-2", "rect-4", "diamond-4", "mask-area", + "mask-points", "mask-point-bbox", "none"],), + "dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "threshold": ("FLOAT", {"default": 0.93, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + "mask_hint_use_negative": (["False", "Small", "Outter"], ) + } + } + + RETURN_TYPES = ("MASK", "MASK") + RETURN_NAMES = ("combined_mask", "batch_masks") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, sam_model, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, mask_hint_use_negative): + combined_mask, batch_masks = core.make_sam_mask_segmented(sam_model, segs, image, detection_hint, dilation, + threshold, bbox_expansion, mask_hint_threshold, + mask_hint_use_negative) + return (combined_mask, batch_masks, ) + + +class BboxDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_detector": ("BBOX_DETECTOR", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "labels": ("STRING", {"multiline": True, "default": "all", "placeholder": "List the types of segments to be allowed, separated by commas"}), + }, + "optional": {"detailer_hook": ("DETAILER_HOOK",), } + } + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, bbox_detector, image, threshold, dilation, crop_factor, drop_size, labels=None, detailer_hook=None): + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: BboxDetectorForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + segs = bbox_detector.detect(image, threshold, dilation, crop_factor, drop_size, detailer_hook) + + if labels is not None and labels != '': + labels = labels.split(',') + if len(labels) > 0: + segs, _ = segs_nodes.SEGSLabelFilter.filter(segs, labels) + + return (segs, ) + + +class SegmDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segm_detector": ("SEGM_DETECTOR", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "labels": ("STRING", {"multiline": True, "default": "all", "placeholder": "List the types of segments to be allowed, separated by commas"}), + }, + "optional": {"detailer_hook": ("DETAILER_HOOK",), } + } + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, segm_detector, image, threshold, dilation, crop_factor, drop_size, labels=None, detailer_hook=None): + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: SegmDetectorForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + segs = segm_detector.detect(image, threshold, dilation, crop_factor, drop_size, detailer_hook) + + if labels is not None and labels != '': + labels = labels.split(',') + if len(labels) > 0: + segs, _ = segs_nodes.SEGSLabelFilter.filter(segs, labels) + + return (segs, ) + + +class SegmDetectorCombined: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segm_detector": ("SEGM_DETECTOR", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, segm_detector, image, threshold, dilation): + mask = segm_detector.detect_combined(image, threshold, dilation) + + if mask is None: + mask = torch.zeros((image.shape[2], image.shape[1]), dtype=torch.float32, device="cpu") + + return (mask.unsqueeze(0),) + + +class BboxDetectorCombined(SegmDetectorCombined): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_detector": ("BBOX_DETECTOR", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 4, "min": -512, "max": 512, "step": 1}), + } + } + + def doit(self, bbox_detector, image, threshold, dilation): + mask = bbox_detector.detect_combined(image, threshold, dilation) + + if mask is None: + mask = torch.zeros((image.shape[2], image.shape[1]), dtype=torch.float32, device="cpu") + + return (mask.unsqueeze(0),) + + +class SimpleDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_detector": ("BBOX_DETECTOR", ), + "image": ("IMAGE", ), + + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + + "sub_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "sub_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sub_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": { + "post_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sam_model_opt": ("SAM_MODEL", ), + "segm_detector_opt": ("SEGM_DETECTOR", ), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + @staticmethod + def detect(bbox_detector, image, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, + sam_mask_hint_threshold, post_dilation=0, sam_model_opt=None, segm_detector_opt=None, + detailer_hook=None): + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: SimpleDetectorForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + if segm_detector_opt is not None and hasattr(segm_detector_opt, 'bbox_detector') and segm_detector_opt.bbox_detector == bbox_detector: + # Better segm support for YOLO-World detector + segs = segm_detector_opt.detect(image, sub_threshold, sub_dilation, crop_factor, drop_size, detailer_hook=detailer_hook) + else: + segs = bbox_detector.detect(image, bbox_threshold, bbox_dilation, crop_factor, drop_size, detailer_hook=detailer_hook) + + if sam_model_opt is not None: + mask = core.make_sam_mask(sam_model_opt, segs, image, "center-1", sub_dilation, + sub_threshold, sub_bbox_expansion, sam_mask_hint_threshold, False) + segs = core.segs_bitwise_and_mask(segs, mask) + elif segm_detector_opt is not None: + segm_segs = segm_detector_opt.detect(image, sub_threshold, sub_dilation, crop_factor, drop_size, detailer_hook=detailer_hook) + mask = core.segs_to_combined_mask(segm_segs) + segs = core.segs_bitwise_and_mask(segs, mask) + + segs = core.dilate_segs(segs, post_dilation) + + return (segs,) + + def doit(self, bbox_detector, image, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, + sam_mask_hint_threshold, post_dilation=0, sam_model_opt=None, segm_detector_opt=None): + + return SimpleDetectorForEach.detect(bbox_detector, image, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, + sam_mask_hint_threshold, post_dilation=post_dilation, + sam_model_opt=sam_model_opt, segm_detector_opt=segm_detector_opt) + + +class SimpleDetectorForEachPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "detailer_pipe": ("DETAILER_PIPE", ), + "image": ("IMAGE", ), + + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + + "sub_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "sub_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sub_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": { + "post_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + def doit(self, detailer_pipe, image, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, sam_mask_hint_threshold, post_dilation=0): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: SimpleDetectorForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, refiner_model, refiner_clip, refiner_positive, refiner_negative = detailer_pipe + + return SimpleDetectorForEach.detect(bbox_detector, image, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, + sam_mask_hint_threshold, post_dilation=post_dilation, sam_model_opt=sam_model_opt, segm_detector_opt=segm_detector_opt, + detailer_hook=detailer_hook) + + +class SimpleDetectorForAnimateDiff: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_detector": ("BBOX_DETECTOR", ), + "image_frames": ("IMAGE", ), + + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 0, "min": -255, "max": 255, "step": 1}), + + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + + "sub_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "sub_dilation": ("INT", {"default": 0, "min": -255, "max": 255, "step": 1}), + "sub_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": { + "masking_mode": (["Pivot SEGS", "Combine neighboring frames", "Don't combine"],), + "segs_pivot": (["Combined mask", "1st frame mask"],), + "sam_model_opt": ("SAM_MODEL", ), + "segm_detector_opt": ("SEGM_DETECTOR", ), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + @staticmethod + def detect(bbox_detector, image_frames, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, sam_mask_hint_threshold, + masking_mode="Pivot SEGS", segs_pivot="Combined mask", sam_model_opt=None, segm_detector_opt=None): + + h = image_frames.shape[1] + w = image_frames.shape[2] + + # gather segs for all frames + segs_by_frames = [] + for image in image_frames: + image = image.unsqueeze(0) + segs = bbox_detector.detect(image, bbox_threshold, bbox_dilation, crop_factor, drop_size) + + if sam_model_opt is not None: + mask = core.make_sam_mask(sam_model_opt, segs, image, "center-1", sub_dilation, + sub_threshold, sub_bbox_expansion, sam_mask_hint_threshold, False) + segs = core.segs_bitwise_and_mask(segs, mask) + elif segm_detector_opt is not None: + segm_segs = segm_detector_opt.detect(image, sub_threshold, sub_dilation, crop_factor, drop_size) + mask = core.segs_to_combined_mask(segm_segs) + segs = core.segs_bitwise_and_mask(segs, mask) + + segs_by_frames.append(segs) + + def get_masked_frames(): + masks_by_frame = [] + for i, segs in enumerate(segs_by_frames): + masks_in_frame = segs_nodes.SEGSToMaskList().doit(segs)[0] + current_frame_mask = (masks_in_frame[0] * 255).to(torch.uint8) + + for mask in masks_in_frame[1:]: + current_frame_mask |= (mask * 255).to(torch.uint8) + + current_frame_mask = (current_frame_mask/255.0).to(torch.float32) + current_frame_mask = utils.to_binary_mask(current_frame_mask, 0.1)[0] + + masks_by_frame.append(current_frame_mask) + + return masks_by_frame + + def get_empty_mask(): + return torch.zeros((h, w), dtype=torch.float32, device="cpu") + + def get_neighboring_mask_at(i, masks_by_frame): + prv = masks_by_frame[i-1] if i > 1 else get_empty_mask() + cur = masks_by_frame[i] + nxt = masks_by_frame[i-1] if i > 1 else get_empty_mask() + + prv = prv if prv is not None else get_empty_mask() + cur = cur.clone() if cur is not None else get_empty_mask() + nxt = nxt if nxt is not None else get_empty_mask() + + return prv, cur, nxt + + def get_merged_neighboring_mask(masks_by_frame): + if len(masks_by_frame) <= 1: + return masks_by_frame + + result = [] + for i in range(0, len(masks_by_frame)): + prv, cur, nxt = get_neighboring_mask_at(i, masks_by_frame) + cur = (cur * 255).to(torch.uint8) + cur |= (prv * 255).to(torch.uint8) + cur |= (nxt * 255).to(torch.uint8) + cur = (cur / 255.0).to(torch.float32) + cur = utils.to_binary_mask(cur, 0.1)[0] + result.append(cur) + + return result + + def get_whole_merged_mask(): + all_masks = [] + for segs in segs_by_frames: + all_masks += segs_nodes.SEGSToMaskList().doit(segs)[0] + + merged_mask = (all_masks[0] * 255).to(torch.uint8) + for mask in all_masks[1:]: + merged_mask |= (mask * 255).to(torch.uint8) + + merged_mask = (merged_mask / 255.0).to(torch.float32) + merged_mask = utils.to_binary_mask(merged_mask, 0.1)[0] + return merged_mask + + def get_pivot_segs(): + if segs_pivot == "1st frame mask": + return segs_by_frames[0][1] + else: + merged_mask = get_whole_merged_mask() + return segs_nodes.MaskToSEGS.doit(merged_mask, False, crop_factor, False, drop_size, contour_fill=True)[0] + + def get_segs(merged_neighboring=False): + pivot_segs = get_pivot_segs() + + masks_by_frame = get_masked_frames() + if merged_neighboring: + masks_by_frame = get_merged_neighboring_mask(masks_by_frame) + + new_segs = [] + for seg in pivot_segs[1]: + cropped_mask = torch.zeros(seg.cropped_mask.shape, dtype=torch.float32, device="cpu").unsqueeze(0) + pivot_mask = torch.from_numpy(seg.cropped_mask) + x1, y1, x2, y2 = seg.crop_region + for mask in masks_by_frame: + cropped_mask_at_frame = (mask[y1:y2, x1:x2] * pivot_mask).unsqueeze(0) + cropped_mask = torch.cat((cropped_mask, cropped_mask_at_frame), dim=0) + + if len(cropped_mask) > 1: + cropped_mask = cropped_mask[1:] + + new_seg = SEG(seg.cropped_image, cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + new_segs.append(new_seg) + + return pivot_segs[0], new_segs + + # create result mask + if masking_mode == "Pivot SEGS": + return (get_pivot_segs(), ) + + elif masking_mode == "Combine neighboring frames": + return (get_segs(merged_neighboring=True), ) + + else: # elif masking_mode == "Don't combine": + return (get_segs(merged_neighboring=False), ) + + def doit(self, bbox_detector, image_frames, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, sam_mask_hint_threshold, + masking_mode="Pivot SEGS", segs_pivot="Combined mask", sam_model_opt=None, segm_detector_opt=None): + + return SimpleDetectorForAnimateDiff.detect(bbox_detector, image_frames, bbox_threshold, bbox_dilation, crop_factor, drop_size, + sub_threshold, sub_dilation, sub_bbox_expansion, sam_mask_hint_threshold, + masking_mode, segs_pivot, sam_model_opt, segm_detector_opt) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hf_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hf_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..cae1d447fc26596e17222b3973bd8c6dd084d20d --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hf_nodes.py @@ -0,0 +1,188 @@ +import comfy +import re +from impact.utils import * + +hf_transformer_model_urls = [ + "rizvandwiki/gender-classification-2", + "NTQAI/pedestrian_gender_recognition", + "Leilab/gender_class", + "ProjectPersonal/GenderClassifier", + "crangana/trained-gender", + "cledoux42/GenderNew_v002", + "ivensamdh/genderage2" +] + + +class HF_TransformersClassifierProvider: + @classmethod + def INPUT_TYPES(s): + global hf_transformer_model_urls + return {"required": { + "preset_repo_id": (hf_transformer_model_urls + ['Manual repo id'],), + "manual_repo_id": ("STRING", {"multiline": False}), + "device_mode": (["AUTO", "Prefer GPU", "CPU"],), + }, + } + + RETURN_TYPES = ("TRANSFORMERS_CLASSIFIER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/HuggingFace" + + def doit(self, preset_repo_id, manual_repo_id, device_mode): + from transformers import pipeline + + if preset_repo_id == 'Manual repo id': + url = manual_repo_id + else: + url = preset_repo_id + + if device_mode != 'CPU': + device = comfy.model_management.get_torch_device() + else: + device = "cpu" + + classifier = pipeline('image-classification', model=url, device=device) + + return (classifier,) + + +preset_classify_expr = [ + '#Female > #Male', + '#Female < #Male', + 'female > 0.5', + 'male > 0.5', + 'Age16to25 > 0.1', + 'Age50to69 > 0.1', +] + +symbolic_label_map = { + '#Female': {'female', 'Female', 'Human Female', 'woman', 'women', 'girl'}, + '#Male': {'male', 'Male', 'Human Male', 'man', 'men', 'boy'} +} + +def is_numeric_string(input_str): + return re.match(r'^-?\d+(\.\d+)?$', input_str) is not None + + +classify_expr_pattern = r'([^><= ]+)\s*(>|<|>=|<=|=)\s*([^><= ]+)' + + +class SEGS_Classify: + @classmethod + def INPUT_TYPES(s): + global preset_classify_expr + return {"required": { + "classifier": ("TRANSFORMERS_CLASSIFIER",), + "segs": ("SEGS",), + "preset_expr": (preset_classify_expr + ['Manual expr'],), + "manual_expr": ("STRING", {"multiline": False}), + }, + "optional": { + "ref_image_opt": ("IMAGE", ), + } + } + + RETURN_TYPES = ("SEGS", "SEGS", "STRING") + RETURN_NAMES = ("filtered_SEGS", "remained_SEGS", "detected_labels") + OUTPUT_IS_LIST = (False, False, True) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/HuggingFace" + + @staticmethod + def lookup_classified_label_score(score_infos, label): + global symbolic_label_map + + if label.startswith('#'): + if label not in symbolic_label_map: + return None + else: + label = symbolic_label_map[label] + else: + label = {label} + + for x in score_infos: + if x['label'] in label: + return x['score'] + + return None + + def doit(self, classifier, segs, preset_expr, manual_expr, ref_image_opt=None): + if preset_expr == 'Manual expr': + expr_str = manual_expr + else: + expr_str = preset_expr + + match = re.match(classify_expr_pattern, expr_str) + + if match is None: + return (segs[0], []), segs, [] + + a = match.group(1) + op = match.group(2) + b = match.group(3) + + a_is_lab = not is_numeric_string(a) + b_is_lab = not is_numeric_string(b) + + classified = [] + remained_SEGS = [] + provided_labels = set() + + for seg in segs[1]: + cropped_image = None + + if seg.cropped_image is not None: + cropped_image = seg.cropped_image + elif ref_image_opt is not None: + # take from original image + cropped_image = crop_image(ref_image_opt, seg.crop_region) + + if cropped_image is not None: + cropped_image = to_pil(cropped_image) + res = classifier(cropped_image) + classified.append((seg, res)) + + for x in res: + provided_labels.add(x['label']) + else: + remained_SEGS.append(seg) + + filtered_SEGS = [] + for seg, res in classified: + if a_is_lab: + avalue = SEGS_Classify.lookup_classified_label_score(res, a) + else: + avalue = a + + if b_is_lab: + bvalue = SEGS_Classify.lookup_classified_label_score(res, b) + else: + bvalue = b + + if avalue is None or bvalue is None: + remained_SEGS.append(seg) + continue + + avalue = float(avalue) + bvalue = float(bvalue) + + if op == '>': + cond = avalue > bvalue + elif op == '<': + cond = avalue < bvalue + elif op == '>=': + cond = avalue >= bvalue + elif op == '<=': + cond = avalue <= bvalue + else: + cond = avalue == bvalue + + if cond: + filtered_SEGS.append(seg) + else: + remained_SEGS.append(seg) + + return (segs[0], filtered_SEGS), (segs[0], remained_SEGS), list(provided_labels) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hook_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hook_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..4e4707fcedb184ef3d3c048b5da8fba10de49c44 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hook_nodes.py @@ -0,0 +1,83 @@ +import sys +from . import hooks +from . import defs + + +class SEGSOrderedFilterDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "target": (["area(=w*h)", "width", "height", "x1", "y1", "x2", "y2"],), + "order": ("BOOLEAN", {"default": True, "label_on": "descending", "label_off": "ascending"}), + "take_start": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "take_count": ("INT", {"default": 1, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, target, order, take_start, take_count): + hook = hooks.SEGSOrderedFilterDetailerHook(target, order, take_start, take_count) + return (hook, ) + + +class SEGSRangeFilterDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "target": (["area(=w*h)", "width", "height", "x1", "y1", "x2", "y2", "length_percent"],), + "mode": ("BOOLEAN", {"default": True, "label_on": "inside", "label_off": "outside"}), + "min_value": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "max_value": ("INT", {"default": 67108864, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, target, mode, min_value, max_value): + hook = hooks.SEGSRangeFilterDetailerHook(target, mode, min_value, max_value) + return (hook, ) + + +class SEGSLabelFilterDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "preset": (['all'] + defs.detection_labels,), + "labels": ("STRING", {"multiline": True, "placeholder": "List the types of segments to be allowed, separated by commas"}), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, preset, labels): + hook = hooks.SEGSLabelFilterDetailerHook(labels) + return (hook, ) + + +class PreviewDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"quality": ("INT", {"default": 95, "min": 20, "max": 100})}, + "hidden": {"unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("DETAILER_HOOK", "UPSCALER_HOOK") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, quality, unique_id): + hook = hooks.PreviewDetailerHook(unique_id, quality) + return (hook, hook) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hooks.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..ea35ee1644b982f164ac91d90be39311069cfcf9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/hooks.py @@ -0,0 +1,518 @@ +import copy +import torch +import nodes +from impact import utils +from . import segs_nodes +from thirdparty import noise_nodes +from server import PromptServer +import asyncio +import folder_paths +import os +from comfy_extras import nodes_custom_sampler +import math + + +class PixelKSampleHook: + cur_step = 0 + total_step = 0 + + def __init__(self): + pass + + def set_steps(self, info): + self.cur_step, self.total_step = info + + def post_decode(self, pixels): + return pixels + + def post_upscale(self, pixels): + return pixels + + def post_encode(self, samples): + return samples + + def pre_decode(self, samples): + return samples + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, + denoise): + return model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise + + def post_crop_region(self, w, h, item_bbox, crop_region): + return crop_region + + def touch_scaled_size(self, w, h): + return w, h + + +class PixelKSampleHookCombine(PixelKSampleHook): + hook1 = None + hook2 = None + + def __init__(self, hook1, hook2): + super().__init__() + self.hook1 = hook1 + self.hook2 = hook2 + + def set_steps(self, info): + self.hook1.set_steps(info) + self.hook2.set_steps(info) + + def pre_decode(self, samples): + return self.hook2.pre_decode(self.hook1.pre_decode(samples)) + + def post_decode(self, pixels): + return self.hook2.post_decode(self.hook1.post_decode(pixels)) + + def post_upscale(self, pixels): + return self.hook2.post_upscale(self.hook1.post_upscale(pixels)) + + def post_encode(self, samples): + return self.hook2.post_encode(self.hook1.post_encode(samples)) + + def post_crop_region(self, w, h, item_bbox, crop_region): + crop_region = self.hook1.post_crop_region(w, h, item_bbox, crop_region) + return self.hook2.post_crop_region(w, h, item_bbox, crop_region) + + def touch_scaled_size(self, w, h): + w, h = self.hook1.touch_scaled_size(w, h) + return self.hook2.touch_scaled_size(w, h) + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, + denoise): + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise = \ + self.hook1.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, + upscaled_latent, denoise) + + return self.hook2.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, + upscaled_latent, denoise) + + +class DetailerHookCombine(PixelKSampleHookCombine): + def cycle_latent(self, latent): + latent = self.hook1.cycle_latent(latent) + latent = self.hook2.cycle_latent(latent) + return latent + + def post_detection(self, segs): + segs = self.hook1.post_detection(segs) + segs = self.hook2.post_detection(segs) + return segs + + def post_paste(self, image): + image = self.hook1.post_paste(image) + image = self.hook2.post_paste(image) + return image + + def get_custom_noise(self, seed, noise, is_touched): + noise_1st, is_touched = self.hook1.get_custom_noise(seed, noise, is_touched) + noise_2nd, is_touched = self.hook2.get_custom_noise(seed, noise, is_touched) + return noise, is_touched + + +class SimpleCfgScheduleHook(PixelKSampleHook): + target_cfg = 0 + + def __init__(self, target_cfg): + super().__init__() + self.target_cfg = target_cfg + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise): + if self.total_step > 1: + progress = self.cur_step / (self.total_step - 1) + gap = self.target_cfg - cfg + current_cfg = int(cfg + gap * progress) + else: + current_cfg = self.target_cfg + + return model, seed, steps, current_cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise + + +class SimpleDenoiseScheduleHook(PixelKSampleHook): + def __init__(self, target_denoise): + super().__init__() + self.target_denoise = target_denoise + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise): + if self.total_step > 1: + progress = self.cur_step / (self.total_step - 1) + gap = self.target_denoise - denoise + current_denoise = denoise + gap * progress + else: + current_denoise = self.target_denoise + + return model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, current_denoise + + +class SimpleStepsScheduleHook(PixelKSampleHook): + def __init__(self, target_steps): + super().__init__() + self.target_steps = target_steps + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise): + if self.total_step > 1: + progress = self.cur_step / (self.total_step - 1) + gap = self.target_steps - steps + current_steps = int(steps + gap * progress) + else: + current_steps = self.target_steps + + return model, seed, current_steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise + + +class DetailerHook(PixelKSampleHook): + def cycle_latent(self, latent): + return latent + + def post_detection(self, segs): + return segs + + def post_paste(self, image): + return image + + def get_custom_noise(self, seed, noise, is_touched): + return noise, is_touched + + +# class CustomNoiseDetailerHookProvider(DetailerHook): +# def __init__(self, noise): +# super().__init__() +# self.noise = noise +# +# def get_custom_noise(self, seed, noise, is_start): +# return self.noise + + +class VariationNoiseDetailerHookProvider(DetailerHook): + def __init__(self, variation_seed, variation_strength): + super().__init__() + self.variation_seed = variation_seed + self.variation_strength = variation_strength + + def get_custom_noise(self, seed, noise, is_touched): + empty_noise = {'samples': torch.zeros(noise.size())} + if not is_touched: + noise = nodes_custom_sampler.Noise_RandomNoise(seed).generate_noise(empty_noise) + noise_2nd = nodes_custom_sampler.Noise_RandomNoise(self.variation_seed).generate_noise(empty_noise) + + mixed_noise = ((1 - self.variation_strength) * noise + self.variation_strength * noise_2nd) + + # NOTE: Since the variance of the Gaussian noise in mixed_noise has changed, it must be corrected through scaling. + scale_factor = math.sqrt((1 - self.variation_strength) ** 2 + self.variation_strength ** 2) + corrected_noise = mixed_noise / scale_factor # Scale the noise to maintain variance of 1 + + return corrected_noise, True + + +class SimpleDetailerDenoiseSchedulerHook(DetailerHook): + def __init__(self, target_denoise): + super().__init__() + self.target_denoise = target_denoise + + def pre_ksample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise): + if self.total_step > 1: + progress = self.cur_step / (self.total_step - 1) + gap = self.target_denoise - denoise + current_denoise = denoise + gap * progress + else: + # ignore hook if total cycle <= 1 + current_denoise = denoise + + return model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, current_denoise + + +class CoreMLHook(DetailerHook): + def __init__(self, mode): + super().__init__() + resolution = mode.split('x') + + self.w = int(resolution[0]) + self.h = int(resolution[1]) + + self.override_bbox_by_segm = False + + def pre_decode(self, samples): + new_samples = copy.deepcopy(samples) + new_samples['samples'] = samples['samples'][0].unsqueeze(0) + return new_samples + + def post_encode(self, samples): + new_samples = copy.deepcopy(samples) + new_samples['samples'] = samples['samples'].repeat(2, 1, 1, 1) + return new_samples + + def post_crop_region(self, w, h, item_bbox, crop_region): + x1, y1, x2, y2 = crop_region + bx1, by1, bx2, by2 = item_bbox + crop_w = x2-x1 + crop_h = y2-y1 + + crop_ratio = crop_w/crop_h + target_ratio = self.w/self.h + if crop_ratio < target_ratio: + # shrink height + top_gap = by1 - y1 + bottom_gap = y2 - by2 + + gap_ratio = top_gap / bottom_gap + + target_height = 1/target_ratio*crop_w + delta_height = crop_h - target_height + + new_y1 = int(y1 + delta_height*gap_ratio) + new_y2 = int(new_y1 + target_height) + crop_region = x1, new_y1, x2, new_y2 + + elif crop_ratio > target_ratio: + # shrink width + left_gap = bx1 - x1 + right_gap = x2 - bx2 + + gap_ratio = left_gap / right_gap + + target_width = target_ratio*crop_h + delta_width = crop_w - target_width + + new_x1 = int(x1 + delta_width*gap_ratio) + new_x2 = int(new_x1 + target_width) + crop_region = new_x1, y1, new_x2, y2 + + return crop_region + + def touch_scaled_size(self, w, h): + return self.w, self.h + + +# REQUIREMENTS: BlenderNeko/ComfyUI Noise +class InjectNoiseHook(PixelKSampleHook): + def __init__(self, source, seed, start_strength, end_strength): + super().__init__() + self.source = source + self.seed = seed + self.start_strength = start_strength + self.end_strength = end_strength + + def post_encode(self, samples): + cur_step = self.cur_step + + size = samples['samples'].shape + seed = cur_step + self.seed + cur_step + + if "BNK_NoisyLatentImage" in nodes.NODE_CLASS_MAPPINGS and "BNK_InjectNoise" in nodes.NODE_CLASS_MAPPINGS: + NoisyLatentImage = nodes.NODE_CLASS_MAPPINGS["BNK_NoisyLatentImage"] + InjectNoise = nodes.NODE_CLASS_MAPPINGS["BNK_InjectNoise"] + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_Noise', + "To use 'NoiseInjectionHookProvider', 'ComfyUI Noise' extension is required.") + raise Exception("'BNK_NoisyLatentImage', 'BNK_InjectNoise' nodes are not installed.") + + noise = NoisyLatentImage().create_noisy_latents(self.source, seed, size[3] * 8, size[2] * 8, size[0])[0] + + # inj noise + mask = None + if 'noise_mask' in samples: + mask = samples['noise_mask'] + + strength = self.start_strength + (self.end_strength - self.start_strength) * cur_step / self.total_step + samples = InjectNoise().inject_noise(samples, strength, noise, mask)[0] + print(f"[Impact Pack] InjectNoiseHook: strength = {strength}") + + if mask is not None: + samples['noise_mask'] = mask + + return samples + + +class UnsamplerHook(PixelKSampleHook): + def __init__(self, model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative): + super().__init__() + self.model = model + self.cfg = cfg + self.sampler_name = sampler_name + self.steps = steps + self.start_end_at_step = start_end_at_step + self.end_end_at_step = end_end_at_step + self.scheduler = scheduler + self.normalize = normalize + self.positive = positive + self.negative = negative + + def post_encode(self, samples): + cur_step = self.cur_step + + Unsampler = noise_nodes.Unsampler + + end_at_step = self.start_end_at_step + (self.end_end_at_step - self.start_end_at_step) * cur_step / self.total_step + end_at_step = int(end_at_step) + + print(f"[Impact Pack] UnsamplerHook: end_at_step = {end_at_step}") + + # inj noise + mask = None + if 'noise_mask' in samples: + mask = samples['noise_mask'] + + samples = Unsampler().unsampler(self.model, self.cfg, self.sampler_name, self.steps, end_at_step, + self.scheduler, self.normalize, self.positive, self.negative, samples)[0] + + if mask is not None: + samples['noise_mask'] = mask + + return samples + + +class InjectNoiseHookForDetailer(DetailerHook): + def __init__(self, source, seed, start_strength, end_strength, from_start=False): + super().__init__() + self.source = source + self.seed = seed + self.start_strength = start_strength + self.end_strength = end_strength + self.from_start = from_start + + def inject_noise(self, samples): + cur_step = self.cur_step if self.from_start else self.cur_step - 1 + total_step = self.total_step if self.from_start else self.total_step - 1 + + size = samples['samples'].shape + seed = cur_step + self.seed + cur_step + + if "BNK_NoisyLatentImage" in nodes.NODE_CLASS_MAPPINGS and "BNK_InjectNoise" in nodes.NODE_CLASS_MAPPINGS: + NoisyLatentImage = nodes.NODE_CLASS_MAPPINGS["BNK_NoisyLatentImage"] + InjectNoise = nodes.NODE_CLASS_MAPPINGS["BNK_InjectNoise"] + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_Noise', + "To use 'NoiseInjectionDetailerHookProvider', 'ComfyUI Noise' extension is required.") + raise Exception("'BNK_NoisyLatentImage', 'BNK_InjectNoise' nodes are not installed.") + + noise = NoisyLatentImage().create_noisy_latents(self.source, seed, size[3] * 8, size[2] * 8, size[0])[0] + + # inj noise + mask = None + if 'noise_mask' in samples: + mask = samples['noise_mask'] + + strength = self.start_strength + (self.end_strength - self.start_strength) * cur_step / total_step + samples = InjectNoise().inject_noise(samples, strength, noise, mask)[0] + + if mask is not None: + samples['noise_mask'] = mask + + return samples + + def cycle_latent(self, latent): + if self.cur_step == 0 and not self.from_start: + return latent + else: + return self.inject_noise(latent) + + +class UnsamplerDetailerHook(DetailerHook): + def __init__(self, model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative, from_start=False): + super().__init__() + self.model = model + self.cfg = cfg + self.sampler_name = sampler_name + self.steps = steps + self.start_end_at_step = start_end_at_step + self.end_end_at_step = end_end_at_step + self.scheduler = scheduler + self.normalize = normalize + self.positive = positive + self.negative = negative + self.from_start = from_start + + def unsample(self, samples): + cur_step = self.cur_step if self.from_start else self.cur_step - 1 + total_step = self.total_step if self.from_start else self.total_step - 1 + + Unsampler = noise_nodes.Unsampler + + end_at_step = self.start_end_at_step + (self.end_end_at_step - self.start_end_at_step) * cur_step / total_step + end_at_step = int(end_at_step) + + # inj noise + mask = None + if 'noise_mask' in samples: + mask = samples['noise_mask'] + + samples = Unsampler().unsampler(self.model, self.cfg, self.sampler_name, self.steps, end_at_step, + self.scheduler, self.normalize, self.positive, self.negative, samples)[0] + + if mask is not None: + samples['noise_mask'] = mask + + return samples + + def cycle_latent(self, latent): + if self.cur_step == 0 and not self.from_start: + return latent + else: + return self.unsample(latent) + + +class SEGSOrderedFilterDetailerHook(DetailerHook): + def __init__(self, target, order, take_start, take_count): + super().__init__() + self.target = target + self.order = order + self.take_start = take_start + self.take_count = take_count + + def post_detection(self, segs): + return segs_nodes.SEGSOrderedFilter().doit(segs, self.target, self.order, self.take_start, self.take_count)[0] + + +class SEGSRangeFilterDetailerHook(DetailerHook): + def __init__(self, target, mode, min_value, max_value): + super().__init__() + self.target = target + self.mode = mode + self.min_value = min_value + self.max_value = max_value + + def post_detection(self, segs): + return segs_nodes.SEGSRangeFilter().doit(segs, self.target, self.mode, self.min_value, self.max_value)[0] + + +class SEGSLabelFilterDetailerHook(DetailerHook): + def __init__(self, labels): + super().__init__() + self.labels = labels + + def post_detection(self, segs): + return segs_nodes.SEGSLabelFilter().doit(segs, "", self.labels)[0] + + +class PreviewDetailerHook(DetailerHook): + def __init__(self, node_id, quality): + super().__init__() + self.node_id = node_id + self.quality = quality + + async def send(self, image): + if len(image) > 0: + image = image[0].unsqueeze(0) + img = utils.tensor2pil(image) + + temp_path = os.path.join(folder_paths.get_temp_directory(), 'pvhook') + + if not os.path.exists(temp_path): + os.makedirs(temp_path) + + fullpath = os.path.join(temp_path, f"{self.node_id}.webp") + img.save(fullpath, quality=self.quality) + + item = { + "filename": f"{self.node_id}.webp", + "subfolder": 'pvhook', + "type": 'temp' + } + + PromptServer.instance.send_sync("impact-preview", {'node_id': self.node_id, 'item': item}) + + def post_paste(self, image): + asyncio.run(self.send(image)) + return image diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_pack.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_pack.py new file mode 100644 index 0000000000000000000000000000000000000000..eb2b956ec238c170c58504194e81f3c7e5e5d726 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_pack.py @@ -0,0 +1,2176 @@ +import os +import sys + +import comfy.samplers +import comfy.sd +import warnings +from segment_anything import sam_model_registry +from io import BytesIO +import piexif +import zipfile +import re + +import impact.wildcards + +from impact.utils import * +import impact.core as core +from impact.core import SEG +from impact.config import latent_letter_path +from nodes import MAX_RESOLUTION +from PIL import Image, ImageOps +import numpy as np +import hashlib +import json +import safetensors.torch +from PIL.PngImagePlugin import PngInfo +import comfy.model_management +import base64 +import impact.wildcards as wildcards +from . import hooks +from . import utils + +warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated') + +model_path = folder_paths.models_dir + + +# folder_paths.supported_pt_extensions +add_folder_path_and_extensions("mmdets_bbox", [os.path.join(model_path, "mmdets", "bbox")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mmdets_segm", [os.path.join(model_path, "mmdets", "segm")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("mmdets", [os.path.join(model_path, "mmdets")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("sams", [os.path.join(model_path, "sams")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("onnx", [os.path.join(model_path, "onnx")], {'.onnx'}) + + +# Nodes +class ONNXDetectorProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": {"model_name": (folder_paths.get_filename_list("onnx"), )}} + + RETURN_TYPES = ("BBOX_DETECTOR", ) + FUNCTION = "load_onnx" + + CATEGORY = "ImpactPack" + + def load_onnx(self, model_name): + model = folder_paths.get_full_path("onnx", model_name) + return (core.ONNXDetector(model), ) + + +class CLIPSegDetectorProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "text": ("STRING", {"multiline": False}), + "blur": ("FLOAT", {"min": 0, "max": 15, "step": 0.1, "default": 7}), + "threshold": ("FLOAT", {"min": 0, "max": 1, "step": 0.05, "default": 0.4}), + "dilation_factor": ("INT", {"min": 0, "max": 10, "step": 1, "default": 4}), + } + } + + RETURN_TYPES = ("BBOX_DETECTOR", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, text, blur, threshold, dilation_factor): + if "CLIPSeg" in nodes.NODE_CLASS_MAPPINGS: + return (core.BBoxDetectorBasedOnCLIPSeg(text, blur, threshold, dilation_factor), ) + else: + print("[ERROR] CLIPSegToBboxDetector: CLIPSeg custom node isn't installed. You must install biegert/ComfyUI-CLIPSeg extension to use this node.") + + +class SAMLoader: + @classmethod + def INPUT_TYPES(cls): + models = [x for x in folder_paths.get_filename_list("sams") if 'hq' not in x] + return { + "required": { + "model_name": (models + ['ESAM'], ), + "device_mode": (["AUTO", "Prefer GPU", "CPU"],), + } + } + + RETURN_TYPES = ("SAM_MODEL", ) + FUNCTION = "load_model" + + CATEGORY = "ImpactPack" + + def load_model(self, model_name, device_mode="auto"): + if model_name == 'ESAM': + if 'ESAM_ModelLoader_Zho' not in nodes.NODE_CLASS_MAPPINGS: + try_install_custom_node('https://github.com/ZHO-ZHO-ZHO/ComfyUI-YoloWorld-EfficientSAM', + "To use 'ESAM' model, 'ComfyUI-YoloWorld-EfficientSAM' extension is required.") + raise Exception("'ComfyUI-YoloWorld-EfficientSAM' node isn't installed.") + + esam_loader = nodes.NODE_CLASS_MAPPINGS['ESAM_ModelLoader_Zho']() + + if device_mode == 'CPU': + esam = esam_loader.load_esam_model('CPU')[0] + else: + device_mode = 'CUDA' + esam = esam_loader.load_esam_model('CUDA')[0] + + sam_obj = core.ESAMWrapper(esam, device_mode) + esam.sam_wrapper = sam_obj + + print(f"Loads EfficientSAM model: (device:{device_mode})") + return (esam, ) + + modelname = folder_paths.get_full_path("sams", model_name) + + if 'vit_h' in model_name: + model_kind = 'vit_h' + elif 'vit_l' in model_name: + model_kind = 'vit_l' + else: + model_kind = 'vit_b' + + sam = sam_model_registry[model_kind](checkpoint=modelname) + size = os.path.getsize(modelname) + safe_to = core.SafeToGPU(size) + + # Unless user explicitly wants to use CPU, we use GPU + device = comfy.model_management.get_torch_device() if device_mode == "Prefer GPU" else "CPU" + + if device_mode == "Prefer GPU": + safe_to.to_device(sam, device) + + is_auto_mode = device_mode == "AUTO" + + sam_obj = core.SAMWrapper(sam, is_auto_mode=is_auto_mode, safe_to_gpu=safe_to) + sam.sam_wrapper = sam_obj + + print(f"Loads SAM model: {modelname} (device:{device_mode})") + return (sam, ) + + +class ONNXDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "onnx_detector": ("ONNX_DETECTOR",), + "image": ("IMAGE",), + "threshold": ("FLOAT", {"default": 0.8, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "crop_factor": ("FLOAT", {"default": 1.0, "min": 0.5, "max": 100, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + } + } + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detector" + + OUTPUT_NODE = True + + def doit(self, onnx_detector, image, threshold, dilation, crop_factor, drop_size): + segs = onnx_detector.detect(image, threshold, dilation, crop_factor, drop_size) + return (segs, ) + + +class DetailerForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "segs": ("SEGS", ), + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "detailer_hook": ("DETAILER_HOOK",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + @staticmethod + def do_detail(image, segs, model, clip, vae, guide_size, guide_size_for_bbox, max_size, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, wildcard_opt=None, detailer_hook=None, + refiner_ratio=None, refiner_model=None, refiner_clip=None, refiner_positive=None, refiner_negative=None, + cycle=1, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: DetailerForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + image = image.clone() + enhanced_alpha_list = [] + enhanced_list = [] + cropped_list = [] + cnet_pil_list = [] + + segs = core.segs_scale_match(segs, image.shape) + new_segs = [] + + wildcard_concat_mode = None + if wildcard_opt is not None: + if wildcard_opt.startswith('[CONCAT]'): + wildcard_concat_mode = 'concat' + wildcard_opt = wildcard_opt[8:] + wmode, wildcard_chooser = wildcards.process_wildcard_for_segs(wildcard_opt) + else: + wmode, wildcard_chooser = None, None + + if wmode in ['ASC', 'DSC']: + if wmode == 'ASC': + ordered_segs = sorted(segs[1], key=lambda x: (x.bbox[0], x.bbox[1])) + else: + ordered_segs = sorted(segs[1], key=lambda x: (x.bbox[0], x.bbox[1]), reverse=True) + else: + ordered_segs = segs[1] + + for i, seg in enumerate(ordered_segs): + cropped_image = crop_ndarray4(image.cpu().numpy(), seg.crop_region) # Never use seg.cropped_image to handle overlapping area + cropped_image = to_tensor(cropped_image) + mask = to_tensor(seg.cropped_mask) + mask = tensor_gaussian_blur_mask(mask, feather) + + is_mask_all_zeros = (seg.cropped_mask == 0).all().item() + if is_mask_all_zeros: + print(f"Detailer: segment skip [empty mask]") + continue + + if noise_mask: + cropped_mask = seg.cropped_mask + else: + cropped_mask = None + + if wildcard_chooser is not None and wmode != "LAB": + seg_seed, wildcard_item = wildcard_chooser.get(seg) + elif wildcard_chooser is not None and wmode == "LAB": + seg_seed, wildcard_item = None, wildcard_chooser.get(seg) + else: + seg_seed, wildcard_item = None, None + + seg_seed = seed + i if seg_seed is None else seg_seed + + cropped_positive = [ + [condition, { + k: core.crop_condition_mask(v, image, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in positive + ] + + if not isinstance(negative, str): + cropped_negative = [ + [condition, { + k: core.crop_condition_mask(v, image, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in negative + ] + else: + # Negative Conditioning is placeholder such as FLUX.1 + cropped_negative = negative + + enhanced_image, cnet_pils = core.enhance_detail(cropped_image, model, clip, vae, guide_size, guide_size_for_bbox, max_size, + seg.bbox, seg_seed, steps, cfg, sampler_name, scheduler, + cropped_positive, cropped_negative, denoise, cropped_mask, force_inpaint, + wildcard_opt=wildcard_item, wildcard_opt_concat_mode=wildcard_concat_mode, + detailer_hook=detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, + refiner_negative=refiner_negative, control_net_wrapper=seg.control_net_wrapper, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, + scheduler_func=scheduler_func_opt) + + if cnet_pils is not None: + cnet_pil_list.extend(cnet_pils) + + if not (enhanced_image is None): + # don't latent composite-> converting to latent caused poor quality + # use image paste + image = image.cpu() + enhanced_image = enhanced_image.cpu() + tensor_paste(image, enhanced_image, (seg.crop_region[0], seg.crop_region[1]), mask) + enhanced_list.append(enhanced_image) + + if detailer_hook is not None: + image = detailer_hook.post_paste(image) + + if not (enhanced_image is None): + # Convert enhanced_pil_alpha to RGBA mode + enhanced_image_alpha = tensor_convert_rgba(enhanced_image) + new_seg_image = enhanced_image.numpy() # alpha should not be applied to seg_image + + # Apply the mask + mask = tensor_resize(mask, *tensor_get_size(enhanced_image)) + tensor_putalpha(enhanced_image_alpha, mask) + enhanced_alpha_list.append(enhanced_image_alpha) + else: + new_seg_image = None + + cropped_list.append(cropped_image) + + new_seg = SEG(new_seg_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + new_segs.append(new_seg) + + image_tensor = tensor_convert_rgb(image) + + cropped_list.sort(key=lambda x: x.shape, reverse=True) + enhanced_list.sort(key=lambda x: x.shape, reverse=True) + enhanced_alpha_list.sort(key=lambda x: x.shape, reverse=True) + + return image_tensor, cropped_list, enhanced_list, enhanced_alpha_list, cnet_pil_list, (segs[0], new_segs) + + def doit(self, image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, + scheduler, positive, negative, denoise, feather, noise_mask, force_inpaint, wildcard, cycle=1, + detailer_hook=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + enhanced_img, *_ = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, + cfg, sampler_name, scheduler, positive, negative, denoise, feather, noise_mask, + force_inpaint, wildcard, detailer_hook, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + return (enhanced_img, ) + + +class DetailerForEachPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "segs": ("SEGS", ), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "basic_pipe": ("BASIC_PIPE", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "detailer_hook": ("DETAILER_HOOK",), + "refiner_basic_pipe_opt": ("BASIC_PIPE",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE", "SEGS", "BASIC_PIPE", "IMAGE") + RETURN_NAMES = ("image", "segs", "basic_pipe", "cnet_images") + OUTPUT_IS_LIST = (False, False, False, True) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, image, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, feather, noise_mask, force_inpaint, basic_pipe, wildcard, + refiner_ratio=None, detailer_hook=None, refiner_basic_pipe_opt=None, + cycle=1, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: DetailerForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + model, clip, vae, positive, negative = basic_pipe + + if refiner_basic_pipe_opt is None: + refiner_model, refiner_clip, refiner_positive, refiner_negative = None, None, None, None + else: + refiner_model, refiner_clip, _, refiner_positive, refiner_negative = refiner_basic_pipe_opt + + enhanced_img, cropped, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list, new_segs = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, + sampler_name, scheduler, positive, negative, denoise, feather, noise_mask, + force_inpaint, wildcard, detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, refiner_negative=refiner_negative, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + # set fallback image + if len(cnet_pil_list) == 0: + cnet_pil_list = [empty_pil_tensor()] + + return enhanced_img, new_segs, basic_pipe, cnet_pil_list + + +class FaceDetailer: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "bbox_crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.1}), + + "sam_detection_hint": (["center-1", "horizontal-2", "vertical-2", "rect-4", "diamond-4", "mask-area", "mask-points", "mask-point-bbox", "none"],), + "sam_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sam_threshold": ("FLOAT", {"default": 0.93, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_mask_hint_use_negative": (["False", "Small", "Outter"],), + + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + + "bbox_detector": ("BBOX_DETECTOR", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "sam_model_opt": ("SAM_MODEL", ), + "segm_detector_opt": ("SEGM_DETECTOR", ), + "detailer_hook": ("DETAILER_HOOK",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + }} + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "MASK", "DETAILER_PIPE", "IMAGE") + RETURN_NAMES = ("image", "cropped_refined", "cropped_enhanced_alpha", "mask", "detailer_pipe", "cnet_images") + OUTPUT_IS_LIST = (False, True, True, False, False, True) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Simple" + + @staticmethod + def enhance_face(image, model, clip, vae, guide_size, guide_size_for_bbox, max_size, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, + bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, drop_size, + bbox_detector, segm_detector=None, sam_model_opt=None, wildcard_opt=None, detailer_hook=None, + refiner_ratio=None, refiner_model=None, refiner_clip=None, refiner_positive=None, refiner_negative=None, cycle=1, + inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + # make default prompt as 'face' if empty prompt for CLIPSeg + bbox_detector.setAux('face') + segs = bbox_detector.detect(image, bbox_threshold, bbox_dilation, bbox_crop_factor, drop_size, detailer_hook=detailer_hook) + bbox_detector.setAux(None) + + # bbox + sam combination + if sam_model_opt is not None: + sam_mask = core.make_sam_mask(sam_model_opt, segs, image, sam_detection_hint, sam_dilation, + sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, ) + segs = core.segs_bitwise_and_mask(segs, sam_mask) + + elif segm_detector is not None: + segm_segs = segm_detector.detect(image, bbox_threshold, bbox_dilation, bbox_crop_factor, drop_size) + + if (hasattr(segm_detector, 'override_bbox_by_segm') and segm_detector.override_bbox_by_segm and + not (detailer_hook is not None and not hasattr(detailer_hook, 'override_bbox_by_segm'))): + segs = segm_segs + else: + segm_mask = core.segs_to_combined_mask(segm_segs) + segs = core.segs_bitwise_and_mask(segs, segm_mask) + + if len(segs[1]) > 0: + enhanced_img, _, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list, new_segs = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for_bbox, max_size, seed, steps, cfg, + sampler_name, scheduler, positive, negative, denoise, feather, noise_mask, + force_inpaint, wildcard_opt, detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, + refiner_negative=refiner_negative, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + else: + enhanced_img = image + cropped_enhanced = [] + cropped_enhanced_alpha = [] + cnet_pil_list = [] + + # Mask Generator + mask = core.segs_to_combined_mask(segs) + + if len(cropped_enhanced) == 0: + cropped_enhanced = [empty_pil_tensor()] + + if len(cropped_enhanced_alpha) == 0: + cropped_enhanced_alpha = [empty_pil_tensor()] + + if len(cnet_pil_list) == 0: + cnet_pil_list = [empty_pil_tensor()] + + return enhanced_img, cropped_enhanced, cropped_enhanced_alpha, mask, cnet_pil_list + + def doit(self, image, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, + bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, drop_size, bbox_detector, wildcard, cycle=1, + sam_model_opt=None, segm_detector_opt=None, detailer_hook=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + result_img = None + result_mask = None + result_cropped_enhanced = [] + result_cropped_enhanced_alpha = [] + result_cnet_images = [] + + if len(image) > 1: + print(f"[Impact Pack] WARN: FaceDetailer is not a node designed for video detailing. If you intend to perform video detailing, please use Detailer For AnimateDiff.") + + for i, single_image in enumerate(image): + enhanced_img, cropped_enhanced, cropped_enhanced_alpha, mask, cnet_pil_list = FaceDetailer.enhance_face( + single_image.unsqueeze(0), model, clip, vae, guide_size, guide_size_for, max_size, seed + i, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, + bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, drop_size, bbox_detector, segm_detector_opt, sam_model_opt, wildcard, detailer_hook, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + result_img = torch.cat((result_img, enhanced_img), dim=0) if result_img is not None else enhanced_img + result_mask = torch.cat((result_mask, mask), dim=0) if result_mask is not None else mask + result_cropped_enhanced.extend(cropped_enhanced) + result_cropped_enhanced_alpha.extend(cropped_enhanced_alpha) + result_cnet_images.extend(cnet_pil_list) + + pipe = (model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, None, None, None, None) + return result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, result_mask, pipe, result_cnet_images + + +class LatentPixelScale: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples": ("LATENT", ), + "scale_method": (s.upscale_methods,), + "scale_factor": ("FLOAT", {"default": 1.5, "min": 0.1, "max": 10000, "step": 0.1}), + "vae": ("VAE", ), + "use_tiled_vae": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL", ), + } + } + + RETURN_TYPES = ("LATENT", "IMAGE") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, samples, scale_method, scale_factor, vae, use_tiled_vae, upscale_model_opt=None): + if upscale_model_opt is None: + latimg = core.latent_upscale_on_pixel_space2(samples, scale_method, scale_factor, vae, use_tile=use_tiled_vae) + else: + latimg = core.latent_upscale_on_pixel_space_with_model2(samples, scale_method, upscale_model_opt, scale_factor, vae, use_tile=use_tiled_vae) + return latimg + + +class NoiseInjectionDetailerHookProvider: + schedules = ["skip_start", "from_start"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_cycle": (s.schedules,), + "source": (["CPU", "GPU"],), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "start_strength": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 200.0, "step": 0.01}), + "end_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 200.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, schedule_for_cycle, source, seed, start_strength, end_strength): + try: + hook = hooks.InjectNoiseHookForDetailer(source, seed, start_strength, end_strength, + from_start=('from_start' in schedule_for_cycle)) + return (hook, ) + except Exception as e: + print("[ERROR] NoiseInjectionDetailerHookProvider: 'ComfyUI Noise' custom node isn't installed. You must install 'BlenderNeko/ComfyUI Noise' extension to use this node.") + print(f"\t{e}") + pass + + +# class CustomNoiseDetailerHookProvider: +# @classmethod +# def INPUT_TYPES(s): +# return {"required": { +# "noise": ("NOISE",)}, +# } +# +# RETURN_TYPES = ("DETAILER_HOOK",) +# FUNCTION = "doit" +# +# CATEGORY = "ImpactPack/Detailer" +# +# def doit(self, noise): +# hook = hooks.CustomNoiseDetailerHookProvider(noise) +# return (hook, ) + + +class VariationNoiseDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01})} + } + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, seed, strength): + hook = hooks.VariationNoiseDetailerHookProvider(seed, strength) + return (hook, ) + + +class UnsamplerDetailerHookProvider: + schedules = ["skip_start", "from_start"] + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 25, "min": 1, "max": 10000}), + "start_end_at_step": ("INT", {"default": 21, "min": 0, "max": 10000}), + "end_end_at_step": ("INT", {"default": 24, "min": 0, "max": 10000}), + "cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "normalize": (["disable", "enable"], ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "schedule_for_cycle": (s.schedules,), + }} + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative, schedule_for_cycle): + try: + hook = hooks.UnsamplerDetailerHook(model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative, + from_start=('from_start' in schedule_for_cycle)) + + return (hook, ) + except Exception as e: + print("[ERROR] UnsamplerDetailerHookProvider: 'ComfyUI Noise' custom node isn't installed. You must install 'BlenderNeko/ComfyUI Noise' extension to use this node.") + print(f"\t{e}") + pass + + +class DenoiseSchedulerDetailerHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_cycle": (s.schedules,), + "target_denoise": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, schedule_for_cycle, target_denoise): + hook = hooks.SimpleDetailerDenoiseSchedulerHook(target_denoise) + return (hook, ) + + +class CoreMLDetailerHookProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": {"mode": (["512x512", "768x768", "512x768", "768x512"], )}, } + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, mode): + hook = hooks.CoreMLHook(mode) + return (hook, ) + + +class CfgScheduleHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_iteration": (s.schedules,), + "target_cfg": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0}), + }, + } + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, schedule_for_iteration, target_cfg): + hook = None + if schedule_for_iteration == "simple": + hook = hooks.SimpleCfgScheduleHook(target_cfg) + + return (hook, ) + + +class UnsamplerHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": + {"model": ("MODEL",), + "steps": ("INT", {"default": 25, "min": 1, "max": 10000}), + "start_end_at_step": ("INT", {"default": 21, "min": 0, "max": 10000}), + "end_end_at_step": ("INT", {"default": 24, "min": 0, "max": 10000}), + "cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "normalize": (["disable", "enable"], ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "schedule_for_iteration": (s.schedules,), + }} + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative, schedule_for_iteration): + try: + hook = None + if schedule_for_iteration == "simple": + hook = hooks.UnsamplerHook(model, steps, start_end_at_step, end_end_at_step, cfg, sampler_name, + scheduler, normalize, positive, negative) + + return (hook, ) + except Exception as e: + print("[ERROR] UnsamplerHookProvider: 'ComfyUI Noise' custom node isn't installed. You must install 'BlenderNeko/ComfyUI Noise' extension to use this node.") + print(f"\t{e}") + pass + + +class NoiseInjectionHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_iteration": (s.schedules,), + "source": (["CPU", "GPU"],), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "start_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 200.0, "step": 0.01}), + "end_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 200.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, schedule_for_iteration, source, seed, start_strength, end_strength): + try: + hook = None + if schedule_for_iteration == "simple": + hook = hooks.InjectNoiseHook(source, seed, start_strength, end_strength) + + return (hook, ) + except Exception as e: + print("[ERROR] NoiseInjectionHookProvider: 'ComfyUI Noise' custom node isn't installed. You must install 'BlenderNeko/ComfyUI Noise' extension to use this node.") + print(f"\t{e}") + pass + + +class DenoiseScheduleHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_iteration": (s.schedules,), + "target_denoise": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, schedule_for_iteration, target_denoise): + hook = None + if schedule_for_iteration == "simple": + hook = hooks.SimpleDenoiseScheduleHook(target_denoise) + + return (hook, ) + + +class StepsScheduleHookProvider: + schedules = ["simple"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "schedule_for_iteration": (s.schedules,), + "target_steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + }, + } + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, schedule_for_iteration, target_steps): + hook = None + if schedule_for_iteration == "simple": + hook = hooks.SimpleStepsScheduleHook(target_steps) + + return (hook, ) + + +class DetailerHookCombine: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "hook1": ("DETAILER_HOOK",), + "hook2": ("DETAILER_HOOK",), + }, + } + + RETURN_TYPES = ("DETAILER_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, hook1, hook2): + hook = hooks.DetailerHookCombine(hook1, hook2) + return (hook, ) + + +class PixelKSampleHookCombine: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "hook1": ("PK_HOOK",), + "hook2": ("PK_HOOK",), + }, + } + + RETURN_TYPES = ("PK_HOOK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, hook1, hook2): + hook = hooks.PixelKSampleHookCombine(hook1, hook2) + return (hook, ) + + +class PixelTiledKSampleUpscalerProvider: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "model": ("MODEL",), + "vae": ("VAE",), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "tile_width": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tile_height": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tiling_strategy": (["random", "padded", 'simple'], ), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_opt": ("PK_HOOK", ), + "tile_cnet_opt": ("CONTROL_NET", ), + "tile_cnet_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("UPSCALER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, tile_width, tile_height, tiling_strategy, upscale_model_opt=None, + pk_hook_opt=None, tile_cnet_opt=None, tile_cnet_strength=1.0): + if "BNK_TiledKSampler" in nodes.NODE_CLASS_MAPPINGS: + upscaler = core.PixelTiledKSampleUpscaler(scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + tile_width, tile_height, tiling_strategy, upscale_model_opt, pk_hook_opt, tile_cnet_opt, + tile_size=max(tile_width, tile_height), tile_cnet_strength=tile_cnet_strength) + return (upscaler, ) + else: + utils.try_install_custom_node('https://github.com/BlenderNeko/ComfyUI_TiledKSampler', + "To use 'PixelTiledKSampleUpscalerProvider' node, 'BlenderNeko/ComfyUI_TiledKSampler' extension is required.") + + raise Exception("[ERROR] PixelTiledKSampleUpscalerProvider: ComfyUI_TiledKSampler custom node isn't installed. You must install BlenderNeko/ComfyUI_TiledKSampler extension to use this node.") + + +class PixelTiledKSampleUpscalerProviderPipe: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "tile_width": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tile_height": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tiling_strategy": (["random", "padded", 'simple'], ), + "basic_pipe": ("BASIC_PIPE",) + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_opt": ("PK_HOOK", ), + "tile_cnet_opt": ("CONTROL_NET", ), + "tile_cnet_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("UPSCALER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, scale_method, seed, steps, cfg, sampler_name, scheduler, denoise, tile_width, tile_height, tiling_strategy, basic_pipe, upscale_model_opt=None, pk_hook_opt=None, + tile_cnet_opt=None, tile_cnet_strength=1.0): + if "BNK_TiledKSampler" in nodes.NODE_CLASS_MAPPINGS: + model, _, vae, positive, negative = basic_pipe + upscaler = core.PixelTiledKSampleUpscaler(scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + tile_width, tile_height, tiling_strategy, upscale_model_opt, pk_hook_opt, tile_cnet_opt, + tile_size=max(tile_width, tile_height), tile_cnet_strength=tile_cnet_strength) + return (upscaler, ) + else: + print("[ERROR] PixelTiledKSampleUpscalerProviderPipe: ComfyUI_TiledKSampler custom node isn't installed. You must install BlenderNeko/ComfyUI_TiledKSampler extension to use this node.") + + +class PixelKSampleUpscalerProvider: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "model": ("MODEL",), + "vae": ("VAE",), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "positive": ("CONDITIONING", ), + "negative": ("CONDITIONING", ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "use_tiled_vae": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_opt": ("PK_HOOK", ), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("UPSCALER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + use_tiled_vae, upscale_model_opt=None, pk_hook_opt=None, tile_size=512, scheduler_func_opt=None): + upscaler = core.PixelKSampleUpscaler(scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, use_tiled_vae, upscale_model_opt, pk_hook_opt, + tile_size=tile_size, scheduler_func=scheduler_func_opt) + return (upscaler, ) + + +class PixelKSampleUpscalerProviderPipe(PixelKSampleUpscalerProvider): + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "use_tiled_vae": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "basic_pipe": ("BASIC_PIPE",), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_opt": ("PK_HOOK", ), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + "tile_cnet_opt": ("CONTROL_NET", ), + "tile_cnet_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + RETURN_TYPES = ("UPSCALER",) + FUNCTION = "doit_pipe" + + CATEGORY = "ImpactPack/Upscale" + + def doit_pipe(self, scale_method, seed, steps, cfg, sampler_name, scheduler, denoise, + use_tiled_vae, basic_pipe, upscale_model_opt=None, pk_hook_opt=None, + tile_size=512, scheduler_func_opt=None, tile_cnet_opt=None, tile_cnet_strength=1.0): + model, _, vae, positive, negative = basic_pipe + upscaler = core.PixelKSampleUpscaler(scale_method, model, vae, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, use_tiled_vae, upscale_model_opt, pk_hook_opt, + tile_size=tile_size, scheduler_func=scheduler_func_opt, + tile_cnet_opt=tile_cnet_opt, tile_cnet_strength=tile_cnet_strength) + return (upscaler, ) + + +class TwoSamplersForMaskUpscalerProvider: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "full_sample_schedule": ( + ["none", "interleave1", "interleave2", "interleave3", + "last1", "last2", + "interleave1+last1", "interleave2+last1", "interleave3+last1", + ],), + "use_tiled_vae": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "base_sampler": ("KSAMPLER", ), + "mask_sampler": ("KSAMPLER", ), + "mask": ("MASK", ), + "vae": ("VAE",), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + }, + "optional": { + "full_sampler_opt": ("KSAMPLER",), + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_base_opt": ("PK_HOOK", ), + "pk_hook_mask_opt": ("PK_HOOK", ), + "pk_hook_full_opt": ("PK_HOOK", ), + } + } + + RETURN_TYPES = ("UPSCALER", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, scale_method, full_sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, vae, + full_sampler_opt=None, upscale_model_opt=None, + pk_hook_base_opt=None, pk_hook_mask_opt=None, pk_hook_full_opt=None, tile_size=512): + upscaler = core.TwoSamplersForMaskUpscaler(scale_method, full_sample_schedule, use_tiled_vae, + base_sampler, mask_sampler, mask, vae, full_sampler_opt, upscale_model_opt, + pk_hook_base_opt, pk_hook_mask_opt, pk_hook_full_opt, tile_size=tile_size) + return (upscaler, ) + + +class TwoSamplersForMaskUpscalerProviderPipe: + upscale_methods = ["nearest-exact", "bilinear", "lanczos", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scale_method": (s.upscale_methods,), + "full_sample_schedule": ( + ["none", "interleave1", "interleave2", "interleave3", + "last1", "last2", + "interleave1+last1", "interleave2+last1", "interleave3+last1", + ],), + "use_tiled_vae": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "base_sampler": ("KSAMPLER", ), + "mask_sampler": ("KSAMPLER", ), + "mask": ("MASK", ), + "basic_pipe": ("BASIC_PIPE",), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + }, + "optional": { + "full_sampler_opt": ("KSAMPLER",), + "upscale_model_opt": ("UPSCALE_MODEL", ), + "pk_hook_base_opt": ("PK_HOOK", ), + "pk_hook_mask_opt": ("PK_HOOK", ), + "pk_hook_full_opt": ("PK_HOOK", ), + } + } + + RETURN_TYPES = ("UPSCALER", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, scale_method, full_sample_schedule, use_tiled_vae, base_sampler, mask_sampler, mask, basic_pipe, + full_sampler_opt=None, upscale_model_opt=None, + pk_hook_base_opt=None, pk_hook_mask_opt=None, pk_hook_full_opt=None, tile_size=512): + + mask = make_2d_mask(mask) + + _, _, vae, _, _ = basic_pipe + upscaler = core.TwoSamplersForMaskUpscaler(scale_method, full_sample_schedule, use_tiled_vae, + base_sampler, mask_sampler, mask, vae, full_sampler_opt, upscale_model_opt, + pk_hook_base_opt, pk_hook_mask_opt, pk_hook_full_opt, tile_size=tile_size) + return (upscaler, ) + + +class IterativeLatentUpscale: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples": ("LATENT", ), + "upscale_factor": ("FLOAT", {"default": 1.5, "min": 1, "max": 10000, "step": 0.1}), + "steps": ("INT", {"default": 3, "min": 1, "max": 10000, "step": 1}), + "temp_prefix": ("STRING", {"default": ""}), + "upscaler": ("UPSCALER",), + "step_mode": (["simple", "geometric"], {"default": "simple"}) + }, + "hidden": {"unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("LATENT", "VAE") + RETURN_NAMES = ("latent", "vae") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, samples, upscale_factor, steps, temp_prefix, upscaler, step_mode="simple", unique_id=None): + w = samples['samples'].shape[3]*8 # image width + h = samples['samples'].shape[2]*8 # image height + + if temp_prefix == "": + temp_prefix = None + + if step_mode == "geometric": + upscale_factor_unit = pow(upscale_factor, 1.0/steps) + else: # simple + upscale_factor_unit = max(0, (upscale_factor - 1.0) / steps) + + current_latent = samples + scale = 1 + + for i in range(steps-1): + if step_mode == "geometric": + scale *= upscale_factor_unit + else: # simple + scale += upscale_factor_unit + + new_w = w*scale + new_h = h*scale + core.update_node_status(unique_id, f"{i+1}/{steps} steps | x{scale:.2f}", (i+1)/steps) + print(f"IterativeLatentUpscale[{i+1}/{steps}]: {new_w:.1f}x{new_h:.1f} (scale:{scale:.2f}) ") + step_info = i, steps + current_latent = upscaler.upscale_shape(step_info, current_latent, new_w, new_h, temp_prefix) + + if scale < upscale_factor: + new_w = w*upscale_factor + new_h = h*upscale_factor + core.update_node_status(unique_id, f"Final step | x{upscale_factor:.2f}", 1.0) + print(f"IterativeLatentUpscale[Final]: {new_w:.1f}x{new_h:.1f} (scale:{upscale_factor:.2f}) ") + step_info = steps-1, steps + current_latent = upscaler.upscale_shape(step_info, current_latent, new_w, new_h, temp_prefix) + + core.update_node_status(unique_id, "", None) + + return (current_latent, upscaler.vae) + + +class IterativeImageUpscale: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "pixels": ("IMAGE", ), + "upscale_factor": ("FLOAT", {"default": 1.5, "min": 1, "max": 10000, "step": 0.1}), + "steps": ("INT", {"default": 3, "min": 1, "max": 10000, "step": 1}), + "temp_prefix": ("STRING", {"default": ""}), + "upscaler": ("UPSCALER",), + "vae": ("VAE",), + "step_mode": (["simple", "geometric"], {"default": "simple"}) + }, + "hidden": {"unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("IMAGE",) + RETURN_NAMES = ("image",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + def doit(self, pixels, upscale_factor, steps, temp_prefix, upscaler, vae, step_mode="simple", unique_id=None): + if temp_prefix == "": + temp_prefix = None + + core.update_node_status(unique_id, "VAEEncode (first)", 0) + if upscaler.is_tiled: + latent = nodes.VAEEncodeTiled().encode(vae, pixels, upscaler.tile_size)[0] + else: + latent = nodes.VAEEncode().encode(vae, pixels)[0] + + refined_latent = IterativeLatentUpscale().doit(latent, upscale_factor, steps, temp_prefix, upscaler, step_mode, unique_id) + + core.update_node_status(unique_id, "VAEDecode (final)", 1.0) + if upscaler.is_tiled: + pixels = nodes.VAEDecodeTiled().decode(vae, refined_latent[0], upscaler.tile_size)[0] + else: + pixels = nodes.VAEDecode().decode(vae, refined_latent[0])[0] + + core.update_node_status(unique_id, "", None) + + return (pixels, ) + + +class FaceDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "detailer_pipe": ("DETAILER_PIPE",), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + + "bbox_threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "bbox_dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + "bbox_crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.1}), + + "sam_detection_hint": (["center-1", "horizontal-2", "vertical-2", "rect-4", "diamond-4", "mask-area", "mask-points", "mask-point-bbox", "none"],), + "sam_dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "sam_threshold": ("FLOAT", {"default": 0.93, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_bbox_expansion": ("INT", {"default": 0, "min": 0, "max": 1000, "step": 1}), + "sam_mask_hint_threshold": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}), + "sam_mask_hint_use_negative": (["False", "Small", "Outter"],), + + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "MASK", "DETAILER_PIPE", "IMAGE") + RETURN_NAMES = ("image", "cropped_refined", "cropped_enhanced_alpha", "mask", "detailer_pipe", "cnet_images") + OUTPUT_IS_LIST = (False, True, True, False, False, True) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Simple" + + def doit(self, image, detailer_pipe, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, feather, noise_mask, force_inpaint, bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, + sam_mask_hint_threshold, sam_mask_hint_use_negative, drop_size, refiner_ratio=None, + cycle=1, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + result_img = None + result_mask = None + result_cropped_enhanced = [] + result_cropped_enhanced_alpha = [] + result_cnet_images = [] + + if len(image) > 1: + print(f"[Impact Pack] WARN: FaceDetailer is not a node designed for video detailing. If you intend to perform video detailing, please use Detailer For AnimateDiff.") + + model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector, sam_model_opt, detailer_hook, \ + refiner_model, refiner_clip, refiner_positive, refiner_negative = detailer_pipe + + for i, single_image in enumerate(image): + enhanced_img, cropped_enhanced, cropped_enhanced_alpha, mask, cnet_pil_list = FaceDetailer.enhance_face( + single_image.unsqueeze(0), model, clip, vae, guide_size, guide_size_for, max_size, seed + i, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, feather, noise_mask, force_inpaint, + bbox_threshold, bbox_dilation, bbox_crop_factor, + sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, + sam_mask_hint_use_negative, drop_size, bbox_detector, segm_detector, sam_model_opt, wildcard, detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, refiner_negative=refiner_negative, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + result_img = torch.cat((result_img, enhanced_img), dim=0) if result_img is not None else enhanced_img + result_mask = torch.cat((result_mask, mask), dim=0) if result_mask is not None else mask + result_cropped_enhanced.extend(cropped_enhanced) + result_cropped_enhanced_alpha.extend(cropped_enhanced_alpha) + result_cnet_images.extend(cnet_pil_list) + + if len(result_cropped_enhanced) == 0: + result_cropped_enhanced = [empty_pil_tensor()] + + if len(result_cropped_enhanced_alpha) == 0: + result_cropped_enhanced_alpha = [empty_pil_tensor()] + + if len(result_cnet_images) == 0: + result_cnet_images = [empty_pil_tensor()] + + return result_img, result_cropped_enhanced, result_cropped_enhanced_alpha, result_mask, detailer_pipe, result_cnet_images + + +class MaskDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "mask": ("MASK", ), + "basic_pipe": ("BASIC_PIPE",), + + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "mask bbox", "label_off": "crop region"}), + "max_size": ("FLOAT", {"default": 1024, "min": 64, "max": nodes.MAX_RESOLUTION, "step": 8}), + "mask_mode": ("BOOLEAN", {"default": True, "label_on": "masked only", "label_off": "whole"}), + + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.1}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 100}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "refiner_basic_pipe_opt": ("BASIC_PIPE", ), + "detailer_hook": ("DETAILER_HOOK",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "bbox_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "contour_fill": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "BASIC_PIPE", "BASIC_PIPE") + RETURN_NAMES = ("image", "cropped_refined", "cropped_enhanced_alpha", "basic_pipe", "refiner_basic_pipe_opt") + OUTPUT_IS_LIST = (False, True, True, False, False) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, image, mask, basic_pipe, guide_size, guide_size_for, max_size, mask_mode, + seed, steps, cfg, sampler_name, scheduler, denoise, + feather, crop_factor, drop_size, refiner_ratio, batch_size, cycle=1, + refiner_basic_pipe_opt=None, detailer_hook=None, inpaint_model=False, noise_mask_feather=0, + bbox_fill=False, contour_fill=True, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: MaskDetailer does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + model, clip, vae, positive, negative = basic_pipe + + if refiner_basic_pipe_opt is None: + refiner_model, refiner_clip, refiner_positive, refiner_negative = None, None, None, None + else: + refiner_model, refiner_clip, _, refiner_positive, refiner_negative = refiner_basic_pipe_opt + + # create segs + if mask is not None: + mask = make_2d_mask(mask) + segs = core.mask_to_segs(mask, False, crop_factor, bbox_fill, drop_size, is_contour=contour_fill) + else: + segs = ((image.shape[1], image.shape[2]), []) + + enhanced_img_batch = None + cropped_enhanced_list = [] + cropped_enhanced_alpha_list = [] + + for i in range(batch_size): + if mask is not None: + enhanced_img, _, cropped_enhanced, cropped_enhanced_alpha, _, _ = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed+i, steps, + cfg, sampler_name, scheduler, positive, negative, denoise, feather, mask_mode, + force_inpaint=True, wildcard_opt=None, detailer_hook=detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, refiner_clip=refiner_clip, + refiner_positive=refiner_positive, refiner_negative=refiner_negative, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + else: + enhanced_img, cropped_enhanced, cropped_enhanced_alpha = image, [], [] + + if enhanced_img_batch is None: + enhanced_img_batch = enhanced_img + else: + enhanced_img_batch = torch.cat((enhanced_img_batch, enhanced_img), dim=0) + + cropped_enhanced_list += cropped_enhanced + cropped_enhanced_alpha_list += cropped_enhanced_alpha + + # set fallback image + if len(cropped_enhanced_list) == 0: + cropped_enhanced_list = [empty_pil_tensor()] + + if len(cropped_enhanced_alpha_list) == 0: + cropped_enhanced_alpha_list = [empty_pil_tensor()] + + return enhanced_img_batch, cropped_enhanced_list, cropped_enhanced_alpha_list, basic_pipe, refiner_basic_pipe_opt + + +class DetailerForEachTest(DetailerForEach): + RETURN_TYPES = ("IMAGE", "IMAGE", "IMAGE", "IMAGE", "IMAGE") + RETURN_NAMES = ("image", "cropped", "cropped_refined", "cropped_refined_alpha", "cnet_images") + OUTPUT_IS_LIST = (False, True, True, True, True) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, + scheduler, positive, negative, denoise, feather, noise_mask, force_inpaint, wildcard, detailer_hook=None, + cycle=1, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: DetailerForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + enhanced_img, cropped, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list, new_segs = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, + cfg, sampler_name, scheduler, positive, negative, denoise, feather, noise_mask, + force_inpaint, wildcard, detailer_hook, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + # set fallback image + if len(cropped) == 0: + cropped = [empty_pil_tensor()] + + if len(cropped_enhanced) == 0: + cropped_enhanced = [empty_pil_tensor()] + + if len(cropped_enhanced_alpha) == 0: + cropped_enhanced_alpha = [empty_pil_tensor()] + + if len(cnet_pil_list) == 0: + cnet_pil_list = [empty_pil_tensor()] + + return enhanced_img, cropped, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list + + +class DetailerForEachTestPipe(DetailerForEachPipe): + RETURN_TYPES = ("IMAGE", "SEGS", "BASIC_PIPE", "IMAGE", "IMAGE", "IMAGE", "IMAGE", ) + RETURN_NAMES = ("image", "segs", "basic_pipe", "cropped", "cropped_refined", "cropped_refined_alpha", 'cnet_images') + OUTPUT_IS_LIST = (False, False, False, True, True, True, True) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + def doit(self, image, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, feather, noise_mask, force_inpaint, basic_pipe, wildcard, cycle=1, + refiner_ratio=None, detailer_hook=None, refiner_basic_pipe_opt=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: DetailerForEach does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + model, clip, vae, positive, negative = basic_pipe + + if refiner_basic_pipe_opt is None: + refiner_model, refiner_clip, refiner_positive, refiner_negative = None, None, None, None + else: + refiner_model, refiner_clip, _, refiner_positive, refiner_negative = refiner_basic_pipe_opt + + enhanced_img, cropped, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list, new_segs = \ + DetailerForEach.do_detail(image, segs, model, clip, vae, guide_size, guide_size_for, max_size, seed, steps, cfg, + sampler_name, scheduler, positive, negative, denoise, feather, noise_mask, + force_inpaint, wildcard, detailer_hook, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, + refiner_negative=refiner_negative, + cycle=cycle, inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + # set fallback image + if len(cropped) == 0: + cropped = [empty_pil_tensor()] + + if len(cropped_enhanced) == 0: + cropped_enhanced = [empty_pil_tensor()] + + if len(cropped_enhanced_alpha) == 0: + cropped_enhanced_alpha = [empty_pil_tensor()] + + if len(cnet_pil_list) == 0: + cnet_pil_list = [empty_pil_tensor()] + + return enhanced_img, new_segs, basic_pipe, cropped, cropped_enhanced, cropped_enhanced_alpha, cnet_pil_list + + +class SegsBitwiseAndMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS",), + "mask": ("MASK",), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, segs, mask): + return (core.segs_bitwise_and_mask(segs, mask), ) + + +class SegsBitwiseAndMaskForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS",), + "masks": ("MASK",), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, segs, masks): + return (core.apply_mask_to_each_seg(segs, masks), ) + + +class BitwiseAndMaskForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": + { + "base_segs": ("SEGS",), + "mask_segs": ("SEGS",), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, base_segs, mask_segs): + mask = core.segs_to_combined_mask(mask_segs) + mask = make_3d_mask(mask) + + return SegsBitwiseAndMask().doit(base_segs, mask) + + +class SubtractMaskForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "base_segs": ("SEGS",), + "mask_segs": ("SEGS",), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, base_segs, mask_segs): + mask = core.segs_to_combined_mask(mask_segs) + mask = make_3d_mask(mask) + return (core.segs_bitwise_subtract_mask(base_segs, mask), ) + + +class ToBinaryMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK",), + "threshold": ("INT", {"default": 20, "min": 1, "max": 255}), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, mask, threshold): + mask = to_binary_mask(mask, threshold/255.0) + return (mask,) + + +class BitwiseAndMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask1": ("MASK",), + "mask2": ("MASK",), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, mask1, mask2): + mask = bitwise_and_masks(mask1, mask2) + return (mask,) + + +class SubtractMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask1": ("MASK", ), + "mask2": ("MASK", ), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, mask1, mask2): + mask = subtract_masks(mask1, mask2) + return (mask,) + + +class AddMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask1": ("MASK",), + "mask2": ("MASK",), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, mask1, mask2): + mask = add_masks(mask1, mask2) + return (mask,) + + +import nodes + + +def get_image_hash(arr): + split_index1 = arr.shape[0] // 2 + split_index2 = arr.shape[1] // 2 + part1 = arr[:split_index1, :split_index2] + part2 = arr[:split_index1, split_index2:] + part3 = arr[split_index1:, :split_index2] + part4 = arr[split_index1:, split_index2:] + + # 각 부분을 합산 + sum1 = np.sum(part1) + sum2 = np.sum(part2) + sum3 = np.sum(part3) + sum4 = np.sum(part4) + + return hash((sum1, sum2, sum3, sum4)) + + +def get_file_item(base_type, path): + path_type = base_type + + if path == "[output]": + path_type = "output" + path = path[:-9] + elif path == "[input]": + path_type = "input" + path = path[:-8] + elif path == "[temp]": + path_type = "temp" + path = path[:-7] + + subfolder = os.path.dirname(path) + filename = os.path.basename(path) + + return { + "filename": filename, + "subfolder": subfolder, + "type": path_type + } + + +class ImageReceiver: + @classmethod + def INPUT_TYPES(s): + input_dir = folder_paths.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f))] + return {"required": { + "image": (sorted(files), ), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "save_to_workflow": ("BOOLEAN", {"default": False}), + "image_data": ("STRING", {"multiline": False}), + "trigger_always": ("BOOLEAN", {"default": False, "label_on": "enable", "label_off": "disable"}), + }, + } + + FUNCTION = "doit" + + RETURN_TYPES = ("IMAGE", "MASK") + + CATEGORY = "ImpactPack/Util" + + def doit(self, image, link_id, save_to_workflow, image_data, trigger_always): + if save_to_workflow: + try: + image_data = base64.b64decode(image_data.split(",")[1]) + i = Image.open(BytesIO(image_data)) + i = ImageOps.exif_transpose(i) + image = i.convert("RGB") + image = np.array(image).astype(np.float32) / 255.0 + image = torch.from_numpy(image)[None,] + if 'A' in i.getbands(): + mask = np.array(i.getchannel('A')).astype(np.float32) / 255.0 + mask = 1. - torch.from_numpy(mask) + else: + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + return (image, mask.unsqueeze(0)) + except Exception as e: + print(f"[WARN] ComfyUI-Impact-Pack: ImageReceiver - invalid 'image_data'") + mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + return (empty_pil_tensor(64, 64), mask, ) + else: + return nodes.LoadImage().load_image(image) + + @classmethod + def VALIDATE_INPUTS(s, image, link_id, save_to_workflow, image_data, trigger_always): + if image != '#DATA' and not folder_paths.exists_annotated_filepath(image) or image.startswith("/") or ".." in image: + return "Invalid image file: {}".format(image) + + return True + + @classmethod + def IS_CHANGED(s, image, link_id, save_to_workflow, image_data, trigger_always): + if trigger_always: + return float("NaN") + else: + if save_to_workflow: + return hash(image_data) + else: + return hash(image) + + +from server import PromptServer + +class ImageSender(nodes.PreviewImage): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE", ), + "filename_prefix": ("STRING", {"default": "ImgSender"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, images, filename_prefix="ImgSender", link_id=0, prompt=None, extra_pnginfo=None): + result = nodes.PreviewImage().save_images(images, filename_prefix, prompt, extra_pnginfo) + PromptServer.instance.send_sync("img-send", {"link_id": link_id, "images": result['ui']['images']}) + return result + + +class LatentReceiver: + def __init__(self): + self.input_dir = folder_paths.get_input_directory() + self.type = "input" + + @classmethod + def INPUT_TYPES(s): + def check_file_extension(x): + return x.endswith(".latent") or x.endswith(".latent.png") + + input_dir = folder_paths.get_input_directory() + files = [f for f in os.listdir(input_dir) if os.path.isfile(os.path.join(input_dir, f)) and check_file_extension(f)] + return {"required": { + "latent": (sorted(files), ), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "trigger_always": ("BOOLEAN", {"default": False, "label_on": "enable", "label_off": "disable"}), + }, + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + RETURN_TYPES = ("LATENT",) + + @staticmethod + def load_preview_latent(image_path): + if not os.path.exists(image_path): + return None + + image = Image.open(image_path) + exif_data = piexif.load(image.info["exif"]) + + if piexif.ExifIFD.UserComment in exif_data["Exif"]: + compressed_data = exif_data["Exif"][piexif.ExifIFD.UserComment] + compressed_data_io = BytesIO(compressed_data) + with zipfile.ZipFile(compressed_data_io, mode='r') as archive: + tensor_bytes = archive.read("latent") + tensor = safetensors.torch.load(tensor_bytes) + return {"samples": tensor['latent_tensor']} + return None + + def parse_filename(self, filename): + pattern = r"^(.*)/(.*?)\[(.*)\]\s*$" + match = re.match(pattern, filename) + if match: + subfolder = match.group(1) + filename = match.group(2).rstrip() + file_type = match.group(3) + else: + subfolder = '' + file_type = self.type + + return {'filename': filename, 'subfolder': subfolder, 'type': file_type} + + def doit(self, **kwargs): + if 'latent' not in kwargs: + return (torch.zeros([1, 4, 8, 8]), ) + + latent = kwargs['latent'] + + latent_name = latent + latent_path = folder_paths.get_annotated_filepath(latent_name) + + if latent.endswith(".latent"): + latent = safetensors.torch.load_file(latent_path, device="cpu") + multiplier = 1.0 + if "latent_format_version_0" not in latent: + multiplier = 1.0 / 0.18215 + samples = {"samples": latent["latent_tensor"].float() * multiplier} + else: + samples = LatentReceiver.load_preview_latent(latent_path) + + if samples is None: + samples = {'samples': torch.zeros([1, 4, 8, 8])} + + preview = self.parse_filename(latent_name) + + return { + 'ui': {"images": [preview]}, + 'result': (samples, ) + } + + @classmethod + def IS_CHANGED(s, latent, link_id, trigger_always): + if trigger_always: + return float("NaN") + else: + image_path = folder_paths.get_annotated_filepath(latent) + m = hashlib.sha256() + with open(image_path, 'rb') as f: + m.update(f.read()) + return m.digest().hex() + + @classmethod + def VALIDATE_INPUTS(s, latent, link_id, trigger_always): + if not folder_paths.exists_annotated_filepath(latent) or latent.startswith("/") or ".." in latent: + return "Invalid latent file: {}".format(latent) + return True + + +class LatentSender(nodes.SaveLatent): + def __init__(self): + super().__init__() + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples": ("LATENT", ), + "filename_prefix": ("STRING", {"default": "latents/LatentSender"}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "preview_method": (["Latent2RGB-SDXL", "Latent2RGB-SD15", "TAESDXL", "TAESD15"],) + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"}, + } + + OUTPUT_NODE = True + + RETURN_TYPES = () + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def save_to_file(tensor_bytes, prompt, extra_pnginfo, image, image_path): + compressed_data = BytesIO() + with zipfile.ZipFile(compressed_data, mode='w') as archive: + archive.writestr("latent", tensor_bytes) + image = image.copy() + exif_data = {"Exif": {piexif.ExifIFD.UserComment: compressed_data.getvalue()}} + + metadata = PngInfo() + if prompt is not None: + metadata.add_text("prompt", json.dumps(prompt)) + if extra_pnginfo is not None: + for x in extra_pnginfo: + metadata.add_text(x, json.dumps(extra_pnginfo[x])) + + exif_bytes = piexif.dump(exif_data) + image.save(image_path, format='png', exif=exif_bytes, pnginfo=metadata, optimize=True) + + @staticmethod + def prepare_preview(latent_tensor, preview_method): + from comfy.cli_args import LatentPreviewMethod + import comfy.latent_formats as latent_formats + + lower_bound = 128 + upper_bound = 256 + + if preview_method == "Latent2RGB-SD15": + latent_format = latent_formats.SD15() + method = LatentPreviewMethod.Latent2RGB + elif preview_method == "TAESD15": + latent_format = latent_formats.SD15() + method = LatentPreviewMethod.TAESD + elif preview_method == "TAESDXL": + latent_format = latent_formats.SDXL() + method = LatentPreviewMethod.TAESD + else: # preview_method == "Latent2RGB-SDXL" + latent_format = latent_formats.SDXL() + method = LatentPreviewMethod.Latent2RGB + + previewer = core.get_previewer("cpu", latent_format=latent_format, force=True, method=method) + + image = previewer.decode_latent_to_preview(latent_tensor) + min_size = min(image.size[0], image.size[1]) + max_size = max(image.size[0], image.size[1]) + + scale_factor = 1 + if max_size > upper_bound: + scale_factor = upper_bound/max_size + + # prevent too small preview + if min_size*scale_factor < lower_bound: + scale_factor = lower_bound/min_size + + w = int(image.size[0] * scale_factor) + h = int(image.size[1] * scale_factor) + + image = image.resize((w, h), resample=Image.NEAREST) + + return LatentSender.attach_format_text(image) + + @staticmethod + def attach_format_text(image): + width_a, height_a = image.size + + letter_image = Image.open(latent_letter_path) + width_b, height_b = letter_image.size + + new_width = max(width_a, width_b) + new_height = height_a + height_b + + new_image = Image.new('RGB', (new_width, new_height), (0, 0, 0)) + + offset_x = (new_width - width_b) // 2 + offset_y = (height_a + (new_height - height_a - height_b) // 2) + new_image.paste(letter_image, (offset_x, offset_y)) + + new_image.paste(image, (0, 0)) + + return new_image + + def doit(self, samples, filename_prefix="latents/LatentSender", link_id=0, preview_method="Latent2RGB-SDXL", prompt=None, extra_pnginfo=None): + full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir) + + # load preview + preview = LatentSender.prepare_preview(samples['samples'], preview_method) + + # support save metadata for latent sharing + file = f"{filename}_{counter:05}_.latent.png" + fullpath = os.path.join(full_output_folder, file) + + output = {"latent_tensor": samples["samples"]} + + tensor_bytes = safetensors.torch.save(output) + LatentSender.save_to_file(tensor_bytes, prompt, extra_pnginfo, preview, fullpath) + + latent_path = { + 'filename': file, + 'subfolder': subfolder, + 'type': self.type + } + + PromptServer.instance.send_sync("latent-send", {"link_id": link_id, "images": [latent_path]}) + + return {'ui': {'images': [latent_path]}} + + +class ImpactWildcardProcessor: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "wildcard_text": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "populated_text": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "mode": ("BOOLEAN", {"default": True, "label_on": "Populate", "label_off": "Fixed"}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + } + + CATEGORY = "ImpactPack/Prompt" + + RETURN_TYPES = ("STRING", ) + FUNCTION = "doit" + + @staticmethod + def process(**kwargs): + return impact.wildcards.process(**kwargs) + + def doit(self, *args, **kwargs): + populated_text = ImpactWildcardProcessor.process(text=kwargs['populated_text'], seed=kwargs['seed']) + return (populated_text, ) + + +class ImpactWildcardEncode: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "clip": ("CLIP",), + "wildcard_text": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "populated_text": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "mode": ("BOOLEAN", {"default": True, "label_on": "Populate", "label_off": "Fixed"}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"), ), + "Select to add Wildcard": (["Select the Wildcard to add to the text"], ), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + }, + } + + CATEGORY = "ImpactPack/Prompt" + + RETURN_TYPES = ("MODEL", "CLIP", "CONDITIONING", "STRING") + RETURN_NAMES = ("model", "clip", "conditioning", "populated_text") + FUNCTION = "doit" + + @staticmethod + def process_with_loras(**kwargs): + return impact.wildcards.process_with_loras(**kwargs) + + @staticmethod + def get_wildcard_list(): + return impact.wildcards.get_wildcard_list() + + def doit(self, *args, **kwargs): + populated = kwargs['populated_text'] + processed = [] + model, clip, conditioning = impact.wildcards.process_with_loras(wildcard_opt=populated, model=kwargs['model'], clip=kwargs['clip'], seed=kwargs['seed'], processed=processed) + return model, clip, conditioning, processed[0] + + +class ImpactSchedulerAdapter: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, {"defaultInput": True, }), + "extra_scheduler": (['None', 'AYS SDXL', 'AYS SD1', 'AYS SVD', 'GITS[coeff=1.2]'],), + }} + + CATEGORY = "ImpactPack/Util" + + RETURN_TYPES = (core.SCHEDULERS,) + RETURN_NAMES = ("scheduler",) + + FUNCTION = "doit" + + def doit(self, scheduler, extra_scheduler): + if extra_scheduler != 'None': + return (extra_scheduler,) + + return (scheduler,) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_sampling.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..ea1eb45f47c7d0e81888e901b3f0367687c17f2c --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_sampling.py @@ -0,0 +1,355 @@ +import nodes +from comfy.k_diffusion import sampling as k_diffusion_sampling +from comfy import samplers +from comfy_extras import nodes_custom_sampler +import latent_preview +import comfy +import torch +import math +import comfy.model_management as mm + + +try: + from comfy_extras.nodes_custom_sampler import Noise_EmptyNoise, Noise_RandomNoise + import node_helpers +except: + print(f"\n#############################################\n[Impact Pack] ComfyUI is an outdated version.\n#############################################\n") + raise Exception("[Impact Pack] ComfyUI is an outdated version.") + + +def calculate_sigmas(model, sampler, scheduler, steps): + discard_penultimate_sigma = False + if sampler in ['dpm_2', 'dpm_2_ancestral', 'uni_pc', 'uni_pc_bh2']: + steps += 1 + discard_penultimate_sigma = True + + if scheduler.startswith('AYS'): + sigmas = nodes.NODE_CLASS_MAPPINGS['AlignYourStepsScheduler']().get_sigmas(scheduler[4:], steps, denoise=1.0)[0] + elif scheduler.startswith('GITS[coeff='): + sigmas = nodes.NODE_CLASS_MAPPINGS['GITSScheduler']().get_sigmas(float(scheduler[11:-1]), steps, denoise=1.0)[0] + else: + sigmas = samplers.calculate_sigmas(model.get_model_object("model_sampling"), scheduler, steps) + + if discard_penultimate_sigma: + sigmas = torch.cat([sigmas[:-2], sigmas[-1:]]) + return sigmas + + +def get_noise_sampler(x, cpu, total_sigmas, **kwargs): + if 'extra_args' in kwargs and 'seed' in kwargs['extra_args']: + sigma_min, sigma_max = total_sigmas[total_sigmas > 0].min(), total_sigmas.max() + seed = kwargs['extra_args'].get("seed", None) + return k_diffusion_sampling.BrownianTreeNoiseSampler(x, sigma_min, sigma_max, seed=seed, cpu=cpu) + return None + + +def ksampler(sampler_name, total_sigmas, extra_options={}, inpaint_options={}): + if sampler_name == "dpmpp_sde": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, True, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_sde(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + elif sampler_name == "dpmpp_sde_gpu": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, False, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_sde_gpu(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + elif sampler_name == "dpmpp_2m_sde": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, True, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_2m_sde(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + elif sampler_name == "dpmpp_2m_sde_gpu": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, False, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_2m_sde_gpu(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + elif sampler_name == "dpmpp_3m_sde": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, True, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_3m_sde(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + elif sampler_name == "dpmpp_3m_sde_gpu": + def sample_dpmpp_sde(model, x, sigmas, **kwargs): + noise_sampler = get_noise_sampler(x, False, total_sigmas, **kwargs) + if noise_sampler is not None: + kwargs['noise_sampler'] = noise_sampler + + return k_diffusion_sampling.sample_dpmpp_3m_sde_gpu(model, x, sigmas, **kwargs) + + sampler_function = sample_dpmpp_sde + + else: + return comfy.samplers.sampler_object(sampler_name) + + return samplers.KSAMPLER(sampler_function, extra_options, inpaint_options) + + +# modified version of SamplerCustom.sample +def sample_with_custom_noise(model, add_noise, noise_seed, cfg, positive, negative, sampler, sigmas, latent_image, noise=None, callback=None): + latent = latent_image + latent_image = latent["samples"] + + if hasattr(comfy.sample, 'fix_empty_latent_channels'): + latent_image = comfy.sample.fix_empty_latent_channels(model, latent_image) + + out = latent.copy() + out['samples'] = latent_image + + if noise is None: + if not add_noise: + noise = Noise_EmptyNoise().generate_noise(out) + else: + noise = Noise_RandomNoise(noise_seed).generate_noise(out) + + noise_mask = None + if "noise_mask" in latent: + noise_mask = latent["noise_mask"] + + x0_output = {} + preview_callback = latent_preview.prepare_callback(model, sigmas.shape[-1] - 1, x0_output) + + if callback is not None: + def touched_callback(step, x0, x, total_steps): + callback(step, x0, x, total_steps) + preview_callback(step, x0, x, total_steps) + else: + touched_callback = preview_callback + + disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED + + device = mm.get_torch_device() + + noise = noise.to(device) + latent_image = latent_image.to(device) + if noise_mask is not None: + noise_mask = noise_mask.to(device) + + if negative != 'NegativePlaceholder': + # This way is incompatible with Advanced ControlNet, yet. + # guider = comfy.samplers.CFGGuider(model) + # guider.set_conds(positive, negative) + # guider.set_cfg(cfg) + samples = comfy.sample.sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, + noise_mask=noise_mask, callback=touched_callback, + disable_pbar=disable_pbar, seed=noise_seed) + else: + guider = nodes_custom_sampler.Guider_Basic(model) + positive = node_helpers.conditioning_set_values(positive, {"guidance": cfg}) + guider.set_conds(positive) + samples = guider.sample(noise, latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=touched_callback, disable_pbar=disable_pbar, seed=noise_seed) + + samples = samples.to(comfy.model_management.intermediate_device()) + + out["samples"] = samples + if "x0" in x0_output: + out_denoised = latent.copy() + out_denoised["samples"] = model.model.process_latent_out(x0_output["x0"].cpu()) + else: + out_denoised = out + return out, out_denoised + + +# When sampling one step at a time, it mitigates the problem. (especially for _sde series samplers) +def separated_sample(model, add_noise, seed, steps, cfg, sampler_name, scheduler, positive, negative, + latent_image, start_at_step, end_at_step, return_with_leftover_noise, sigma_ratio=1.0, sampler_opt=None, noise=None, callback=None, scheduler_func=None): + + if scheduler_func is not None: + total_sigmas = scheduler_func(model, sampler_name, steps) + else: + if sampler_opt is None: + total_sigmas = calculate_sigmas(model, sampler_name, scheduler, steps) + else: + total_sigmas = calculate_sigmas(model, "", scheduler, steps) + + sigmas = total_sigmas + + if end_at_step is not None and end_at_step < (len(total_sigmas) - 1): + sigmas = total_sigmas[:end_at_step + 1] + if not return_with_leftover_noise: + sigmas[-1] = 0 + + if start_at_step is not None: + if start_at_step < (len(sigmas) - 1): + sigmas = sigmas[start_at_step:] * sigma_ratio + else: + if latent_image is not None: + return latent_image + else: + return {'samples': torch.zeros_like(noise)} + + if sampler_opt is None: + impact_sampler = ksampler(sampler_name, total_sigmas) + else: + impact_sampler = sampler_opt + + if len(sigmas) == 0 or (len(sigmas) == 1 and sigmas[0] == 0): + return latent_image + + res = sample_with_custom_noise(model, add_noise, seed, cfg, positive, negative, impact_sampler, sigmas, latent_image, noise=noise, callback=callback) + + if return_with_leftover_noise: + return res[0] + else: + return res[1] + + +def impact_sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise=1.0, sigma_ratio=1.0, sampler_opt=None, noise=None, scheduler_func=None): + advanced_steps = math.floor(steps / denoise) + start_at_step = advanced_steps - steps + end_at_step = start_at_step + steps + return separated_sample(model, True, seed, advanced_steps, cfg, sampler_name, scheduler, positive, negative, latent_image, + start_at_step, end_at_step, False, scheduler_func=scheduler_func) + + +def ksampler_wrapper(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise, + refiner_ratio=None, refiner_model=None, refiner_clip=None, refiner_positive=None, refiner_negative=None, sigma_factor=1.0, noise=None, scheduler_func=None): + + if refiner_ratio is None or refiner_model is None or refiner_clip is None or refiner_positive is None or refiner_negative is None: + # Use separated_sample instead of KSampler for `AYS scheduler` + # refined_latent = nodes.KSampler().sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise * sigma_factor)[0] + + advanced_steps = math.floor(steps / denoise) + start_at_step = advanced_steps - steps + end_at_step = start_at_step + steps + + refined_latent = separated_sample(model, True, seed, advanced_steps, cfg, sampler_name, scheduler, + positive, negative, latent_image, start_at_step, end_at_step, False, + sigma_ratio=sigma_factor, noise=noise, scheduler_func=scheduler_func) + else: + advanced_steps = math.floor(steps / denoise) + start_at_step = advanced_steps - steps + end_at_step = start_at_step + math.floor(steps * (1.0 - refiner_ratio)) + + # print(f"pre: {start_at_step} .. {end_at_step} / {advanced_steps}") + temp_latent = separated_sample(model, True, seed, advanced_steps, cfg, sampler_name, scheduler, + positive, negative, latent_image, start_at_step, end_at_step, True, + sigma_ratio=sigma_factor, noise=noise, scheduler_func=scheduler_func) + + if 'noise_mask' in latent_image: + # noise_latent = \ + # impact_sampling.separated_sample(refiner_model, "enable", seed, advanced_steps, cfg, sampler_name, + # scheduler, refiner_positive, refiner_negative, latent_image, end_at_step, + # end_at_step, "enable") + + latent_compositor = nodes.NODE_CLASS_MAPPINGS['LatentCompositeMasked']() + temp_latent = latent_compositor.composite(latent_image, temp_latent, 0, 0, False, latent_image['noise_mask'])[0] + + # print(f"post: {end_at_step} .. {advanced_steps + 1} / {advanced_steps}") + refined_latent = separated_sample(refiner_model, False, seed, advanced_steps, cfg, sampler_name, scheduler, + refiner_positive, refiner_negative, temp_latent, end_at_step, advanced_steps + 1, False, + sigma_ratio=sigma_factor, scheduler_func=scheduler_func) + + return refined_latent + + +class KSamplerAdvancedWrapper: + params = None + + def __init__(self, model, cfg, sampler_name, scheduler, positive, negative, sampler_opt=None, sigma_factor=1.0, scheduler_func=None): + self.params = model, cfg, sampler_name, scheduler, positive, negative, sigma_factor + self.sampler_opt = sampler_opt + self.scheduler_func = scheduler_func + + def clone_with_conditionings(self, positive, negative): + model, cfg, sampler_name, scheduler, _, _, _ = self.params + return KSamplerAdvancedWrapper(model, cfg, sampler_name, scheduler, positive, negative, self.sampler_opt) + + def sample_advanced(self, add_noise, seed, steps, latent_image, start_at_step, end_at_step, return_with_leftover_noise, hook=None, + recovery_mode="ratio additional", recovery_sampler="AUTO", recovery_sigma_ratio=1.0, noise=None): + + model, cfg, sampler_name, scheduler, positive, negative, sigma_factor = self.params + # steps, start_at_step, end_at_step = self.compensate_denoise(steps, start_at_step, end_at_step) + + if hook is not None: + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent = hook.pre_ksample_advanced(model, add_noise, seed, steps, cfg, sampler_name, scheduler, + positive, negative, latent_image, start_at_step, end_at_step, + return_with_leftover_noise) + + if recovery_mode != 'DISABLE' and sampler_name in ['uni_pc', 'uni_pc_bh2', 'dpmpp_sde', 'dpmpp_sde_gpu', 'dpmpp_2m_sde', 'dpmpp_2m_sde_gpu', 'dpmpp_3m_sde', 'dpmpp_3m_sde_gpu']: + base_image = latent_image.copy() + if recovery_mode == "ratio between": + sigma_ratio = 1.0 - recovery_sigma_ratio + else: + sigma_ratio = 1.0 + else: + base_image = None + sigma_ratio = 1.0 + + try: + if sigma_ratio > 0: + latent_image = separated_sample(model, add_noise, seed, steps, cfg, sampler_name, scheduler, + positive, negative, latent_image, start_at_step, end_at_step, + return_with_leftover_noise, sigma_ratio=sigma_ratio * sigma_factor, + sampler_opt=self.sampler_opt, noise=noise, scheduler_func=self.scheduler_func) + except ValueError as e: + if str(e) == 'sigma_min and sigma_max must not be 0': + print(f"\nWARN: sampling skipped - sigma_min and sigma_max are 0") + return latent_image + + if (recovery_sigma_ratio > 0 and recovery_mode != 'DISABLE' and + sampler_name in ['uni_pc', 'uni_pc_bh2', 'dpmpp_sde', 'dpmpp_sde_gpu', 'dpmpp_2m_sde', 'dpmpp_2m_sde_gpu', 'dpmpp_3m_sde', 'dpmpp_3m_sde_gpu']): + compensate = 0 if sampler_name in ['uni_pc', 'uni_pc_bh2', 'dpmpp_sde', 'dpmpp_sde_gpu', 'dpmpp_2m_sde', 'dpmpp_2m_sde_gpu', 'dpmpp_3m_sde', 'dpmpp_3m_sde_gpu'] else 2 + if recovery_sampler == "AUTO": + recovery_sampler = 'dpm_fast' if sampler_name in ['uni_pc', 'uni_pc_bh2', 'dpmpp_sde', 'dpmpp_sde_gpu'] else 'dpmpp_2m' + + latent_compositor = nodes.NODE_CLASS_MAPPINGS['LatentCompositeMasked']() + + noise_mask = latent_image['noise_mask'] + + if len(noise_mask.shape) == 4: + noise_mask = noise_mask.squeeze(0).squeeze(0) + + latent_image = latent_compositor.composite(base_image, latent_image, 0, 0, False, noise_mask)[0] + + try: + latent_image = separated_sample(model, add_noise, seed, steps, cfg, recovery_sampler, scheduler, + positive, negative, latent_image, start_at_step-compensate, end_at_step, return_with_leftover_noise, + sigma_ratio=recovery_sigma_ratio * sigma_factor, sampler_opt=self.sampler_opt, scheduler_func=self.scheduler_func) + except ValueError as e: + if str(e) == 'sigma_min and sigma_max must not be 0': + print(f"\nWARN: sampling skipped - sigma_min and sigma_max are 0") + + return latent_image + + +class KSamplerWrapper: + params = None + + def __init__(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, scheduler_func=None): + self.params = model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise + self.scheduler_func = scheduler_func + + def sample(self, latent_image, hook=None): + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise = self.params + + if hook is not None: + model, seed, steps, cfg, sampler_name, scheduler, positive, negative, upscaled_latent, denoise = \ + hook.pre_ksample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise) + + return impact_sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise, scheduler_func=self.scheduler_func) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_server.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_server.py new file mode 100644 index 0000000000000000000000000000000000000000..61bd59bcec0dcf5b0983a4c74725d9198fe704e9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/impact_server.py @@ -0,0 +1,567 @@ +import os +import threading +import traceback + +from aiohttp import web + +import impact +import folder_paths + +import torchvision + +import impact.core as core +import impact.impact_pack as impact_pack +from impact.utils import to_tensor +from segment_anything import SamPredictor, sam_model_registry +import numpy as np +import nodes +from PIL import Image +import io +import impact.wildcards as wildcards +import comfy +from io import BytesIO +import random +from server import PromptServer + + +@PromptServer.instance.routes.post("/upload/temp") +async def upload_image(request): + upload_dir = folder_paths.get_temp_directory() + + if not os.path.exists(upload_dir): + os.makedirs(upload_dir) + + post = await request.post() + image = post.get("image") + + if image and image.file: + filename = image.filename + if not filename: + return web.Response(status=400) + + split = os.path.splitext(filename) + i = 1 + while os.path.exists(os.path.join(upload_dir, filename)): + filename = f"{split[0]} ({i}){split[1]}" + i += 1 + + filepath = os.path.join(upload_dir, filename) + + with open(filepath, "wb") as f: + f.write(image.file.read()) + + return web.json_response({"name": filename}) + else: + return web.Response(status=400) + + +sam_predictor = None +default_sam_model_name = os.path.join(impact_pack.model_path, "sams", "sam_vit_b_01ec64.pth") + +sam_lock = threading.Condition() + +last_prepare_data = None + + +def async_prepare_sam(image_dir, model_name, filename): + with sam_lock: + global sam_predictor + + if 'vit_h' in model_name: + model_kind = 'vit_h' + elif 'vit_l' in model_name: + model_kind = 'vit_l' + else: + model_kind = 'vit_b' + + sam_model = sam_model_registry[model_kind](checkpoint=model_name) + sam_predictor = SamPredictor(sam_model) + + image_path = os.path.join(image_dir, filename) + image = nodes.LoadImage().load_image(image_path)[0] + image = np.clip(255. * image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8) + + if impact.config.get_config()['sam_editor_cpu']: + device = 'cpu' + else: + device = comfy.model_management.get_torch_device() + + sam_predictor.model.to(device=device) + sam_predictor.set_image(image, "RGB") + sam_predictor.model.cpu() + + +@PromptServer.instance.routes.post("/sam/prepare") +async def sam_prepare(request): + global sam_predictor + global last_prepare_data + data = await request.json() + + with sam_lock: + if last_prepare_data is not None and last_prepare_data == data: + # already loaded: skip -- prevent redundant loading + return web.Response(status=200) + + last_prepare_data = data + + model_name = 'sam_vit_b_01ec64.pth' + if data['sam_model_name'] == 'auto': + model_name = impact.config.get_config()['sam_editor_model'] + + model_name = os.path.join(impact_pack.model_path, "sams", model_name) + + print(f"[INFO] ComfyUI-Impact-Pack: Loading SAM model '{impact_pack.model_path}'") + + filename, image_dir = folder_paths.annotated_filepath(data["filename"]) + + if image_dir is None: + typ = data['type'] if data['type'] != '' else 'output' + image_dir = folder_paths.get_directory_by_type(typ) + if data['subfolder'] is not None and data['subfolder'] != '': + image_dir += f"/{data['subfolder']}" + + if image_dir is None: + return web.Response(status=400) + + thread = threading.Thread(target=async_prepare_sam, args=(image_dir, model_name, filename,)) + thread.start() + + print(f"[INFO] ComfyUI-Impact-Pack: SAM model loaded. ") + return web.Response(status=200) + + +@PromptServer.instance.routes.post("/sam/release") +async def release_sam(request): + global sam_predictor + + with sam_lock: + del sam_predictor + sam_predictor = None + + print(f"[INFO] ComfyUI-Impact-Pack: unloading SAM model") + + +@PromptServer.instance.routes.post("/sam/detect") +async def sam_detect(request): + global sam_predictor + with sam_lock: + if sam_predictor is not None: + if impact.config.get_config()['sam_editor_cpu']: + device = 'cpu' + else: + device = comfy.model_management.get_torch_device() + + sam_predictor.model.to(device=device) + try: + data = await request.json() + + positive_points = data['positive_points'] + negative_points = data['negative_points'] + threshold = data['threshold'] + + points = [] + plabs = [] + + for p in positive_points: + points.append(p) + plabs.append(1) + + for p in negative_points: + points.append(p) + plabs.append(0) + + detected_masks = core.sam_predict(sam_predictor, points, plabs, None, threshold) + mask = core.combine_masks2(detected_masks) + + if mask is None: + return web.Response(status=400) + + image = mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])).movedim(1, -1).expand(-1, -1, -1, 3) + i = 255. * image.cpu().numpy() + + img = Image.fromarray(np.clip(i[0], 0, 255).astype(np.uint8)) + + img_buffer = io.BytesIO() + img.save(img_buffer, format='png') + + headers = {'Content-Type': 'image/png'} + finally: + sam_predictor.model.to(device="cpu") + + return web.Response(body=img_buffer.getvalue(), headers=headers) + + else: + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/wildcards/refresh") +async def wildcards_refresh(request): + impact.wildcards.wildcard_load() + return web.Response(status=200) + + +@PromptServer.instance.routes.get("/impact/wildcards/list") +async def wildcards_list(request): + data = {'data': impact.wildcards.get_wildcard_list()} + return web.json_response(data) + + +@PromptServer.instance.routes.post("/impact/wildcards") +async def populate_wildcards(request): + data = await request.json() + populated = wildcards.process(data['text'], data.get('seed', None)) + return web.json_response({"text": populated}) + + +segs_picker_map = {} + +@PromptServer.instance.routes.get("/impact/segs/picker/count") +async def segs_picker_count(request): + node_id = request.rel_url.query.get('id', '') + + if node_id in segs_picker_map: + res = len(segs_picker_map[node_id]) + return web.Response(status=200, text=str(res)) + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/segs/picker/view") +async def segs_picker(request): + node_id = request.rel_url.query.get('id', '') + idx = int(request.rel_url.query.get('idx', '')) + + if node_id in segs_picker_map and idx < len(segs_picker_map[node_id]): + img = to_tensor(segs_picker_map[node_id][idx]).permute(0, 3, 1, 2).squeeze(0) + pil = torchvision.transforms.ToPILImage('RGB')(img) + + image_bytes = BytesIO() + pil.save(image_bytes, format="PNG") + image_bytes.seek(0) + return web.Response(status=200, body=image_bytes, content_type='image/png', headers={"Content-Disposition": f"filename={node_id}{idx}.png"}) + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/view/validate") +async def view_validate(request): + if "filename" in request.rel_url.query: + filename = request.rel_url.query["filename"] + subfolder = request.rel_url.query["subfolder"] + filename, base_dir = folder_paths.annotated_filepath(filename) + + if filename == '' or filename[0] == '/' or '..' in filename: + return web.Response(status=400) + + if base_dir is None: + base_dir = folder_paths.get_input_directory() + + file = os.path.join(base_dir, subfolder, filename) + + if os.path.isfile(file): + return web.Response(status=200) + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/validate/pb_id_image") +async def view_validate(request): + if "id" in request.rel_url.query: + pb_id = request.rel_url.query["id"] + + if pb_id not in core.preview_bridge_image_id_map: + return web.Response(status=400) + + file = core.preview_bridge_image_id_map[pb_id] + if os.path.isfile(file): + return web.Response(status=200) + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/set/pb_id_image") +async def set_previewbridge_image(request): + try: + if "filename" in request.rel_url.query: + node_id = request.rel_url.query["node_id"] + filename = request.rel_url.query["filename"] + path_type = request.rel_url.query["type"] + subfolder = request.rel_url.query["subfolder"] + filename, output_dir = folder_paths.annotated_filepath(filename) + + if filename == '' or filename[0] == '/' or '..' in filename: + return web.Response(status=400) + + if output_dir is None: + if path_type == 'input': + output_dir = folder_paths.get_input_directory() + elif path_type == 'output': + output_dir = folder_paths.get_output_directory() + else: + output_dir = folder_paths.get_temp_directory() + + file = os.path.join(output_dir, subfolder, filename) + item = { + 'filename': filename, + 'type': path_type, + 'subfolder': subfolder, + } + pb_id = core.set_previewbridge_image(node_id, file, item) + + return web.Response(status=200, text=pb_id) + except Exception: + traceback.print_exc() + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/get/pb_id_image") +async def get_previewbridge_image(request): + if "id" in request.rel_url.query: + pb_id = request.rel_url.query["id"] + + if pb_id in core.preview_bridge_image_id_map: + _, path_item = core.preview_bridge_image_id_map[pb_id] + return web.json_response(path_item) + + return web.Response(status=400) + + +@PromptServer.instance.routes.get("/impact/view/pb_id_image") +async def view_previewbridge_image(request): + if "id" in request.rel_url.query: + pb_id = request.rel_url.query["id"] + + if pb_id in core.preview_bridge_image_id_map: + file = core.preview_bridge_image_id_map[pb_id] + + with Image.open(file) as img: + filename = os.path.basename(file) + return web.FileResponse(file, headers={"Content-Disposition": f"filename=\"{filename}\""}) + + return web.Response(status=400) + + +def onprompt_for_switch(json_data): + inversed_switch_info = {} + onprompt_switch_info = {} + onprompt_cond_branch_info = {} + + for k, v in json_data['prompt'].items(): + if 'class_type' not in v: + continue + + cls = v['class_type'] + if cls == 'ImpactInversedSwitch': + if 'sel_mode' in v['inputs'] and v['inputs']['sel_mode'] and 'select' in v['inputs']: + select_input = v['inputs']['select'] + if isinstance(select_input, list) and len(select_input) == 2: + input_node = json_data['prompt'][select_input[0]] + if input_node['class_type'] == 'ImpactInt' and 'inputs' in input_node and 'value' in input_node['inputs']: + inversed_switch_info[k] = input_node['inputs']['value'] + else: + print(f"\n##### ##### #####\n[WARN] {cls}: For the 'select' operation, only 'select_index' of the 'ImpactInversedSwitch', which is not an input, or 'ImpactInt' and 'Primitive' are allowed as inputs if 'select_on_prompt' is selected.\n##### ##### #####\n") + else: + inversed_switch_info[k] = select_input + + elif cls in ['ImpactSwitch', 'LatentSwitch', 'SEGSSwitch', 'ImpactMakeImageList']: + if 'sel_mode' in v['inputs'] and v['inputs']['sel_mode'] and 'select' in v['inputs']: + select_input = v['inputs']['select'] + if isinstance(select_input, list) and len(select_input) == 2: + input_node = json_data['prompt'][select_input[0]] + if input_node['class_type'] == 'ImpactInt' and 'inputs' in input_node and 'value' in input_node['inputs']: + onprompt_switch_info[k] = input_node['inputs']['value'] + if input_node['class_type'] == 'ImpactSwitch' and 'inputs' in input_node and 'select' in input_node['inputs']: + if isinstance(input_node['inputs']['select'], int): + onprompt_switch_info[k] = input_node['inputs']['select'] + else: + print(f"\n##### ##### #####\n[WARN] {cls}: For the 'select' operation, only 'select_index' of the 'ImpactSwitch', which is not an input, or 'ImpactInt' and 'Primitive' are allowed as inputs if 'select_on_prompt' is selected.\n##### ##### #####\n") + else: + onprompt_switch_info[k] = select_input + + elif cls == 'ImpactConditionalBranchSelMode': + if 'sel_mode' in v['inputs'] and v['inputs']['sel_mode'] and 'cond' in v['inputs']: + cond_input = v['inputs']['cond'] + if isinstance(cond_input, list) and len(cond_input) == 2: + input_node = json_data['prompt'][cond_input[0]] + if (input_node['class_type'] == 'ImpactValueReceiver' and 'inputs' in input_node + and 'value' in input_node['inputs'] and 'typ' in input_node['inputs']): + if 'BOOLEAN' == input_node['inputs']['typ']: + try: + onprompt_cond_branch_info[k] = input_node['inputs']['value'].lower() == "true" + except: + pass + else: + onprompt_cond_branch_info[k] = cond_input + + for k, v in json_data['prompt'].items(): + disable_targets = set() + + for kk, vv in v['inputs'].items(): + if isinstance(vv, list) and len(vv) == 2: + if vv[0] in inversed_switch_info: + if vv[1] + 1 != inversed_switch_info[vv[0]]: + disable_targets.add(kk) + + if k in onprompt_switch_info: + selected_slot_name = f"input{onprompt_switch_info[k]}" + for kk, vv in v['inputs'].items(): + if kk != selected_slot_name and kk.startswith('input'): + disable_targets.add(kk) + + if k in onprompt_cond_branch_info: + selected_slot_name = "tt_value" if onprompt_cond_branch_info[k] else "ff_value" + for kk, vv in v['inputs'].items(): + if kk in ['tt_value', 'ff_value'] and kk != selected_slot_name: + disable_targets.add(kk) + + for kk in disable_targets: + del v['inputs'][kk] + +def onprompt_for_pickers(json_data): + detected_pickers = set() + + for k, v in json_data['prompt'].items(): + if 'class_type' not in v: + continue + + cls = v['class_type'] + if cls == 'ImpactSEGSPicker': + detected_pickers.add(k) + + # garbage collection + keys_to_remove = [key for key in segs_picker_map if key not in detected_pickers] + for key in keys_to_remove: + del segs_picker_map[key] + + +def gc_preview_bridge_cache(json_data): + prompt_keys = json_data['prompt'].keys() + + for key in list(core.preview_bridge_cache.keys()): + if key not in prompt_keys: + print(f"key deleted: {key}") + del core.preview_bridge_cache[key] + + +def workflow_imagereceiver_update(json_data): + prompt = json_data['prompt'] + + for v in prompt.values(): + if 'class_type' in v and v['class_type'] == 'ImageReceiver': + if v['inputs']['save_to_workflow']: + v['inputs']['image'] = "#DATA" + + +def regional_sampler_seed_update(json_data): + prompt = json_data['prompt'] + + for k, v in prompt.items(): + if 'class_type' in v and v['class_type'] == 'RegionalSampler': + seed_2nd_mode = v['inputs']['seed_2nd_mode'] + + new_seed = None + if seed_2nd_mode == 'increment': + new_seed = v['inputs']['seed_2nd']+1 + if new_seed > 1125899906842624: + new_seed = 0 + elif seed_2nd_mode == 'decrement': + new_seed = v['inputs']['seed_2nd']-1 + if new_seed < 0: + new_seed = 1125899906842624 + elif seed_2nd_mode == 'randomize': + new_seed = random.randint(0, 1125899906842624) + + if new_seed is not None: + PromptServer.instance.send_sync("impact-node-feedback", {"node_id": k, "widget_name": "seed_2nd", "type": "INT", "value": new_seed}) + + +def onprompt_populate_wildcards(json_data): + prompt = json_data['prompt'] + + updated_widget_values = {} + for k, v in prompt.items(): + if 'class_type' in v and (v['class_type'] == 'ImpactWildcardEncode' or v['class_type'] == 'ImpactWildcardProcessor'): + inputs = v['inputs'] + if inputs['mode'] and isinstance(inputs['populated_text'], str): + if isinstance(inputs['seed'], list): + try: + input_node = prompt[inputs['seed'][0]] + if input_node['class_type'] == 'ImpactInt': + input_seed = int(input_node['inputs']['value']) + if not isinstance(input_seed, int): + continue + if input_node['class_type'] == 'Seed (rgthree)': + input_seed = int(input_node['inputs']['seed']) + if not isinstance(input_seed, int): + continue + else: + print(f"[Impact Pack] Only `ImpactInt`, `Seed (rgthree)` and `Primitive` Node are allowed as the seed for '{v['class_type']}'. It will be ignored. ") + continue + except: + continue + else: + input_seed = int(inputs['seed']) + + inputs['populated_text'] = wildcards.process(inputs['wildcard_text'], input_seed) + inputs['mode'] = False + + PromptServer.instance.send_sync("impact-node-feedback", {"node_id": k, "widget_name": "populated_text", "type": "STRING", "value": inputs['populated_text']}) + updated_widget_values[k] = inputs['populated_text'] + + if 'extra_data' in json_data and 'extra_pnginfo' in json_data['extra_data']: + for node in json_data['extra_data']['extra_pnginfo']['workflow']['nodes']: + key = str(node['id']) + if key in updated_widget_values: + node['widgets_values'][1] = updated_widget_values[key] + node['widgets_values'][2] = False + + +def onprompt_for_remote(json_data): + prompt = json_data['prompt'] + + for v in prompt.values(): + if 'class_type' in v: + cls = v['class_type'] + if cls == 'ImpactRemoteBoolean' or cls == 'ImpactRemoteInt': + inputs = v['inputs'] + node_id = str(inputs['node_id']) + + if node_id not in prompt: + continue + + target_inputs = prompt[node_id]['inputs'] + + widget_name = inputs['widget_name'] + if widget_name in target_inputs: + widget_type = None + if cls == 'ImpactRemoteBoolean' and isinstance(target_inputs[widget_name], bool): + widget_type = 'BOOLEAN' + + elif cls == 'ImpactRemoteInt' and (isinstance(target_inputs[widget_name], int) or isinstance(target_inputs[widget_name], float)): + widget_type = 'INT' + + if widget_type is None: + break + + target_inputs[widget_name] = inputs['value'] + PromptServer.instance.send_sync("impact-node-feedback", {"node_id": node_id, "widget_name": widget_name, "type": widget_type, "value": inputs['value']}) + + +def onprompt(json_data): + try: + onprompt_for_remote(json_data) # NOTE: top priority + onprompt_for_switch(json_data) + onprompt_for_pickers(json_data) + onprompt_populate_wildcards(json_data) + gc_preview_bridge_cache(json_data) + workflow_imagereceiver_update(json_data) + regional_sampler_seed_update(json_data) + core.current_prompt = json_data + except Exception as e: + print(f"[WARN] ComfyUI-Impact-Pack: Error on prompt - several features will not work.\n{e}") + + return json_data + + +PromptServer.instance.add_on_prompt_handler(onprompt) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/legacy_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/legacy_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..61709ce09d5410d4e75722d2df0094c1a5e5fe93 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/legacy_nodes.py @@ -0,0 +1,273 @@ +import folder_paths + +import impact.mmdet_nodes as mmdet_nodes +from impact.utils import * +from impact.core import SEG +import impact.core as core +import nodes + +class NO_BBOX_MODEL: + pass + + +class NO_SEGM_MODEL: + pass + + +class MMDetLoader: + @classmethod + def INPUT_TYPES(s): + bboxs = ["bbox/"+x for x in folder_paths.get_filename_list("mmdets_bbox")] + segms = ["segm/"+x for x in folder_paths.get_filename_list("mmdets_segm")] + return {"required": {"model_name": (bboxs + segms, )}} + RETURN_TYPES = ("BBOX_MODEL", "SEGM_MODEL") + FUNCTION = "load_mmdet" + + CATEGORY = "ImpactPack/Legacy" + + def load_mmdet(self, model_name): + mmdet_path = folder_paths.get_full_path("mmdets", model_name) + model = mmdet_nodes.load_mmdet(mmdet_path) + + if model_name.startswith("bbox"): + return model, NO_SEGM_MODEL() + else: + return NO_BBOX_MODEL(), model + + +class BboxDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_model": ("BBOX_MODEL", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 10, "min": 0, "max": 255, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + } + } + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Legacy" + + @staticmethod + def detect(bbox_model, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + mmdet_results = mmdet_nodes.inference_bbox(bbox_model, image, threshold) + segmasks = core.create_segmasks(mmdet_results) + + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + for x in segmasks: + item_bbox = x[0] + item_mask = x[1] + + y1, x1, y2, x2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: + crop_region = make_crop_region(w, h, item_bbox, crop_factor) + cropped_image = crop_image(image, crop_region) + cropped_mask = crop_ndarray2(item_mask, crop_region) + confidence = x[2] + # bbox_size = (item_bbox[2]-item_bbox[0],item_bbox[3]-item_bbox[1]) # (w,h) + + item = SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, None, None) + items.append(item) + + shape = h, w + return shape, items + + def doit(self, bbox_model, image, threshold, dilation, crop_factor): + return (BboxDetectorForEach.detect(bbox_model, image, threshold, dilation, crop_factor), ) + + +class SegmDetectorCombined: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segm_model": ("SEGM_MODEL", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 0, "min": 0, "max": 255, "step": 1}), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Legacy" + + def doit(self, segm_model, image, threshold, dilation): + mmdet_results = mmdet_nodes.inference_segm(image, segm_model, threshold) + segmasks = core.create_segmasks(mmdet_results) + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + mask = combine_masks(segmasks) + return (mask,) + + +class BboxDetectorCombined(SegmDetectorCombined): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox_model": ("BBOX_MODEL", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 4, "min": 0, "max": 255, "step": 1}), + } + } + + def doit(self, bbox_model, image, threshold, dilation): + mmdet_results = mmdet_nodes.inference_bbox(bbox_model, image, threshold) + segmasks = core.create_segmasks(mmdet_results) + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + mask = combine_masks(segmasks) + return (mask,) + + +class SegmDetectorForEach: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segm_model": ("SEGM_MODEL", ), + "image": ("IMAGE", ), + "threshold": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.01}), + "dilation": ("INT", {"default": 10, "min": 0, "max": 255, "step": 1}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + } + } + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Legacy" + + def doit(self, segm_model, image, threshold, dilation, crop_factor): + mmdet_results = mmdet_nodes.inference_segm(image, segm_model, threshold) + segmasks = core.create_segmasks(mmdet_results) + + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + for x in segmasks: + item_bbox = x[0] + item_mask = x[1] + + crop_region = make_crop_region(w, h, item_bbox, crop_factor) + cropped_image = crop_image(image, crop_region) + cropped_mask = crop_ndarray2(item_mask, crop_region) + confidence = x[2] + + item = SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, None, None) + items.append(item) + + shape = h,w + return ((shape, items), ) + + +class SegsMaskCombine: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "image": ("IMAGE", ), + } + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Legacy" + + @staticmethod + def combine(segs, image): + h = image.shape[1] + w = image.shape[2] + + mask = np.zeros((h, w), dtype=np.uint8) + + for seg in segs[1]: + cropped_mask = seg.cropped_mask + crop_region = seg.crop_region + mask[crop_region[1]:crop_region[3], crop_region[0]:crop_region[2]] |= (cropped_mask * 255).astype(np.uint8) + + return torch.from_numpy(mask.astype(np.float32) / 255.0) + + def doit(self, segs, image): + return (SegsMaskCombine.combine(segs, image), ) + + +class MaskPainter(nodes.PreviewImage): + @classmethod + def INPUT_TYPES(s): + return {"required": {"images": ("IMAGE",), }, + "hidden": { + "prompt": "PROMPT", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + "optional": {"mask_image": ("IMAGE_PATH",), }, + "optional": {"image": (["#placeholder"], )}, + } + + RETURN_TYPES = ("MASK",) + + FUNCTION = "save_painted_images" + + CATEGORY = "ImpactPack/Legacy" + + def save_painted_images(self, images, filename_prefix="impact-mask", + prompt=None, extra_pnginfo=None, mask_image=None, image=None): + if image == "#placeholder" or image['image_hash'] != id(images): + # new input image + res = self.save_images(images, filename_prefix, prompt, extra_pnginfo) + + item = res['ui']['images'][0] + + if not item['filename'].endswith(']'): + filepath = f"{item['filename']} [{item['type']}]" + else: + filepath = item['filename'] + + _, mask = nodes.LoadImage().load_image(filepath) + + res['ui']['aux'] = [id(images), res['ui']['images']] + res['result'] = (mask, ) + + return res + + else: + # new mask + if '0' in image: # fallback + image = image['0'] + + forward = {'filename': image['forward_filename'], + 'subfolder': image['forward_subfolder'], + 'type': image['forward_type'], } + + res = {'ui': {'images': [forward]}} + + imgpath = "" + if 'subfolder' in image and image['subfolder'] != "": + imgpath = image['subfolder'] + "/" + + imgpath += f"{image['filename']}" + + if 'type' in image and image['type'] != "": + imgpath += f" [{image['type']}]" + + res['ui']['aux'] = [id(images), [forward]] + _, mask = nodes.LoadImage().load_image(imgpath) + res['result'] = (mask, ) + + return res diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/logics.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/logics.py new file mode 100644 index 0000000000000000000000000000000000000000..60cb087d4d343dff9ce8ab935865d95418771325 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/logics.py @@ -0,0 +1,732 @@ +import sys +import time + +import execution +import impact.impact_server +from server import PromptServer +from impact.utils import any_typ +import impact.core as core +import re +import nodes +import traceback + + +class ImpactCompare: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "cmp": (['a = b', 'a <> b', 'a > b', 'a < b', 'a >= b', 'a <= b', 'tt', 'ff'],), + "a": (any_typ, ), + "b": (any_typ, ), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("BOOLEAN", ) + + def doit(self, cmp, a, b): + if cmp == "a = b": + return (a == b, ) + elif cmp == "a <> b": + return (a != b, ) + elif cmp == "a > b": + return (a > b, ) + elif cmp == "a < b": + return (a < b, ) + elif cmp == "a >= b": + return (a >= b, ) + elif cmp == "a <= b": + return (a <= b, ) + elif cmp == 'tt': + return (True, ) + else: + return (False, ) + + +class ImpactNotEmptySEGS: + @classmethod + def INPUT_TYPES(cls): + return {"required": {"segs": ("SEGS",)}} + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("BOOLEAN", ) + + def doit(self, segs): + return (segs[1] != [], ) + + +class ImpactConditionalBranch: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "cond": ("BOOLEAN",), + "tt_value": (any_typ,), + "ff_value": (any_typ,), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = (any_typ, ) + + def doit(self, cond, tt_value, ff_value): + if cond: + return (tt_value,) + else: + return (ff_value,) + + +class ImpactConditionalBranchSelMode: + @classmethod + def INPUT_TYPES(cls): + if not core.is_execution_model_version_supported(): + required_inputs = { + "cond": ("BOOLEAN",), + "sel_mode": ("BOOLEAN", {"default": True, "label_on": "select_on_prompt", "label_off": "select_on_execution"}), + } + else: + required_inputs = { + "cond": ("BOOLEAN",), + } + + return { + "required": required_inputs, + "optional": { + "tt_value": (any_typ,), + "ff_value": (any_typ,), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = (any_typ, ) + + def doit(self, cond, tt_value=None, ff_value=None, **kwargs): + print(f'tt={tt_value is None}\nff={ff_value is None}') + if cond: + return (tt_value,) + else: + return (ff_value,) + + +class ImpactConvertDataType: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return {"required": {"value": (any_typ,)}} + + RETURN_TYPES = ("STRING", "FLOAT", "INT", "BOOLEAN") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic" + + @staticmethod + def is_number(string): + pattern = re.compile(r'^[-+]?[0-9]*\.?[0-9]+$') + return bool(pattern.match(string)) + + def doit(self, value): + if self.is_number(str(value)): + num = value + else: + if str.lower(str(value)) != "false": + num = 1 + else: + num = 0 + return (str(value), float(num), int(float(num)), bool(float(num)), ) + + +class ImpactIfNone: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + return { + "required": {}, + "optional": {"signal": (any_typ,), "any_input": (any_typ,), } + } + + RETURN_TYPES = (any_typ, "BOOLEAN") + RETURN_NAMES = ("signal_opt", "bool") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic" + + def doit(self, signal=None, any_input=None): + if any_input is None: + return (signal, False, ) + else: + return (signal, True, ) + + +class ImpactLogicalOperators: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "operator": (['and', 'or', 'xor'],), + "bool_a": ("BOOLEAN", {"forceInput": True}), + "bool_b": ("BOOLEAN", {"forceInput": True}), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("BOOLEAN", ) + + def doit(self, operator, bool_a, bool_b): + if operator == "and": + return (bool_a and bool_b, ) + elif operator == "or": + return (bool_a or bool_b, ) + else: + return (bool_a != bool_b, ) + + +class ImpactConditionalStopIteration: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { "cond": ("BOOLEAN", {"forceInput": True}), }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = () + + OUTPUT_NODE = True + + def doit(self, cond): + if cond: + PromptServer.instance.send_sync("stop-iteration", {}) + return {} + + +class ImpactNeg: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { "value": ("BOOLEAN", {"forceInput": True}), }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("BOOLEAN", ) + + def doit(self, value): + return (not value, ) + + +class ImpactInt: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "value": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("INT", ) + + def doit(self, value): + return (value, ) + + +class ImpactFloat: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "value": ("FLOAT", {"default": 1.0, "min": -3.402823466e+38, "max": 3.402823466e+38}), + }, + } + + FUNCTION = "doit" + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = ("FLOAT", ) + + def doit(self, value): + return (value, ) + + +class ImpactValueSender: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "value": (any_typ, ), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + }, + "optional": { + "signal_opt": (any_typ,), + } + } + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = (any_typ, ) + RETURN_NAMES = ("signal", ) + + def doit(self, value, link_id=0, signal_opt=None): + PromptServer.instance.send_sync("value-send", {"link_id": link_id, "value": value}) + return (signal_opt, ) + + +class ImpactIntConstSender: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ, ), + "value": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = () + + def doit(self, signal, value, link_id=0): + PromptServer.instance.send_sync("value-send", {"link_id": link_id, "value": value}) + return {} + + +class ImpactValueReceiver: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "typ": (["STRING", "INT", "FLOAT", "BOOLEAN"], ), + "value": ("STRING", {"default": ""}), + "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic" + + RETURN_TYPES = (any_typ, ) + + def doit(self, typ, value, link_id=0): + if typ == "INT": + return (int(value), ) + elif typ == "FLOAT": + return (float(value), ) + elif typ == "BOOLEAN": + return (value.lower() == "true", ) + else: + return (value, ) + + +class ImpactImageInfo: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "value": ("IMAGE", ), + }, + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + + RETURN_TYPES = ("INT", "INT", "INT", "INT") + RETURN_NAMES = ("batch", "height", "width", "channel") + + def doit(self, value): + return (value.shape[0], value.shape[1], value.shape[2], value.shape[3]) + + +class ImpactLatentInfo: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "value": ("LATENT", ), + }, + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + + RETURN_TYPES = ("INT", "INT", "INT", "INT") + RETURN_NAMES = ("batch", "height", "width", "channel") + + def doit(self, value): + shape = value['samples'].shape + return (shape[0], shape[2] * 8, shape[3] * 8, shape[1]) + + +class ImpactMinMax: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "mode": ("BOOLEAN", {"default": True, "label_on": "max", "label_off": "min"}), + "a": (any_typ,), + "b": (any_typ,), + }, + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + + RETURN_TYPES = ("INT", ) + + def doit(self, mode, a, b): + if mode: + return (max(a, b), ) + else: + return (min(a, b),) + + +class ImpactQueueTrigger: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ,), + "mode": ("BOOLEAN", {"default": True, "label_on": "Trigger", "label_off": "Don't trigger"}), + } + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ,) + RETURN_NAMES = ("signal_opt",) + OUTPUT_NODE = True + + def doit(self, signal, mode): + if(mode): + PromptServer.instance.send_sync("impact-add-queue", {}) + + return (signal,) + + +class ImpactQueueTriggerCountdown: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "count": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "total": ("INT", {"default": 10, "min": 1, "max": 0xffffffffffffffff}), + "mode": ("BOOLEAN", {"default": True, "label_on": "Trigger", "label_off": "Don't trigger"}), + }, + "optional": {"signal": (any_typ,),}, + "hidden": {"unique_id": "UNIQUE_ID"} + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ, "INT", "INT") + RETURN_NAMES = ("signal_opt", "count", "total") + OUTPUT_NODE = True + + def doit(self, count, total, mode, unique_id, signal=None): + if (mode): + if count < total - 1: + PromptServer.instance.send_sync("impact-node-feedback", + {"node_id": unique_id, "widget_name": "count", "type": "int", "value": count+1}) + PromptServer.instance.send_sync("impact-add-queue", {}) + if count >= total - 1: + PromptServer.instance.send_sync("impact-node-feedback", + {"node_id": unique_id, "widget_name": "count", "type": "int", "value": 0}) + + return (signal, count, total) + + + +class ImpactSetWidgetValue: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ,), + "node_id": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "widget_name": ("STRING", {"multiline": False}), + }, + "optional": { + "boolean_value": ("BOOLEAN", {"forceInput": True}), + "int_value": ("INT", {"forceInput": True}), + "float_value": ("FLOAT", {"forceInput": True}), + "string_value": ("STRING", {"forceInput": True}), + } + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ,) + RETURN_NAMES = ("signal_opt",) + OUTPUT_NODE = True + + def doit(self, signal, node_id, widget_name, boolean_value=None, int_value=None, float_value=None, string_value=None, ): + kind = None + if boolean_value is not None: + value = boolean_value + kind = "BOOLEAN" + elif int_value is not None: + value = int_value + kind = "INT" + elif float_value is not None: + value = float_value + kind = "FLOAT" + elif string_value is not None: + value = string_value + kind = "STRING" + else: + value = None + + if value is not None: + PromptServer.instance.send_sync("impact-node-feedback", + {"node_id": node_id, "widget_name": widget_name, "type": kind, "value": value}) + + return (signal,) + + +class ImpactNodeSetMuteState: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ,), + "node_id": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "set_state": ("BOOLEAN", {"default": True, "label_on": "active", "label_off": "mute"}), + } + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ,) + RETURN_NAMES = ("signal_opt",) + OUTPUT_NODE = True + + def doit(self, signal, node_id, set_state): + PromptServer.instance.send_sync("impact-node-mute-state", {"node_id": node_id, "is_active": set_state}) + return (signal,) + + +class ImpactSleep: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ,), + "seconds": ("FLOAT", {"default": 0.5, "min": 0, "max": 3600}), + } + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ,) + RETURN_NAMES = ("signal_opt",) + OUTPUT_NODE = True + + def doit(self, signal, seconds): + time.sleep(seconds) + return (signal,) + + +error_skip_flag = False +try: + import cm_global + def filter_message(str): + global error_skip_flag + + if "IMPACT-PACK-SIGNAL: STOP CONTROL BRIDGE" in str: + return True + elif error_skip_flag and "ERROR:root:!!! Exception during processing !!!\n" == str: + error_skip_flag = False + return True + else: + return False + + cm_global.try_call(api='cm.register_message_collapse', f=filter_message) + +except Exception as e: + print(f"[WARN] ComfyUI-Impact-Pack: `ComfyUI` or `ComfyUI-Manager` is an outdated version.") + pass + + +def workflow_to_map(workflow): + nodes = {} + links = {} + for link in workflow['links']: + links[link[0]] = link[1:] + for node in workflow['nodes']: + nodes[str(node['id'])] = node + + return nodes, links + + +class ImpactRemoteBoolean: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "node_id": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "widget_name": ("STRING", {"multiline": False}), + "value": ("BOOLEAN", {"default": True, "label_on": "True", "label_off": "False"}), + }} + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = () + OUTPUT_NODE = True + + def doit(self, **kwargs): + return {} + + +class ImpactRemoteInt: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "node_id": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "widget_name": ("STRING", {"multiline": False}), + "value": ("INT", {"default": 0, "min": -0xffffffffffffffff, "max": 0xffffffffffffffff}), + }} + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = () + OUTPUT_NODE = True + + def doit(self, **kwargs): + return {} + +class ImpactControlBridge: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "value": (any_typ,), + "mode": ("BOOLEAN", {"default": True, "label_on": "Active", "label_off": "Mute/Bypass"}), + "behavior": ("BOOLEAN", {"default": True, "label_on": "Mute", "label_off": "Bypass"}), + }, + "hidden": {"unique_id": "UNIQUE_ID", "prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO"} + } + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Logic/_for_test" + RETURN_TYPES = (any_typ,) + RETURN_NAMES = ("value",) + OUTPUT_NODE = True + + @classmethod + def IS_CHANGED(self, value, mode, behavior=True, unique_id=None, prompt=None, extra_pnginfo=None): + # NOTE: extra_pnginfo is not populated for IS_CHANGED. + # so extra_pnginfo is useless in here + try: + workflow = core.current_prompt['extra_data']['extra_pnginfo']['workflow'] + except: + print(f"[Impact Pack] core.current_prompt['extra_data']['extra_pnginfo']['workflow']") + return 0 + + nodes, links = workflow_to_map(workflow) + next_nodes = [] + + for link in nodes[unique_id]['outputs'][0]['links']: + node_id = str(links[link][2]) + impact.utils.collect_non_reroute_nodes(nodes, links, next_nodes, node_id) + + return next_nodes + + def doit(self, value, mode, behavior=True, unique_id=None, prompt=None, extra_pnginfo=None): + global error_skip_flag + + workflow_nodes, links = workflow_to_map(extra_pnginfo['workflow']) + + active_nodes = [] + mute_nodes = [] + bypass_nodes = [] + + for link in workflow_nodes[unique_id]['outputs'][0]['links']: + node_id = str(links[link][2]) + + next_nodes = [] + impact.utils.collect_non_reroute_nodes(workflow_nodes, links, next_nodes, node_id) + + for next_node_id in next_nodes: + node_mode = workflow_nodes[next_node_id]['mode'] + + if node_mode == 0: + active_nodes.append(next_node_id) + elif node_mode == 2: + mute_nodes.append(next_node_id) + elif node_mode == 4: + bypass_nodes.append(next_node_id) + + if mode: + # active + should_be_active_nodes = mute_nodes + bypass_nodes + if len(should_be_active_nodes) > 0: + PromptServer.instance.send_sync("impact-bridge-continue", {"node_id": unique_id, 'actives': list(should_be_active_nodes)}) + nodes.interrupt_processing() + + elif behavior: + # mute + should_be_mute_nodes = active_nodes + bypass_nodes + if len(should_be_mute_nodes) > 0: + PromptServer.instance.send_sync("impact-bridge-continue", {"node_id": unique_id, 'mutes': list(should_be_mute_nodes)}) + nodes.interrupt_processing() + + else: + # bypass + should_be_bypass_nodes = active_nodes + mute_nodes + if len(should_be_bypass_nodes) > 0: + PromptServer.instance.send_sync("impact-bridge-continue", {"node_id": unique_id, 'bypasses': list(should_be_bypass_nodes)}) + nodes.interrupt_processing() + + return (value, ) + + +class ImpactExecutionOrderController: + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "signal": (any_typ,), + "value": (any_typ,), + }} + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + RETURN_TYPES = (any_typ, any_typ) + RETURN_NAMES = ("signal", "value") + + def doit(self, signal, value): + return signal, value + + +original_handle_execution = execution.PromptExecutor.handle_execution_error + + +def handle_execution_error(**kwargs): + execution.PromptExecutor.handle_execution_error(**kwargs) + diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/mmdet_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/mmdet_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..5adb147bc1f5fa30112402d2c5917939c4d83cd3 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/mmdet_nodes.py @@ -0,0 +1,219 @@ +import folder_paths +from impact.core import * +import os + +import mmcv +from mmdet.apis import (inference_detector, init_detector) +from mmdet.evaluation import get_classes + + +def load_mmdet(model_path): + model_config = os.path.splitext(model_path)[0] + ".py" + model = init_detector(model_config, model_path, device="cpu") + return model + + +def inference_segm_old(model, image, conf_threshold): + image = image.numpy()[0] * 255 + mmdet_results = inference_detector(model, image) + + bbox_results, segm_results = mmdet_results + label = "A" + + classes = get_classes("coco") + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_results) + ] + n, m = bbox_results[0].shape + if n == 0: + return [[], [], []] + labels = np.concatenate(labels) + bboxes = np.vstack(bbox_results) + segms = mmcv.concat_list(segm_results) + filter_idxs = np.where(bboxes[:, -1] > conf_threshold)[0] + results = [[], [], []] + for i in filter_idxs: + results[0].append(label + "-" + classes[labels[i]]) + results[1].append(bboxes[i]) + results[2].append(segms[i]) + + return results + + +def inference_segm(image, modelname, conf_thres, lab="A"): + image = image.numpy()[0] * 255 + mmdet_results = inference_detector(modelname, image).pred_instances + bboxes = mmdet_results.bboxes.numpy() + segms = mmdet_results.masks.numpy() + scores = mmdet_results.scores.numpy() + + classes = get_classes("coco") + + n, m = bboxes.shape + if n == 0: + return [[], [], [], []] + labels = mmdet_results.labels + filter_inds = np.where(mmdet_results.scores > conf_thres)[0] + results = [[], [], [], []] + for i in filter_inds: + results[0].append(lab + "-" + classes[labels[i]]) + results[1].append(bboxes[i]) + results[2].append(segms[i]) + results[3].append(scores[i]) + + return results + + +def inference_bbox(modelname, image, conf_threshold): + image = image.numpy()[0] * 255 + label = "A" + output = inference_detector(modelname, image).pred_instances + cv2_image = np.array(image) + cv2_image = cv2_image[:, :, ::-1].copy() + cv2_gray = cv2.cvtColor(cv2_image, cv2.COLOR_BGR2GRAY) + + segms = [] + for x0, y0, x1, y1 in output.bboxes: + cv2_mask = np.zeros(cv2_gray.shape, np.uint8) + cv2.rectangle(cv2_mask, (int(x0), int(y0)), (int(x1), int(y1)), 255, -1) + cv2_mask_bool = cv2_mask.astype(bool) + segms.append(cv2_mask_bool) + + n, m = output.bboxes.shape + if n == 0: + return [[], [], [], []] + + bboxes = output.bboxes.numpy() + scores = output.scores.numpy() + filter_idxs = np.where(scores > conf_threshold)[0] + results = [[], [], [], []] + for i in filter_idxs: + results[0].append(label) + results[1].append(bboxes[i]) + results[2].append(segms[i]) + results[3].append(scores[i]) + + return results + + +class BBoxDetector: + bbox_model = None + + def __init__(self, bbox_model): + self.bbox_model = bbox_model + + def detect(self, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + drop_size = max(drop_size, 1) + mmdet_results = inference_bbox(self.bbox_model, image, threshold) + segmasks = create_segmasks(mmdet_results) + + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + + for x in segmasks: + item_bbox = x[0] + item_mask = x[1] + + y1, x1, y2, x2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: # minimum dimension must be (2,2) to avoid squeeze issue + crop_region = make_crop_region(w, h, item_bbox, crop_factor) + cropped_image = crop_image(image, crop_region) + cropped_mask = crop_ndarray2(item_mask, crop_region) + confidence = x[2] + # bbox_size = (item_bbox[2]-item_bbox[0],item_bbox[3]-item_bbox[1]) # (w,h) + + item = SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, None, None) + + items.append(item) + + shape = image.shape[1], image.shape[2] + return shape, items + + def detect_combined(self, image, threshold, dilation): + mmdet_results = inference_bbox(self.bbox_model, image, threshold) + segmasks = create_segmasks(mmdet_results) + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + return combine_masks(segmasks) + + def setAux(self, x): + pass + + +class SegmDetector(BBoxDetector): + segm_model = None + + def __init__(self, segm_model): + self.segm_model = segm_model + + def detect(self, image, threshold, dilation, crop_factor, drop_size=1, detailer_hook=None): + drop_size = max(drop_size, 1) + mmdet_results = inference_segm(image, self.segm_model, threshold) + segmasks = create_segmasks(mmdet_results) + + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + items = [] + h = image.shape[1] + w = image.shape[2] + for x in segmasks: + item_bbox = x[0] + item_mask = x[1] + + y1, x1, y2, x2 = item_bbox + + if x2 - x1 > drop_size and y2 - y1 > drop_size: # minimum dimension must be (2,2) to avoid squeeze issue + crop_region = make_crop_region(w, h, item_bbox, crop_factor) + cropped_image = crop_image(image, crop_region) + cropped_mask = crop_ndarray2(item_mask, crop_region) + confidence = x[2] + + item = SEG(cropped_image, cropped_mask, confidence, crop_region, item_bbox, None, None) + items.append(item) + + segs = image.shape, items + + if detailer_hook is not None and hasattr(detailer_hook, "post_detection"): + segs = detailer_hook.post_detection(segs) + + return segs + + def detect_combined(self, image, threshold, dilation): + mmdet_results = inference_bbox(self.bbox_model, image, threshold) + segmasks = create_segmasks(mmdet_results) + if dilation > 0: + segmasks = dilate_masks(segmasks, dilation) + + return combine_masks(segmasks) + + def setAux(self, x): + pass + + +class MMDetDetectorProvider: + @classmethod + def INPUT_TYPES(s): + bboxs = ["bbox/"+x for x in folder_paths.get_filename_list("mmdets_bbox")] + segms = ["segm/"+x for x in folder_paths.get_filename_list("mmdets_segm")] + return {"required": {"model_name": (bboxs + segms, )}} + RETURN_TYPES = ("BBOX_DETECTOR", "SEGM_DETECTOR") + FUNCTION = "load_mmdet" + + CATEGORY = "ImpactPack" + + def load_mmdet(self, model_name): + mmdet_path = folder_paths.get_full_path("mmdets", model_name) + model = load_mmdet(mmdet_path) + + if model_name.startswith("bbox"): + return BBoxDetector(model), NO_SEGM_DETECTOR() + else: + return NO_BBOX_DETECTOR(), model \ No newline at end of file diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/onnx.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..91736a1ac4913220ff1255bf0c463523840b4283 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/onnx.py @@ -0,0 +1,38 @@ +import impact.additional_dependencies +from impact.utils import * + +impact.additional_dependencies.ensure_onnx_package() + +try: + import onnxruntime + + def onnx_inference(image, onnx_model): + # prepare image + pil = tensor2pil(image) + image = np.ascontiguousarray(pil) + image = image[:, :, ::-1] # to BGR image + image = image.astype(np.float32) + image -= [103.939, 116.779, 123.68] # 'caffe' mode image preprocessing + + # do detection + onnx_model = onnxruntime.InferenceSession(onnx_model, providers=["CPUExecutionProvider"]) + outputs = onnx_model.run( + [s_i.name for s_i in onnx_model.get_outputs()], + {onnx_model.get_inputs()[0].name: np.expand_dims(image, axis=0)}, + ) + + labels = [op for op in outputs if op.dtype == "int32"][0] + scores = [op for op in outputs if isinstance(op[0][0], np.float32)][0] + boxes = [op for op in outputs if isinstance(op[0][0], np.ndarray)][0] + + # filter-out useless item + idx = np.where(labels[0] == -1)[0][0] + + labels = labels[0][:idx] + scores = scores[0][:idx] + boxes = boxes[0][:idx].astype(np.uint32) + + return labels, scores, boxes +except Exception as e: + print("[ERROR] ComfyUI-Impact-Pack: 'onnxruntime' package doesn't support 'python 3.11', yet.") + print(f"\t{e}") diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/pipe.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/pipe.py new file mode 100644 index 0000000000000000000000000000000000000000..2f6ca7ee305de706f511f2323068334111250fa9 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/pipe.py @@ -0,0 +1,422 @@ +import folder_paths +import impact.wildcards + +class ToDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "bbox_detector": ("BBOX_DETECTOR", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"], ), + }, + "optional": { + "sam_model_opt": ("SAM_MODEL",), + "segm_detector_opt": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }} + + RETURN_TYPES = ("DETAILER_PIPE", ) + RETURN_NAMES = ("detailer_pipe", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, *args, **kwargs): + pipe = (kwargs['model'], kwargs['clip'], kwargs['vae'], kwargs['positive'], kwargs['negative'], kwargs['wildcard'], kwargs['bbox_detector'], + kwargs.get('segm_detector_opt', None), kwargs.get('sam_model_opt', None), kwargs.get('detailer_hook', None), + kwargs.get('refiner_model', None), kwargs.get('refiner_clip', None), + kwargs.get('refiner_positive', None), kwargs.get('refiner_negative', None)) + return (pipe, ) + + +class ToDetailerPipeSDXL(ToDetailerPipe): + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "refiner_model": ("MODEL",), + "refiner_clip": ("CLIP",), + "refiner_positive": ("CONDITIONING",), + "refiner_negative": ("CONDITIONING",), + "bbox_detector": ("BBOX_DETECTOR", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + "optional": { + "sam_model_opt": ("SAM_MODEL",), + "segm_detector_opt": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }} + + +class FromDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": {"detailer_pipe": ("DETAILER_PIPE",), }, } + + RETURN_TYPES = ("MODEL", "CLIP", "VAE", "CONDITIONING", "CONDITIONING", "BBOX_DETECTOR", "SAM_MODEL", "SEGM_DETECTOR", "DETAILER_HOOK") + RETURN_NAMES = ("model", "clip", "vae", "positive", "negative", "bbox_detector", "sam_model_opt", "segm_detector_opt", "detailer_hook") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, detailer_pipe): + model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, _, _, _, _ = detailer_pipe + return model, clip, vae, positive, negative, bbox_detector, sam_model_opt, segm_detector_opt, detailer_hook + + +class FromDetailerPipe_v2: + @classmethod + def INPUT_TYPES(s): + return {"required": {"detailer_pipe": ("DETAILER_PIPE",), }, } + + RETURN_TYPES = ("DETAILER_PIPE", "MODEL", "CLIP", "VAE", "CONDITIONING", "CONDITIONING", "BBOX_DETECTOR", "SAM_MODEL", "SEGM_DETECTOR", "DETAILER_HOOK") + RETURN_NAMES = ("detailer_pipe", "model", "clip", "vae", "positive", "negative", "bbox_detector", "sam_model_opt", "segm_detector_opt", "detailer_hook") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, detailer_pipe): + model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, _, _, _, _ = detailer_pipe + return detailer_pipe, model, clip, vae, positive, negative, bbox_detector, sam_model_opt, segm_detector_opt, detailer_hook + + +class FromDetailerPipe_SDXL: + @classmethod + def INPUT_TYPES(s): + return {"required": {"detailer_pipe": ("DETAILER_PIPE",), }, } + + RETURN_TYPES = ("DETAILER_PIPE", "MODEL", "CLIP", "VAE", "CONDITIONING", "CONDITIONING", "BBOX_DETECTOR", "SAM_MODEL", "SEGM_DETECTOR", "DETAILER_HOOK", "MODEL", "CLIP", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("detailer_pipe", "model", "clip", "vae", "positive", "negative", "bbox_detector", "sam_model_opt", "segm_detector_opt", "detailer_hook", "refiner_model", "refiner_clip", "refiner_positive", "refiner_negative") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, detailer_pipe): + model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, refiner_model, refiner_clip, refiner_positive, refiner_negative = detailer_pipe + return detailer_pipe, model, clip, vae, positive, negative, bbox_detector, sam_model_opt, segm_detector_opt, detailer_hook, refiner_model, refiner_clip, refiner_positive, refiner_negative + + +class ToBasicPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + }, + } + + RETURN_TYPES = ("BASIC_PIPE", ) + RETURN_NAMES = ("basic_pipe", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, model, clip, vae, positive, negative): + pipe = (model, clip, vae, positive, negative) + return (pipe, ) + + +class FromBasicPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": {"basic_pipe": ("BASIC_PIPE",), }, } + + RETURN_TYPES = ("MODEL", "CLIP", "VAE", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("model", "clip", "vae", "positive", "negative") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, basic_pipe): + model, clip, vae, positive, negative = basic_pipe + return model, clip, vae, positive, negative + + +class FromBasicPipe_v2: + @classmethod + def INPUT_TYPES(s): + return {"required": {"basic_pipe": ("BASIC_PIPE",), }, } + + RETURN_TYPES = ("BASIC_PIPE", "MODEL", "CLIP", "VAE", "CONDITIONING", "CONDITIONING") + RETURN_NAMES = ("basic_pipe", "model", "clip", "vae", "positive", "negative") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, basic_pipe): + model, clip, vae, positive, negative = basic_pipe + return basic_pipe, model, clip, vae, positive, negative + + +class BasicPipeToDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": {"basic_pipe": ("BASIC_PIPE",), + "bbox_detector": ("BBOX_DETECTOR", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + "optional": { + "sam_model_opt": ("SAM_MODEL", ), + "segm_detector_opt": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }, + } + + RETURN_TYPES = ("DETAILER_PIPE", ) + RETURN_NAMES = ("detailer_pipe", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, *args, **kwargs): + basic_pipe = kwargs['basic_pipe'] + bbox_detector = kwargs['bbox_detector'] + wildcard = kwargs['wildcard'] + sam_model_opt = kwargs.get('sam_model_opt', None) + segm_detector_opt = kwargs.get('segm_detector_opt', None) + detailer_hook = kwargs.get('detailer_hook', None) + + model, clip, vae, positive, negative = basic_pipe + pipe = model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, None, None, None, None + return (pipe, ) + + +class BasicPipeToDetailerPipeSDXL: + @classmethod + def INPUT_TYPES(s): + return {"required": {"base_basic_pipe": ("BASIC_PIPE",), + "refiner_basic_pipe": ("BASIC_PIPE",), + "bbox_detector": ("BBOX_DETECTOR", ), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + "optional": { + "sam_model_opt": ("SAM_MODEL", ), + "segm_detector_opt": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }, + } + + RETURN_TYPES = ("DETAILER_PIPE", ) + RETURN_NAMES = ("detailer_pipe", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, *args, **kwargs): + base_basic_pipe = kwargs['base_basic_pipe'] + refiner_basic_pipe = kwargs['refiner_basic_pipe'] + bbox_detector = kwargs['bbox_detector'] + wildcard = kwargs['wildcard'] + sam_model_opt = kwargs.get('sam_model_opt', None) + segm_detector_opt = kwargs.get('segm_detector_opt', None) + detailer_hook = kwargs.get('detailer_hook', None) + + model, clip, vae, positive, negative = base_basic_pipe + refiner_model, refiner_clip, refiner_vae, refiner_positive, refiner_negative = refiner_basic_pipe + pipe = model, clip, vae, positive, negative, wildcard, bbox_detector, segm_detector_opt, sam_model_opt, detailer_hook, refiner_model, refiner_clip, refiner_positive, refiner_negative + return (pipe, ) + + +class DetailerPipeToBasicPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": {"detailer_pipe": ("DETAILER_PIPE",), }} + + RETURN_TYPES = ("BASIC_PIPE", "BASIC_PIPE") + RETURN_NAMES = ("base_basic_pipe", "refiner_basic_pipe") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, detailer_pipe): + model, clip, vae, positive, negative, _, _, _, _, _, refiner_model, refiner_clip, refiner_positive, refiner_negative = detailer_pipe + pipe = model, clip, vae, positive, negative + refiner_pipe = refiner_model, refiner_clip, vae, refiner_positive, refiner_negative + return (pipe, refiner_pipe) + + +class EditBasicPipe: + @classmethod + def INPUT_TYPES(s): + return { + "required": {"basic_pipe": ("BASIC_PIPE",), }, + "optional": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + }, + } + + RETURN_TYPES = ("BASIC_PIPE", ) + RETURN_NAMES = ("basic_pipe", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, basic_pipe, model=None, clip=None, vae=None, positive=None, negative=None): + res_model, res_clip, res_vae, res_positive, res_negative = basic_pipe + + if model is not None: + res_model = model + + if clip is not None: + res_clip = clip + + if vae is not None: + res_vae = vae + + if positive is not None: + res_positive = positive + + if negative is not None: + res_negative = negative + + pipe = res_model, res_clip, res_vae, res_positive, res_negative + + return (pipe, ) + + +class EditDetailerPipe: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "detailer_pipe": ("DETAILER_PIPE",), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + "optional": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "bbox_detector": ("BBOX_DETECTOR",), + "sam_model": ("SAM_MODEL",), + "segm_detector": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }, + } + + RETURN_TYPES = ("DETAILER_PIPE",) + RETURN_NAMES = ("detailer_pipe",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Pipe" + + def doit(self, *args, **kwargs): + detailer_pipe = kwargs['detailer_pipe'] + wildcard = kwargs['wildcard'] + model = kwargs.get('model', None) + clip = kwargs.get('clip', None) + vae = kwargs.get('vae', None) + positive = kwargs.get('positive', None) + negative = kwargs.get('negative', None) + bbox_detector = kwargs.get('bbox_detector', None) + sam_model = kwargs.get('sam_model', None) + segm_detector = kwargs.get('segm_detector', None) + detailer_hook = kwargs.get('detailer_hook', None) + refiner_model = kwargs.get('refiner_model', None) + refiner_clip = kwargs.get('refiner_clip', None) + refiner_positive = kwargs.get('refiner_positive', None) + refiner_negative = kwargs.get('refiner_negative', None) + + res_model, res_clip, res_vae, res_positive, res_negative, res_wildcard, res_bbox_detector, res_segm_detector, res_sam_model, res_detailer_hook, res_refiner_model, res_refiner_clip, res_refiner_positive, res_refiner_negative = detailer_pipe + + if model is not None: + res_model = model + + if clip is not None: + res_clip = clip + + if vae is not None: + res_vae = vae + + if positive is not None: + res_positive = positive + + if negative is not None: + res_negative = negative + + if bbox_detector is not None: + res_bbox_detector = bbox_detector + + if segm_detector is not None: + res_segm_detector = segm_detector + + if wildcard != "": + res_wildcard = wildcard + + if sam_model is not None: + res_sam_model = sam_model + + if detailer_hook is not None: + res_detailer_hook = detailer_hook + + if refiner_model is not None: + res_refiner_model = refiner_model + + if refiner_clip is not None: + res_refiner_clip = refiner_clip + + if refiner_positive is not None: + res_refiner_positive = refiner_positive + + if refiner_negative is not None: + res_refiner_negative = refiner_negative + + pipe = (res_model, res_clip, res_vae, res_positive, res_negative, res_wildcard, + res_bbox_detector, res_segm_detector, res_sam_model, res_detailer_hook, + res_refiner_model, res_refiner_clip, res_refiner_positive, res_refiner_negative) + + return (pipe, ) + + +class EditDetailerPipeSDXL(EditDetailerPipe): + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "detailer_pipe": ("DETAILER_PIPE",), + "wildcard": ("STRING", {"multiline": True, "dynamicPrompts": False}), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), + "Select to add Wildcard": (["Select the Wildcard to add to the text"],), + }, + "optional": { + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "refiner_model": ("MODEL",), + "refiner_clip": ("CLIP",), + "refiner_positive": ("CONDITIONING",), + "refiner_negative": ("CONDITIONING",), + "bbox_detector": ("BBOX_DETECTOR",), + "sam_model": ("SAM_MODEL",), + "segm_detector": ("SEGM_DETECTOR",), + "detailer_hook": ("DETAILER_HOOK",), + }, + } diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/sample_error_enhancer.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/sample_error_enhancer.py new file mode 100644 index 0000000000000000000000000000000000000000..01b5600671e4bc5620251eab6f0c5a4ffe625571 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/sample_error_enhancer.py @@ -0,0 +1,25 @@ +import comfy.sample +import traceback + +original_sample = comfy.sample.sample + + +def informative_sample(*args, **kwargs): + try: + return original_sample(*args, **kwargs) # This code helps interpret error messages that occur within exceptions but does not have any impact on other operations. + except RuntimeError as e: + is_model_mix_issue = False + try: + if 'mat1 and mat2 shapes cannot be multiplied' in e.args[0]: + if 'torch.nn.functional.linear' in traceback.format_exc().strip().split('\n')[-3]: + is_model_mix_issue = True + except: + pass + + if is_model_mix_issue: + raise RuntimeError("\n\n#### It seems that models and clips are mixed and interconnected between SDXL Base, SDXL Refiner, SD1.x, and SD2.x. Please verify. ####\n\n") + else: + raise e + + +comfy.sample.sample = informative_sample diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..9293917a1614f486e2ec84f47bd24f18d70fca2a --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_nodes.py @@ -0,0 +1,1845 @@ +import os +import sys + +import impact.impact_server +from nodes import MAX_RESOLUTION + +from impact.utils import * +from . import core +from .core import SEG +import impact.utils as utils +from . import defs +from . import segs_upscaler +from comfy.cli_args import args +import math + + +class SEGSDetailer: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "segs": ("SEGS", ), + "guide_size": ("FLOAT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "guide_size_for": ("BOOLEAN", {"default": True, "label_on": "bbox", "label_off": "crop_region"}), + "max_size": ("FLOAT", {"default": 768, "min": 64, "max": MAX_RESOLUTION, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "noise_mask": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "force_inpaint": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "basic_pipe": ("BASIC_PIPE",), + "refiner_ratio": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0}), + "batch_size": ("INT", {"default": 1, "min": 1, "max": 100}), + + "cycle": ("INT", {"default": 1, "min": 1, "max": 10, "step": 1}), + }, + "optional": { + "refiner_basic_pipe_opt": ("BASIC_PIPE",), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("SEGS", "IMAGE") + RETURN_NAMES = ("segs", "cnet_images") + OUTPUT_IS_LIST = (False, True) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + @staticmethod + def do_detail(image, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, noise_mask, force_inpaint, basic_pipe, refiner_ratio=None, batch_size=1, cycle=1, + refiner_basic_pipe_opt=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + model, clip, vae, positive, negative = basic_pipe + if refiner_basic_pipe_opt is None: + refiner_model, refiner_clip, refiner_positive, refiner_negative = None, None, None, None + else: + refiner_model, refiner_clip, _, refiner_positive, refiner_negative = refiner_basic_pipe_opt + + segs = core.segs_scale_match(segs, image.shape) + + new_segs = [] + cnet_pil_list = [] + + for i in range(batch_size): + seed += 1 + for seg in segs[1]: + cropped_image = seg.cropped_image if seg.cropped_image is not None \ + else crop_ndarray4(image.numpy(), seg.crop_region) + cropped_image = to_tensor(cropped_image) + + is_mask_all_zeros = (seg.cropped_mask == 0).all().item() + if is_mask_all_zeros: + print(f"Detailer: segment skip [empty mask]") + new_segs.append(seg) + continue + + if noise_mask: + cropped_mask = seg.cropped_mask + else: + cropped_mask = None + + cropped_positive = [ + [condition, { + k: core.crop_condition_mask(v, image, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in positive + ] + + cropped_negative = [ + [condition, { + k: core.crop_condition_mask(v, image, seg.crop_region) if k == "mask" else v + for k, v in details.items() + }] + for condition, details in negative + ] + + enhanced_image, cnet_pils = core.enhance_detail(cropped_image, model, clip, vae, guide_size, guide_size_for, max_size, + seg.bbox, seed, steps, cfg, sampler_name, scheduler, + cropped_positive, cropped_negative, denoise, cropped_mask, force_inpaint, + refiner_ratio=refiner_ratio, refiner_model=refiner_model, + refiner_clip=refiner_clip, refiner_positive=refiner_positive, refiner_negative=refiner_negative, + control_net_wrapper=seg.control_net_wrapper, cycle=cycle, + inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func=scheduler_func_opt) + + if cnet_pils is not None: + cnet_pil_list.extend(cnet_pils) + + if enhanced_image is None: + new_cropped_image = cropped_image + else: + new_cropped_image = enhanced_image + + new_seg = SEG(to_numpy(new_cropped_image), seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + new_segs.append(new_seg) + + return (segs[0], new_segs), cnet_pil_list + + def doit(self, image, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, scheduler, + denoise, noise_mask, force_inpaint, basic_pipe, refiner_ratio=None, batch_size=1, cycle=1, + refiner_basic_pipe_opt=None, inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + if len(image) > 1: + raise Exception('[Impact Pack] ERROR: SEGSDetailer does not allow image batches.\nPlease refer to https://github.com/ltdrdata/ComfyUI-extension-tutorials/blob/Main/ComfyUI-Impact-Pack/tutorial/batching-detailer.md for more information.') + + segs, cnet_pil_list = SEGSDetailer.do_detail(image, segs, guide_size, guide_size_for, max_size, seed, steps, cfg, sampler_name, + scheduler, denoise, noise_mask, force_inpaint, basic_pipe, refiner_ratio, batch_size, cycle=cycle, + refiner_basic_pipe_opt=refiner_basic_pipe_opt, + inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + + # set fallback image + if len(cnet_pil_list) == 0: + cnet_pil_list = [empty_pil_tensor()] + + return segs, cnet_pil_list + + +class SEGSPaste: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "image": ("IMAGE", ), + "segs": ("SEGS", ), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "alpha": ("INT", {"default": 255, "min": 0, "max": 255, "step": 1}), + }, + "optional": {"ref_image_opt": ("IMAGE", ), } + } + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Detailer" + + @staticmethod + def doit(image, segs, feather, alpha=255, ref_image_opt=None): + + segs = core.segs_scale_match(segs, image.shape) + + result = None + for i, single_image in enumerate(image): + image_i = single_image.unsqueeze(0).clone() + + for seg in segs[1]: + ref_image = None + if ref_image_opt is None and seg.cropped_image is not None: + cropped_image = seg.cropped_image + if isinstance(cropped_image, np.ndarray): + cropped_image = torch.from_numpy(cropped_image) + ref_image = cropped_image[i].unsqueeze(0) + elif ref_image_opt is not None: + ref_tensor = ref_image_opt[i].unsqueeze(0) + ref_image = crop_image(ref_tensor, seg.crop_region) + if ref_image is not None: + if seg.cropped_mask.ndim == 3 and len(seg.cropped_mask) == len(image): + mask = seg.cropped_mask[i] + elif seg.cropped_mask.ndim == 3 and len(seg.cropped_mask) > 1: + print(f"[Impact Pack] WARN: SEGSPaste - The number of the mask batch({len(seg.cropped_mask)}) and the image batch({len(image)}) are different. Combine the mask frames and apply.") + combined_mask = (seg.cropped_mask[0] * 255).to(torch.uint8) + + for frame_mask in seg.cropped_mask[1:]: + combined_mask |= (frame_mask * 255).to(torch.uint8) + + combined_mask = (combined_mask/255.0).to(torch.float32) + mask = utils.to_binary_mask(combined_mask, 0.1) + else: # ndim == 2 + mask = seg.cropped_mask + + mask = tensor_gaussian_blur_mask(mask, feather) * (alpha/255) + x, y, *_ = seg.crop_region + + # ensure same device + mask = mask.to(image_i.device) + ref_image = ref_image.to(image_i.device) + + tensor_paste(image_i, ref_image, (x, y), mask) + + if result is None: + result = image_i + else: + result = torch.concat((result, image_i), dim=0) + + if not args.highvram and not args.gpu_only: + result = result.cpu() + + return (result, ) + + +class SEGSPreviewCNet: + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + + @classmethod + def INPUT_TYPES(s): + return {"required": {"segs": ("SEGS", ),}, } + + RETURN_TYPES = ("IMAGE", ) + OUTPUT_IS_LIST = (True, ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + OUTPUT_NODE = True + + def doit(self, segs): + full_output_folder, filename, counter, subfolder, filename_prefix = \ + folder_paths.get_save_image_path("impact_seg_preview", self.output_dir, segs[0][1], segs[0][0]) + + results = list() + result_image_list = [] + + for seg in segs[1]: + file = f"{filename}_{counter:05}_.webp" + + if seg.control_net_wrapper is not None and seg.control_net_wrapper.control_image is not None: + cnet_image = seg.control_net_wrapper.control_image + result_image_list.append(cnet_image) + else: + cnet_image = empty_pil_tensor(64, 64) + + cnet_pil = utils.tensor2pil(cnet_image) + cnet_pil.save(os.path.join(full_output_folder, file)) + + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + + counter += 1 + + return {"ui": {"images": results}, "result": (result_image_list,)} + + +class SEGSPreview: + def __init__(self): + self.output_dir = folder_paths.get_temp_directory() + self.type = "temp" + + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "alpha_mode": ("BOOLEAN", {"default": True, "label_on": "enable", "label_off": "disable"}), + "min_alpha": ("FLOAT", {"default": 0.2, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": { + "fallback_image_opt": ("IMAGE", ), + } + } + + RETURN_TYPES = ("IMAGE", ) + OUTPUT_IS_LIST = (True, ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + OUTPUT_NODE = True + + def doit(self, segs, alpha_mode=True, min_alpha=0.0, fallback_image_opt=None): + full_output_folder, filename, counter, subfolder, filename_prefix = \ + folder_paths.get_save_image_path("impact_seg_preview", self.output_dir, segs[0][1], segs[0][0]) + + results = list() + result_image_list = [] + + if fallback_image_opt is not None: + segs = core.segs_scale_match(segs, fallback_image_opt.shape) + + if min_alpha != 0: + min_alpha = int(255 * min_alpha) + + if len(segs[1]) > 0: + if segs[1][0].cropped_image is not None: + batch_count = len(segs[1][0].cropped_image) + elif fallback_image_opt is not None: + batch_count = len(fallback_image_opt) + else: + return {"ui": {"images": results}} + + for seg in segs[1]: + result_image_batch = None + cached_mask = None + + def get_combined_mask(): + nonlocal cached_mask + + if cached_mask is not None: + return cached_mask + else: + if isinstance(seg.cropped_mask, np.ndarray): + masks = torch.tensor(seg.cropped_mask) + else: + masks = seg.cropped_mask + + cached_mask = (masks[0] * 255).to(torch.uint8) + for x in masks[1:]: + cached_mask |= (x * 255).to(torch.uint8) + cached_mask = (cached_mask/255.0).to(torch.float32) + cached_mask = utils.to_binary_mask(cached_mask, 0.1) + cached_mask = cached_mask.numpy() + + return cached_mask + + def stack_image(image, mask=None): + nonlocal result_image_batch + + if isinstance(image, np.ndarray): + image = torch.from_numpy(image) + + if mask is not None: + image *= torch.tensor(mask)[None, ..., None] + + if result_image_batch is None: + result_image_batch = image + else: + result_image_batch = torch.concat((result_image_batch, image), dim=0) + + for i in range(batch_count): + cropped_image = None + + if seg.cropped_image is not None: + cropped_image = seg.cropped_image[i, None] + elif fallback_image_opt is not None: + # take from original image + ref_image = fallback_image_opt[i].unsqueeze(0) + cropped_image = crop_image(ref_image, seg.crop_region) + + if cropped_image is not None: + if isinstance(cropped_image, np.ndarray): + cropped_image = torch.from_numpy(cropped_image) + + cropped_image = cropped_image.clone() + cropped_pil = to_pil(cropped_image) + + if alpha_mode: + if isinstance(seg.cropped_mask, np.ndarray): + cropped_mask = seg.cropped_mask + else: + if seg.cropped_image is not None and len(seg.cropped_image) != len(seg.cropped_mask): + cropped_mask = get_combined_mask() + else: + cropped_mask = seg.cropped_mask[i].numpy() + + mask_array = (cropped_mask * 255).astype(np.uint8) + + if min_alpha != 0: + mask_array[mask_array < min_alpha] = min_alpha + + mask_pil = Image.fromarray(mask_array, mode='L').resize(cropped_pil.size) + cropped_pil.putalpha(mask_pil) + stack_image(cropped_image, cropped_mask) + else: + stack_image(cropped_image) + + file = f"{filename}_{counter:05}_.webp" + cropped_pil.save(os.path.join(full_output_folder, file)) + results.append({ + "filename": file, + "subfolder": subfolder, + "type": self.type + }) + + counter += 1 + + if result_image_batch is not None: + result_image_list.append(result_image_batch) + + return {"ui": {"images": results}, "result": (result_image_list,) } + + +class SEGSLabelFilter: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "preset": (['all'] + defs.detection_labels, ), + "labels": ("STRING", {"multiline": True, "placeholder": "List the types of segments to be allowed, separated by commas"}), + }, + } + + RETURN_TYPES = ("SEGS", "SEGS",) + RETURN_NAMES = ("filtered_SEGS", "remained_SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def filter(segs, labels): + labels = set([label.strip() for label in labels]) + + if 'all' in labels: + return (segs, (segs[0], []), ) + else: + res_segs = [] + remained_segs = [] + + for x in segs[1]: + if x.label in labels: + res_segs.append(x) + elif 'eyes' in labels and x.label in ['left_eye', 'right_eye']: + res_segs.append(x) + elif 'eyebrows' in labels and x.label in ['left_eyebrow', 'right_eyebrow']: + res_segs.append(x) + elif 'pupils' in labels and x.label in ['left_pupil', 'right_pupil']: + res_segs.append(x) + else: + remained_segs.append(x) + + return ((segs[0], res_segs), (segs[0], remained_segs), ) + + def doit(self, segs, preset, labels): + labels = labels.split(',') + return SEGSLabelFilter.filter(segs, labels) + + +class SEGSLabelAssign: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "labels": ("STRING", {"multiline": True, "placeholder": "List the label to be assigned in order of segs, separated by commas"}), + }, + } + + RETURN_TYPES = ("SEGS",) + RETURN_NAMES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def assign(segs, labels): + labels = [label.strip() for label in labels] + + if len(labels) != len(segs[1]): + print(f'Warning (SEGSLabelAssign): length of labels ({len(labels)}) != length of segs ({len(segs[1])})') + + labeled_segs = [] + + idx = 0 + for x in segs[1]: + if len(labels) > idx: + x = x._replace(label=labels[idx]) + labeled_segs.append(x) + idx += 1 + + return ((segs[0], labeled_segs), ) + + def doit(self, segs, labels): + labels = labels.split(',') + return SEGSLabelAssign.assign(segs, labels) + + +class SEGSOrderedFilter: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "target": (["area(=w*h)", "width", "height", "x1", "y1", "x2", "y2", "confidence"],), + "order": ("BOOLEAN", {"default": True, "label_on": "descending", "label_off": "ascending"}), + "take_start": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "take_count": ("INT", {"default": 1, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + RETURN_TYPES = ("SEGS", "SEGS",) + RETURN_NAMES = ("filtered_SEGS", "remained_SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs, target, order, take_start, take_count): + segs_with_order = [] + + for seg in segs[1]: + x1 = seg.crop_region[0] + y1 = seg.crop_region[1] + x2 = seg.crop_region[2] + y2 = seg.crop_region[3] + + if target == "area(=w*h)": + value = (y2 - y1) * (x2 - x1) + elif target == "width": + value = x2 - x1 + elif target == "height": + value = y2 - y1 + elif target == "x1": + value = x1 + elif target == "x2": + value = x2 + elif target == "y1": + value = y1 + elif target == "y2": + value = y2 + elif target == "confidence": + value = seg.confidence + else: + raise Exception(f"[Impact Pack] SEGSOrderedFilter - Unexpected target '{target}'") + + segs_with_order.append((value, seg)) + + if order: + sorted_list = sorted(segs_with_order, key=lambda x: x[0], reverse=True) + else: + sorted_list = sorted(segs_with_order, key=lambda x: x[0], reverse=False) + + result_list = [] + remained_list = [] + + for i, item in enumerate(sorted_list): + if take_start <= i < take_start + take_count: + result_list.append(item[1]) + else: + remained_list.append(item[1]) + + return (segs[0], result_list), (segs[0], remained_list), + + +class SEGSRangeFilter: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "target": (["area(=w*h)", "width", "height", "x1", "y1", "x2", "y2", "length_percent", "confidence(0-100)"],), + "mode": ("BOOLEAN", {"default": True, "label_on": "inside", "label_off": "outside"}), + "min_value": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), + "max_value": ("INT", {"default": 67108864, "min": 0, "max": sys.maxsize, "step": 1}), + }, + } + + RETURN_TYPES = ("SEGS", "SEGS",) + RETURN_NAMES = ("filtered_SEGS", "remained_SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs, target, mode, min_value, max_value): + new_segs = [] + remained_segs = [] + + for seg in segs[1]: + x1 = seg.crop_region[0] + y1 = seg.crop_region[1] + x2 = seg.crop_region[2] + y2 = seg.crop_region[3] + + if target == "area(=w*h)": + value = (y2 - y1) * (x2 - x1) + elif target == "length_percent": + h = y2 - y1 + w = x2 - x1 + value = max(h/w, w/h)*100 + print(f"value={value}") + elif target == "width": + value = x2 - x1 + elif target == "height": + value = y2 - y1 + elif target == "x1": + value = x1 + elif target == "x2": + value = x2 + elif target == "y1": + value = y1 + elif target == "y2": + value = y2 + elif target == "confidence(0-100)": + value = seg.confidence*100 + else: + raise Exception(f"[Impact Pack] SEGSRangeFilter - Unexpected target '{target}'") + + if mode and min_value <= value <= max_value: + print(f"[in] value={value} / {mode}, {min_value}, {max_value}") + new_segs.append(seg) + elif not mode and (value < min_value or value > max_value): + print(f"[out] value={value} / {mode}, {min_value}, {max_value}") + new_segs.append(seg) + else: + remained_segs.append(seg) + print(f"[filter] value={value} / {mode}, {min_value}, {max_value}") + + return (segs[0], new_segs), (segs[0], remained_segs), + + +class SEGSToImageList: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + }, + "optional": { + "fallback_image_opt": ("IMAGE", ), + } + } + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs, fallback_image_opt=None): + results = list() + + if fallback_image_opt is not None: + segs = core.segs_scale_match(segs, fallback_image_opt.shape) + + for seg in segs[1]: + if seg.cropped_image is not None: + cropped_image = to_tensor(seg.cropped_image) + elif fallback_image_opt is not None: + # take from original image + cropped_image = to_tensor(crop_image(fallback_image_opt, seg.crop_region)) + else: + cropped_image = empty_pil_tensor() + + results.append(cropped_image) + + if len(results) == 0: + results.append(empty_pil_tensor()) + + return (results,) + + +class SEGSToMaskList: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + }, + } + + RETURN_TYPES = ("MASK",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs): + masks = core.segs_to_masklist(segs) + if len(masks) == 0: + empty_mask = torch.zeros(segs[0], dtype=torch.float32, device="cpu") + masks = [empty_mask] + masks = [utils.make_3d_mask(mask) for mask in masks] + return (masks,) + + +class SEGSToMaskBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + }, + } + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs): + masks = core.segs_to_masklist(segs) + masks = [utils.make_3d_mask(mask) for mask in masks] + mask_batch = torch.concat(masks) + return (mask_batch,) + + +class SEGSConcat: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs1": ("SEGS", ), + }, + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, **kwargs): + dim = None + res = None + + for k, v in list(kwargs.items()): + if v[0] == (0, 0) or len(v[1]) == 0: + continue + + if dim is None: + dim = v[0] + res = v[1] + else: + if v[0] == dim: + res = res + v[1] + else: + print(f"ERROR: source shape of 'segs1'{dim} and '{k}'{v[0]} are different. '{k}' will be ignored") + + if dim is None: + empty_segs = ((0, 0), []) + return (empty_segs, ) + else: + return ((dim, res), ) + + +class Count_Elts_in_SEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + }, + } + + RETURN_TYPES = ("INT",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs): + return (len(segs[1]), ) + + +class DecomposeSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + }, + } + + RETURN_TYPES = ("SEGS_HEADER", "SEG_ELT",) + OUTPUT_IS_LIST = (False, True, ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs): + return segs + + +class AssembleSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seg_header": ("SEGS_HEADER", ), + "seg_elt": ("SEG_ELT", ), + }, + } + + INPUT_IS_LIST = True + + RETURN_TYPES = ("SEGS", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, seg_header, seg_elt): + return ((seg_header[0], seg_elt), ) + + +class From_SEG_ELT: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seg_elt": ("SEG_ELT", ), + }, + } + + RETURN_TYPES = ("SEG_ELT", "IMAGE", "MASK", "SEG_ELT_crop_region", "SEG_ELT_bbox", "SEG_ELT_control_net_wrapper", "FLOAT", "STRING") + RETURN_NAMES = ("seg_elt", "cropped_image", "cropped_mask", "crop_region", "bbox", "control_net_wrapper", "confidence", "label") + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, seg_elt): + cropped_image = to_tensor(seg_elt.cropped_image) if seg_elt.cropped_image is not None else None + return (seg_elt, cropped_image, to_tensor(seg_elt.cropped_mask), seg_elt.crop_region, seg_elt.bbox, seg_elt.control_net_wrapper, seg_elt.confidence, seg_elt.label,) + + +class From_SEG_ELT_bbox: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "bbox": ("SEG_ELT_bbox", ), + }, + } + + RETURN_TYPES = ("INT", "INT", "INT", "INT") + RETURN_NAMES = ("left", "top", "right", "bottom") + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, bbox): + return bbox + + +class From_SEG_ELT_crop_region: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "crop_region": ("SEG_ELT_crop_region", ), + }, + } + + RETURN_TYPES = ("INT", "INT", "INT", "INT") + RETURN_NAMES = ("left", "top", "right", "bottom") + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, crop_region): + return crop_region + + +class Edit_SEG_ELT: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seg_elt": ("SEG_ELT", ), + }, + "optional": { + "cropped_image_opt": ("IMAGE", ), + "cropped_mask_opt": ("MASK", ), + "crop_region_opt": ("SEG_ELT_crop_region", ), + "bbox_opt": ("SEG_ELT_bbox", ), + "control_net_wrapper_opt": ("SEG_ELT_control_net_wrapper", ), + "confidence_opt": ("FLOAT", {"min": 0, "max": 1.0, "step": 0.1, "forceInput": True}), + "label_opt": ("STRING", {"multiline": False, "forceInput": True}), + } + } + + RETURN_TYPES = ("SEG_ELT", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, seg_elt, cropped_image_opt=None, cropped_mask_opt=None, confidence_opt=None, crop_region_opt=None, + bbox_opt=None, label_opt=None, control_net_wrapper_opt=None): + + cropped_image = seg_elt.cropped_image if cropped_image_opt is None else cropped_image_opt + cropped_mask = seg_elt.cropped_mask if cropped_mask_opt is None else cropped_mask_opt + confidence = seg_elt.confidence if confidence_opt is None else confidence_opt + crop_region = seg_elt.crop_region if crop_region_opt is None else crop_region_opt + bbox = seg_elt.bbox if bbox_opt is None else bbox_opt + label = seg_elt.label if label_opt is None else label_opt + control_net_wrapper = seg_elt.control_net_wrapper if control_net_wrapper_opt is None else control_net_wrapper_opt + + cropped_image = cropped_image.numpy() if cropped_image is not None else None + + if isinstance(cropped_mask, torch.Tensor): + if len(cropped_mask.shape) == 3: + cropped_mask = cropped_mask.squeeze(0) + + cropped_mask = cropped_mask.numpy() + + seg = SEG(cropped_image, cropped_mask, confidence, crop_region, bbox, label, control_net_wrapper) + + return (seg,) + + +class DilateMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK", ), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + }} + + RETURN_TYPES = ("MASK", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, mask, dilation): + mask = core.dilate_mask(mask.numpy(), dilation) + mask = torch.from_numpy(mask) + mask = utils.make_3d_mask(mask) + return (mask, ) + + +class GaussianBlurMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK", ), + "kernel_size": ("INT", {"default": 10, "min": 0, "max": 100, "step": 1}), + "sigma": ("FLOAT", {"default": 10.0, "min": 0.1, "max": 100.0, "step": 0.1}), + }} + + RETURN_TYPES = ("MASK", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, mask, kernel_size, sigma): + # Some custom nodes use abnormal 4-dimensional masks in the format of b, c, h, w. In the impact pack, internal 4-dimensional masks are required in the format of b, h, w, c. Therefore, normalization is performed using the normal mask format, which is 3-dimensional, before proceeding with the operation. + mask = make_3d_mask(mask) + mask = torch.unsqueeze(mask, dim=-1) + mask = utils.tensor_gaussian_blur_mask(mask, kernel_size, sigma) + mask = torch.squeeze(mask, dim=-1) + return (mask, ) + + +class DilateMaskInSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + }} + + RETURN_TYPES = ("SEGS", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs, dilation): + new_segs = [] + for seg in segs[1]: + mask = core.dilate_mask(seg.cropped_mask, dilation) + seg = SEG(seg.cropped_image, mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + new_segs.append(seg) + + return ((segs[0], new_segs), ) + + +class GaussianBlurMaskInSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "kernel_size": ("INT", {"default": 10, "min": 0, "max": 100, "step": 1}), + "sigma": ("FLOAT", {"default": 10.0, "min": 0.1, "max": 100.0, "step": 0.1}), + }} + + RETURN_TYPES = ("SEGS", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, segs, kernel_size, sigma): + new_segs = [] + for seg in segs[1]: + mask = utils.tensor_gaussian_blur_mask(seg.cropped_mask, kernel_size, sigma) + mask = torch.squeeze(mask, dim=-1).squeeze(0).numpy() + seg = SEG(seg.cropped_image, mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + new_segs.append(seg) + + return ((segs[0], new_segs), ) + + +class Dilate_SEG_ELT: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seg_elt": ("SEG_ELT", ), + "dilation": ("INT", {"default": 10, "min": -512, "max": 512, "step": 1}), + }} + + RETURN_TYPES = ("SEG_ELT", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, seg, dilation): + mask = core.dilate_mask(seg.cropped_mask, dilation) + seg = SEG(seg.cropped_image, mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + return (seg,) + + +class SEG_ELT_BBOX_ScaleBy: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seg": ("SEG_ELT", ), + "scale_by": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 8.0, "step": 0.01}), } + } + + RETURN_TYPES = ("SEG_ELT", ) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def fill_zero_outside_bbox(mask, crop_region, bbox): + cx1, cy1, _, _ = crop_region + x1, y1, x2, y2 = bbox + x1, y1, x2, y2 = x1-cx1, y1-cy1, x2-cx1, y2-cy1 + h, w = mask.shape + + x1 = min(w-1, max(0, x1)) + x2 = min(w-1, max(0, x2)) + y1 = min(h-1, max(0, y1)) + y2 = min(h-1, max(0, y2)) + + mask_cropped = mask.copy() + mask_cropped[:, :x1] = 0 # zero fill left side + mask_cropped[:, x2:] = 0 # zero fill right side + mask_cropped[:y1, :] = 0 # zero fill top side + mask_cropped[y2:, :] = 0 # zero fill bottom side + return mask_cropped + + def doit(self, seg, scale_by): + x1, y1, x2, y2 = seg.bbox + w = x2-x1 + h = y2-y1 + + dw = int((w * scale_by - w)/2) + dh = int((h * scale_by - h)/2) + + bbox = (x1-dw, y1-dh, x2+dw, y2+dh) + + cropped_mask = SEG_ELT_BBOX_ScaleBy.fill_zero_outside_bbox(seg.cropped_mask, seg.crop_region, bbox) + seg = SEG(seg.cropped_image, cropped_mask, seg.confidence, seg.crop_region, bbox, seg.label, seg.control_net_wrapper) + return (seg,) + + +class EmptySEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": {}, } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self): + shape = 0, 0 + return ((shape, []),) + + +class SegsToCombinedMask: + @classmethod + def INPUT_TYPES(s): + return {"required": {"segs": ("SEGS",), }} + + RETURN_TYPES = ("MASK",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, segs): + mask = core.segs_to_combined_mask(segs) + mask = utils.make_3d_mask(mask) + return (mask,) + + +class MediaPipeFaceMeshToSEGS: + @classmethod + def INPUT_TYPES(s): + bool_true_widget = ("BOOLEAN", {"default": True, "label_on": "Enabled", "label_off": "Disabled"}) + bool_false_widget = ("BOOLEAN", {"default": False, "label_on": "Enabled", "label_off": "Disabled"}) + return {"required": { + "image": ("IMAGE",), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "bbox_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "crop_min_size": ("INT", {"min": 10, "max": MAX_RESOLUTION, "step": 1, "default": 50}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 1}), + "dilation": ("INT", {"default": 0, "min": -512, "max": 512, "step": 1}), + "face": bool_true_widget, + "mouth": bool_false_widget, + "left_eyebrow": bool_false_widget, + "left_eye": bool_false_widget, + "left_pupil": bool_false_widget, + "right_eyebrow": bool_false_widget, + "right_eye": bool_false_widget, + "right_pupil": bool_false_widget, + }, + # "optional": {"reference_image_opt": ("IMAGE", ), } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, image, crop_factor, bbox_fill, crop_min_size, drop_size, dilation, face, mouth, left_eyebrow, left_eye, left_pupil, right_eyebrow, right_eye, right_pupil): + # padding is obsolete now + # https://github.com/Fannovel16/comfyui_controlnet_aux/blob/1ec41fceff1ee99596445a0c73392fd91df407dc/utils.py#L33 + # def calc_pad(h_raw, w_raw): + # resolution = normalize_size_base_64(h_raw, w_raw) + # + # def pad64(x): + # return int(np.ceil(float(x) / 64.0) * 64 - x) + # + # k = float(resolution) / float(min(h_raw, w_raw)) + # h_target = int(np.round(float(h_raw) * k)) + # w_target = int(np.round(float(w_raw) * k)) + # + # return pad64(h_target), pad64(w_target) + + # if reference_image_opt is not None: + # if image.shape[1:] != reference_image_opt.shape[1:]: + # scale_by1 = reference_image_opt.shape[1] / image.shape[1] + # scale_by2 = reference_image_opt.shape[2] / image.shape[2] + # scale_by = min(scale_by1, scale_by2) + # + # # padding is obsolete now + # # h_pad, w_pad = calc_pad(reference_image_opt.shape[1], reference_image_opt.shape[2]) + # # if h_pad != 0: + # # # height padded + # # image = image[:, :-h_pad, :, :] + # # elif w_pad != 0: + # # # width padded + # # image = image[:, :, :-w_pad, :] + # + # image = nodes.ImageScaleBy().upscale(image, "bilinear", scale_by)[0] + + result = core.mediapipe_facemesh_to_segs(image, crop_factor, bbox_fill, crop_min_size, drop_size, dilation, face, mouth, left_eyebrow, left_eye, left_pupil, right_eyebrow, right_eye, right_pupil) + return (result, ) + + +class MaskToSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK",), + "combined": ("BOOLEAN", {"default": False, "label_on": "True", "label_off": "False"}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "bbox_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "contour_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + @staticmethod + def doit(mask, combined, crop_factor, bbox_fill, drop_size, contour_fill=False): + mask = make_2d_mask(mask) + result = core.mask_to_segs(mask, combined, crop_factor, bbox_fill, drop_size, is_contour=contour_fill) + + return (result, ) + + +class MaskToSEGS_for_AnimateDiff: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK",), + "combined": ("BOOLEAN", {"default": False, "label_on": "True", "label_off": "False"}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 100, "step": 0.1}), + "bbox_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "drop_size": ("INT", {"min": 1, "max": MAX_RESOLUTION, "step": 1, "default": 10}), + "contour_fill": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + @staticmethod + def doit(mask, combined, crop_factor, bbox_fill, drop_size, contour_fill=False): + if (len(mask.shape) == 4 and mask.shape[1] > 1) or (len(mask.shape) == 3 and mask.shape[0] > 1): + mask = make_3d_mask(mask) + if contour_fill: + print(f"[Impact Pack] MaskToSEGS_for_AnimateDiff: 'contour_fill' is ignored because batch mask 'contour_fill' is not supported.") + result = core.batch_mask_to_segs(mask, combined, crop_factor, bbox_fill, drop_size) + return (result, ) + + mask = make_2d_mask(mask) + segs = core.mask_to_segs(mask, combined, crop_factor, bbox_fill, drop_size, is_contour=contour_fill) + all_masks = SEGSToMaskList().doit(segs)[0] + + result_mask = (all_masks[0] * 255).to(torch.uint8) + for mask in all_masks[1:]: + result_mask |= (mask * 255).to(torch.uint8) + + result_mask = (result_mask/255.0).to(torch.float32) + result_mask = utils.to_binary_mask(result_mask, 0.1)[0] + + return MaskToSEGS.doit(result_mask, False, crop_factor, False, drop_size, contour_fill) + + +class IPAdapterApplySEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS",), + "ipadapter_pipe": ("IPADAPTER_PIPE",), + "weight": ("FLOAT", {"default": 0.7, "min": -1, "max": 3, "step": 0.05}), + "noise": ("FLOAT", {"default": 0.4, "min": 0.0, "max": 1.0, "step": 0.01}), + "weight_type": (["original", "linear", "channel penalty"], {"default": 'channel penalty'}), + "start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_at": ("FLOAT", {"default": 0.9, "min": 0.0, "max": 1.0, "step": 0.001}), + "unfold_batch": ("BOOLEAN", {"default": False}), + "faceid_v2": ("BOOLEAN", {"default": False}), + "weight_v2": ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}), + "context_crop_factor": ("FLOAT", {"default": 1.2, "min": 1.0, "max": 100, "step": 0.1}), + "reference_image": ("IMAGE",), + }, + "optional": { + "combine_embeds": (["concat", "add", "subtract", "average", "norm average"],), + "neg_image": ("IMAGE",), + }, + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs, ipadapter_pipe, weight, noise, weight_type, start_at, end_at, unfold_batch, faceid_v2, weight_v2, context_crop_factor, reference_image, combine_embeds="concat", neg_image=None): + + if len(ipadapter_pipe) == 4: + print(f"[Impact Pack] IPAdapterApplySEGS: Installed Inspire Pack is outdated.") + raise Exception("Inspire Pack is outdated.") + + new_segs = [] + + h, w = segs[0] + + if reference_image.shape[2] != w or reference_image.shape[1] != h: + reference_image = tensor_resize(reference_image, w, h) + + for seg in segs[1]: + # The context_crop_region sets how much wider the IPAdapter context will reflect compared to the crop_region, not the bbox + context_crop_region = make_crop_region(w, h, seg.crop_region, context_crop_factor) + cropped_image = crop_image(reference_image, context_crop_region) + + control_net_wrapper = core.IPAdapterWrapper(ipadapter_pipe, weight, noise, weight_type, start_at, end_at, unfold_batch, weight_v2, cropped_image, neg_image=neg_image, prev_control_net=seg.control_net_wrapper, combine_embeds=combine_embeds) + new_seg = SEG(seg.cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, control_net_wrapper) + new_segs.append(new_seg) + + return ((segs[0], new_segs), ) + + +class ControlNetApplySEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS",), + "control_net": ("CONTROL_NET",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + }, + "optional": { + "segs_preprocessor": ("SEGS_PREPROCESSOR",), + "control_image": ("IMAGE",) + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs, control_net, strength, segs_preprocessor=None, control_image=None): + new_segs = [] + + for seg in segs[1]: + control_net_wrapper = core.ControlNetWrapper(control_net, strength, segs_preprocessor, seg.control_net_wrapper, + original_size=segs[0], crop_region=seg.crop_region, control_image=control_image) + new_seg = SEG(seg.cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, control_net_wrapper) + new_segs.append(new_seg) + + return ((segs[0], new_segs), ) + + +class ControlNetApplyAdvancedSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS",), + "control_net": ("CONTROL_NET",), + "strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001}) + }, + "optional": { + "segs_preprocessor": ("SEGS_PREPROCESSOR",), + "control_image": ("IMAGE",) + } + } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs, control_net, strength, start_percent, end_percent, segs_preprocessor=None, control_image=None): + new_segs = [] + + for seg in segs[1]: + control_net_wrapper = core.ControlNetAdvancedWrapper(control_net, strength, start_percent, end_percent, segs_preprocessor, + seg.control_net_wrapper, original_size=segs[0], crop_region=seg.crop_region, + control_image=control_image) + new_seg = SEG(seg.cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, control_net_wrapper) + new_segs.append(new_seg) + + return ((segs[0], new_segs), ) + + +class ControlNetClearSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": {"segs": ("SEGS",), }, } + + RETURN_TYPES = ("SEGS",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs): + new_segs = [] + + for seg in segs[1]: + new_seg = SEG(seg.cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, None) + new_segs.append(new_seg) + + return ((segs[0], new_segs), ) + + +class SEGSSwitch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "select": ("INT", {"default": 1, "min": 1, "max": 99999, "step": 1}), + "segs1": ("SEGS",), + }, + } + + RETURN_TYPES = ("SEGS", ) + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, *args, **kwargs): + input_name = f"segs{int(kwargs['select'])}" + + if input_name in kwargs: + return (kwargs[input_name],) + else: + print(f"SEGSSwitch: invalid select index ('segs1' is selected)") + return (kwargs['segs1'],) + + +class SEGSPicker: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "picks": ("STRING", {"multiline": True, "dynamicPrompts": False, "pysssss.autocomplete": False}), + "segs": ("SEGS",), + }, + "optional": { + "fallback_image_opt": ("IMAGE", ), + }, + "hidden": {"unique_id": "UNIQUE_ID"}, + } + + RETURN_TYPES = ("SEGS", ) + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(picks, segs, fallback_image_opt=None, unique_id=None): + if fallback_image_opt is not None: + segs = core.segs_scale_match(segs, fallback_image_opt.shape) + + # generate candidates image + cands = [] + for seg in segs[1]: + if seg.cropped_image is not None: + cropped_image = seg.cropped_image + elif fallback_image_opt is not None: + # take from original image + cropped_image = crop_image(fallback_image_opt, seg.crop_region) + else: + cropped_image = empty_pil_tensor() + + mask_array = seg.cropped_mask.copy() + mask_array[mask_array < 0.3] = 0.3 + mask_array = mask_array[None, ..., None] + cropped_image = cropped_image * mask_array + + cands.append(cropped_image) + + impact.impact_server.segs_picker_map[unique_id] = cands + + # pass only selected + pick_ids = set() + + for pick in picks.split(","): + try: + pick_ids.add(int(pick)-1) + except Exception: + pass + + new_segs = [] + for i in pick_ids: + if 0 <= i < len(segs[1]): + new_segs.append(segs[1][i]) + + return ((segs[0], new_segs),) + + +class DefaultImageForSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "segs": ("SEGS", ), + "image": ("IMAGE", ), + "override": ("BOOLEAN", {"default": True}), + }} + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs, image, override): + results = [] + + segs = core.segs_scale_match(segs, image.shape) + + if len(segs[1]) > 0: + if segs[1][0].cropped_image is not None: + batch_count = len(segs[1][0].cropped_image) + else: + batch_count = len(image) + + for seg in segs[1]: + if seg.cropped_image is not None and not override: + cropped_image = seg.cropped_image + else: + cropped_image = None + for i in range(0, batch_count): + # take from original image + ref_image = image[i].unsqueeze(0) + cropped_image2 = crop_image(ref_image, seg.crop_region) + + if cropped_image is None: + cropped_image = cropped_image2 + else: + cropped_image = torch.cat((cropped_image, cropped_image2), dim=0) + + new_seg = SEG(cropped_image, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + results.append(new_seg) + + return ((segs[0], results), ) + else: + return (segs, ) + + +class RemoveImageFromSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": {"segs": ("SEGS", ), }} + + RETURN_TYPES = ("SEGS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(segs): + results = [] + + if len(segs[1]) > 0: + for seg in segs[1]: + new_seg = SEG(None, seg.cropped_mask, seg.confidence, seg.crop_region, seg.bbox, seg.label, seg.control_net_wrapper) + results.append(new_seg) + + return ((segs[0], results), ) + else: + return (segs, ) + + +class MakeTileSEGS: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE", ), + "bbox_size": ("INT", {"default": 512, "min": 64, "max": 4096, "step": 8}), + "crop_factor": ("FLOAT", {"default": 3.0, "min": 1.0, "max": 10, "step": 0.01}), + "min_overlap": ("INT", {"default": 5, "min": 0, "max": 512, "step": 1}), + "filter_segs_dilation": ("INT", {"default": 20, "min": -255, "max": 255, "step": 1}), + "mask_irregularity": ("FLOAT", {"default": 0, "min": 0, "max": 1.0, "step": 0.01}), + "irregular_mask_mode": (["Reuse fast", "Reuse quality", "All random fast", "All random quality"],) + }, + "optional": { + "filter_in_segs_opt": ("SEGS", ), + "filter_out_segs_opt": ("SEGS", ), + } + } + + RETURN_TYPES = ("SEGS",) + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/__for_testing" + + @staticmethod + def doit(images, bbox_size, crop_factor, min_overlap, filter_segs_dilation, mask_irregularity=0, irregular_mask_mode="Reuse fast", filter_in_segs_opt=None, filter_out_segs_opt=None): + if bbox_size <= 2*min_overlap: + new_min_overlap = bbox_size / 2 + print(f"[MakeTileSEGS] min_overlap should be greater than bbox_size. (value changed: {min_overlap} => {new_min_overlap})") + min_overlap = new_min_overlap + + _, ih, iw, _ = images.size() + + mask_cache = None + mask_quality = 512 + if mask_irregularity > 0: + if irregular_mask_mode == "Reuse fast": + mask_quality = 128 + mask_cache = np.zeros((128, 128)).astype(np.float32) + core.random_mask(mask_cache, (0, 0, 128, 128), factor=mask_irregularity, size=mask_quality) + elif irregular_mask_mode == "Reuse quality": + mask_quality = 512 + mask_cache = np.zeros((512, 512)).astype(np.float32) + core.random_mask(mask_cache, (0, 0, 512, 512), factor=mask_irregularity, size=mask_quality) + elif irregular_mask_mode == "All random fast": + mask_quality = 512 + + # compensate overlap/bbox_size for irregular mask + if mask_irregularity > 0: + compensate = max(6, int(mask_quality * mask_irregularity / 4)) + min_overlap += compensate + bbox_size += compensate*2 + + # create exclusion mask + if filter_out_segs_opt is not None: + exclusion_mask = core.segs_to_combined_mask(filter_out_segs_opt) + exclusion_mask = utils.make_3d_mask(exclusion_mask) + exclusion_mask = utils.resize_mask(exclusion_mask, (ih, iw)) + exclusion_mask = dilate_mask(exclusion_mask.cpu().numpy(), filter_segs_dilation) + else: + exclusion_mask = None + + if filter_in_segs_opt is not None: + and_mask = core.segs_to_combined_mask(filter_in_segs_opt) + and_mask = utils.make_3d_mask(and_mask) + and_mask = utils.resize_mask(and_mask, (ih, iw)) + and_mask = dilate_mask(and_mask.cpu().numpy(), filter_segs_dilation) + + a, b = core.mask_to_segs(and_mask, True, 1.0, False, 0) + if len(b) == 0: + return ((a, b),) + + start_x, start_y, c, d = b[0].crop_region + w = c - start_x + h = d - start_y + else: + start_x = 0 + start_y = 0 + h, w = ih, iw + and_mask = None + + # calculate tile factors + if bbox_size > h or bbox_size > w: + new_bbox_size = min(bbox_size, min(w, h)) + print(f"[MaskTileSEGS] bbox_size is greater than resolution (value changed: {bbox_size} => {new_bbox_size}") + bbox_size = new_bbox_size + + n_horizontal = math.ceil(w / (bbox_size - min_overlap)) + n_vertical = math.ceil(h / (bbox_size - min_overlap)) + + w_overlap_sum = (bbox_size * n_horizontal) - w + if w_overlap_sum < 0: + n_horizontal += 1 + w_overlap_sum = (bbox_size * n_horizontal) - w + + w_overlap_size = 0 if n_horizontal == 1 else int(w_overlap_sum/(n_horizontal-1)) + + h_overlap_sum = (bbox_size * n_vertical) - h + if h_overlap_sum < 0: + n_vertical += 1 + h_overlap_sum = (bbox_size * n_vertical) - h + + h_overlap_size = 0 if n_vertical == 1 else int(h_overlap_sum/(n_vertical-1)) + + new_segs = [] + + if w_overlap_size == bbox_size: + n_horizontal = 1 + + if h_overlap_size == bbox_size: + n_vertical = 1 + + y = start_y + for j in range(0, n_vertical): + x = start_x + for i in range(0, n_horizontal): + x1 = x + y1 = y + + if x+bbox_size < iw-1: + x2 = x+bbox_size + else: + x2 = iw + x1 = iw-bbox_size + + if y+bbox_size < ih-1: + y2 = y+bbox_size + else: + y2 = ih + y1 = ih-bbox_size + + bbox = x1, y1, x2, y2 + crop_region = make_crop_region(iw, ih, bbox, crop_factor) + cx1, cy1, cx2, cy2 = crop_region + + mask = np.zeros((cy2 - cy1, cx2 - cx1)).astype(np.float32) + + rel_left = x1 - cx1 + rel_top = y1 - cy1 + rel_right = x2 - cx1 + rel_bot = y2 - cy1 + + if mask_irregularity > 0: + if mask_cache is not None: + core.adaptive_mask_paste(mask, mask_cache, (rel_left, rel_top, rel_right, rel_bot)) + else: + core.random_mask(mask, (rel_left, rel_top, rel_right, rel_bot), factor=mask_irregularity, size=mask_quality) + + # corner filling + if rel_left == 0: + pad = int((x2 - x1) / 8) + mask[rel_top:rel_bot, :pad] = 1.0 + + if rel_top == 0: + pad = int((y2 - y1) / 8) + mask[:pad, rel_left:rel_right] = 1.0 + + if rel_right == mask.shape[1]: + pad = int((x2 - x1) / 8) + mask[rel_top:rel_bot, -pad:] = 1.0 + + if rel_bot == mask.shape[0]: + pad = int((y2 - y1) / 8) + mask[-pad:, rel_left:rel_right] = 1.0 + else: + mask[rel_top:rel_bot, rel_left:rel_right] = 1.0 + + mask = torch.tensor(mask) + + if exclusion_mask is not None: + exclusion_mask_cropped = exclusion_mask[cy1:cy2, cx1:cx2] + mask[exclusion_mask_cropped != 0] = 0.0 + + if and_mask is not None: + and_mask_cropped = and_mask[cy1:cy2, cx1:cx2] + mask[and_mask_cropped == 0] = 0.0 + + is_mask_zero = torch.all(mask == 0.0).item() + + if not is_mask_zero: + item = SEG(None, mask.numpy(), 1.0, crop_region, bbox, "", None) + new_segs.append(item) + + x += bbox_size - w_overlap_size + y += bbox_size - h_overlap_size + + res = (ih, iw), new_segs # segs + return (res,) + + +class SEGSUpscaler: + @classmethod + def INPUT_TYPES(s): + resampling_methods = ["lanczos", "nearest", "bilinear", "bicubic"] + + return {"required": { + "image": ("IMAGE",), + "segs": ("SEGS",), + "model": ("MODEL",), + "clip": ("CLIP",), + "vae": ("VAE",), + "rescale_factor": ("FLOAT", {"default": 2, "min": 0.01, "max": 100.0, "step": 0.01}), + "resampling_method": (resampling_methods,), + "supersample": (["true", "false"],), + "rounding_modulus": ("INT", {"default": 8, "min": 8, "max": 1024, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "positive": ("CONDITIONING",), + "negative": ("CONDITIONING",), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL",), + "upscaler_hook_opt": ("UPSCALER_HOOK",), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + @staticmethod + def doit(image, segs, model, clip, vae, rescale_factor, resampling_method, supersample, rounding_modulus, + seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, feather, inpaint_model, noise_mask_feather, + upscale_model_opt=None, upscaler_hook_opt=None, scheduler_func_opt=None): + + new_image = segs_upscaler.upscaler(image, upscale_model_opt, rescale_factor, resampling_method, supersample, rounding_modulus) + + segs = core.segs_scale_match(segs, new_image.shape) + + ordered_segs = segs[1] + + for i, seg in enumerate(ordered_segs): + cropped_image = crop_ndarray4(new_image.numpy(), seg.crop_region) + cropped_image = to_tensor(cropped_image) + mask = to_tensor(seg.cropped_mask) + mask = tensor_gaussian_blur_mask(mask, feather) + + is_mask_all_zeros = (seg.cropped_mask == 0).all().item() + if is_mask_all_zeros: + print(f"SEGSUpscaler: segment skip [empty mask]") + continue + + cropped_mask = seg.cropped_mask + + seg_seed = seed + i + + enhanced_image = segs_upscaler.img2img_segs(cropped_image, model, clip, vae, seg_seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, + noise_mask=cropped_mask, control_net_wrapper=seg.control_net_wrapper, + inpaint_model=inpaint_model, noise_mask_feather=noise_mask_feather, scheduler_func_opt=scheduler_func_opt) + if not (enhanced_image is None): + new_image = new_image.cpu() + enhanced_image = enhanced_image.cpu() + left = seg.crop_region[0] + top = seg.crop_region[1] + tensor_paste(new_image, enhanced_image, (left, top), mask) + + if upscaler_hook_opt is not None: + new_image = upscaler_hook_opt.post_paste(new_image) + + enhanced_img = tensor_convert_rgb(new_image) + + return (enhanced_img,) + + +class SEGSUpscalerPipe: + @classmethod + def INPUT_TYPES(s): + resampling_methods = ["lanczos", "nearest", "bilinear", "bicubic"] + + return {"required": { + "image": ("IMAGE",), + "segs": ("SEGS",), + "basic_pipe": ("BASIC_PIPE",), + "rescale_factor": ("FLOAT", {"default": 2, "min": 0.01, "max": 100.0, "step": 0.01}), + "resampling_method": (resampling_methods,), + "supersample": (["true", "false"],), + "rounding_modulus": ("INT", {"default": 8, "min": 8, "max": 1024, "step": 8}), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), + "scheduler": (core.SCHEDULERS,), + "denoise": ("FLOAT", {"default": 0.5, "min": 0.0001, "max": 1.0, "step": 0.01}), + "feather": ("INT", {"default": 5, "min": 0, "max": 100, "step": 1}), + "inpaint_model": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "noise_mask_feather": ("INT", {"default": 20, "min": 0, "max": 100, "step": 1}), + }, + "optional": { + "upscale_model_opt": ("UPSCALE_MODEL",), + "upscaler_hook_opt": ("UPSCALER_HOOK",), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Upscale" + + @staticmethod + def doit(image, segs, basic_pipe, rescale_factor, resampling_method, supersample, rounding_modulus, + seed, steps, cfg, sampler_name, scheduler, denoise, feather, inpaint_model, noise_mask_feather, + upscale_model_opt=None, upscaler_hook_opt=None, scheduler_func_opt=None): + + model, clip, vae, positive, negative = basic_pipe + + return SEGSUpscaler.doit(image, segs, model, clip, vae, rescale_factor, resampling_method, supersample, rounding_modulus, + seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, feather, inpaint_model, noise_mask_feather, + upscale_model_opt=upscale_model_opt, upscaler_hook_opt=upscaler_hook_opt, scheduler_func_opt=scheduler_func_opt) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_upscaler.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_upscaler.py new file mode 100644 index 0000000000000000000000000000000000000000..af33e32da05921f2b630e8f03c3761259535be10 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/segs_upscaler.py @@ -0,0 +1,132 @@ +from impact.utils import * +from impact import impact_sampling +from comfy import model_management +from comfy.cli_args import args +import nodes + +try: + from comfy_extras import nodes_differential_diffusion +except Exception: + print(f"[Impact Pack] ComfyUI is an outdated version. The DifferentialDiffusion feature will be disabled.") + + +# Implementation based on `https://github.com/lingondricka2/Upscaler-Detailer` + +# code from comfyroll ---> +# https://github.com/Suzie1/ComfyUI_Comfyroll_CustomNodes/blob/main/nodes/functions_upscale.py + +def upscale_with_model(upscale_model, image): + device = model_management.get_torch_device() + upscale_model.to(device) + in_img = image.movedim(-1, -3).to(device) + free_memory = model_management.get_free_memory(device) + + tile = 512 + overlap = 32 + + oom = True + while oom: + try: + steps = in_img.shape[0] * comfy.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, tile_y=tile, overlap=overlap) + pbar = comfy.utils.ProgressBar(steps) + s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar) + oom = False + except model_management.OOM_EXCEPTION as e: + tile //= 2 + if tile < 128: + raise e + + s = torch.clamp(s.movedim(-3, -1), min=0, max=1.0) + return s + + +def apply_resize_image(image: Image.Image, original_width, original_height, rounding_modulus, mode='scale', supersample='true', factor: int = 2, width: int = 1024, height: int = 1024, + resample='bicubic'): + # Calculate the new width and height based on the given mode and parameters + if mode == 'rescale': + new_width, new_height = int(original_width * factor), int(original_height * factor) + else: + m = rounding_modulus + original_ratio = original_height / original_width + height = int(width * original_ratio) + + new_width = width if width % m == 0 else width + (m - width % m) + new_height = height if height % m == 0 else height + (m - height % m) + + # Define a dictionary of resampling filters + resample_filters = {'nearest': 0, 'bilinear': 2, 'bicubic': 3, 'lanczos': 1} + + # Apply supersample + if supersample == 'true': + image = image.resize((new_width * 8, new_height * 8), resample=Image.Resampling(resample_filters[resample])) + + # Resize the image using the given resampling filter + resized_image = image.resize((new_width, new_height), resample=Image.Resampling(resample_filters[resample])) + + return resized_image + + +def upscaler(image, upscale_model, rescale_factor, resampling_method, supersample, rounding_modulus): + if upscale_model is not None: + up_image = upscale_with_model(upscale_model, image) + else: + up_image = image + + pil_img = tensor2pil(image) + original_width, original_height = pil_img.size + scaled_image = pil2tensor(apply_resize_image(tensor2pil(up_image), original_width, original_height, rounding_modulus, 'rescale', + supersample, rescale_factor, 1024, resampling_method)) + return scaled_image + +# <--- + + +def img2img_segs(image, model, clip, vae, seed, steps, cfg, sampler_name, scheduler, + positive, negative, denoise, noise_mask, control_net_wrapper=None, + inpaint_model=False, noise_mask_feather=0, scheduler_func_opt=None): + + original_image_size = image.shape[1:3] + + # Match to original image size + if original_image_size[0] % 8 > 0 or original_image_size[1] % 8 > 0: + scale = 8/min(original_image_size[0], original_image_size[1]) + 1 + w = int(original_image_size[1] * scale) + h = int(original_image_size[0] * scale) + image = tensor_resize(image, w, h) + + if noise_mask is not None: + noise_mask = tensor_gaussian_blur_mask(noise_mask, noise_mask_feather) + noise_mask = noise_mask.squeeze(3) + + if noise_mask_feather > 0: + model = nodes_differential_diffusion.DifferentialDiffusion().apply(model)[0] + + if control_net_wrapper is not None: + positive, negative, _ = control_net_wrapper.apply(positive, negative, image, noise_mask) + + # prepare mask + if noise_mask is not None and inpaint_model: + positive, negative, latent_image = nodes.InpaintModelConditioning().encode(positive, negative, image, vae, noise_mask) + else: + latent_image = to_latent_image(image, vae) + if noise_mask is not None: + latent_image['noise_mask'] = noise_mask + + refined_latent = latent_image + + # ksampler + refined_latent = impact_sampling.ksampler_wrapper(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, refined_latent, denoise, scheduler_func=scheduler_func_opt) + + # non-latent downscale - latent downscale cause bad quality + refined_image = vae.decode(refined_latent['samples']) + + # prevent mixing of device + refined_image = refined_image.cpu() + + # Match to original image size + if refined_image.shape[1:3] != original_image_size: + refined_image = tensor_resize(refined_image, original_image_size[1], original_image_size[0]) + + # don't convert to latent - latent break image + # preserving pil is much better + return refined_image diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/special_samplers.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/special_samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..c18fd0dfb379858ea36c23226b797214dfbb78ea --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/special_samplers.py @@ -0,0 +1,829 @@ +import math +import impact.core as core +from comfy_extras.nodes_custom_sampler import Noise_RandomNoise +from impact.utils import * +from nodes import MAX_RESOLUTION +import nodes +from impact.impact_sampling import KSamplerWrapper, KSamplerAdvancedWrapper, separated_sample, impact_sample + + +class TiledKSamplerProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS, ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "tile_width": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tile_height": ("INT", {"default": 512, "min": 320, "max": MAX_RESOLUTION, "step": 64}), + "tiling_strategy": (["random", "padded", 'simple'], ), + "basic_pipe": ("BASIC_PIPE", ) + }} + + TOOLTIPS = { + "input": { + "seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "cfg": "classifier free guidance value", + "sampler_name": "sampler", + "scheduler": "noise schedule", + "denoise": "The amount of noise to remove. This amount is the noise added at the start, and the higher it is, the more the input latent will be modified before being returned.", + "tile_width": "Sets the width of the tile to be used in TiledKSampler.", + "tile_height": "Sets the height of the tile to be used in TiledKSampler.", + "tiling_strategy": "Sets the tiling strategy for TiledKSampler.", + "basic_pipe": "basic_pipe input for sampling", + }, + "output": ("sampler wrapper. (Can be used when generating a regional_prompt.)", ) + } + + RETURN_TYPES = ("KSAMPLER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Sampler" + + @staticmethod + def doit(seed, steps, cfg, sampler_name, scheduler, denoise, + tile_width, tile_height, tiling_strategy, basic_pipe): + model, _, _, positive, negative = basic_pipe + sampler = core.TiledKSamplerWrapper(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, + tile_width, tile_height, tiling_strategy) + return (sampler, ) + + +class KSamplerProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "basic_pipe": ("BASIC_PIPE", ) + }, + "optional": { + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + TOOLTIPS = { + "input": { + "seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "cfg": "classifier free guidance value", + "sampler_name": "sampler", + "scheduler": "noise schedule", + "denoise": "The amount of noise to remove. This amount is the noise added at the start, and the higher it is, the more the input latent will be modified before being returned.", + "basic_pipe": "basic_pipe input for sampling", + "scheduler_func_opt": "[OPTIONAL] Noise schedule generation function. If this is set, the scheduler widget will be ignored.", + }, + "output": ("sampler wrapper. (Can be used when generating a regional_prompt.)", ) + } + + RETURN_TYPES = ("KSAMPLER",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Sampler" + + @staticmethod + def doit(seed, steps, cfg, sampler_name, scheduler, denoise, basic_pipe, scheduler_func_opt=None): + model, _, _, positive, negative = basic_pipe + sampler = KSamplerWrapper(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, denoise, scheduler_func=scheduler_func_opt) + return (sampler, ) + + +class KSamplerAdvancedProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "sigma_factor": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "basic_pipe": ("BASIC_PIPE", ) + }, + "optional": { + "sampler_opt": ("SAMPLER", ), + "scheduler_func_opt": ("SCHEDULER_FUNC",), + } + } + + TOOLTIPS = { + "input": { + "cfg": "classifier free guidance value", + "sampler_name": "sampler", + "scheduler": "noise schedule", + "sigma_factor": "Multiplier of noise schedule", + "basic_pipe": "basic_pipe input for sampling", + "sampler_opt": "[OPTIONAL] Uses the passed sampler instead of internal impact_sampler.", + "scheduler_func_opt": "[OPTIONAL] Noise schedule generation function. If this is set, the scheduler widget will be ignored.", + }, + "output": ("sampler wrapper. (Can be used when generating a regional_prompt.)", ) + } + + RETURN_TYPES = ("KSAMPLER_ADVANCED",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Sampler" + + @staticmethod + def doit(cfg, sampler_name, scheduler, basic_pipe, sigma_factor=1.0, sampler_opt=None, scheduler_func_opt=None): + model, _, _, positive, negative = basic_pipe + sampler = KSamplerAdvancedWrapper(model, cfg, sampler_name, scheduler, positive, negative, sampler_opt=sampler_opt, sigma_factor=sigma_factor, scheduler_func=scheduler_func_opt) + return (sampler, ) + + +class TwoSamplersForMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "latent_image": ("LATENT", ), + "base_sampler": ("KSAMPLER", ), + "mask_sampler": ("KSAMPLER", ), + "mask": ("MASK", ) + }, + } + + TOOLTIPS = { + "input": { + "latent_image": "input latent image", + "base_sampler": "Sampler to apply to the region outside the mask.", + "mask_sampler": "Sampler to apply to the masked region.", + "mask": "region mask", + }, + "output": ("result latent", ) + } + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Sampler" + + @staticmethod + def doit(latent_image, base_sampler, mask_sampler, mask): + inv_mask = torch.where(mask != 1.0, torch.tensor(1.0), torch.tensor(0.0)) + + latent_image['noise_mask'] = inv_mask + new_latent_image = base_sampler.sample(latent_image) + + new_latent_image['noise_mask'] = mask + new_latent_image = mask_sampler.sample(new_latent_image) + + del new_latent_image['noise_mask'] + + return (new_latent_image, ) + + +class TwoAdvancedSamplersForMask: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "samples": ("LATENT", ), + "base_sampler": ("KSAMPLER_ADVANCED", ), + "mask_sampler": ("KSAMPLER_ADVANCED", ), + "mask": ("MASK", ), + "overlap_factor": ("INT", {"default": 10, "min": 0, "max": 10000}) + }, + } + + TOOLTIPS = { + "input": { + "seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "denoise": "The amount of noise to remove. This amount is the noise added at the start, and the higher it is, the more the input latent will be modified before being returned.", + "samples": "input latent image", + "base_sampler": "Sampler to apply to the region outside the mask.", + "mask_sampler": "Sampler to apply to the masked region.", + "mask": "region mask", + "overlap_factor": "To smooth the seams of the region boundaries, expand the mask by the overlap_factor amount to overlap with other regions.", + }, + "output": ("result latent", ) + } + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Sampler" + + @staticmethod + def doit(seed, steps, denoise, samples, base_sampler, mask_sampler, mask, overlap_factor): + regional_prompts = RegionalPrompt().doit(mask=mask, advanced_sampler=mask_sampler)[0] + + return RegionalSampler().doit(seed=seed, seed_2nd=0, seed_2nd_mode="ignore", steps=steps, base_only_steps=1, + denoise=denoise, samples=samples, base_sampler=base_sampler, + regional_prompts=regional_prompts, overlap_factor=overlap_factor, + restore_latent=True, additional_mode="ratio between", + additional_sampler="AUTO", additional_sigma_ratio=0.3) + + +class RegionalPrompt: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK", ), + "advanced_sampler": ("KSAMPLER_ADVANCED", ), + }, + "optional": { + "variation_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "variation_strength": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "variation_method": (["linear", "slerp"],), + } + } + + TOOLTIPS = { + "input": { + "mask": "region mask", + "advanced_sampler": "sampler for specified region", + }, + "output": ("regional prompts. (Can be used in the RegionalSampler.)", ) + } + + RETURN_TYPES = ("REGIONAL_PROMPTS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Regional" + + @staticmethod + def doit(mask, advanced_sampler, variation_seed=0, variation_strength=0.0, variation_method="linear"): + regional_prompt = core.REGIONAL_PROMPT(mask, advanced_sampler, variation_seed=variation_seed, variation_strength=variation_strength, variation_method=variation_method) + return ([regional_prompt], ) + + +class CombineRegionalPrompts: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "regional_prompts1": ("REGIONAL_PROMPTS", ), + }, + } + + TOOLTIPS = { + "input": { + "regional_prompts1": "input regional_prompts. (Connecting to the input slot increases the number of additional slots.)", + }, + "output": ("Combined REGIONAL_PROMPTS", ) + } + + RETURN_TYPES = ("REGIONAL_PROMPTS", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Regional" + + @staticmethod + def doit(**kwargs): + res = [] + for k, v in kwargs.items(): + res += v + + return (res, ) + + +class CombineConditionings: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning1": ("CONDITIONING", ), + }, + } + + TOOLTIPS = { + "input": { + "conditioning1": "input conditionings. (Connecting to the input slot increases the number of additional slots.)", + }, + "output": ("Combined conditioning", ) + } + + RETURN_TYPES = ("CONDITIONING", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(**kwargs): + res = [] + for k, v in kwargs.items(): + res += v + + return (res, ) + + +class ConcatConditionings: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "conditioning1": ("CONDITIONING", ), + }, + } + + TOOLTIPS = { + "input": { + "conditioning1": "input conditionings. (Connecting to the input slot increases the number of additional slots.)", + }, + "output": ("Concatenated conditioning", ) + } + + RETURN_TYPES = ("CONDITIONING", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + @staticmethod + def doit(**kwargs): + conditioning_to = list(kwargs.values())[0] + + for k, conditioning_from in list(kwargs.items())[1:]: + out = [] + if len(conditioning_from) > 1: + print("Warning: ConcatConditionings {k} contains more than 1 cond, only the first one will actually be applied to conditioning1.") + + cond_from = conditioning_from[0][0] + + for i in range(len(conditioning_to)): + t1 = conditioning_to[i][0] + tw = torch.cat((t1, cond_from), 1) + n = [tw, conditioning_to[i][1].copy()] + out.append(n) + + conditioning_to = out + + return (out, ) + + +class RegionalSampler: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "seed_2nd": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "seed_2nd_mode": (["ignore", "fixed", "seed+seed_2nd", "seed-seed_2nd", "increment", "decrement", "randomize"], ), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "base_only_steps": ("INT", {"default": 2, "min": 0, "max": 10000}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + "samples": ("LATENT", ), + "base_sampler": ("KSAMPLER_ADVANCED", ), + "regional_prompts": ("REGIONAL_PROMPTS", ), + "overlap_factor": ("INT", {"default": 10, "min": 0, "max": 10000}), + "restore_latent": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "additional_mode": (["DISABLE", "ratio additional", "ratio between"], {"default": "ratio between"}), + "additional_sampler": (["AUTO", "euler", "heun", "heunpp2", "dpm_2", "dpm_fast", "dpmpp_2m", "ddpm"],), + "additional_sigma_ratio": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "hidden": {"unique_id": "UNIQUE_ID"}, + } + + TOOLTIPS = { + "input": { + "seed": "Random seed to use for generating CPU noise for sampling.", + "seed_2nd": "Additional noise seed. The behavior is determined by seed_2nd_mode.", + "seed_2nd_mode": "application method of seed_2nd. 1) ignore: Do not use seed_2nd. In the base only sampling stage, the seed is applied as a noise seed, and in the regional sampling stage, denoising is performed as it is without additional noise. 2) Others: In the base only sampling stage, the seed is applied as a noise seed, and once it is closed so that there is no leftover noise, new noise is added with seed_2nd and the regional samping stage is performed. a) fixed: Use seed_2nd as it is as an additional noise seed. b) seed+seed_2nd: Apply the value of seed+seed_2nd as an additional noise seed. c) seed-seed_2nd: Apply the value of seed-seed_2nd as an additional noise seed. d) increment: Not implemented yet. Same with fixed. e) decrement: Not implemented yet. Same with fixed. f) randomize: Not implemented yet. Same with fixed.", + "steps": "total sampling steps", + "base_only_steps": "total sampling steps", + "denoise": "The amount of noise to remove. This amount is the noise added at the start, and the higher it is, the more the input latent will be modified before being returned.", + "samples": "input latent image", + "base_sampler": "The sampler applied outside the area set by the regional_prompt.", + "regional_prompts": "The prompt applied to each region", + "overlap_factor": "To smooth the seams of the region boundaries, expand the mask set in regional_prompts by the overlap_factor amount to overlap with other regions.", + "restore_latent": "At each step, restore the noise outside the mask area to its original state, as per the principle of inpainting. This option is provided for backward compatibility, and it is recommended to always set it to true.", + "additional_mode": "..._sde or uni_pc and other special samplers are used, the region is not properly denoised, and it causes a phenomenon that destroys the overall harmony. To compensate for this, a recovery operation is performed using another sampler. This requires a longer time for sampling because a second sampling is performed at each step in each region using a special sampler. 1) DISABLE: Disable this feature. 2) ratio additional: After performing the denoise amount to be performed in the step with the sampler set in the region, the recovery sampler is additionally applied by the additional_sigma_ratio. If you use this option, the total denoise amount increases by additional_sigma_ratio. 3) ratio between: The denoise amount to be performed in the step with the sampler set in the region and the denoise amount to be applied to the recovery sampler are divided by additional_sigma_ratio, and denoise is performed for each denoise amount. If you use this option, the total denoise amount does not change.", + "additional_sampler": "1) AUTO: Automatically set the recovery sampler. If the sampler is uni_pc, uni_pc_bh2, dpmpp_sde, dpmpp_sde_gpu, the dpm_fast sampler is selected If the sampler is dpmpp_2m_sde, dpmpp_2m_sde_gpu, dpmpp_3m_sde, dpmpp_3m_sde_gpu, the dpmpp_2m sampler is selected. 2) Others: Manually set the recovery sampler.", + "additional_sigma_ratio": "Multiplier of noise schedule to be applied according to additional_mode.", + }, + "output": ("result latent", ) + } + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Regional" + + @staticmethod + def separated_sample(*args, **kwargs): + return separated_sample(*args, **kwargs) + + @staticmethod + def mask_erosion(samples, mask, grow_mask_by): + mask = mask.clone() + + w = samples['samples'].shape[3] + h = samples['samples'].shape[2] + + mask2 = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(w, h), mode="bilinear") + if grow_mask_by == 0: + mask_erosion = mask2 + else: + kernel_tensor = torch.ones((1, 1, grow_mask_by, grow_mask_by)) + padding = math.ceil((grow_mask_by - 1) / 2) + + mask_erosion = torch.clamp(torch.nn.functional.conv2d(mask2.round(), kernel_tensor, padding=padding), 0, 1) + + return mask_erosion[:, :, :w, :h].round() + + @staticmethod + def doit(seed, seed_2nd, seed_2nd_mode, steps, base_only_steps, denoise, samples, base_sampler, regional_prompts, overlap_factor, restore_latent, + additional_mode, additional_sampler, additional_sigma_ratio, unique_id=None): + if restore_latent: + latent_compositor = nodes.NODE_CLASS_MAPPINGS['LatentCompositeMasked']() + else: + latent_compositor = None + + masks = [regional_prompt.mask.numpy() for regional_prompt in regional_prompts] + masks = [np.ceil(mask).astype(np.int32) for mask in masks] + combined_mask = torch.from_numpy(np.bitwise_or.reduce(masks)) + + inv_mask = torch.where(combined_mask == 0, torch.tensor(1.0), torch.tensor(0.0)) + + adv_steps = int(steps / denoise) + start_at_step = adv_steps - steps + + region_len = len(regional_prompts) + total = steps*region_len + + leftover_noise = False + if base_only_steps > 0: + if seed_2nd_mode == 'ignore': + leftover_noise = True + + noise = Noise_RandomNoise(seed).generate_noise(samples) + + for rp in regional_prompts: + noise = rp.touch_noise(noise) + + samples = base_sampler.sample_advanced(True, seed, adv_steps, samples, start_at_step, start_at_step + base_only_steps, leftover_noise, recovery_mode="DISABLE", noise=noise) + + if seed_2nd_mode == "seed+seed_2nd": + seed += seed_2nd + if seed > 1125899906842624: + seed = seed - 1125899906842624 + elif seed_2nd_mode == "seed-seed_2nd": + seed -= seed_2nd + if seed < 0: + seed += 1125899906842624 + elif seed_2nd_mode != 'ignore': + seed = seed_2nd + + new_latent_image = samples.copy() + base_latent_image = None + + if not leftover_noise: + add_noise = True + noise = Noise_RandomNoise(seed).generate_noise(samples) + + for rp in regional_prompts: + noise = rp.touch_noise(noise) + else: + add_noise = False + noise = None + + for i in range(start_at_step+base_only_steps, adv_steps): + core.update_node_status(unique_id, f"{i}/{steps} steps | ", ((i-start_at_step)*region_len)/total) + + new_latent_image['noise_mask'] = inv_mask + new_latent_image = base_sampler.sample_advanced(add_noise, seed, adv_steps, new_latent_image, + start_at_step=i, end_at_step=i + 1, return_with_leftover_noise=True, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio, noise=noise) + + if restore_latent: + if 'noise_mask' in new_latent_image: + del new_latent_image['noise_mask'] + base_latent_image = new_latent_image.copy() + + j = 1 + for regional_prompt in regional_prompts: + if restore_latent: + new_latent_image = base_latent_image.copy() + + core.update_node_status(unique_id, f"{i}/{steps} steps | {j}/{region_len}", ((i-start_at_step)*region_len + j)/total) + + region_mask = regional_prompt.get_mask_erosion(overlap_factor).squeeze(0).squeeze(0) + + new_latent_image['noise_mask'] = region_mask + new_latent_image = regional_prompt.sampler.sample_advanced(False, seed, adv_steps, new_latent_image, i, i + 1, True, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio) + + if restore_latent: + del new_latent_image['noise_mask'] + base_latent_image = latent_compositor.composite(base_latent_image, new_latent_image, 0, 0, False, region_mask)[0] + new_latent_image = base_latent_image + + j += 1 + + add_noise = False + + # finalize + core.update_node_status(unique_id, f"finalize") + if base_latent_image is not None: + new_latent_image = base_latent_image + else: + base_latent_image = new_latent_image + + new_latent_image['noise_mask'] = inv_mask + new_latent_image = base_sampler.sample_advanced(False, seed, adv_steps, new_latent_image, adv_steps, adv_steps+1, False, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio) + + core.update_node_status(unique_id, f"{steps}/{steps} steps", total) + core.update_node_status(unique_id, "", None) + + if restore_latent: + new_latent_image = base_latent_image + + if 'noise_mask' in new_latent_image: + del new_latent_image['noise_mask'] + + return (new_latent_image, ) + + +class RegionalSamplerAdvanced: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "add_noise": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "overlap_factor": ("INT", {"default": 10, "min": 0, "max": 10000}), + "restore_latent": ("BOOLEAN", {"default": True, "label_on": "enabled", "label_off": "disabled"}), + "return_with_leftover_noise": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "latent_image": ("LATENT", ), + "base_sampler": ("KSAMPLER_ADVANCED", ), + "regional_prompts": ("REGIONAL_PROMPTS", ), + "additional_mode": (["DISABLE", "ratio additional", "ratio between"], {"default": "ratio between"}), + "additional_sampler": (["AUTO", "euler", "heun", "heunpp2", "dpm_2", "dpm_fast", "dpmpp_2m", "ddpm"],), + "additional_sigma_ratio": ("FLOAT", {"default": 0.3, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "hidden": {"unique_id": "UNIQUE_ID"}, + } + + TOOLTIPS = { + "input": { + "add_noise": "Whether to add noise", + "noise_seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "start_at_step": "The starting step of the sampling to be applied at this node within the range of 'steps'.", + "end_at_step": "The step at which sampling applied at this node will stop within the range of steps (if greater than steps, sampling will continue only up to steps).", + "overlap_factor": "To smooth the seams of the region boundaries, expand the mask set in regional_prompts by the overlap_factor amount to overlap with other regions.", + "restore_latent": "At each step, restore the noise outside the mask area to its original state, as per the principle of inpainting. This option is provided for backward compatibility, and it is recommended to always set it to true.", + "return_with_leftover_noise": "Whether to return the latent with noise remaining if the noise has not been completely removed according to the noise schedule, or to completely remove the noise before returning it.", + "latent_image": "input latent image", + "base_sampler": "The sampler applied outside the area set by the regional_prompt.", + "regional_prompts": "The prompt applied to each region", + "additional_mode": "..._sde or uni_pc and other special samplers are used, the region is not properly denoised, and it causes a phenomenon that destroys the overall harmony. To compensate for this, a recovery operation is performed using another sampler. This requires a longer time for sampling because a second sampling is performed at each step in each region using a special sampler. 1) DISABLE: Disable this feature. 2) ratio additional: After performing the denoise amount to be performed in the step with the sampler set in the region, the recovery sampler is additionally applied by the additional_sigma_ratio. If you use this option, the total denoise amount increases by additional_sigma_ratio. 3) ratio between: The denoise amount to be performed in the step with the sampler set in the region and the denoise amount to be applied to the recovery sampler are divided by additional_sigma_ratio, and denoise is performed for each denoise amount. If you use this option, the total denoise amount does not change.", + "additional_sampler": "1) AUTO: Automatically set the recovery sampler. If the sampler is uni_pc, uni_pc_bh2, dpmpp_sde, dpmpp_sde_gpu, the dpm_fast sampler is selected If the sampler is dpmpp_2m_sde, dpmpp_2m_sde_gpu, dpmpp_3m_sde, dpmpp_3m_sde_gpu, the dpmpp_2m sampler is selected. 2) Others: Manually set the recovery sampler.", + "additional_sigma_ratio": "Multiplier of noise schedule to be applied according to additional_mode.", + }, + "output": ("result latent", ) + } + + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Regional" + + @staticmethod + def doit(add_noise, noise_seed, steps, start_at_step, end_at_step, overlap_factor, restore_latent, return_with_leftover_noise, latent_image, base_sampler, regional_prompts, + additional_mode, additional_sampler, additional_sigma_ratio, unique_id): + + if restore_latent: + latent_compositor = nodes.NODE_CLASS_MAPPINGS['LatentCompositeMasked']() + else: + latent_compositor = None + + masks = [regional_prompt.mask.numpy() for regional_prompt in regional_prompts] + masks = [np.ceil(mask).astype(np.int32) for mask in masks] + combined_mask = torch.from_numpy(np.bitwise_or.reduce(masks)) + + inv_mask = torch.where(combined_mask == 0, torch.tensor(1.0), torch.tensor(0.0)) + + region_len = len(regional_prompts) + end_at_step = min(steps, end_at_step) + total = (end_at_step - start_at_step) * region_len + + new_latent_image = latent_image.copy() + base_latent_image = None + region_masks = {} + + for i in range(start_at_step, end_at_step-1): + core.update_node_status(unique_id, f"{start_at_step+i}/{end_at_step} steps | ", ((i-start_at_step)*region_len)/total) + + cur_add_noise = True if i == start_at_step and add_noise else False + + if cur_add_noise: + noise = Noise_RandomNoise(noise_seed).generate_noise(new_latent_image) + for rp in regional_prompts: + noise = rp.touch_noise(noise) + else: + noise = None + + new_latent_image['noise_mask'] = inv_mask + new_latent_image = base_sampler.sample_advanced(cur_add_noise, noise_seed, steps, new_latent_image, i, i + 1, True, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio, noise=noise) + + if restore_latent: + del new_latent_image['noise_mask'] + base_latent_image = new_latent_image.copy() + + j = 1 + for regional_prompt in regional_prompts: + if restore_latent: + new_latent_image = base_latent_image.copy() + + core.update_node_status(unique_id, f"{start_at_step+i}/{end_at_step} steps | {j}/{region_len}", ((i-start_at_step)*region_len + j)/total) + + if j not in region_masks: + region_mask = regional_prompt.get_mask_erosion(overlap_factor).squeeze(0).squeeze(0) + region_masks[j] = region_mask + else: + region_mask = region_masks[j] + + new_latent_image['noise_mask'] = region_mask + new_latent_image = regional_prompt.sampler.sample_advanced(False, noise_seed, steps, new_latent_image, i, i + 1, True, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio) + + if restore_latent: + del new_latent_image['noise_mask'] + base_latent_image = latent_compositor.composite(base_latent_image, new_latent_image, 0, 0, False, region_mask)[0] + new_latent_image = base_latent_image + + j += 1 + + # finalize + core.update_node_status(unique_id, f"finalize") + if base_latent_image is not None: + new_latent_image = base_latent_image + else: + base_latent_image = new_latent_image + + new_latent_image['noise_mask'] = inv_mask + new_latent_image = base_sampler.sample_advanced(False, noise_seed, steps, new_latent_image, end_at_step-1, end_at_step, return_with_leftover_noise, + recovery_mode=additional_mode, recovery_sampler=additional_sampler, recovery_sigma_ratio=additional_sigma_ratio) + + core.update_node_status(unique_id, f"{end_at_step}/{end_at_step} steps", total) + core.update_node_status(unique_id, "", None) + + if restore_latent: + new_latent_image = base_latent_image + + if 'noise_mask' in new_latent_image: + del new_latent_image['noise_mask'] + + return (new_latent_image, ) + + +class KSamplerBasicPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"basic_pipe": ("BASIC_PIPE",), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "latent_image": ("LATENT", ), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + }, + "optional": + { + "scheduler_func_opt": ("SCHEDULER_FUNC", ), + } + } + + TOOLTIPS = { + "input": { + "basic_pipe": "basic_pipe input for sampling", + "seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "cfg": "classifier free guidance value", + "sampler_name": "sampler", + "scheduler": "noise schedule", + "latent_image": "input latent image", + "denoise": "The amount of noise to remove. This amount is the noise added at the start, and the higher it is, the more the input latent will be modified before being returned.", + "scheduler_func_opt": "[OPTIONAL] Noise schedule generation function. If this is set, the scheduler widget will be ignored.", + }, + "output": ("passthrough input basic_pipe", "result latent", "VAE in basic_pipe") + } + + RETURN_TYPES = ("BASIC_PIPE", "LATENT", "VAE") + FUNCTION = "sample" + + CATEGORY = "ImpactPack/sampling" + + @staticmethod + def sample(basic_pipe, seed, steps, cfg, sampler_name, scheduler, latent_image, denoise=1.0, scheduler_func_opt=None): + model, clip, vae, positive, negative = basic_pipe + latent = impact_sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, denoise, scheduler_func=scheduler_func_opt) + return basic_pipe, latent, vae + + +class KSamplerAdvancedBasicPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"basic_pipe": ("BASIC_PIPE",), + "add_noise": ("BOOLEAN", {"default": True, "label_on": "enable", "label_off": "disable"}), + "noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), + "sampler_name": (comfy.samplers.KSampler.SAMPLERS, ), + "scheduler": (core.SCHEDULERS, ), + "latent_image": ("LATENT", ), + "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), + "return_with_leftover_noise": ("BOOLEAN", {"default": False, "label_on": "enable", "label_off": "disable"}), + }, + "optional": + { + "scheduler_func_opt": ("SCHEDULER_FUNC", ), + } + } + + TOOLTIPS = { + "input": { + "basic_pipe": "basic_pipe input for sampling", + "add_noise": "Whether to add noise", + "noise_seed": "Random seed to use for generating CPU noise for sampling.", + "steps": "total sampling steps", + "cfg": "classifier free guidance value", + "sampler_name": "sampler", + "scheduler": "noise schedule", + "latent_image": "input latent image", + "start_at_step": "The starting step of the sampling to be applied at this node within the range of 'steps'.", + "end_at_step": "The step at which sampling applied at this node will stop within the range of steps (if greater than steps, sampling will continue only up to steps).", + "return_with_leftover_noise": "Whether to return the latent with noise remaining if the noise has not been completely removed according to the noise schedule, or to completely remove the noise before returning it.", + "scheduler_func_opt": "[OPTIONAL] Noise schedule generation function. If this is set, the scheduler widget will be ignored.", + }, + "output": ("passthrough input basic_pipe", "result latent", "VAE in basic_pipe") + } + + RETURN_TYPES = ("BASIC_PIPE", "LATENT", "VAE") + FUNCTION = "sample" + + CATEGORY = "ImpactPack/sampling" + + @staticmethod + def sample(basic_pipe, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, latent_image, start_at_step, end_at_step, return_with_leftover_noise, denoise=1.0, scheduler_func_opt=None): + model, clip, vae, positive, negative = basic_pipe + + latent = separated_sample(model, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image, start_at_step, end_at_step, return_with_leftover_noise, scheduler_func=scheduler_func_opt) + return basic_pipe, latent, vae + + +class GITSSchedulerFuncProvider: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "coeff": ("FLOAT", {"default": 1.20, "min": 0.80, "max": 1.50, "step": 0.05}), + "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), + } + } + + TOOLTIPS = { + "input": { + "coeff": "coeff factor of GITS Scheduler", + "denoise": "denoise amount for noise schedule", + }, + "output": ("Returns a function that generates a noise schedule using GITSScheduler. This can be used in place of a predetermined noise schedule to dynamically generate a noise schedule based on the steps.",) + } + + RETURN_TYPES = ("SCHEDULER_FUNC",) + CATEGORY = "ImpactPack/sampling" + + FUNCTION = "doit" + + @staticmethod + def doit(coeff, denoise): + def f(model, sampler, steps): + if 'GITSScheduler' not in nodes.NODE_CLASS_MAPPINGS: + raise Exception("[Impact Pack] ComfyUI is an outdated version. Cannot use GITSScheduler.") + + scheduler = nodes.NODE_CLASS_MAPPINGS['GITSScheduler']() + return scheduler.get_sigmas(coeff, steps, denoise)[0] + + return (f, ) + + +class NegativeConditioningPlaceholder: + @classmethod + def INPUT_TYPES(s): + return {"required": {}} + + TOOLTIPS = { + "output": ("This is a Placeholder for the FLUX model that does not use Negative Conditioning.",) + } + + RETURN_TYPES = ("CONDITIONING",) + CATEGORY = "ImpactPack/sampling" + + FUNCTION = "doit" + + @staticmethod + def doit(): + return ("NegativePlaceholder", ) diff --git a/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/util_nodes.py b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/util_nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..925514ff93b8728e5b3a89c4c53645e4f401ef80 --- /dev/null +++ b/src/comfyui/custom_nodes/ComfyUI-Impact-Pack/modules/impact/util_nodes.py @@ -0,0 +1,605 @@ +from impact.utils import any_typ, ByPassTypeTuple, make_3d_mask +import comfy_extras.nodes_mask +from nodes import MAX_RESOLUTION +import torch +import comfy +import sys +import nodes +import re +import impact.core as core +from server import PromptServer +import inspect + + +class GeneralSwitch: + @classmethod + def INPUT_TYPES(s): + dyn_inputs = {"input1": (any_typ, {"lazy": True}), } + if core.is_execution_model_version_supported(): + stack = inspect.stack() + if stack[2].function == 'get_input_info' and stack[3].function == 'add_node': + for x in range(2, 200): + dyn_inputs[f"input{x}"] = (any_typ, {"lazy": True}) + + inputs = {"required": { + "select": ("INT", {"default": 1, "min": 1, "max": 999999, "step": 1}), + "sel_mode": ("BOOLEAN", {"default": False, "label_on": "select_on_prompt", "label_off": "select_on_execution", "forceInput": False}), + }, + "optional": dyn_inputs, + "hidden": {"unique_id": "UNIQUE_ID", "extra_pnginfo": "EXTRA_PNGINFO"} + } + + return inputs + + RETURN_TYPES = (any_typ, "STRING", "INT") + RETURN_NAMES = ("selected_value", "selected_label", "selected_index") + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def check_lazy_status(self, *args, **kwargs): + selected_index = int(kwargs['select']) + input_name = f"input{selected_index}" + + print(f"SELECTED: {input_name}") + + return [input_name] + + @staticmethod + def doit(*args, **kwargs): + selected_index = int(kwargs['select']) + input_name = f"input{selected_index}" + + selected_label = input_name + node_id = kwargs['unique_id'] + + if 'extra_pnginfo' in kwargs and kwargs['extra_pnginfo'] is not None: + nodelist = kwargs['extra_pnginfo']['workflow']['nodes'] + for node in nodelist: + if str(node['id']) == node_id: + inputs = node['inputs'] + + for slot in inputs: + if slot['name'] == input_name and 'label' in slot: + selected_label = slot['label'] + + break + else: + print(f"[Impact-Pack] The switch node does not guarantee proper functioning in API mode.") + + if input_name in kwargs: + return kwargs[input_name], selected_label, selected_index + else: + print(f"ImpactSwitch: invalid select index (ignored)") + return None, "", selected_index + +class LatentSwitch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "select": ("INT", {"default": 1, "min": 1, "max": 99999, "step": 1}), + "latent1": ("LATENT",), + }, + } + + RETURN_TYPES = ("LATENT", ) + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, *args, **kwargs): + input_name = f"latent{int(kwargs['select'])}" + + if input_name in kwargs: + return (kwargs[input_name],) + else: + print(f"LatentSwitch: invalid select index ('latent1' is selected)") + return (kwargs['latent1'],) + + +class ImageMaskSwitch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "select": ("INT", {"default": 1, "min": 1, "max": 4, "step": 1}), + "images1": ("IMAGE",), + }, + + "optional": { + "mask1_opt": ("MASK",), + "images2_opt": ("IMAGE",), + "mask2_opt": ("MASK",), + "images3_opt": ("IMAGE",), + "mask3_opt": ("MASK",), + "images4_opt": ("IMAGE",), + "mask4_opt": ("MASK",), + }, + } + + RETURN_TYPES = ("IMAGE", "MASK",) + + OUTPUT_NODE = True + + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, select, images1, mask1_opt=None, images2_opt=None, mask2_opt=None, images3_opt=None, mask3_opt=None, + images4_opt=None, mask4_opt=None): + if select == 1: + return images1, mask1_opt, + elif select == 2: + return images2_opt, mask2_opt, + elif select == 3: + return images3_opt, mask3_opt, + else: + return images4_opt, mask4_opt, + + +class GeneralInversedSwitch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "select": ("INT", {"default": 1, "min": 1, "max": 999999, "step": 1}), + "input": (any_typ,), + }, + "optional": { + "sel_mode": ("BOOLEAN", {"default": False, "label_on": "select_on_prompt", "label_off": "select_on_execution", "forceInput": False}), + }, + } + + RETURN_TYPES = ByPassTypeTuple((any_typ, )) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, select, input, **kwargs): + res = [] + + for i in range(0, select): + if select == i+1: + res.append(input) + else: + res.append(None) + + return res + + +class RemoveNoiseMask: + @classmethod + def INPUT_TYPES(s): + return {"required": {"samples": ("LATENT",)}} + + RETURN_TYPES = ("LATENT",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, samples): + res = {key: value for key, value in samples.items() if key != 'noise_mask'} + return (res, ) + + +class ImagePasteMasked: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "destination": ("IMAGE",), + "source": ("IMAGE",), + "x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}), + "resize_source": ("BOOLEAN", {"default": False}), + }, + "optional": { + "mask": ("MASK",), + } + } + RETURN_TYPES = ("IMAGE",) + FUNCTION = "composite" + + CATEGORY = "image" + + def composite(self, destination, source, x, y, resize_source, mask = None): + destination = destination.clone().movedim(-1, 1) + output = comfy_extras.nodes_mask.composite(destination, source.movedim(-1, 1), x, y, mask, 1, resize_source).movedim(1, -1) + return (output,) + + +from impact.utils import any_typ + +class ImpactLogger: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "data": (any_typ,), + "text": ("STRING", {"multiline": True}), + }, + "hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "unique_id": "UNIQUE_ID"}, + } + + CATEGORY = "ImpactPack/Debug" + + OUTPUT_NODE = True + + RETURN_TYPES = () + FUNCTION = "doit" + + def doit(self, data, text, prompt, extra_pnginfo, unique_id): + shape = "" + if hasattr(data, "shape"): + shape = f"{data.shape} / " + + print(f"[IMPACT LOGGER]: {shape}{data}") + + print(f" PROMPT: {prompt}") + + # for x in prompt: + # if 'inputs' in x and 'populated_text' in x['inputs']: + # print(f"PROMPT: {x['10']['inputs']['populated_text']}") + # + # for x in extra_pnginfo['workflow']['nodes']: + # if x['type'] == 'ImpactWildcardProcessor': + # print(f" WV : {x['widgets_values'][1]}\n") + + PromptServer.instance.send_sync("impact-node-feedback", {"node_id": unique_id, "widget_name": "text", "type": "TEXT", "value": f"{data}"}) + return {} + + +class ImpactDummyInput: + @classmethod + def INPUT_TYPES(s): + return {"required": {}} + + CATEGORY = "ImpactPack/Debug" + + RETURN_TYPES = (any_typ,) + FUNCTION = "doit" + + def doit(self): + return ("DUMMY",) + + +class MasksToMaskList: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "masks": ("MASK", ), + } + } + + RETURN_TYPES = ("MASK", ) + OUTPUT_IS_LIST = (True, ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, masks): + if masks is None: + empty_mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + return ([empty_mask], ) + + res = [] + + for mask in masks: + res.append(mask) + + print(f"mask len: {len(res)}") + + res = [make_3d_mask(x) for x in res] + + return (res, ) + + +class MaskListToMaskBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "mask": ("MASK", ), + } + } + + INPUT_IS_LIST = True + + RETURN_TYPES = ("MASK", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, mask): + if len(mask) == 1: + mask = make_3d_mask(mask[0]) + return (mask,) + elif len(mask) > 1: + mask1 = make_3d_mask(mask[0]) + + for mask2 in mask[1:]: + mask2 = make_3d_mask(mask2) + if mask1.shape[1:] != mask2.shape[1:]: + mask2 = comfy.utils.common_upscale(mask2.movedim(-1, 1), mask1.shape[2], mask1.shape[1], "lanczos", "center").movedim(1, -1) + mask1 = torch.cat((mask1, mask2), dim=0) + + return (mask1,) + else: + empty_mask = torch.zeros((1, 64, 64), dtype=torch.float32, device="cpu").unsqueeze(0) + return (empty_mask,) + + +class ImageListToImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE", ), + } + } + + INPUT_IS_LIST = True + + RETURN_TYPES = ("IMAGE", ) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Operation" + + def doit(self, images): + if len(images) <= 1: + return (images,) + else: + image1 = images[0] + for image2 in images[1:]: + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "lanczos", "center").movedim(1, -1) + image1 = torch.cat((image1, image2), dim=0) + return (image1,) + + +class ImageBatchToImageList: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), }} + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, image): + images = [image[i:i + 1, ...] for i in range(image.shape[0])] + return (images, ) + + +class MakeImageList: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image1": ("IMAGE",), }} + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, **kwargs): + images = [] + + for k, v in kwargs.items(): + images.append(v) + + return (images, ) + + +class MakeImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image1": ("IMAGE",), }} + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, **kwargs): + image1 = kwargs['image1'] + del kwargs['image1'] + images = [value for value in kwargs.values()] + + if len(images) == 0: + return (image1,) + else: + for image2 in images: + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "lanczos", "center").movedim(1, -1) + image1 = torch.cat((image1, image2), dim=0) + return (image1,) + + +class ReencodeLatent: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples": ("LATENT", ), + "tile_mode": (["None", "Both", "Decode(input) only", "Encode(output) only"],), + "input_vae": ("VAE", ), + "output_vae": ("VAE", ), + "tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64}), + }, + } + + CATEGORY = "ImpactPack/Util" + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + def doit(self, samples, tile_mode, input_vae, output_vae, tile_size=512): + if tile_mode in ["Both", "Decode(input) only"]: + pixels = nodes.VAEDecodeTiled().decode(input_vae, samples, tile_size)[0] + else: + pixels = nodes.VAEDecode().decode(input_vae, samples)[0] + + if tile_mode in ["Both", "Encode(output) only"]: + return nodes.VAEEncodeTiled().encode(output_vae, pixels, tile_size) + else: + return nodes.VAEEncode().encode(output_vae, pixels) + + +class ReencodeLatentPipe: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "samples": ("LATENT", ), + "tile_mode": (["None", "Both", "Decode(input) only", "Encode(output) only"],), + "input_basic_pipe": ("BASIC_PIPE", ), + "output_basic_pipe": ("BASIC_PIPE", ), + }, + } + + CATEGORY = "ImpactPack/Util" + + RETURN_TYPES = ("LATENT", ) + FUNCTION = "doit" + + def doit(self, samples, tile_mode, input_basic_pipe, output_basic_pipe): + _, _, input_vae, _, _ = input_basic_pipe + _, _, output_vae, _, _ = output_basic_pipe + return ReencodeLatent().doit(samples, tile_mode, input_vae, output_vae) + + +class StringSelector: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "strings": ("STRING", {"multiline": True}), + "multiline": ("BOOLEAN", {"default": False, "label_on": "enabled", "label_off": "disabled"}), + "select": ("INT", {"min": 0, "max": sys.maxsize, "step": 1, "default": 0}), + }} + + RETURN_TYPES = ("STRING",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, strings, multiline, select): + lines = strings.split('\n') + + if multiline: + result = [] + current_string = "" + + for line in lines: + if line.startswith("#"): + if current_string: + result.append(current_string.strip()) + current_string = "" + current_string += line + "\n" + + if current_string: + result.append(current_string.strip()) + + if len(result) == 0: + selected = strings + else: + selected = result[select % len(result)] + + if selected.startswith('#'): + selected = selected[1:] + else: + if len(lines) == 0: + selected = strings + else: + selected = lines[select % len(lines)] + + return (selected, ) + + +class StringListToString: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "join_with": ("STRING", {"default": "\\n"}), + "string_list": ("STRING", {"forceInput": True}), + } + } + + INPUT_IS_LIST = True + RETURN_TYPES = ("STRING",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, join_with, string_list): + # convert \\n to newline character + if join_with[0] == "\\n": + join_with[0] = "\n" + + joined_text = join_with[0].join(string_list) + + return (joined_text,) + + +class WildcardPromptFromString: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "string": ("STRING", {"forceInput": True}), + "delimiter": ("STRING", {"multiline": False, "default": "\\n" }), + "prefix_all": ("STRING", {"multiline": False}), + "postfix_all": ("STRING", {"multiline": False}), + "restrict_to_tags": ("STRING", {"multiline": False}), + "exclude_tags": ("STRING", {"multiline": False}) + }, + } + + RETURN_TYPES = ("STRING", "STRING",) + RETURN_NAMES = ("wildcard", "segs_labels",) + FUNCTION = "doit" + + CATEGORY = "ImpactPack/Util" + + def doit(self, string, delimiter, prefix_all, postfix_all, restrict_to_tags, exclude_tags): + # convert \\n to newline character + if delimiter == "\\n": + delimiter = "\n" + + # some sanity checks and normalization for later processing + if prefix_all is None: + prefix_all = "" + if postfix_all is None: + postfix_all = "" + if restrict_to_tags is None: + restrict_to_tags = "" + if exclude_tags is None: + exclude_tags = "" + + restrict_to_tags = restrict_to_tags.split(", ") + exclude_tags = exclude_tags.split(", ") + + # build the wildcard prompt per list entry + output = ["[LAB]"] + labels = [] + for x in string.split(delimiter): + label = str(len(labels) + 1) + labels.append(label) + x = x.split(", ") + # restrict to tags + if restrict_to_tags != [""]: + x = list(set(x) & set(restrict_to_tags)) + # remove tags + if exclude_tags != [""]: + x = list(set(x) - set(exclude_tags)) + # next row: