nucleuseru commited on
Commit
87f7762
·
verified ·
1 Parent(s): 4f5ed38

Upload folder using huggingface_hub

Browse files
Files changed (45) hide show
  1. .gitattributes +6 -0
  2. LICENSE.md +53 -0
  3. README.md +84 -3
  4. ae.safetensors +3 -0
  5. comfy/Flux2_00030_.png +3 -0
  6. comfy/Flux_2-Turbo-LoRA_comfyui.safetensors +3 -0
  7. comfy/Workflow.json +1395 -0
  8. example_edit.png +3 -0
  9. example_t2i.png +3 -0
  10. flux.2-turbo-lora.safetensors +3 -0
  11. flux2-dev.safetensors +3 -0
  12. model_index.json +24 -0
  13. scheduler/scheduler_config.json +18 -0
  14. teaser_editing.png +3 -0
  15. teaser_generation.png +3 -0
  16. text_encoder/config.json +48 -0
  17. text_encoder/generation_config.json +8 -0
  18. text_encoder/model-00001-of-00010.safetensors +3 -0
  19. text_encoder/model-00002-of-00010.safetensors +3 -0
  20. text_encoder/model-00003-of-00010.safetensors +3 -0
  21. text_encoder/model-00004-of-00010.safetensors +3 -0
  22. text_encoder/model-00005-of-00010.safetensors +3 -0
  23. text_encoder/model-00006-of-00010.safetensors +3 -0
  24. text_encoder/model-00007-of-00010.safetensors +3 -0
  25. text_encoder/model-00008-of-00010.safetensors +3 -0
  26. text_encoder/model-00009-of-00010.safetensors +3 -0
  27. text_encoder/model-00010-of-00010.safetensors +3 -0
  28. text_encoder/model.safetensors.index.json +593 -0
  29. tokenizer/chat_template.jinja +51 -0
  30. tokenizer/preprocessor_config.json +34 -0
  31. tokenizer/processor_config.json +8 -0
  32. tokenizer/special_tokens_map.json +1032 -0
  33. tokenizer/tokenizer.json +3 -0
  34. tokenizer/tokenizer_config.json +0 -0
  35. transformer/config.json +22 -0
  36. transformer/diffusion_pytorch_model-00001-of-00007.safetensors +3 -0
  37. transformer/diffusion_pytorch_model-00002-of-00007.safetensors +3 -0
  38. transformer/diffusion_pytorch_model-00003-of-00007.safetensors +3 -0
  39. transformer/diffusion_pytorch_model-00004-of-00007.safetensors +3 -0
  40. transformer/diffusion_pytorch_model-00005-of-00007.safetensors +3 -0
  41. transformer/diffusion_pytorch_model-00006-of-00007.safetensors +3 -0
  42. transformer/diffusion_pytorch_model-00007-of-00007.safetensors +3 -0
  43. transformer/diffusion_pytorch_model.safetensors.index.json +338 -0
  44. vae/config.json +39 -0
  45. vae/diffusion_pytorch_model.safetensors +3 -0
.gitattributes CHANGED
@@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ example_edit.png filter=lfs diff=lfs merge=lfs -text
37
+ example_t2i.png filter=lfs diff=lfs merge=lfs -text
38
+ comfy/Flux2_00030_.png filter=lfs diff=lfs merge=lfs -text
39
+ teaser_editing.png filter=lfs diff=lfs merge=lfs -text
40
+ teaser_generation.png filter=lfs diff=lfs merge=lfs -text
41
+ tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
LICENSE.md ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FLUX Non-Commercial License v2.1
2
+
3
+ Black Forest Labs Inc. (“we” or “our” or “Company”) is pleased to make the weights, parameters, and inference code for the FLUX Model (as defined below) freely available for your non-commercial and non-production use as set forth in this FLUX Non-Commercial License (“License”). “FLUX Model” includes, individually and collectively, the models denoted as FLUX.x [dev], where “.x” denotes the FLUX Model version number, and models made available under the License, as indicated by a license notice that is included in or attached to the work, and their elements which includes algorithms, software, checkpoints, parameters, source code (inference code, evaluation code, and if applicable, fine-tuning code) and any other materials associated with the FLUX AI models made available by Company under this License, including if any, the technical documentation, manuals, and instructions for the use and operation thereof. Note that we may also make available certain elements of what is included in the definition of “FLUX Model” under a separate license, such as the inference code, and nothing in this License will be deemed to restrict or limit any other licenses granted by us in such elements.
4
+
5
+ By downloading, accessing, using, Distributing (as defined below), or creating a Derivative (as defined below) of the FLUX Model, you agree to the terms of this License. If you do not agree to this License, then you do not have any rights to access, use, Distribute or create a Derivative of the FLUX Model and you must immediately cease using the FLUX Model. If you are agreeing to be bound by the terms of this License on behalf of your employer or other entity, you represent and warrant to us that you have full legal authority to bind your employer or such entity to this License. If you do not have the requisite authority, you may not accept the License or access the FLUX Model on behalf of your employer or other entity.
6
+
7
+ 1. Definitions.
8
+ - a. “Derivative” means any (i) modified version of the FLUX Model (including but not limited to any customized or fine-tuned version thereof), (ii) work based on the FLUX Model, or (iii) any other derivative work thereof. For the avoidance of doubt, Outputs are not considered Derivatives under this License.
9
+ - b. “Distribution” or “Distribute” or “Distributing” means providing or making available, by any means, a copy of the FLUX Model and/or the Derivatives as the case may be.
10
+ - c. “Non-Commercial Purpose” means any of the following uses, but only so far as you do not receive any direct or indirect payment arising from the use of the FLUX Model, Derivatives, or Content Filters (as defined below): (i) personal use for research, experimentation, and testing for the benefit of public knowledge, personal study, private entertainment, hobby projects, or otherwise not directly or indirectly connected to any commercial activities, business operations, or employment responsibilities; (ii) use by commercial or for-profit entities for testing, evaluation, or non-commercial research and development in a non-production environment; and (iii) use by any charitable organization for charitable purposes, or for testing or evaluation. For clarity, use (a) for revenue-generating activity, (b) in direct interactions with or that has impact on end users, or (c) to train, fine tune, or distill other models for commercial use, in each case, is not a Non-Commercial Purpose.
11
+ - d. “Outputs” means any content generated by the operation of the FLUX Model or Derivatives from an input (such as an image input) or prompt (i.e., text instructions) provided by users. For the avoidance of doubt, Outputs do not include any components of the FLUX Model, such as any fine-tuned versions of the FLUX Model, the weights, or parameters.
12
+ - e. “you” or “your” means the individual or entity entering into this License with Company.
13
+
14
+ 2. License Grant.
15
+ - a. License. Subject to your compliance with this License, Company grants you a non-exclusive, worldwide, non-transferable, non-sublicensable, revocable, royalty free, and limited license to access, use, create Derivatives of, and Distribute the FLUX Model and Derivatives solely for your Non-Commercial Purposes. The foregoing license is personal to you, and you may not assign or sublicense this License or any other rights or obligations under this License without Company’s prior written consent; any such assignment or sublicense will be void and will automatically and immediately terminate this License. Any restrictions set forth herein regarding the FLUX Model also apply to any Derivative you create or that are created on your behalf.
16
+ - b. Non-Commercial Use Only. You may only access, use, Distribute, or create Derivatives of the FLUX Model or Derivatives for Non-Commercial Purposes. If you want to use a FLUX Model or a Derivative for any purpose that is not expressly authorized under this License, such as for a commercial activity, you must request a license from Company, which Company may grant to you in Company’s sole discretion and which additional use may be subject to a fee, royalty or other revenue share. Please see https://bfl.ai/licensing if you would like a commercial license.
17
+ - c. Reserved Rights. The grant of rights expressly set forth in this License are the complete grant of rights to use the FLUX Model, and no other licenses are granted, whether by waiver, estoppel, implication, equity, or otherwise. Company and its licensors reserve all rights not expressly granted by this License.
18
+ - d. Outputs. We claim no ownership rights in and to the Outputs. You are solely responsible for the Outputs you generate and their subsequent uses in accordance with this License. You may use Output for any purpose (including for commercial purposes), except as expressly prohibited herein. You may not use the Output to train, fine-tune, or distill a model that is competitive with a FLUX Model.
19
+ - e. You may access, use, Distribute, or create Output of the FLUX Model or Derivatives if you: (i) (A) implement and maintain content filtering measures (“Content Filters”) for your use of the FLUX Model or Derivatives to prevent the creation, display, transmission, generation, or dissemination of unlawful or infringing content, which may include Content Filters that we may make available for use with the FLUX Model (“Provided Content Filters”), or (B) ensure Output undergoes review for unlawful or infringing content before public or non-public distribution, display, transmission or dissemination; and (ii) ensure Output includes disclosure (or other indication) that the Output was generated or modified using artificial intelligence technologies to the extent required under applicable law.
20
+
21
+ 3. Distribution. Subject to this License, you may Distribute copies of the FLUX Model and/or Derivatives made by you, under the following conditions:
22
+ - a. you must make available a copy of this License to third-party recipients of the FLUX Mode and/or Derivatives you Distribute, and specify that any rights to use the FLUX Model and/or Derivatives shall be directly granted by Company to said third-party recipients pursuant to this License;
23
+ - b. you must prominently display the following notice alongside the Distribution of the FLUX Model or Derivative (such as via a “Notice” text file distributed as part of such FLUX Model or Derivative) (the “Attribution Notice”):
24
+
25
+ > This FLUX Model is licensed by Black Forest Labs Inc. under the FLUX Non-Commercial License. Copyright Black Forest Labs Inc. IN NO EVENT SHALL BLACK FOREST LABS INC. BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH USE OF THIS MODEL.
26
+
27
+ - c. in the case of Distribution of Derivatives made by you: (i) you must also include in the Attribution Notice a statement that you have modified the applicable FLUX Model; (ii) any terms and conditions you impose on any third-party recipients relating to Derivatives made by or for you shall neither limit such third-party recipients’ use of the FLUX Model or any Derivatives made by or for Company in accordance with this License nor conflict with any of its terms and conditions and must include disclaimer of warranties and limitation of liability provisions that are at least as protective of Company as those set forth herein; and (iii) you must not misrepresent or imply, through any means, that the Derivatives made by or for you and/or any modified version of the FLUX Model you Distribute under your name and responsibility is an official product of the Company or has been endorsed, approved or validated by the Company, unless you are authorized by Company to do so in writing.
28
+
29
+ 4. Restrictions. You will not, and will not permit, assist or cause any third party to
30
+ - a. use, modify, copy, reproduce, create Derivatives of, or Distribute the FLUX Model (or any Derivative thereof, or any data produced by the FLUX Model), in whole or in part, (i) for any commercial or production purposes, (ii) military purposes, (iii) purposes of surveillance, including any research or development relating to surveillance, (iv) biometric processing, (v) in any manner that infringes, misappropriates, or otherwise violates (or is likely to infringe, misappropriate, or otherwise violate) any third party’s legal rights, including rights of publicity or “digital replica” rights, (vi) in any unlawful, fraudulent, defamatory, or abusive activity, (vii) to generate unlawful content, including child sexual abuse material, or non-consensual intimate images; or (viii) in any manner that violates any applicable law and any privacy or security laws, rules, regulations, directives, or governmental requirements (including the General Data Privacy Regulation (Regulation (EU) 2016/679), the California Consumer Privacy Act, any and all laws governing the processing of biometric information, and the EU Artificial Intelligence Act (Regulation (EU) 2024/1689), as well as all amendments and successor laws to any of the foregoing);
31
+ - b. alter or remove copyright and other proprietary notices which appear on or in any portion of the FLUX Model;
32
+ - c. utilize any equipment, device, software, or other means to circumvent or remove any security or protection used by Company in connection with the FLUX Model, or to circumvent or remove any usage restrictions, or to enable functionality disabled by FLUX Model;
33
+ - d. offer or impose any terms on the FLUX Model that alter, restrict, or are inconsistent with the terms of this License;
34
+ - e. violate any applicable U.S. and non-U.S. export control and trade sanctions laws (“Export Laws”) in connection with your use or Distribution of any FLUX Model;
35
+ - f. directly or indirectly Distribute, export, or otherwise transfer FLUX Model (i) to any individual, entity, or country prohibited by Export Laws; (ii) to anyone on U.S. or non-U.S. government restricted parties lists; (iii) for any purpose prohibited by Export Laws, including nuclear, chemical or biological weapons, or missile technology applications; (iv) use or download FLUX Model if you or they are (a) located in a comprehensively sanctioned jurisdiction, (b) currently listed on any U.S. or non-U.S. restricted parties list, or (c) for any purpose prohibited by Export Laws; and (v) will not disguise your location through IP proxying or other methods.
36
+
37
+ 5. DISCLAIMERS. THE FLUX MODEL AND PROVIDED CONTENT FILTERS ARE PROVIDED “AS IS” AND “WITH ALL FAULTS” WITH NO WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. COMPANY EXPRESSLY DISCLAIMS ALL REPRESENTATIONS AND WARRANTIES, EXPRESS OR IMPLIED, WHETHER BY STATUTE, CUSTOM, USAGE OR OTHERWISE AS TO ANY MATTERS RELATED TO THE FLUX MODEL AND PROVIDED CONTENT FILTERS, INCLUDING BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, SATISFACTORY QUALITY, OR NON-INFRINGEMENT. COMPANY MAKES NO WARRANTIES OR REPRESENTATIONS THAT THE FLUX MODEL AND PROVIDED CONTENT FILTERS WILL BE ERROR FREE OR FREE OF VIRUSES OR OTHER HARMFUL COMPONENTS, OR PRODUCE ANY PARTICULAR RESULTS.
38
+
39
+ 6. LIMITATION OF LIABILITY. TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL COMPANY BE LIABLE TO YOU OR YOUR EMPLOYEES, AFFILIATES, USERS, OFFICERS OR DIRECTORS (A) UNDER ANY THEORY OF LIABILITY, WHETHER BASED IN CONTRACT, TORT, NEGLIGENCE, STRICT LIABILITY, WARRANTY, OR OTHERWISE UNDER THIS LICENSE, OR (B) FOR ANY INDIRECT, CONSEQUENTIAL, EXEMPLARY, INCIDENTAL, PUNITIVE OR SPECIAL DAMAGES OR LOST PROFITS, EVEN IF COMPANY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE FLUX MODEL, ITS CONSTITUENT COMPONENTS, PROVIDED CONTENT FILTERS, AND ANY OUTPUT (COLLECTIVELY, “MODEL MATERIALS”) ARE NOT DESIGNED OR INTENDED FOR USE IN ANY APPLICATION OR SITUATION WHERE FAILURE OR FAULT OF THE MODEL MATERIALS COULD REASONABLY BE ANTICIPATED TO LEAD TO SERIOUS INJURY OF ANY PERSON, INCLUDING POTENTIAL DISCRIMINATION OR VIOLATION OF AN INDIVIDUAL’S PRIVACY RIGHTS, OR TO SEVERE PHYSICAL, PROPERTY, OR ENVIRONMENTAL DAMAGE (EACH, A “HIGH-RISK USE”). IF YOU ELECT TO USE ANY OF THE MODEL MATERIALS FOR A HIGH-RISK USE, YOU DO SO AT YOUR OWN RISK. YOU AGREE TO DESIGN AND IMPLEMENT APPROPRIATE DECISION-MAKING AND RISK-MITIGATION PROCEDURES AND POLICIES IN CONNECTION WITH A HIGH-RISK USE SUCH THAT EVEN IF THERE IS A FAILURE OR FAULT IN ANY OF THE MODEL MATERIALS, THE SAFETY OF PERSONS OR PROPERTY AFFECTED BY THE ACTIVITY STAYS AT A LEVEL THAT IS REASONABLE, APPROPRIATE, AND LAWFUL FOR THE FIELD OF THE HIGH-RISK USE.
40
+
41
+ 7. INDEMNIFICATION. You will indemnify, defend and hold harmless Company and our subsidiaries and affiliates, and each of our respective shareholders, directors, officers, employees, agents, successors, and assigns (collectively, the “Company Parties”) from and against any losses, liabilities, damages, fines, penalties, and expenses (including reasonable attorneys’ fees) incurred by any Company Party in connection with any claim, demand, allegation, lawsuit, proceeding, or investigation (collectively, “Claims”) arising out of or related to (a) your access to or use of the FLUX Model (including in connection with any Output, results or data generated from such access or use, or from your access or use of any Content Filters), including any High-Risk Use; (b) your Content Filters, including your failure to implement any Content Filters where required by this License such as in Section 2(e); (c) your violation of this License; or (d) your violation, misappropriation or infringement of any rights of another (including intellectual property or other proprietary rights and privacy rights). You will promptly notify the Company Parties of any such Claims, and cooperate with Company Parties in defending such Claims. You will also grant the Company Parties sole control of the defense or settlement, at Company’s sole option, of any Claims. This indemnity is in addition to, and not in lieu of, any other indemnities or remedies set forth in a written agreement between you and Company or the other Company Parties.
42
+
43
+ 8. Termination; Survival.
44
+ a. This License will automatically terminate upon any breach by you of the terms of this License.
45
+ b. We may terminate this License, in whole or in part, at any time upon notice (including electronic) to you.
46
+ c. If you initiate any legal action or proceedings against Company or any other entity (including a cross-claim or counterclaim in a lawsuit), alleging that the FLUX Model, any Derivative, or Provided Content Filters, or any part thereof, infringe upon intellectual property or other rights owned or licensable by you, then any licenses granted to you under this License will immediately terminate as of the date such legal action or claim is filed or initiated.
47
+ d. Upon termination of this License, you must cease all use, access or Distribution of the FLUX Model, any Derivatives, and any Provided Content Filters. The following sections survive termination of this License: 2(c), 2(d), 4-11.
48
+
49
+ 9. Third Party Materials. The FLUX Model and Provided Content Filters may contain third-party software or other components (including free and open source software) (all of the foregoing, “Third Party Materials”), which are subject to the license terms of the respective third-party licensors. Your dealings or correspondence with third parties and your use of or interaction with any Third Party Materials are solely between you and the third party. Company does not control or endorse, and makes no representations or warranties regarding, any Third Party Materials, and your access to and use of such Third Party Materials are at your own risk.
50
+
51
+ 10. Trademarks. You have not been granted any trademark license as part of this License and may not use any name, logo or trademark associated with Company without the prior written permission of Company, except to the extent necessary to make the reference required in the Attribution Notice as specified above or as is reasonably necessary in describing the FLUX Model and its creators.
52
+
53
+ 11. General. This License will be governed and construed under the laws of the State of Delaware without regard to conflicts of law provisions. If any provision or part of a provision of this License is unlawful, void or unenforceable, that provision or part of the provision is deemed severed from this License, and will not affect the validity and enforceability of any remaining provisions. The failure of Company to exercise or enforce any right or provision of this License will not operate as a waiver of such right or provision. This License does not confer any third-party beneficiary rights upon any other person or entity. This License, together with the documentation, contains the entire understanding between you and Company regarding the subject matter of this License, and supersedes all other written or oral agreements and understandings between you and Company regarding such subject matter.
README.md CHANGED
@@ -1,3 +1,84 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ license: other
5
+ license_name: flux-dev-non-commercial-license
6
+ license_link: https://huggingface.co/black-forest-labs/FLUX.2-dev/blob/main/LICENSE.txt
7
+ base_model: black-forest-labs/FLUX.2-dev
8
+ tags:
9
+ - image-generation
10
+ - flux
11
+ - lora
12
+ - distillation
13
+ - turbo
14
+ pipeline_tag: text-to-image
15
+ library_name: diffusers
16
+ ---
17
+
18
+ # FLUX.2 [dev] Turbo LoRA
19
+
20
+ **FLUX.2 [dev] Turbo** is a distilled LoRA adapter for [FLUX.2 [dev]](https://huggingface.co/black-forest-labs/FLUX.2-dev) that enables high-quality image generation in just **8 inference steps**.
21
+
22
+ ![Example Text-to-Image](./example_t2i.png)
23
+ ---
24
+
25
+ ---
26
+
27
+ ![Example Image Editing](./example_edit.png)
28
+
29
+ ## Key Features
30
+
31
+ - ⚡ **8-step inference** — 6x faster than the base model's typical 50 steps
32
+ - 🎨 **Quality preserved** — Matches or surpasses the original FLUX.2 [dev] quality
33
+ - 🔌 **LoRA adapter** — Lightweight, easy to integrate with existing FLUX.2 workflows
34
+ - 🖼️ **Multiple modes** — Supports both text-to-image and image editing
35
+
36
+
37
+ ## Hosted API Endpoints
38
+
39
+ FLUX.2 [dev] Turbo is available through fal.ai hosted endpoints:
40
+ - [Text-to-Image](https://fal.ai/models/fal-ai/flux-2/turbo)
41
+ - [Image Editing](https://fal.ai/models/fal-ai/flux-2/turbo/edit)
42
+
43
+
44
+ ## ComfyUI
45
+
46
+ ComfyUI-compatible weights are available in the `comfy/` directory, converted by [ByteZSzn](https://huggingface.co/ByteZSzn/Flux.2-Turbo-ComfyUI).
47
+
48
+
49
+ ## Usage
50
+
51
+ ```python
52
+ import torch
53
+ from diffusers import Flux2Pipeline
54
+
55
+ # Pre-shifted custom sigmas for 8-step turbo inference
56
+ TURBO_SIGMAS = [1.0, 0.6509, 0.4374, 0.2932, 0.1893, 0.1108, 0.0495, 0.00031]
57
+
58
+ pipe = Flux2Pipeline.from_pretrained(
59
+ "black-forest-labs/FLUX.2-dev",
60
+ torch_dtype=torch.bfloat16
61
+ ).to("cuda")
62
+
63
+ pipe.load_lora_weights(
64
+ "fal/FLUX.2-dev-Turbo",
65
+ weight_name="flux.2-turbo-lora.safetensors"
66
+ )
67
+
68
+ prompt = "Industrial product shot of a chrome turbocharger with glowing hot exhaust manifold, engraved text 'FLUX.2 [dev] Turbo by fal' on the compressor housing and 'fal' on the turbine wheel, gradient heat glow from orange to electric blue , studio lighting with dramatic shadows, shallow depth of field, engineering blueprint pattern in background."
69
+
70
+ image = pipe(
71
+ prompt=prompt,
72
+ sigmas=TURBO_SIGMAS,
73
+ guidance_scale=2.5,
74
+ height=1024,
75
+ width=1024,
76
+ num_inference_steps=8,
77
+ ).images[0]
78
+
79
+ image.save("output.png")
80
+ ```
81
+
82
+ ## License
83
+
84
+ This model inherits the [FLUX [dev] Non-Commercial License](https://huggingface.co/black-forest-labs/FLUX.2-dev/blob/main/LICENSE.txt) from the base model.
ae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:868fe7b343cc8f3a19dbcfcafbc3d5f888802be3f89bd81b65b3621a066ce8f3
3
+ size 336211292
comfy/Flux2_00030_.png ADDED

Git LFS Details

  • SHA256: 66eedd6c3e9671b95b14181b7ef69f9d4670fe66950b6c7ff6f4a3b3a3aaf3f1
  • Pointer size: 132 Bytes
  • Size of remote file: 1.41 MB
comfy/Flux_2-Turbo-LoRA_comfyui.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:011487390b8020baf22a9d543930c90d74a4809b7241bee6b0622777b17b413b
3
+ size 2760814880
comfy/Workflow.json ADDED
@@ -0,0 +1,1395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "7c048efb-a059-44e2-970a-43e1eb472d0d",
3
+ "revision": 0,
4
+ "last_node_id": 67,
5
+ "last_link_id": 165,
6
+ "nodes": [
7
+ {
8
+ "id": 22,
9
+ "type": "BasicGuider",
10
+ "pos": [
11
+ -160,
12
+ 70
13
+ ],
14
+ "size": [
15
+ 222.3482666015625,
16
+ 46
17
+ ],
18
+ "flags": {},
19
+ "order": 22,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "model",
24
+ "type": "MODEL",
25
+ "link": 165
26
+ },
27
+ {
28
+ "name": "conditioning",
29
+ "type": "CONDITIONING",
30
+ "link": 153
31
+ }
32
+ ],
33
+ "outputs": [
34
+ {
35
+ "name": "GUIDER",
36
+ "type": "GUIDER",
37
+ "slot_index": 0,
38
+ "links": [
39
+ 30
40
+ ]
41
+ }
42
+ ],
43
+ "properties": {
44
+ "cnr_id": "comfy-core",
45
+ "ver": "0.3.71",
46
+ "Node name for S&R": "BasicGuider"
47
+ },
48
+ "widgets_values": []
49
+ },
50
+ {
51
+ "id": 40,
52
+ "type": "VAEEncode",
53
+ "pos": [
54
+ -405.919921875,
55
+ 880
56
+ ],
57
+ "size": [
58
+ 140,
59
+ 46
60
+ ],
61
+ "flags": {
62
+ "collapsed": true
63
+ },
64
+ "order": 17,
65
+ "mode": 4,
66
+ "inputs": [
67
+ {
68
+ "name": "pixels",
69
+ "type": "IMAGE",
70
+ "link": 122
71
+ },
72
+ {
73
+ "name": "vae",
74
+ "type": "VAE",
75
+ "link": 120
76
+ }
77
+ ],
78
+ "outputs": [
79
+ {
80
+ "name": "LATENT",
81
+ "type": "LATENT",
82
+ "links": [
83
+ 121
84
+ ]
85
+ }
86
+ ],
87
+ "properties": {
88
+ "cnr_id": "comfy-core",
89
+ "ver": "0.3.71",
90
+ "Node name for S&R": "VAEEncode"
91
+ },
92
+ "widgets_values": []
93
+ },
94
+ {
95
+ "id": 42,
96
+ "type": "LoadImage",
97
+ "pos": [
98
+ -540,
99
+ 400
100
+ ],
101
+ "size": [
102
+ 274.080078125,
103
+ 314
104
+ ],
105
+ "flags": {},
106
+ "order": 0,
107
+ "mode": 4,
108
+ "inputs": [],
109
+ "outputs": [
110
+ {
111
+ "name": "IMAGE",
112
+ "type": "IMAGE",
113
+ "links": [
114
+ 123
115
+ ]
116
+ },
117
+ {
118
+ "name": "MASK",
119
+ "type": "MASK",
120
+ "links": null
121
+ }
122
+ ],
123
+ "properties": {
124
+ "cnr_id": "comfy-core",
125
+ "ver": "0.3.71",
126
+ "Node name for S&R": "LoadImage"
127
+ },
128
+ "widgets_values": [
129
+ "image_flux2_input_image.png",
130
+ "image"
131
+ ]
132
+ },
133
+ {
134
+ "id": 44,
135
+ "type": "VAEEncode",
136
+ "pos": [
137
+ -840,
138
+ 880
139
+ ],
140
+ "size": [
141
+ 140,
142
+ 46
143
+ ],
144
+ "flags": {
145
+ "collapsed": true
146
+ },
147
+ "order": 18,
148
+ "mode": 4,
149
+ "inputs": [
150
+ {
151
+ "name": "pixels",
152
+ "type": "IMAGE",
153
+ "link": 126
154
+ },
155
+ {
156
+ "name": "vae",
157
+ "type": "VAE",
158
+ "link": 127
159
+ }
160
+ ],
161
+ "outputs": [
162
+ {
163
+ "name": "LATENT",
164
+ "type": "LATENT",
165
+ "links": [
166
+ 125
167
+ ]
168
+ }
169
+ ],
170
+ "properties": {
171
+ "cnr_id": "comfy-core",
172
+ "ver": "0.3.71",
173
+ "Node name for S&R": "VAEEncode"
174
+ },
175
+ "widgets_values": []
176
+ },
177
+ {
178
+ "id": 39,
179
+ "type": "ReferenceLatent",
180
+ "pos": [
181
+ -463.6328125,
182
+ 920
183
+ ],
184
+ "size": [
185
+ 204.134765625,
186
+ 46
187
+ ],
188
+ "flags": {},
189
+ "order": 21,
190
+ "mode": 4,
191
+ "inputs": [
192
+ {
193
+ "name": "conditioning",
194
+ "type": "CONDITIONING",
195
+ "link": 145
196
+ },
197
+ {
198
+ "name": "latent",
199
+ "shape": 7,
200
+ "type": "LATENT",
201
+ "link": 121
202
+ }
203
+ ],
204
+ "outputs": [
205
+ {
206
+ "name": "CONDITIONING",
207
+ "type": "CONDITIONING",
208
+ "links": [
209
+ 153
210
+ ]
211
+ }
212
+ ],
213
+ "properties": {
214
+ "cnr_id": "comfy-core",
215
+ "ver": "0.3.71",
216
+ "Node name for S&R": "ReferenceLatent"
217
+ },
218
+ "widgets_values": []
219
+ },
220
+ {
221
+ "id": 26,
222
+ "type": "FluxGuidance",
223
+ "pos": [
224
+ -520,
225
+ 220
226
+ ],
227
+ "size": [
228
+ 317.4000244140625,
229
+ 58
230
+ ],
231
+ "flags": {},
232
+ "order": 19,
233
+ "mode": 0,
234
+ "inputs": [
235
+ {
236
+ "name": "conditioning",
237
+ "type": "CONDITIONING",
238
+ "link": 41
239
+ }
240
+ ],
241
+ "outputs": [
242
+ {
243
+ "name": "CONDITIONING",
244
+ "type": "CONDITIONING",
245
+ "slot_index": 0,
246
+ "links": [
247
+ 144
248
+ ]
249
+ }
250
+ ],
251
+ "properties": {
252
+ "cnr_id": "comfy-core",
253
+ "ver": "0.3.71",
254
+ "Node name for S&R": "FluxGuidance"
255
+ },
256
+ "widgets_values": [
257
+ 4
258
+ ],
259
+ "color": "#233",
260
+ "bgcolor": "#355"
261
+ },
262
+ {
263
+ "id": 16,
264
+ "type": "KSamplerSelect",
265
+ "pos": [
266
+ -160,
267
+ 160
268
+ ],
269
+ "size": [
270
+ 222.3482666015625,
271
+ 58
272
+ ],
273
+ "flags": {},
274
+ "order": 1,
275
+ "mode": 0,
276
+ "inputs": [],
277
+ "outputs": [
278
+ {
279
+ "name": "SAMPLER",
280
+ "type": "SAMPLER",
281
+ "links": [
282
+ 19
283
+ ]
284
+ }
285
+ ],
286
+ "properties": {
287
+ "cnr_id": "comfy-core",
288
+ "ver": "0.3.71",
289
+ "Node name for S&R": "KSamplerSelect"
290
+ },
291
+ "widgets_values": [
292
+ "euler"
293
+ ]
294
+ },
295
+ {
296
+ "id": 43,
297
+ "type": "ReferenceLatent",
298
+ "pos": [
299
+ -910,
300
+ 920
301
+ ],
302
+ "size": [
303
+ 204.134765625,
304
+ 46
305
+ ],
306
+ "flags": {},
307
+ "order": 20,
308
+ "mode": 4,
309
+ "inputs": [
310
+ {
311
+ "name": "conditioning",
312
+ "type": "CONDITIONING",
313
+ "link": 144
314
+ },
315
+ {
316
+ "name": "latent",
317
+ "shape": 7,
318
+ "type": "LATENT",
319
+ "link": 125
320
+ }
321
+ ],
322
+ "outputs": [
323
+ {
324
+ "name": "CONDITIONING",
325
+ "type": "CONDITIONING",
326
+ "links": [
327
+ 145
328
+ ]
329
+ }
330
+ ],
331
+ "properties": {
332
+ "cnr_id": "comfy-core",
333
+ "ver": "0.3.71",
334
+ "Node name for S&R": "ReferenceLatent"
335
+ },
336
+ "widgets_values": []
337
+ },
338
+ {
339
+ "id": 50,
340
+ "type": "PrimitiveNode",
341
+ "pos": [
342
+ -160,
343
+ 470
344
+ ],
345
+ "size": [
346
+ 210,
347
+ 82
348
+ ],
349
+ "flags": {},
350
+ "order": 2,
351
+ "mode": 0,
352
+ "inputs": [],
353
+ "outputs": [
354
+ {
355
+ "name": "INT",
356
+ "type": "INT",
357
+ "widget": {
358
+ "name": "width"
359
+ },
360
+ "links": [
361
+ 135,
362
+ 136
363
+ ]
364
+ }
365
+ ],
366
+ "title": "width",
367
+ "properties": {
368
+ "Run widget replace on values": false
369
+ },
370
+ "widgets_values": [
371
+ 1248,
372
+ "fixed"
373
+ ],
374
+ "color": "#223",
375
+ "bgcolor": "#335"
376
+ },
377
+ {
378
+ "id": 51,
379
+ "type": "PrimitiveNode",
380
+ "pos": [
381
+ -160,
382
+ 590
383
+ ],
384
+ "size": [
385
+ 210,
386
+ 82
387
+ ],
388
+ "flags": {},
389
+ "order": 3,
390
+ "mode": 0,
391
+ "inputs": [],
392
+ "outputs": [
393
+ {
394
+ "name": "INT",
395
+ "type": "INT",
396
+ "widget": {
397
+ "name": "height"
398
+ },
399
+ "links": [
400
+ 137,
401
+ 138
402
+ ]
403
+ }
404
+ ],
405
+ "title": "height",
406
+ "properties": {
407
+ "Run widget replace on values": false
408
+ },
409
+ "widgets_values": [
410
+ 832,
411
+ "fixed"
412
+ ],
413
+ "color": "#223",
414
+ "bgcolor": "#335"
415
+ },
416
+ {
417
+ "id": 10,
418
+ "type": "VAELoader",
419
+ "pos": [
420
+ -973.1827364834872,
421
+ 230
422
+ ],
423
+ "size": [
424
+ 298.1818181818182,
425
+ 60.429901123046875
426
+ ],
427
+ "flags": {},
428
+ "order": 4,
429
+ "mode": 0,
430
+ "inputs": [],
431
+ "outputs": [
432
+ {
433
+ "name": "VAE",
434
+ "type": "VAE",
435
+ "slot_index": 0,
436
+ "links": [
437
+ 120,
438
+ 127,
439
+ 159
440
+ ]
441
+ }
442
+ ],
443
+ "properties": {
444
+ "cnr_id": "comfy-core",
445
+ "ver": "0.3.71",
446
+ "Node name for S&R": "VAELoader",
447
+ "models": [
448
+ {
449
+ "name": "flux2-vae.safetensors",
450
+ "url": "https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/vae/flux2-vae.safetensors",
451
+ "directory": "vae"
452
+ }
453
+ ]
454
+ },
455
+ "widgets_values": [
456
+ "flux2-vae.safetensors"
457
+ ]
458
+ },
459
+ {
460
+ "id": 47,
461
+ "type": "EmptyFlux2LatentImage",
462
+ "pos": [
463
+ 110,
464
+ 500
465
+ ],
466
+ "size": [
467
+ 270,
468
+ 106
469
+ ],
470
+ "flags": {},
471
+ "order": 12,
472
+ "mode": 0,
473
+ "inputs": [
474
+ {
475
+ "name": "width",
476
+ "type": "INT",
477
+ "widget": {
478
+ "name": "width"
479
+ },
480
+ "link": 135
481
+ },
482
+ {
483
+ "name": "height",
484
+ "type": "INT",
485
+ "widget": {
486
+ "name": "height"
487
+ },
488
+ "link": 137
489
+ }
490
+ ],
491
+ "outputs": [
492
+ {
493
+ "name": "LATENT",
494
+ "type": "LATENT",
495
+ "links": [
496
+ 161
497
+ ]
498
+ }
499
+ ],
500
+ "properties": {
501
+ "cnr_id": "comfy-core",
502
+ "ver": "0.3.71",
503
+ "Node name for S&R": "EmptyFlux2LatentImage"
504
+ },
505
+ "widgets_values": [
506
+ 1248,
507
+ 832,
508
+ 1
509
+ ]
510
+ },
511
+ {
512
+ "id": 13,
513
+ "type": "SamplerCustomAdvanced",
514
+ "pos": [
515
+ 90,
516
+ -40
517
+ ],
518
+ "size": [
519
+ 272.3617858886719,
520
+ 124.53733825683594
521
+ ],
522
+ "flags": {},
523
+ "order": 23,
524
+ "mode": 0,
525
+ "inputs": [
526
+ {
527
+ "name": "noise",
528
+ "type": "NOISE",
529
+ "link": 37
530
+ },
531
+ {
532
+ "name": "guider",
533
+ "type": "GUIDER",
534
+ "link": 30
535
+ },
536
+ {
537
+ "name": "sampler",
538
+ "type": "SAMPLER",
539
+ "link": 19
540
+ },
541
+ {
542
+ "name": "sigmas",
543
+ "type": "SIGMAS",
544
+ "link": 132
545
+ },
546
+ {
547
+ "name": "latent_image",
548
+ "type": "LATENT",
549
+ "link": 161
550
+ }
551
+ ],
552
+ "outputs": [
553
+ {
554
+ "name": "output",
555
+ "type": "LATENT",
556
+ "slot_index": 0,
557
+ "links": [
558
+ 24
559
+ ]
560
+ },
561
+ {
562
+ "name": "denoised_output",
563
+ "type": "LATENT",
564
+ "links": null
565
+ }
566
+ ],
567
+ "properties": {
568
+ "cnr_id": "comfy-core",
569
+ "ver": "0.3.71",
570
+ "Node name for S&R": "SamplerCustomAdvanced"
571
+ },
572
+ "widgets_values": []
573
+ },
574
+ {
575
+ "id": 41,
576
+ "type": "ImageScaleToTotalPixels",
577
+ "pos": [
578
+ -535.919921875,
579
+ 750
580
+ ],
581
+ "size": [
582
+ 270,
583
+ 106
584
+ ],
585
+ "flags": {},
586
+ "order": 11,
587
+ "mode": 4,
588
+ "inputs": [
589
+ {
590
+ "name": "image",
591
+ "type": "IMAGE",
592
+ "link": 123
593
+ }
594
+ ],
595
+ "outputs": [
596
+ {
597
+ "name": "IMAGE",
598
+ "type": "IMAGE",
599
+ "links": [
600
+ 122
601
+ ]
602
+ }
603
+ ],
604
+ "properties": {
605
+ "cnr_id": "comfy-core",
606
+ "ver": "0.3.71",
607
+ "Node name for S&R": "ImageScaleToTotalPixels"
608
+ },
609
+ "widgets_values": [
610
+ "lanczos",
611
+ 1,
612
+ 1
613
+ ]
614
+ },
615
+ {
616
+ "id": 8,
617
+ "type": "VAEDecode",
618
+ "pos": [
619
+ -140,
620
+ 750
621
+ ],
622
+ "size": [
623
+ 210,
624
+ 46
625
+ ],
626
+ "flags": {},
627
+ "order": 24,
628
+ "mode": 0,
629
+ "inputs": [
630
+ {
631
+ "name": "samples",
632
+ "type": "LATENT",
633
+ "link": 24
634
+ },
635
+ {
636
+ "name": "vae",
637
+ "type": "VAE",
638
+ "link": 159
639
+ }
640
+ ],
641
+ "outputs": [
642
+ {
643
+ "name": "IMAGE",
644
+ "type": "IMAGE",
645
+ "slot_index": 0,
646
+ "links": [
647
+ 9
648
+ ]
649
+ }
650
+ ],
651
+ "properties": {
652
+ "cnr_id": "comfy-core",
653
+ "ver": "0.3.71",
654
+ "Node name for S&R": "VAEDecode"
655
+ },
656
+ "widgets_values": []
657
+ },
658
+ {
659
+ "id": 45,
660
+ "type": "ImageScaleToTotalPixels",
661
+ "pos": [
662
+ -970,
663
+ 750
664
+ ],
665
+ "size": [
666
+ 270,
667
+ 106
668
+ ],
669
+ "flags": {},
670
+ "order": 14,
671
+ "mode": 4,
672
+ "inputs": [
673
+ {
674
+ "name": "image",
675
+ "type": "IMAGE",
676
+ "link": 128
677
+ }
678
+ ],
679
+ "outputs": [
680
+ {
681
+ "name": "IMAGE",
682
+ "type": "IMAGE",
683
+ "links": [
684
+ 126
685
+ ]
686
+ }
687
+ ],
688
+ "properties": {
689
+ "cnr_id": "comfy-core",
690
+ "ver": "0.3.71",
691
+ "Node name for S&R": "ImageScaleToTotalPixels"
692
+ },
693
+ "widgets_values": [
694
+ "lanczos",
695
+ 1,
696
+ 1
697
+ ]
698
+ },
699
+ {
700
+ "id": 60,
701
+ "type": "Note",
702
+ "pos": [
703
+ -980,
704
+ 1070
705
+ ],
706
+ "size": [
707
+ 460,
708
+ 130
709
+ ],
710
+ "flags": {},
711
+ "order": 5,
712
+ "mode": 0,
713
+ "inputs": [],
714
+ "outputs": [],
715
+ "properties": {},
716
+ "widgets_values": [
717
+ "Unbypass (CTRL-B) the ReferenceLatent nodes to give ref images.\n\nIf you want to add more reference images, you can add multiple reference images by following the pattern.\n\nIf you don't want any reference image, just select all ReferenceLatent nodes and then use CTRL-B to bypass them, turning the workflow into a Text to Image workflow."
718
+ ],
719
+ "color": "#432",
720
+ "bgcolor": "#653"
721
+ },
722
+ {
723
+ "id": 6,
724
+ "type": "CLIPTextEncode",
725
+ "pos": [
726
+ -630,
727
+ -50
728
+ ],
729
+ "size": [
730
+ 430,
731
+ 200
732
+ ],
733
+ "flags": {},
734
+ "order": 16,
735
+ "mode": 0,
736
+ "inputs": [
737
+ {
738
+ "name": "clip",
739
+ "type": "CLIP",
740
+ "link": 117
741
+ }
742
+ ],
743
+ "outputs": [
744
+ {
745
+ "name": "CONDITIONING",
746
+ "type": "CONDITIONING",
747
+ "slot_index": 0,
748
+ "links": [
749
+ 41
750
+ ]
751
+ }
752
+ ],
753
+ "title": "CLIP Text Encode (Positive Prompt)",
754
+ "properties": {
755
+ "cnr_id": "comfy-core",
756
+ "ver": "0.3.71",
757
+ "Node name for S&R": "CLIPTextEncode"
758
+ },
759
+ "widgets_values": [
760
+ "The woman is wearing a small pale yellow knitted beanie, with a white fabric patch on the front right, embroidered with big gray text “FLUX.2 COMFY.” Keep the face"
761
+ ],
762
+ "color": "#232",
763
+ "bgcolor": "#353"
764
+ },
765
+ {
766
+ "id": 46,
767
+ "type": "LoadImage",
768
+ "pos": [
769
+ -970,
770
+ 390
771
+ ],
772
+ "size": [
773
+ 274.080078125,
774
+ 314
775
+ ],
776
+ "flags": {},
777
+ "order": 6,
778
+ "mode": 4,
779
+ "inputs": [],
780
+ "outputs": [
781
+ {
782
+ "name": "IMAGE",
783
+ "type": "IMAGE",
784
+ "links": [
785
+ 128
786
+ ]
787
+ },
788
+ {
789
+ "name": "MASK",
790
+ "type": "MASK",
791
+ "links": null
792
+ }
793
+ ],
794
+ "properties": {
795
+ "cnr_id": "comfy-core",
796
+ "ver": "0.3.71",
797
+ "Node name for S&R": "LoadImage"
798
+ },
799
+ "widgets_values": [
800
+ "image_flux2_input_image.png",
801
+ "image"
802
+ ]
803
+ },
804
+ {
805
+ "id": 9,
806
+ "type": "SaveImage",
807
+ "pos": [
808
+ 410,
809
+ -90
810
+ ],
811
+ "size": [
812
+ 985.3012084960938,
813
+ 1060.3828125
814
+ ],
815
+ "flags": {},
816
+ "order": 25,
817
+ "mode": 0,
818
+ "inputs": [
819
+ {
820
+ "name": "images",
821
+ "type": "IMAGE",
822
+ "link": 9
823
+ }
824
+ ],
825
+ "outputs": [],
826
+ "properties": {
827
+ "cnr_id": "comfy-core",
828
+ "ver": "0.3.71",
829
+ "Node name for S&R": "SaveImage"
830
+ },
831
+ "widgets_values": [
832
+ "Flux2"
833
+ ]
834
+ },
835
+ {
836
+ "id": 12,
837
+ "type": "UNETLoader",
838
+ "pos": [
839
+ -973.1827364834872,
840
+ -42
841
+ ],
842
+ "size": [
843
+ 298.1818181818182,
844
+ 82
845
+ ],
846
+ "flags": {},
847
+ "order": 7,
848
+ "mode": 0,
849
+ "inputs": [],
850
+ "outputs": [
851
+ {
852
+ "name": "MODEL",
853
+ "type": "MODEL",
854
+ "slot_index": 0,
855
+ "links": [
856
+ 164
857
+ ]
858
+ }
859
+ ],
860
+ "properties": {
861
+ "cnr_id": "comfy-core",
862
+ "ver": "0.3.71",
863
+ "Node name for S&R": "UNETLoader",
864
+ "models": [
865
+ {
866
+ "name": "flux2_dev_fp8mixed.safetensors",
867
+ "url": "https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/diffusion_models/flux2_dev_fp8mixed.safetensors",
868
+ "directory": "diffusion_models"
869
+ }
870
+ ]
871
+ },
872
+ "widgets_values": [
873
+ "flux2_dev_fp8mixed.safetensors",
874
+ "default"
875
+ ]
876
+ },
877
+ {
878
+ "id": 66,
879
+ "type": "MarkdownNote",
880
+ "pos": [
881
+ -1520,
882
+ -90
883
+ ],
884
+ "size": [
885
+ 520,
886
+ 500
887
+ ],
888
+ "flags": {
889
+ "collapsed": false
890
+ },
891
+ "order": 8,
892
+ "mode": 0,
893
+ "inputs": [],
894
+ "outputs": [],
895
+ "title": "Model links",
896
+ "properties": {},
897
+ "widgets_values": [
898
+ "We are using quantized weights in this workflow, the original flux 2 repo is [here](https://huggingface.co/black-forest-labs/FLUX.2-dev/)\n## Report issue\n\nIf you found any issues when running this workflow, [report template issue here](https://github.com/Comfy-Org/workflow_templates/issues)\n\n\n## Model links\n\n**text_encoders**\n\n- [mistral_3_small_flux2_bf16.safetensors](https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/text_encoders/mistral_3_small_flux2_bf16.safetensors)\n\n**loras**\n\n- [flux2_berthe_morisot.safetensors](https://huggingface.co/ostris/flux2_berthe_morisot/resolve/main/flux2_berthe_morisot.safetensors)\n\n**diffusion_models**\n\n- [flux2_dev_fp8mixed.safetensors](https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/diffusion_models/flux2_dev_fp8mixed.safetensors)\n\n**vae**\n\n- [flux2-vae.safetensors](https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/vae/flux2-vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 text_encoders/\n│ │ └── mistral_3_small_flux2_bf16.safetensors\n│ ├── 📂 loras/\n│ │ └── flux2_berthe_morisot.safetensors\n│ ├── 📂 diffusion_models/\n│ │ └── flux2_dev_fp8mixed.safetensors\n│ └── 📂 vae/\n│ └── flux2-vae.safetensors\n```\n"
899
+ ],
900
+ "color": "#432",
901
+ "bgcolor": "#653"
902
+ },
903
+ {
904
+ "id": 38,
905
+ "type": "CLIPLoader",
906
+ "pos": [
907
+ -973.1827364834872,
908
+ 82
909
+ ],
910
+ "size": [
911
+ 298.1818181818182,
912
+ 106
913
+ ],
914
+ "flags": {},
915
+ "order": 9,
916
+ "mode": 0,
917
+ "inputs": [],
918
+ "outputs": [
919
+ {
920
+ "name": "CLIP",
921
+ "type": "CLIP",
922
+ "links": [
923
+ 117
924
+ ]
925
+ }
926
+ ],
927
+ "properties": {
928
+ "cnr_id": "comfy-core",
929
+ "ver": "0.3.71",
930
+ "Node name for S&R": "CLIPLoader",
931
+ "models": [
932
+ {
933
+ "name": "mistral_3_small_flux2_bf16.safetensors",
934
+ "url": "https://huggingface.co/Comfy-Org/flux2-dev/resolve/main/split_files/text_encoders/mistral_3_small_flux2_bf16.safetensors",
935
+ "directory": "text_encoders"
936
+ }
937
+ ]
938
+ },
939
+ "widgets_values": [
940
+ "mistral_3_small_flux2_fp8.safetensors",
941
+ "flux2",
942
+ "default"
943
+ ]
944
+ },
945
+ {
946
+ "id": 67,
947
+ "type": "LoraLoaderModelOnly",
948
+ "pos": [
949
+ -550,
950
+ -230
951
+ ],
952
+ "size": [
953
+ 270,
954
+ 82
955
+ ],
956
+ "flags": {},
957
+ "order": 15,
958
+ "mode": 0,
959
+ "inputs": [
960
+ {
961
+ "name": "model",
962
+ "type": "MODEL",
963
+ "link": 164
964
+ }
965
+ ],
966
+ "outputs": [
967
+ {
968
+ "name": "MODEL",
969
+ "type": "MODEL",
970
+ "links": [
971
+ 165
972
+ ]
973
+ }
974
+ ],
975
+ "properties": {
976
+ "cnr_id": "comfy-core",
977
+ "ver": "0.6.0",
978
+ "Node name for S&R": "LoraLoaderModelOnly",
979
+ "models": [
980
+ {
981
+ "name": "flux2_berthe_morisot.safetensors",
982
+ "url": "https://huggingface.co/ostris/flux2_berthe_morisot/resolve/main/flux2_berthe_morisot.safetensors",
983
+ "directory": "loras"
984
+ }
985
+ ]
986
+ },
987
+ "widgets_values": [
988
+ "Flux_2-Turbo-LoRA_comfyui.safetensors",
989
+ 1
990
+ ]
991
+ },
992
+ {
993
+ "id": 25,
994
+ "type": "RandomNoise",
995
+ "pos": [
996
+ -160,
997
+ -50
998
+ ],
999
+ "size": [
1000
+ 222.3482666015625,
1001
+ 82
1002
+ ],
1003
+ "flags": {},
1004
+ "order": 10,
1005
+ "mode": 0,
1006
+ "inputs": [],
1007
+ "outputs": [
1008
+ {
1009
+ "name": "NOISE",
1010
+ "type": "NOISE",
1011
+ "links": [
1012
+ 37
1013
+ ]
1014
+ }
1015
+ ],
1016
+ "properties": {
1017
+ "cnr_id": "comfy-core",
1018
+ "ver": "0.3.71",
1019
+ "Node name for S&R": "RandomNoise"
1020
+ },
1021
+ "widgets_values": [
1022
+ 108865988415532,
1023
+ "randomize"
1024
+ ]
1025
+ },
1026
+ {
1027
+ "id": 48,
1028
+ "type": "Flux2Scheduler",
1029
+ "pos": [
1030
+ -160,
1031
+ 260
1032
+ ],
1033
+ "size": [
1034
+ 222.3482666015625,
1035
+ 106
1036
+ ],
1037
+ "flags": {},
1038
+ "order": 13,
1039
+ "mode": 0,
1040
+ "inputs": [
1041
+ {
1042
+ "name": "width",
1043
+ "type": "INT",
1044
+ "widget": {
1045
+ "name": "width"
1046
+ },
1047
+ "link": 136
1048
+ },
1049
+ {
1050
+ "name": "height",
1051
+ "type": "INT",
1052
+ "widget": {
1053
+ "name": "height"
1054
+ },
1055
+ "link": 138
1056
+ }
1057
+ ],
1058
+ "outputs": [
1059
+ {
1060
+ "name": "SIGMAS",
1061
+ "type": "SIGMAS",
1062
+ "links": [
1063
+ 132
1064
+ ]
1065
+ }
1066
+ ],
1067
+ "properties": {
1068
+ "cnr_id": "comfy-core",
1069
+ "ver": "0.3.71",
1070
+ "Node name for S&R": "Flux2Scheduler"
1071
+ },
1072
+ "widgets_values": [
1073
+ 8,
1074
+ 1248,
1075
+ 832
1076
+ ]
1077
+ }
1078
+ ],
1079
+ "links": [
1080
+ [
1081
+ 9,
1082
+ 8,
1083
+ 0,
1084
+ 9,
1085
+ 0,
1086
+ "IMAGE"
1087
+ ],
1088
+ [
1089
+ 19,
1090
+ 16,
1091
+ 0,
1092
+ 13,
1093
+ 2,
1094
+ "SAMPLER"
1095
+ ],
1096
+ [
1097
+ 24,
1098
+ 13,
1099
+ 0,
1100
+ 8,
1101
+ 0,
1102
+ "LATENT"
1103
+ ],
1104
+ [
1105
+ 30,
1106
+ 22,
1107
+ 0,
1108
+ 13,
1109
+ 1,
1110
+ "GUIDER"
1111
+ ],
1112
+ [
1113
+ 37,
1114
+ 25,
1115
+ 0,
1116
+ 13,
1117
+ 0,
1118
+ "NOISE"
1119
+ ],
1120
+ [
1121
+ 41,
1122
+ 6,
1123
+ 0,
1124
+ 26,
1125
+ 0,
1126
+ "CONDITIONING"
1127
+ ],
1128
+ [
1129
+ 117,
1130
+ 38,
1131
+ 0,
1132
+ 6,
1133
+ 0,
1134
+ "CLIP"
1135
+ ],
1136
+ [
1137
+ 120,
1138
+ 10,
1139
+ 0,
1140
+ 40,
1141
+ 1,
1142
+ "VAE"
1143
+ ],
1144
+ [
1145
+ 121,
1146
+ 40,
1147
+ 0,
1148
+ 39,
1149
+ 1,
1150
+ "LATENT"
1151
+ ],
1152
+ [
1153
+ 122,
1154
+ 41,
1155
+ 0,
1156
+ 40,
1157
+ 0,
1158
+ "IMAGE"
1159
+ ],
1160
+ [
1161
+ 123,
1162
+ 42,
1163
+ 0,
1164
+ 41,
1165
+ 0,
1166
+ "IMAGE"
1167
+ ],
1168
+ [
1169
+ 125,
1170
+ 44,
1171
+ 0,
1172
+ 43,
1173
+ 1,
1174
+ "LATENT"
1175
+ ],
1176
+ [
1177
+ 126,
1178
+ 45,
1179
+ 0,
1180
+ 44,
1181
+ 0,
1182
+ "IMAGE"
1183
+ ],
1184
+ [
1185
+ 127,
1186
+ 10,
1187
+ 0,
1188
+ 44,
1189
+ 1,
1190
+ "VAE"
1191
+ ],
1192
+ [
1193
+ 128,
1194
+ 46,
1195
+ 0,
1196
+ 45,
1197
+ 0,
1198
+ "IMAGE"
1199
+ ],
1200
+ [
1201
+ 132,
1202
+ 48,
1203
+ 0,
1204
+ 13,
1205
+ 3,
1206
+ "SIGMAS"
1207
+ ],
1208
+ [
1209
+ 135,
1210
+ 50,
1211
+ 0,
1212
+ 47,
1213
+ 0,
1214
+ "INT"
1215
+ ],
1216
+ [
1217
+ 136,
1218
+ 50,
1219
+ 0,
1220
+ 48,
1221
+ 0,
1222
+ "INT"
1223
+ ],
1224
+ [
1225
+ 137,
1226
+ 51,
1227
+ 0,
1228
+ 47,
1229
+ 1,
1230
+ "INT"
1231
+ ],
1232
+ [
1233
+ 138,
1234
+ 51,
1235
+ 0,
1236
+ 48,
1237
+ 1,
1238
+ "INT"
1239
+ ],
1240
+ [
1241
+ 144,
1242
+ 26,
1243
+ 0,
1244
+ 43,
1245
+ 0,
1246
+ "CONDITIONING"
1247
+ ],
1248
+ [
1249
+ 145,
1250
+ 43,
1251
+ 0,
1252
+ 39,
1253
+ 0,
1254
+ "CONDITIONING"
1255
+ ],
1256
+ [
1257
+ 153,
1258
+ 39,
1259
+ 0,
1260
+ 22,
1261
+ 1,
1262
+ "CONDITIONING"
1263
+ ],
1264
+ [
1265
+ 159,
1266
+ 10,
1267
+ 0,
1268
+ 8,
1269
+ 1,
1270
+ "VAE"
1271
+ ],
1272
+ [
1273
+ 161,
1274
+ 47,
1275
+ 0,
1276
+ 13,
1277
+ 4,
1278
+ "LATENT"
1279
+ ],
1280
+ [
1281
+ 164,
1282
+ 12,
1283
+ 0,
1284
+ 67,
1285
+ 0,
1286
+ "MODEL"
1287
+ ],
1288
+ [
1289
+ 165,
1290
+ 67,
1291
+ 0,
1292
+ 22,
1293
+ 0,
1294
+ "MODEL"
1295
+ ]
1296
+ ],
1297
+ "groups": [
1298
+ {
1299
+ "id": 1,
1300
+ "title": "Step 1 - Upload models",
1301
+ "bounding": [
1302
+ -980,
1303
+ -120,
1304
+ 318.18181818181813,
1305
+ 416.0299011230469
1306
+ ],
1307
+ "color": "#3f789e",
1308
+ "font_size": 24,
1309
+ "flags": {}
1310
+ },
1311
+ {
1312
+ "id": 2,
1313
+ "title": "Custom sampler",
1314
+ "bounding": [
1315
+ -170,
1316
+ -120,
1317
+ 558.5359191894531,
1318
+ 501.6
1319
+ ],
1320
+ "color": "#3f789e",
1321
+ "font_size": 24,
1322
+ "flags": {}
1323
+ },
1324
+ {
1325
+ "id": 3,
1326
+ "title": "Step 4- Image size",
1327
+ "bounding": [
1328
+ -170,
1329
+ 400,
1330
+ 560,
1331
+ 285.6
1332
+ ],
1333
+ "color": "#3f789e",
1334
+ "font_size": 24,
1335
+ "flags": {}
1336
+ },
1337
+ {
1338
+ "id": 4,
1339
+ "title": "Step2 - Prompt",
1340
+ "bounding": [
1341
+ -640,
1342
+ -120,
1343
+ 450,
1344
+ 420
1345
+ ],
1346
+ "color": "#3f789e",
1347
+ "font_size": 24,
1348
+ "flags": {}
1349
+ },
1350
+ {
1351
+ "id": 5,
1352
+ "title": "Step 3- Reference images",
1353
+ "bounding": [
1354
+ -980,
1355
+ 320,
1356
+ 790,
1357
+ 700
1358
+ ],
1359
+ "color": "#3f789e",
1360
+ "font_size": 24,
1361
+ "flags": {}
1362
+ },
1363
+ {
1364
+ "id": 6,
1365
+ "title": "Ctrl-B to Enable",
1366
+ "bounding": [
1367
+ -640,
1368
+ -310,
1369
+ 450,
1370
+ 170
1371
+ ],
1372
+ "color": "#3f789e",
1373
+ "font_size": 24,
1374
+ "flags": {}
1375
+ }
1376
+ ],
1377
+ "config": {},
1378
+ "extra": {
1379
+ "ds": {
1380
+ "scale": 0.8907171599668258,
1381
+ "offset": [
1382
+ 1485.8806212299676,
1383
+ 483.18657780300765
1384
+ ]
1385
+ },
1386
+ "frontendVersion": "1.34.9",
1387
+ "workflowRendererVersion": "LG",
1388
+ "groupNodes": {},
1389
+ "VHS_latentpreview": false,
1390
+ "VHS_latentpreviewrate": 0,
1391
+ "VHS_MetadataImage": true,
1392
+ "VHS_KeepIntermediate": true
1393
+ },
1394
+ "version": 0.4
1395
+ }
example_edit.png ADDED

Git LFS Details

  • SHA256: 0372a83c07acc7bba7e7d92d08b5e6a8d96771e46962d59f99f107a62498c0f1
  • Pointer size: 132 Bytes
  • Size of remote file: 5.01 MB
example_t2i.png ADDED

Git LFS Details

  • SHA256: 0559a27f45fb3ca0c007d787b37744e21f7e44bf7b06f35f0485f4a12e2d907c
  • Pointer size: 132 Bytes
  • Size of remote file: 6.52 MB
flux.2-turbo-lora.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f76cf9c2cc546ddca878799136434a1098477af3f4b0adff2cfd79f2ebe4aa01
3
+ size 2760818216
flux2-dev.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6159a3f19f829c8e84ba6e9996b7afaf7c0a5f3428677f5b37445778a320d275
3
+ size 64446596128
model_index.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "Flux2Pipeline",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "scheduler": [
5
+ "diffusers",
6
+ "FlowMatchEulerDiscreteScheduler"
7
+ ],
8
+ "text_encoder": [
9
+ "transformers",
10
+ "Mistral3ForConditionalGeneration"
11
+ ],
12
+ "tokenizer": [
13
+ "transformers",
14
+ "PixtralProcessor"
15
+ ],
16
+ "transformer": [
17
+ "diffusers",
18
+ "Flux2Transformer2DModel"
19
+ ],
20
+ "vae": [
21
+ "diffusers",
22
+ "AutoencoderKLFlux2"
23
+ ]
24
+ }
scheduler/scheduler_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FlowMatchEulerDiscreteScheduler",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "base_image_seq_len": 256,
5
+ "base_shift": 0.5,
6
+ "invert_sigmas": false,
7
+ "max_image_seq_len": 4096,
8
+ "max_shift": 1.15,
9
+ "num_train_timesteps": 1000,
10
+ "shift": 3.0,
11
+ "shift_terminal": null,
12
+ "stochastic_sampling": false,
13
+ "time_shift_type": "exponential",
14
+ "use_beta_sigmas": false,
15
+ "use_dynamic_shifting": true,
16
+ "use_exponential_sigmas": false,
17
+ "use_karras_sigmas": false
18
+ }
teaser_editing.png ADDED

Git LFS Details

  • SHA256: c6c1aa92b4b4a3b61f43c42e3d7aa0a6c29d1a6ce150b0a17282a40ec119acdc
  • Pointer size: 133 Bytes
  • Size of remote file: 11.3 MB
teaser_generation.png ADDED

Git LFS Details

  • SHA256: 4df742c27729c240828ca676401f9bbee20ce1baa1b8868b7e50ead7f837d61a
  • Pointer size: 133 Bytes
  • Size of remote file: 23.3 MB
text_encoder/config.json ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Mistral3ForConditionalGeneration"
4
+ ],
5
+ "dtype": "bfloat16",
6
+ "image_token_index": 10,
7
+ "model_type": "mistral3",
8
+ "multimodal_projector_bias": false,
9
+ "projector_hidden_act": "gelu",
10
+ "spatial_merge_size": 2,
11
+ "text_config": {
12
+ "attention_dropout": 0.0,
13
+ "dtype": "bfloat16",
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 5120,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 32768,
19
+ "max_position_embeddings": 131072,
20
+ "model_type": "mistral",
21
+ "num_attention_heads": 32,
22
+ "num_hidden_layers": 40,
23
+ "num_key_value_heads": 8,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_theta": 1000000000.0,
26
+ "sliding_window": null,
27
+ "use_cache": true,
28
+ "vocab_size": 131072
29
+ },
30
+ "transformers_version": "4.57.1",
31
+ "vision_config": {
32
+ "attention_dropout": 0.0,
33
+ "dtype": "bfloat16",
34
+ "head_dim": 64,
35
+ "hidden_act": "silu",
36
+ "hidden_size": 1024,
37
+ "image_size": 1540,
38
+ "initializer_range": 0.02,
39
+ "intermediate_size": 4096,
40
+ "model_type": "pixtral",
41
+ "num_attention_heads": 16,
42
+ "num_channels": 3,
43
+ "num_hidden_layers": 24,
44
+ "patch_size": 14,
45
+ "rope_theta": 10000.0
46
+ },
47
+ "vision_feature_layer": -1
48
+ }
text_encoder/generation_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "temperature": 0.15,
7
+ "transformers_version": "4.57.1"
8
+ }
text_encoder/model-00001-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91831c2ce219df0ce63bc33c6249e5cb01db8d93816bcebf975f1c406286520e
3
+ size 4883550696
text_encoder/model-00002-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ffe80706a66b2f5ef1fb058806ccf09f124ec4ad38af7a377e44ab1ee2fd664
3
+ size 4781593336
text_encoder/model-00003-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99ec66e891f9563f568734eadfc5b7701e04620e8e163d4d5755277a3b50cf2f
3
+ size 4886472224
text_encoder/model-00004-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1df1527b12b1eb5cbd9a50914f9e6eb24e885ec830a3c16b5eed6ad0b53a396
3
+ size 4781593376
text_encoder/model-00005-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3556ac03f47c24eb8ad27c237e25baad639c651d9596fd72cb1523137bf56163
3
+ size 4781593368
text_encoder/model-00006-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c41e6f80f2b5ca384ce703eac048a13daf2aff689c3acca66a8943f45338aae
3
+ size 4886472248
text_encoder/model-00007-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62a725f154f6ba942a36b5cc450db2b2df32f434e3224558c789bc04fa05fd36
3
+ size 4781593376
text_encoder/model-00008-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a1a6ac77e6434418bb7273b68a7b3534fed5217c990061c92a8f990dd6ab20e
3
+ size 4781593368
text_encoder/model-00009-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1fffc9bb2b77d4d2382c1bd9053e9d017741d67ca00cc6f77034a294f2f5cfd
3
+ size 4886472248
text_encoder/model-00010-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:116ef7ae6fa0fd46b478324e4aa6a49f448afed900ca9f71d4fbd3d02289bbd4
3
+ size 4571866320
text_encoder/model.safetensors.index.json ADDED
@@ -0,0 +1,593 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 24011361280,
4
+ "total_size": 48022722560
5
+ },
6
+ "weight_map": {
7
+ "language_model.lm_head.weight": "model-00010-of-00010.safetensors",
8
+ "language_model.model.embed_tokens.weight": "model-00001-of-00010.safetensors",
9
+ "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00010.safetensors",
10
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
11
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
12
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
13
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
14
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
15
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
16
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
17
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
18
+ "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00010.safetensors",
19
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
20
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
21
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
22
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
23
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
24
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
25
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
26
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
27
+ "language_model.model.layers.10.input_layernorm.weight": "model-00003-of-00010.safetensors",
28
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
29
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
30
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
31
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
32
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
33
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
34
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
35
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
36
+ "language_model.model.layers.11.input_layernorm.weight": "model-00004-of-00010.safetensors",
37
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
38
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
39
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
40
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
41
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
42
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
43
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
44
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
45
+ "language_model.model.layers.12.input_layernorm.weight": "model-00004-of-00010.safetensors",
46
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
47
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
48
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
49
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
50
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
51
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
52
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
53
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
54
+ "language_model.model.layers.13.input_layernorm.weight": "model-00004-of-00010.safetensors",
55
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
56
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
57
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
58
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
59
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
60
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
61
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
62
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
63
+ "language_model.model.layers.14.input_layernorm.weight": "model-00004-of-00010.safetensors",
64
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
65
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
66
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
67
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
68
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
69
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
70
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
71
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
72
+ "language_model.model.layers.15.input_layernorm.weight": "model-00005-of-00010.safetensors",
73
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
74
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
75
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
76
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
77
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
78
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
79
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
80
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
81
+ "language_model.model.layers.16.input_layernorm.weight": "model-00005-of-00010.safetensors",
82
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
83
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
84
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
85
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
86
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
87
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
88
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
89
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
90
+ "language_model.model.layers.17.input_layernorm.weight": "model-00005-of-00010.safetensors",
91
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
92
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
93
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
94
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
95
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
96
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
97
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
98
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
99
+ "language_model.model.layers.18.input_layernorm.weight": "model-00005-of-00010.safetensors",
100
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
101
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
102
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
103
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
104
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
105
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
106
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
107
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
108
+ "language_model.model.layers.19.input_layernorm.weight": "model-00006-of-00010.safetensors",
109
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
110
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
111
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
112
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
113
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
114
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
115
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
116
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
117
+ "language_model.model.layers.2.input_layernorm.weight": "model-00002-of-00010.safetensors",
118
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
119
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
120
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
121
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
122
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
123
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
124
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
125
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
126
+ "language_model.model.layers.20.input_layernorm.weight": "model-00006-of-00010.safetensors",
127
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
128
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
129
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
130
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
131
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
132
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
133
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
134
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
135
+ "language_model.model.layers.21.input_layernorm.weight": "model-00006-of-00010.safetensors",
136
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
137
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
138
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
139
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
140
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
141
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
142
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
143
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
144
+ "language_model.model.layers.22.input_layernorm.weight": "model-00006-of-00010.safetensors",
145
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
146
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
147
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
148
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
149
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
150
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
151
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
152
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
153
+ "language_model.model.layers.23.input_layernorm.weight": "model-00006-of-00010.safetensors",
154
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
155
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
156
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
157
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
158
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
159
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
160
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
161
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
162
+ "language_model.model.layers.24.input_layernorm.weight": "model-00007-of-00010.safetensors",
163
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
164
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
165
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
166
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
167
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
168
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
169
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
170
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
171
+ "language_model.model.layers.25.input_layernorm.weight": "model-00007-of-00010.safetensors",
172
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
173
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
174
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
175
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
176
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
177
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
178
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
179
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
180
+ "language_model.model.layers.26.input_layernorm.weight": "model-00007-of-00010.safetensors",
181
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
182
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
183
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
184
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
185
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
186
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
187
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
188
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
189
+ "language_model.model.layers.27.input_layernorm.weight": "model-00007-of-00010.safetensors",
190
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
191
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
192
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
193
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
194
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
195
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
196
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
197
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
198
+ "language_model.model.layers.28.input_layernorm.weight": "model-00008-of-00010.safetensors",
199
+ "language_model.model.layers.28.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
200
+ "language_model.model.layers.28.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
201
+ "language_model.model.layers.28.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
202
+ "language_model.model.layers.28.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
203
+ "language_model.model.layers.28.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
204
+ "language_model.model.layers.28.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
205
+ "language_model.model.layers.28.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
206
+ "language_model.model.layers.28.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
207
+ "language_model.model.layers.29.input_layernorm.weight": "model-00008-of-00010.safetensors",
208
+ "language_model.model.layers.29.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
209
+ "language_model.model.layers.29.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
210
+ "language_model.model.layers.29.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
211
+ "language_model.model.layers.29.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
212
+ "language_model.model.layers.29.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
213
+ "language_model.model.layers.29.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
214
+ "language_model.model.layers.29.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
215
+ "language_model.model.layers.29.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
216
+ "language_model.model.layers.3.input_layernorm.weight": "model-00002-of-00010.safetensors",
217
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
218
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
219
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
220
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
221
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
222
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
223
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
224
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
225
+ "language_model.model.layers.30.input_layernorm.weight": "model-00008-of-00010.safetensors",
226
+ "language_model.model.layers.30.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
227
+ "language_model.model.layers.30.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
228
+ "language_model.model.layers.30.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
229
+ "language_model.model.layers.30.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
230
+ "language_model.model.layers.30.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
231
+ "language_model.model.layers.30.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
232
+ "language_model.model.layers.30.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
233
+ "language_model.model.layers.30.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
234
+ "language_model.model.layers.31.input_layernorm.weight": "model-00008-of-00010.safetensors",
235
+ "language_model.model.layers.31.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
236
+ "language_model.model.layers.31.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
237
+ "language_model.model.layers.31.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
238
+ "language_model.model.layers.31.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
239
+ "language_model.model.layers.31.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
240
+ "language_model.model.layers.31.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
241
+ "language_model.model.layers.31.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
242
+ "language_model.model.layers.31.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
243
+ "language_model.model.layers.32.input_layernorm.weight": "model-00009-of-00010.safetensors",
244
+ "language_model.model.layers.32.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
245
+ "language_model.model.layers.32.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
246
+ "language_model.model.layers.32.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
247
+ "language_model.model.layers.32.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
248
+ "language_model.model.layers.32.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
249
+ "language_model.model.layers.32.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
250
+ "language_model.model.layers.32.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
251
+ "language_model.model.layers.32.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
252
+ "language_model.model.layers.33.input_layernorm.weight": "model-00009-of-00010.safetensors",
253
+ "language_model.model.layers.33.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
254
+ "language_model.model.layers.33.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
255
+ "language_model.model.layers.33.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
256
+ "language_model.model.layers.33.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
257
+ "language_model.model.layers.33.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
258
+ "language_model.model.layers.33.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
259
+ "language_model.model.layers.33.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
260
+ "language_model.model.layers.33.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
261
+ "language_model.model.layers.34.input_layernorm.weight": "model-00009-of-00010.safetensors",
262
+ "language_model.model.layers.34.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
263
+ "language_model.model.layers.34.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
264
+ "language_model.model.layers.34.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
265
+ "language_model.model.layers.34.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
266
+ "language_model.model.layers.34.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
267
+ "language_model.model.layers.34.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
268
+ "language_model.model.layers.34.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
269
+ "language_model.model.layers.34.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
270
+ "language_model.model.layers.35.input_layernorm.weight": "model-00009-of-00010.safetensors",
271
+ "language_model.model.layers.35.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
272
+ "language_model.model.layers.35.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
273
+ "language_model.model.layers.35.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
274
+ "language_model.model.layers.35.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
275
+ "language_model.model.layers.35.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
276
+ "language_model.model.layers.35.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
277
+ "language_model.model.layers.35.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
278
+ "language_model.model.layers.35.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
279
+ "language_model.model.layers.36.input_layernorm.weight": "model-00009-of-00010.safetensors",
280
+ "language_model.model.layers.36.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
281
+ "language_model.model.layers.36.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
282
+ "language_model.model.layers.36.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
283
+ "language_model.model.layers.36.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
284
+ "language_model.model.layers.36.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
285
+ "language_model.model.layers.36.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
286
+ "language_model.model.layers.36.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
287
+ "language_model.model.layers.36.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
288
+ "language_model.model.layers.37.input_layernorm.weight": "model-00010-of-00010.safetensors",
289
+ "language_model.model.layers.37.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
290
+ "language_model.model.layers.37.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
291
+ "language_model.model.layers.37.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
292
+ "language_model.model.layers.37.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
293
+ "language_model.model.layers.37.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
294
+ "language_model.model.layers.37.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
295
+ "language_model.model.layers.37.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
296
+ "language_model.model.layers.37.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
297
+ "language_model.model.layers.38.input_layernorm.weight": "model-00010-of-00010.safetensors",
298
+ "language_model.model.layers.38.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
299
+ "language_model.model.layers.38.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
300
+ "language_model.model.layers.38.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
301
+ "language_model.model.layers.38.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
302
+ "language_model.model.layers.38.self_attn.k_proj.weight": "model-00010-of-00010.safetensors",
303
+ "language_model.model.layers.38.self_attn.o_proj.weight": "model-00010-of-00010.safetensors",
304
+ "language_model.model.layers.38.self_attn.q_proj.weight": "model-00010-of-00010.safetensors",
305
+ "language_model.model.layers.38.self_attn.v_proj.weight": "model-00010-of-00010.safetensors",
306
+ "language_model.model.layers.39.input_layernorm.weight": "model-00010-of-00010.safetensors",
307
+ "language_model.model.layers.39.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
308
+ "language_model.model.layers.39.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
309
+ "language_model.model.layers.39.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
310
+ "language_model.model.layers.39.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
311
+ "language_model.model.layers.39.self_attn.k_proj.weight": "model-00010-of-00010.safetensors",
312
+ "language_model.model.layers.39.self_attn.o_proj.weight": "model-00010-of-00010.safetensors",
313
+ "language_model.model.layers.39.self_attn.q_proj.weight": "model-00010-of-00010.safetensors",
314
+ "language_model.model.layers.39.self_attn.v_proj.weight": "model-00010-of-00010.safetensors",
315
+ "language_model.model.layers.4.input_layernorm.weight": "model-00002-of-00010.safetensors",
316
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
317
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
318
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
319
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
320
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
321
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
322
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
323
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
324
+ "language_model.model.layers.5.input_layernorm.weight": "model-00002-of-00010.safetensors",
325
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
326
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
327
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
328
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
329
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
330
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
331
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
332
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
333
+ "language_model.model.layers.6.input_layernorm.weight": "model-00003-of-00010.safetensors",
334
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
335
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
336
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
337
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
338
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
339
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
340
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
341
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
342
+ "language_model.model.layers.7.input_layernorm.weight": "model-00003-of-00010.safetensors",
343
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
344
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
345
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
346
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
347
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
348
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
349
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
350
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
351
+ "language_model.model.layers.8.input_layernorm.weight": "model-00003-of-00010.safetensors",
352
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
353
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
354
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
355
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
356
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
357
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
358
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
359
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
360
+ "language_model.model.layers.9.input_layernorm.weight": "model-00003-of-00010.safetensors",
361
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
362
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
363
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
364
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
365
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
366
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
367
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
368
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
369
+ "language_model.model.norm.weight": "model-00010-of-00010.safetensors",
370
+ "multi_modal_projector.linear_1.weight": "model-00001-of-00010.safetensors",
371
+ "multi_modal_projector.linear_2.weight": "model-00001-of-00010.safetensors",
372
+ "multi_modal_projector.norm.weight": "model-00001-of-00010.safetensors",
373
+ "multi_modal_projector.patch_merger.merging_layer.weight": "model-00001-of-00010.safetensors",
374
+ "vision_tower.ln_pre.weight": "model-00001-of-00010.safetensors",
375
+ "vision_tower.patch_conv.weight": "model-00001-of-00010.safetensors",
376
+ "vision_tower.transformer.layers.0.attention.k_proj.weight": "model-00001-of-00010.safetensors",
377
+ "vision_tower.transformer.layers.0.attention.o_proj.weight": "model-00001-of-00010.safetensors",
378
+ "vision_tower.transformer.layers.0.attention.q_proj.weight": "model-00001-of-00010.safetensors",
379
+ "vision_tower.transformer.layers.0.attention.v_proj.weight": "model-00001-of-00010.safetensors",
380
+ "vision_tower.transformer.layers.0.attention_norm.weight": "model-00001-of-00010.safetensors",
381
+ "vision_tower.transformer.layers.0.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
382
+ "vision_tower.transformer.layers.0.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
383
+ "vision_tower.transformer.layers.0.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
384
+ "vision_tower.transformer.layers.0.ffn_norm.weight": "model-00001-of-00010.safetensors",
385
+ "vision_tower.transformer.layers.1.attention.k_proj.weight": "model-00001-of-00010.safetensors",
386
+ "vision_tower.transformer.layers.1.attention.o_proj.weight": "model-00001-of-00010.safetensors",
387
+ "vision_tower.transformer.layers.1.attention.q_proj.weight": "model-00001-of-00010.safetensors",
388
+ "vision_tower.transformer.layers.1.attention.v_proj.weight": "model-00001-of-00010.safetensors",
389
+ "vision_tower.transformer.layers.1.attention_norm.weight": "model-00001-of-00010.safetensors",
390
+ "vision_tower.transformer.layers.1.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
391
+ "vision_tower.transformer.layers.1.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
392
+ "vision_tower.transformer.layers.1.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
393
+ "vision_tower.transformer.layers.1.ffn_norm.weight": "model-00001-of-00010.safetensors",
394
+ "vision_tower.transformer.layers.10.attention.k_proj.weight": "model-00001-of-00010.safetensors",
395
+ "vision_tower.transformer.layers.10.attention.o_proj.weight": "model-00001-of-00010.safetensors",
396
+ "vision_tower.transformer.layers.10.attention.q_proj.weight": "model-00001-of-00010.safetensors",
397
+ "vision_tower.transformer.layers.10.attention.v_proj.weight": "model-00001-of-00010.safetensors",
398
+ "vision_tower.transformer.layers.10.attention_norm.weight": "model-00001-of-00010.safetensors",
399
+ "vision_tower.transformer.layers.10.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
400
+ "vision_tower.transformer.layers.10.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
401
+ "vision_tower.transformer.layers.10.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
402
+ "vision_tower.transformer.layers.10.ffn_norm.weight": "model-00001-of-00010.safetensors",
403
+ "vision_tower.transformer.layers.11.attention.k_proj.weight": "model-00001-of-00010.safetensors",
404
+ "vision_tower.transformer.layers.11.attention.o_proj.weight": "model-00001-of-00010.safetensors",
405
+ "vision_tower.transformer.layers.11.attention.q_proj.weight": "model-00001-of-00010.safetensors",
406
+ "vision_tower.transformer.layers.11.attention.v_proj.weight": "model-00001-of-00010.safetensors",
407
+ "vision_tower.transformer.layers.11.attention_norm.weight": "model-00001-of-00010.safetensors",
408
+ "vision_tower.transformer.layers.11.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
409
+ "vision_tower.transformer.layers.11.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
410
+ "vision_tower.transformer.layers.11.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
411
+ "vision_tower.transformer.layers.11.ffn_norm.weight": "model-00001-of-00010.safetensors",
412
+ "vision_tower.transformer.layers.12.attention.k_proj.weight": "model-00001-of-00010.safetensors",
413
+ "vision_tower.transformer.layers.12.attention.o_proj.weight": "model-00001-of-00010.safetensors",
414
+ "vision_tower.transformer.layers.12.attention.q_proj.weight": "model-00001-of-00010.safetensors",
415
+ "vision_tower.transformer.layers.12.attention.v_proj.weight": "model-00001-of-00010.safetensors",
416
+ "vision_tower.transformer.layers.12.attention_norm.weight": "model-00001-of-00010.safetensors",
417
+ "vision_tower.transformer.layers.12.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
418
+ "vision_tower.transformer.layers.12.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
419
+ "vision_tower.transformer.layers.12.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
420
+ "vision_tower.transformer.layers.12.ffn_norm.weight": "model-00001-of-00010.safetensors",
421
+ "vision_tower.transformer.layers.13.attention.k_proj.weight": "model-00001-of-00010.safetensors",
422
+ "vision_tower.transformer.layers.13.attention.o_proj.weight": "model-00001-of-00010.safetensors",
423
+ "vision_tower.transformer.layers.13.attention.q_proj.weight": "model-00001-of-00010.safetensors",
424
+ "vision_tower.transformer.layers.13.attention.v_proj.weight": "model-00001-of-00010.safetensors",
425
+ "vision_tower.transformer.layers.13.attention_norm.weight": "model-00001-of-00010.safetensors",
426
+ "vision_tower.transformer.layers.13.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
427
+ "vision_tower.transformer.layers.13.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
428
+ "vision_tower.transformer.layers.13.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
429
+ "vision_tower.transformer.layers.13.ffn_norm.weight": "model-00001-of-00010.safetensors",
430
+ "vision_tower.transformer.layers.14.attention.k_proj.weight": "model-00001-of-00010.safetensors",
431
+ "vision_tower.transformer.layers.14.attention.o_proj.weight": "model-00001-of-00010.safetensors",
432
+ "vision_tower.transformer.layers.14.attention.q_proj.weight": "model-00001-of-00010.safetensors",
433
+ "vision_tower.transformer.layers.14.attention.v_proj.weight": "model-00001-of-00010.safetensors",
434
+ "vision_tower.transformer.layers.14.attention_norm.weight": "model-00001-of-00010.safetensors",
435
+ "vision_tower.transformer.layers.14.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
436
+ "vision_tower.transformer.layers.14.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
437
+ "vision_tower.transformer.layers.14.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
438
+ "vision_tower.transformer.layers.14.ffn_norm.weight": "model-00001-of-00010.safetensors",
439
+ "vision_tower.transformer.layers.15.attention.k_proj.weight": "model-00001-of-00010.safetensors",
440
+ "vision_tower.transformer.layers.15.attention.o_proj.weight": "model-00001-of-00010.safetensors",
441
+ "vision_tower.transformer.layers.15.attention.q_proj.weight": "model-00001-of-00010.safetensors",
442
+ "vision_tower.transformer.layers.15.attention.v_proj.weight": "model-00001-of-00010.safetensors",
443
+ "vision_tower.transformer.layers.15.attention_norm.weight": "model-00001-of-00010.safetensors",
444
+ "vision_tower.transformer.layers.15.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
445
+ "vision_tower.transformer.layers.15.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
446
+ "vision_tower.transformer.layers.15.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
447
+ "vision_tower.transformer.layers.15.ffn_norm.weight": "model-00001-of-00010.safetensors",
448
+ "vision_tower.transformer.layers.16.attention.k_proj.weight": "model-00001-of-00010.safetensors",
449
+ "vision_tower.transformer.layers.16.attention.o_proj.weight": "model-00001-of-00010.safetensors",
450
+ "vision_tower.transformer.layers.16.attention.q_proj.weight": "model-00001-of-00010.safetensors",
451
+ "vision_tower.transformer.layers.16.attention.v_proj.weight": "model-00001-of-00010.safetensors",
452
+ "vision_tower.transformer.layers.16.attention_norm.weight": "model-00001-of-00010.safetensors",
453
+ "vision_tower.transformer.layers.16.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
454
+ "vision_tower.transformer.layers.16.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
455
+ "vision_tower.transformer.layers.16.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
456
+ "vision_tower.transformer.layers.16.ffn_norm.weight": "model-00001-of-00010.safetensors",
457
+ "vision_tower.transformer.layers.17.attention.k_proj.weight": "model-00001-of-00010.safetensors",
458
+ "vision_tower.transformer.layers.17.attention.o_proj.weight": "model-00001-of-00010.safetensors",
459
+ "vision_tower.transformer.layers.17.attention.q_proj.weight": "model-00001-of-00010.safetensors",
460
+ "vision_tower.transformer.layers.17.attention.v_proj.weight": "model-00001-of-00010.safetensors",
461
+ "vision_tower.transformer.layers.17.attention_norm.weight": "model-00001-of-00010.safetensors",
462
+ "vision_tower.transformer.layers.17.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
463
+ "vision_tower.transformer.layers.17.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
464
+ "vision_tower.transformer.layers.17.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
465
+ "vision_tower.transformer.layers.17.ffn_norm.weight": "model-00001-of-00010.safetensors",
466
+ "vision_tower.transformer.layers.18.attention.k_proj.weight": "model-00001-of-00010.safetensors",
467
+ "vision_tower.transformer.layers.18.attention.o_proj.weight": "model-00001-of-00010.safetensors",
468
+ "vision_tower.transformer.layers.18.attention.q_proj.weight": "model-00001-of-00010.safetensors",
469
+ "vision_tower.transformer.layers.18.attention.v_proj.weight": "model-00001-of-00010.safetensors",
470
+ "vision_tower.transformer.layers.18.attention_norm.weight": "model-00001-of-00010.safetensors",
471
+ "vision_tower.transformer.layers.18.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
472
+ "vision_tower.transformer.layers.18.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
473
+ "vision_tower.transformer.layers.18.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
474
+ "vision_tower.transformer.layers.18.ffn_norm.weight": "model-00001-of-00010.safetensors",
475
+ "vision_tower.transformer.layers.19.attention.k_proj.weight": "model-00001-of-00010.safetensors",
476
+ "vision_tower.transformer.layers.19.attention.o_proj.weight": "model-00001-of-00010.safetensors",
477
+ "vision_tower.transformer.layers.19.attention.q_proj.weight": "model-00001-of-00010.safetensors",
478
+ "vision_tower.transformer.layers.19.attention.v_proj.weight": "model-00001-of-00010.safetensors",
479
+ "vision_tower.transformer.layers.19.attention_norm.weight": "model-00001-of-00010.safetensors",
480
+ "vision_tower.transformer.layers.19.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
481
+ "vision_tower.transformer.layers.19.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
482
+ "vision_tower.transformer.layers.19.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
483
+ "vision_tower.transformer.layers.19.ffn_norm.weight": "model-00001-of-00010.safetensors",
484
+ "vision_tower.transformer.layers.2.attention.k_proj.weight": "model-00001-of-00010.safetensors",
485
+ "vision_tower.transformer.layers.2.attention.o_proj.weight": "model-00001-of-00010.safetensors",
486
+ "vision_tower.transformer.layers.2.attention.q_proj.weight": "model-00001-of-00010.safetensors",
487
+ "vision_tower.transformer.layers.2.attention.v_proj.weight": "model-00001-of-00010.safetensors",
488
+ "vision_tower.transformer.layers.2.attention_norm.weight": "model-00001-of-00010.safetensors",
489
+ "vision_tower.transformer.layers.2.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
490
+ "vision_tower.transformer.layers.2.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
491
+ "vision_tower.transformer.layers.2.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
492
+ "vision_tower.transformer.layers.2.ffn_norm.weight": "model-00001-of-00010.safetensors",
493
+ "vision_tower.transformer.layers.20.attention.k_proj.weight": "model-00001-of-00010.safetensors",
494
+ "vision_tower.transformer.layers.20.attention.o_proj.weight": "model-00001-of-00010.safetensors",
495
+ "vision_tower.transformer.layers.20.attention.q_proj.weight": "model-00001-of-00010.safetensors",
496
+ "vision_tower.transformer.layers.20.attention.v_proj.weight": "model-00001-of-00010.safetensors",
497
+ "vision_tower.transformer.layers.20.attention_norm.weight": "model-00001-of-00010.safetensors",
498
+ "vision_tower.transformer.layers.20.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
499
+ "vision_tower.transformer.layers.20.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
500
+ "vision_tower.transformer.layers.20.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
501
+ "vision_tower.transformer.layers.20.ffn_norm.weight": "model-00001-of-00010.safetensors",
502
+ "vision_tower.transformer.layers.21.attention.k_proj.weight": "model-00001-of-00010.safetensors",
503
+ "vision_tower.transformer.layers.21.attention.o_proj.weight": "model-00001-of-00010.safetensors",
504
+ "vision_tower.transformer.layers.21.attention.q_proj.weight": "model-00001-of-00010.safetensors",
505
+ "vision_tower.transformer.layers.21.attention.v_proj.weight": "model-00001-of-00010.safetensors",
506
+ "vision_tower.transformer.layers.21.attention_norm.weight": "model-00001-of-00010.safetensors",
507
+ "vision_tower.transformer.layers.21.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
508
+ "vision_tower.transformer.layers.21.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
509
+ "vision_tower.transformer.layers.21.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
510
+ "vision_tower.transformer.layers.21.ffn_norm.weight": "model-00001-of-00010.safetensors",
511
+ "vision_tower.transformer.layers.22.attention.k_proj.weight": "model-00001-of-00010.safetensors",
512
+ "vision_tower.transformer.layers.22.attention.o_proj.weight": "model-00001-of-00010.safetensors",
513
+ "vision_tower.transformer.layers.22.attention.q_proj.weight": "model-00001-of-00010.safetensors",
514
+ "vision_tower.transformer.layers.22.attention.v_proj.weight": "model-00001-of-00010.safetensors",
515
+ "vision_tower.transformer.layers.22.attention_norm.weight": "model-00001-of-00010.safetensors",
516
+ "vision_tower.transformer.layers.22.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
517
+ "vision_tower.transformer.layers.22.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
518
+ "vision_tower.transformer.layers.22.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
519
+ "vision_tower.transformer.layers.22.ffn_norm.weight": "model-00001-of-00010.safetensors",
520
+ "vision_tower.transformer.layers.23.attention.k_proj.weight": "model-00001-of-00010.safetensors",
521
+ "vision_tower.transformer.layers.23.attention.o_proj.weight": "model-00001-of-00010.safetensors",
522
+ "vision_tower.transformer.layers.23.attention.q_proj.weight": "model-00001-of-00010.safetensors",
523
+ "vision_tower.transformer.layers.23.attention.v_proj.weight": "model-00001-of-00010.safetensors",
524
+ "vision_tower.transformer.layers.23.attention_norm.weight": "model-00001-of-00010.safetensors",
525
+ "vision_tower.transformer.layers.23.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
526
+ "vision_tower.transformer.layers.23.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
527
+ "vision_tower.transformer.layers.23.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
528
+ "vision_tower.transformer.layers.23.ffn_norm.weight": "model-00001-of-00010.safetensors",
529
+ "vision_tower.transformer.layers.3.attention.k_proj.weight": "model-00001-of-00010.safetensors",
530
+ "vision_tower.transformer.layers.3.attention.o_proj.weight": "model-00001-of-00010.safetensors",
531
+ "vision_tower.transformer.layers.3.attention.q_proj.weight": "model-00001-of-00010.safetensors",
532
+ "vision_tower.transformer.layers.3.attention.v_proj.weight": "model-00001-of-00010.safetensors",
533
+ "vision_tower.transformer.layers.3.attention_norm.weight": "model-00001-of-00010.safetensors",
534
+ "vision_tower.transformer.layers.3.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
535
+ "vision_tower.transformer.layers.3.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
536
+ "vision_tower.transformer.layers.3.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
537
+ "vision_tower.transformer.layers.3.ffn_norm.weight": "model-00001-of-00010.safetensors",
538
+ "vision_tower.transformer.layers.4.attention.k_proj.weight": "model-00001-of-00010.safetensors",
539
+ "vision_tower.transformer.layers.4.attention.o_proj.weight": "model-00001-of-00010.safetensors",
540
+ "vision_tower.transformer.layers.4.attention.q_proj.weight": "model-00001-of-00010.safetensors",
541
+ "vision_tower.transformer.layers.4.attention.v_proj.weight": "model-00001-of-00010.safetensors",
542
+ "vision_tower.transformer.layers.4.attention_norm.weight": "model-00001-of-00010.safetensors",
543
+ "vision_tower.transformer.layers.4.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
544
+ "vision_tower.transformer.layers.4.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
545
+ "vision_tower.transformer.layers.4.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
546
+ "vision_tower.transformer.layers.4.ffn_norm.weight": "model-00001-of-00010.safetensors",
547
+ "vision_tower.transformer.layers.5.attention.k_proj.weight": "model-00001-of-00010.safetensors",
548
+ "vision_tower.transformer.layers.5.attention.o_proj.weight": "model-00001-of-00010.safetensors",
549
+ "vision_tower.transformer.layers.5.attention.q_proj.weight": "model-00001-of-00010.safetensors",
550
+ "vision_tower.transformer.layers.5.attention.v_proj.weight": "model-00001-of-00010.safetensors",
551
+ "vision_tower.transformer.layers.5.attention_norm.weight": "model-00001-of-00010.safetensors",
552
+ "vision_tower.transformer.layers.5.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
553
+ "vision_tower.transformer.layers.5.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
554
+ "vision_tower.transformer.layers.5.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
555
+ "vision_tower.transformer.layers.5.ffn_norm.weight": "model-00001-of-00010.safetensors",
556
+ "vision_tower.transformer.layers.6.attention.k_proj.weight": "model-00001-of-00010.safetensors",
557
+ "vision_tower.transformer.layers.6.attention.o_proj.weight": "model-00001-of-00010.safetensors",
558
+ "vision_tower.transformer.layers.6.attention.q_proj.weight": "model-00001-of-00010.safetensors",
559
+ "vision_tower.transformer.layers.6.attention.v_proj.weight": "model-00001-of-00010.safetensors",
560
+ "vision_tower.transformer.layers.6.attention_norm.weight": "model-00001-of-00010.safetensors",
561
+ "vision_tower.transformer.layers.6.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
562
+ "vision_tower.transformer.layers.6.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
563
+ "vision_tower.transformer.layers.6.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
564
+ "vision_tower.transformer.layers.6.ffn_norm.weight": "model-00001-of-00010.safetensors",
565
+ "vision_tower.transformer.layers.7.attention.k_proj.weight": "model-00001-of-00010.safetensors",
566
+ "vision_tower.transformer.layers.7.attention.o_proj.weight": "model-00001-of-00010.safetensors",
567
+ "vision_tower.transformer.layers.7.attention.q_proj.weight": "model-00001-of-00010.safetensors",
568
+ "vision_tower.transformer.layers.7.attention.v_proj.weight": "model-00001-of-00010.safetensors",
569
+ "vision_tower.transformer.layers.7.attention_norm.weight": "model-00001-of-00010.safetensors",
570
+ "vision_tower.transformer.layers.7.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
571
+ "vision_tower.transformer.layers.7.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
572
+ "vision_tower.transformer.layers.7.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
573
+ "vision_tower.transformer.layers.7.ffn_norm.weight": "model-00001-of-00010.safetensors",
574
+ "vision_tower.transformer.layers.8.attention.k_proj.weight": "model-00001-of-00010.safetensors",
575
+ "vision_tower.transformer.layers.8.attention.o_proj.weight": "model-00001-of-00010.safetensors",
576
+ "vision_tower.transformer.layers.8.attention.q_proj.weight": "model-00001-of-00010.safetensors",
577
+ "vision_tower.transformer.layers.8.attention.v_proj.weight": "model-00001-of-00010.safetensors",
578
+ "vision_tower.transformer.layers.8.attention_norm.weight": "model-00001-of-00010.safetensors",
579
+ "vision_tower.transformer.layers.8.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
580
+ "vision_tower.transformer.layers.8.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
581
+ "vision_tower.transformer.layers.8.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
582
+ "vision_tower.transformer.layers.8.ffn_norm.weight": "model-00001-of-00010.safetensors",
583
+ "vision_tower.transformer.layers.9.attention.k_proj.weight": "model-00001-of-00010.safetensors",
584
+ "vision_tower.transformer.layers.9.attention.o_proj.weight": "model-00001-of-00010.safetensors",
585
+ "vision_tower.transformer.layers.9.attention.q_proj.weight": "model-00001-of-00010.safetensors",
586
+ "vision_tower.transformer.layers.9.attention.v_proj.weight": "model-00001-of-00010.safetensors",
587
+ "vision_tower.transformer.layers.9.attention_norm.weight": "model-00001-of-00010.safetensors",
588
+ "vision_tower.transformer.layers.9.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
589
+ "vision_tower.transformer.layers.9.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
590
+ "vision_tower.transformer.layers.9.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
591
+ "vision_tower.transformer.layers.9.ffn_norm.weight": "model-00001-of-00010.safetensors"
592
+ }
593
+ }
tokenizer/chat_template.jinja ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- set today = strftime_now("%Y-%m-%d") %}
2
+ {%- set default_system_message = "You are Mistral Small 3, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\nYour knowledge base was last updated on 2023-10-01. The current date is " + today + ".\n\nWhen you're not sure about some information, you say that you don't have the information and don't make up anything.\nIf the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \"What are some good restaurants around me?\" => \"Where are you?\" or \"When is the next flight to Tokyo\" => \"Where do you travel from?\")" %}
3
+
4
+ {{- bos_token }}
5
+
6
+ {%- if messages[0]['role'] == 'system' %}
7
+ {%- if messages[0]['content'] is string %}
8
+ {%- set system_message = messages[0]['content'] %}
9
+ {%- else %}
10
+ {%- set system_message = messages[0]['content'][0]['text'] %}
11
+ {%- endif %}
12
+ {%- set loop_messages = messages[1:] %}
13
+ {%- else %}
14
+ {%- set system_message = default_system_message %}
15
+ {%- set loop_messages = messages %}
16
+ {%- endif %}
17
+ {{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}
18
+
19
+ {%- for message in loop_messages %}
20
+ {%- if message['role'] == 'user' %}
21
+ {%- if message['content'] is string %}
22
+ {{- '[INST]' + message['content'] + '[/INST]' }}
23
+ {%- else %}
24
+ {{- '[INST]' }}
25
+ {%- for block in message['content'] %}
26
+ {%- if block['type'] == 'text' %}
27
+ {{- block['text'] }}
28
+ {%- elif block['type'] in ['image', 'image_url'] %}
29
+ {{- '[IMG]' }}
30
+ {%- else %}
31
+ {{- raise_exception('Only text and image blocks are supported in message content!') }}
32
+ {%- endif %}
33
+ {%- endfor %}
34
+ {{- '[/INST]' }}
35
+ {%- endif %}
36
+ {%- elif message['role'] == 'system' %}
37
+ {%- if message['content'] is string %}
38
+ {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}
39
+ {%- else %}
40
+ {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}
41
+ {%- endif %}
42
+ {%- elif message['role'] == 'assistant' %}
43
+ {%- if message['content'] is string %}
44
+ {{- message['content'] + eos_token }}
45
+ {%- else %}
46
+ {{- message['content'][0]['text'] + eos_token }}
47
+ {%- endif %}
48
+ {%- else %}
49
+ {{- raise_exception('Only user, system and assistant roles are supported!') }}
50
+ {%- endif %}
51
+ {%- endfor %}
tokenizer/preprocessor_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "data_format": "channels_first",
4
+ "default_to_square": true,
5
+ "device": null,
6
+ "disable_grouping": null,
7
+ "do_center_crop": null,
8
+ "do_convert_rgb": true,
9
+ "do_normalize": true,
10
+ "do_pad": null,
11
+ "do_rescale": true,
12
+ "do_resize": true,
13
+ "image_mean": [
14
+ 0.48145466,
15
+ 0.4578275,
16
+ 0.40821073
17
+ ],
18
+ "image_processor_type": "PixtralImageProcessorFast",
19
+ "image_std": [
20
+ 0.26862954,
21
+ 0.26130258,
22
+ 0.27577711
23
+ ],
24
+ "input_data_format": null,
25
+ "pad_size": null,
26
+ "patch_size": 14,
27
+ "processor_class": "PixtralProcessor",
28
+ "resample": 3,
29
+ "rescale_factor": 0.00392156862745098,
30
+ "return_tensors": null,
31
+ "size": {
32
+ "longest_edge": 1540
33
+ }
34
+ }
tokenizer/processor_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "image_break_token": "[IMG_BREAK]",
3
+ "image_end_token": "[IMG_END]",
4
+ "image_token": "[IMG]",
5
+ "patch_size": 14,
6
+ "processor_class": "PixtralProcessor",
7
+ "spatial_merge_size": 2
8
+ }
tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "<SPECIAL_32>",
36
+ "<SPECIAL_33>",
37
+ "<SPECIAL_34>",
38
+ "<SPECIAL_35>",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "</s>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tokenizer/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b76085f9923309d873994d444989f7eb6ec074b06f25b58f1e8d7b7741070949
3
+ size 17078037
tokenizer/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
transformer/config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "Flux2Transformer2DModel",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "attention_head_dim": 128,
5
+ "axes_dims_rope": [
6
+ 32,
7
+ 32,
8
+ 32,
9
+ 32
10
+ ],
11
+ "eps": 1e-06,
12
+ "in_channels": 128,
13
+ "joint_attention_dim": 15360,
14
+ "mlp_ratio": 3.0,
15
+ "num_attention_heads": 48,
16
+ "num_layers": 8,
17
+ "num_single_layers": 48,
18
+ "out_channels": null,
19
+ "patch_size": 1,
20
+ "rope_theta": 2000,
21
+ "timestep_guidance_channels": 256
22
+ }
transformer/diffusion_pytorch_model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d9b85f75f72fb17c7d29dacf7c430e924da93122d578a559a36a7635e153714
3
+ size 9935797200
transformer/diffusion_pytorch_model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86adf6f41474b00bd57afbb29a09f008be7d6af8ae914956585ba5bc6bf97c28
3
+ size 9890181048
transformer/diffusion_pytorch_model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a14e26e8f305dd26d7881f333e6e6ce5b562cbb55282538f46d38e1ff2715179
3
+ size 9814681480
transformer/diffusion_pytorch_model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c4f38976fd8d7e5fb2d4cd20562d74eebba3264566987e3ef938d807c75be90
3
+ size 9814681536
transformer/diffusion_pytorch_model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d7a74d916fc22117cde8bad76aa4b561e6dc92368cddc23bdae06dbc586ad95
3
+ size 9814681536
transformer/diffusion_pytorch_model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08f3ad03610651f9d630177ac3a4770d532fa72d788d3b36c39a0301b1595447
3
+ size 9814681536
transformer/diffusion_pytorch_model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:789b9bacb607e9b97597f77c86056fa6cbb747c2a6016588e6e196814b5f9733
3
+ size 5361898792
transformer/diffusion_pytorch_model.safetensors.index.json ADDED
@@ -0,0 +1,338 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 64446562304
4
+ },
5
+ "weight_map": {
6
+ "context_embedder.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
7
+ "double_stream_modulation_img.linear.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
8
+ "double_stream_modulation_txt.linear.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
9
+ "norm_out.linear.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
10
+ "proj_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
11
+ "single_stream_modulation.linear.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
12
+ "single_transformer_blocks.0.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
13
+ "single_transformer_blocks.0.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
14
+ "single_transformer_blocks.0.attn.to_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
15
+ "single_transformer_blocks.0.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
16
+ "single_transformer_blocks.1.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
17
+ "single_transformer_blocks.1.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
18
+ "single_transformer_blocks.1.attn.to_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
19
+ "single_transformer_blocks.1.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
20
+ "single_transformer_blocks.10.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
21
+ "single_transformer_blocks.10.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
22
+ "single_transformer_blocks.10.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
23
+ "single_transformer_blocks.10.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
24
+ "single_transformer_blocks.11.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
25
+ "single_transformer_blocks.11.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
26
+ "single_transformer_blocks.11.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
27
+ "single_transformer_blocks.11.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
28
+ "single_transformer_blocks.12.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
29
+ "single_transformer_blocks.12.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
30
+ "single_transformer_blocks.12.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
31
+ "single_transformer_blocks.12.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
32
+ "single_transformer_blocks.13.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
33
+ "single_transformer_blocks.13.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
34
+ "single_transformer_blocks.13.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
35
+ "single_transformer_blocks.13.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
36
+ "single_transformer_blocks.14.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
37
+ "single_transformer_blocks.14.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
38
+ "single_transformer_blocks.14.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
39
+ "single_transformer_blocks.14.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
40
+ "single_transformer_blocks.15.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
41
+ "single_transformer_blocks.15.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
42
+ "single_transformer_blocks.15.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
43
+ "single_transformer_blocks.15.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
44
+ "single_transformer_blocks.16.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
45
+ "single_transformer_blocks.16.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
46
+ "single_transformer_blocks.16.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
47
+ "single_transformer_blocks.16.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
48
+ "single_transformer_blocks.17.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
49
+ "single_transformer_blocks.17.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
50
+ "single_transformer_blocks.17.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
51
+ "single_transformer_blocks.17.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
52
+ "single_transformer_blocks.18.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
53
+ "single_transformer_blocks.18.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
54
+ "single_transformer_blocks.18.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
55
+ "single_transformer_blocks.18.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
56
+ "single_transformer_blocks.19.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
57
+ "single_transformer_blocks.19.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
58
+ "single_transformer_blocks.19.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
59
+ "single_transformer_blocks.19.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
60
+ "single_transformer_blocks.2.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
61
+ "single_transformer_blocks.2.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
62
+ "single_transformer_blocks.2.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
63
+ "single_transformer_blocks.2.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
64
+ "single_transformer_blocks.20.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
65
+ "single_transformer_blocks.20.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
66
+ "single_transformer_blocks.20.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
67
+ "single_transformer_blocks.20.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
68
+ "single_transformer_blocks.21.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
69
+ "single_transformer_blocks.21.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
70
+ "single_transformer_blocks.21.attn.to_out.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
71
+ "single_transformer_blocks.21.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
72
+ "single_transformer_blocks.22.attn.norm_k.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
73
+ "single_transformer_blocks.22.attn.norm_q.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
74
+ "single_transformer_blocks.22.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
75
+ "single_transformer_blocks.22.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00004-of-00007.safetensors",
76
+ "single_transformer_blocks.23.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
77
+ "single_transformer_blocks.23.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
78
+ "single_transformer_blocks.23.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
79
+ "single_transformer_blocks.23.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
80
+ "single_transformer_blocks.24.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
81
+ "single_transformer_blocks.24.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
82
+ "single_transformer_blocks.24.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
83
+ "single_transformer_blocks.24.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
84
+ "single_transformer_blocks.25.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
85
+ "single_transformer_blocks.25.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
86
+ "single_transformer_blocks.25.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
87
+ "single_transformer_blocks.25.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
88
+ "single_transformer_blocks.26.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
89
+ "single_transformer_blocks.26.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
90
+ "single_transformer_blocks.26.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
91
+ "single_transformer_blocks.26.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
92
+ "single_transformer_blocks.27.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
93
+ "single_transformer_blocks.27.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
94
+ "single_transformer_blocks.27.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
95
+ "single_transformer_blocks.27.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
96
+ "single_transformer_blocks.28.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
97
+ "single_transformer_blocks.28.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
98
+ "single_transformer_blocks.28.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
99
+ "single_transformer_blocks.28.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
100
+ "single_transformer_blocks.29.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
101
+ "single_transformer_blocks.29.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
102
+ "single_transformer_blocks.29.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
103
+ "single_transformer_blocks.29.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
104
+ "single_transformer_blocks.3.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
105
+ "single_transformer_blocks.3.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
106
+ "single_transformer_blocks.3.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
107
+ "single_transformer_blocks.3.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
108
+ "single_transformer_blocks.30.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
109
+ "single_transformer_blocks.30.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
110
+ "single_transformer_blocks.30.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
111
+ "single_transformer_blocks.30.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
112
+ "single_transformer_blocks.31.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
113
+ "single_transformer_blocks.31.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
114
+ "single_transformer_blocks.31.attn.to_out.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
115
+ "single_transformer_blocks.31.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
116
+ "single_transformer_blocks.32.attn.norm_k.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
117
+ "single_transformer_blocks.32.attn.norm_q.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
118
+ "single_transformer_blocks.32.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
119
+ "single_transformer_blocks.32.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00005-of-00007.safetensors",
120
+ "single_transformer_blocks.33.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
121
+ "single_transformer_blocks.33.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
122
+ "single_transformer_blocks.33.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
123
+ "single_transformer_blocks.33.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
124
+ "single_transformer_blocks.34.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
125
+ "single_transformer_blocks.34.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
126
+ "single_transformer_blocks.34.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
127
+ "single_transformer_blocks.34.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
128
+ "single_transformer_blocks.35.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
129
+ "single_transformer_blocks.35.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
130
+ "single_transformer_blocks.35.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
131
+ "single_transformer_blocks.35.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
132
+ "single_transformer_blocks.36.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
133
+ "single_transformer_blocks.36.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
134
+ "single_transformer_blocks.36.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
135
+ "single_transformer_blocks.36.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
136
+ "single_transformer_blocks.37.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
137
+ "single_transformer_blocks.37.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
138
+ "single_transformer_blocks.37.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
139
+ "single_transformer_blocks.37.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
140
+ "single_transformer_blocks.38.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
141
+ "single_transformer_blocks.38.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
142
+ "single_transformer_blocks.38.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
143
+ "single_transformer_blocks.38.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
144
+ "single_transformer_blocks.39.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
145
+ "single_transformer_blocks.39.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
146
+ "single_transformer_blocks.39.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
147
+ "single_transformer_blocks.39.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
148
+ "single_transformer_blocks.4.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
149
+ "single_transformer_blocks.4.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
150
+ "single_transformer_blocks.4.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
151
+ "single_transformer_blocks.4.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
152
+ "single_transformer_blocks.40.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
153
+ "single_transformer_blocks.40.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
154
+ "single_transformer_blocks.40.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
155
+ "single_transformer_blocks.40.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
156
+ "single_transformer_blocks.41.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
157
+ "single_transformer_blocks.41.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
158
+ "single_transformer_blocks.41.attn.to_out.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
159
+ "single_transformer_blocks.41.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
160
+ "single_transformer_blocks.42.attn.norm_k.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
161
+ "single_transformer_blocks.42.attn.norm_q.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
162
+ "single_transformer_blocks.42.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
163
+ "single_transformer_blocks.42.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00006-of-00007.safetensors",
164
+ "single_transformer_blocks.43.attn.norm_k.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
165
+ "single_transformer_blocks.43.attn.norm_q.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
166
+ "single_transformer_blocks.43.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
167
+ "single_transformer_blocks.43.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
168
+ "single_transformer_blocks.44.attn.norm_k.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
169
+ "single_transformer_blocks.44.attn.norm_q.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
170
+ "single_transformer_blocks.44.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
171
+ "single_transformer_blocks.44.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
172
+ "single_transformer_blocks.45.attn.norm_k.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
173
+ "single_transformer_blocks.45.attn.norm_q.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
174
+ "single_transformer_blocks.45.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
175
+ "single_transformer_blocks.45.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
176
+ "single_transformer_blocks.46.attn.norm_k.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
177
+ "single_transformer_blocks.46.attn.norm_q.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
178
+ "single_transformer_blocks.46.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
179
+ "single_transformer_blocks.46.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
180
+ "single_transformer_blocks.47.attn.norm_k.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
181
+ "single_transformer_blocks.47.attn.norm_q.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
182
+ "single_transformer_blocks.47.attn.to_out.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
183
+ "single_transformer_blocks.47.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00007-of-00007.safetensors",
184
+ "single_transformer_blocks.5.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
185
+ "single_transformer_blocks.5.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
186
+ "single_transformer_blocks.5.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
187
+ "single_transformer_blocks.5.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
188
+ "single_transformer_blocks.6.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
189
+ "single_transformer_blocks.6.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
190
+ "single_transformer_blocks.6.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
191
+ "single_transformer_blocks.6.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
192
+ "single_transformer_blocks.7.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
193
+ "single_transformer_blocks.7.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
194
+ "single_transformer_blocks.7.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
195
+ "single_transformer_blocks.7.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
196
+ "single_transformer_blocks.8.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
197
+ "single_transformer_blocks.8.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
198
+ "single_transformer_blocks.8.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
199
+ "single_transformer_blocks.8.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
200
+ "single_transformer_blocks.9.attn.norm_k.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
201
+ "single_transformer_blocks.9.attn.norm_q.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
202
+ "single_transformer_blocks.9.attn.to_out.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
203
+ "single_transformer_blocks.9.attn.to_qkv_mlp_proj.weight": "diffusion_pytorch_model-00003-of-00007.safetensors",
204
+ "time_guidance_embed.guidance_embedder.linear_1.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
205
+ "time_guidance_embed.guidance_embedder.linear_2.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
206
+ "time_guidance_embed.timestep_embedder.linear_1.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
207
+ "time_guidance_embed.timestep_embedder.linear_2.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
208
+ "transformer_blocks.0.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
209
+ "transformer_blocks.0.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
210
+ "transformer_blocks.0.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
211
+ "transformer_blocks.0.attn.norm_added_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
212
+ "transformer_blocks.0.attn.norm_added_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
213
+ "transformer_blocks.0.attn.norm_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
214
+ "transformer_blocks.0.attn.norm_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
215
+ "transformer_blocks.0.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
216
+ "transformer_blocks.0.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
217
+ "transformer_blocks.0.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
218
+ "transformer_blocks.0.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
219
+ "transformer_blocks.0.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
220
+ "transformer_blocks.0.ff.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
221
+ "transformer_blocks.0.ff.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
222
+ "transformer_blocks.0.ff_context.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
223
+ "transformer_blocks.0.ff_context.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
224
+ "transformer_blocks.1.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
225
+ "transformer_blocks.1.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
226
+ "transformer_blocks.1.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
227
+ "transformer_blocks.1.attn.norm_added_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
228
+ "transformer_blocks.1.attn.norm_added_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
229
+ "transformer_blocks.1.attn.norm_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
230
+ "transformer_blocks.1.attn.norm_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
231
+ "transformer_blocks.1.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
232
+ "transformer_blocks.1.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
233
+ "transformer_blocks.1.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
234
+ "transformer_blocks.1.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
235
+ "transformer_blocks.1.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
236
+ "transformer_blocks.1.ff.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
237
+ "transformer_blocks.1.ff.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
238
+ "transformer_blocks.1.ff_context.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
239
+ "transformer_blocks.1.ff_context.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
240
+ "transformer_blocks.2.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
241
+ "transformer_blocks.2.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
242
+ "transformer_blocks.2.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
243
+ "transformer_blocks.2.attn.norm_added_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
244
+ "transformer_blocks.2.attn.norm_added_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
245
+ "transformer_blocks.2.attn.norm_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
246
+ "transformer_blocks.2.attn.norm_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
247
+ "transformer_blocks.2.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
248
+ "transformer_blocks.2.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
249
+ "transformer_blocks.2.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
250
+ "transformer_blocks.2.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
251
+ "transformer_blocks.2.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
252
+ "transformer_blocks.2.ff.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
253
+ "transformer_blocks.2.ff.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
254
+ "transformer_blocks.2.ff_context.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
255
+ "transformer_blocks.2.ff_context.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
256
+ "transformer_blocks.3.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
257
+ "transformer_blocks.3.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
258
+ "transformer_blocks.3.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
259
+ "transformer_blocks.3.attn.norm_added_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
260
+ "transformer_blocks.3.attn.norm_added_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
261
+ "transformer_blocks.3.attn.norm_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
262
+ "transformer_blocks.3.attn.norm_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
263
+ "transformer_blocks.3.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
264
+ "transformer_blocks.3.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
265
+ "transformer_blocks.3.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
266
+ "transformer_blocks.3.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
267
+ "transformer_blocks.3.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
268
+ "transformer_blocks.3.ff.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
269
+ "transformer_blocks.3.ff.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
270
+ "transformer_blocks.3.ff_context.linear_in.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
271
+ "transformer_blocks.3.ff_context.linear_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
272
+ "transformer_blocks.4.attn.add_k_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
273
+ "transformer_blocks.4.attn.add_q_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
274
+ "transformer_blocks.4.attn.add_v_proj.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
275
+ "transformer_blocks.4.attn.norm_added_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
276
+ "transformer_blocks.4.attn.norm_added_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
277
+ "transformer_blocks.4.attn.norm_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
278
+ "transformer_blocks.4.attn.norm_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
279
+ "transformer_blocks.4.attn.to_add_out.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
280
+ "transformer_blocks.4.attn.to_k.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
281
+ "transformer_blocks.4.attn.to_out.0.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
282
+ "transformer_blocks.4.attn.to_q.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
283
+ "transformer_blocks.4.attn.to_v.weight": "diffusion_pytorch_model-00001-of-00007.safetensors",
284
+ "transformer_blocks.4.ff.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
285
+ "transformer_blocks.4.ff.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
286
+ "transformer_blocks.4.ff_context.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
287
+ "transformer_blocks.4.ff_context.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
288
+ "transformer_blocks.5.attn.add_k_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
289
+ "transformer_blocks.5.attn.add_q_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
290
+ "transformer_blocks.5.attn.add_v_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
291
+ "transformer_blocks.5.attn.norm_added_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
292
+ "transformer_blocks.5.attn.norm_added_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
293
+ "transformer_blocks.5.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
294
+ "transformer_blocks.5.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
295
+ "transformer_blocks.5.attn.to_add_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
296
+ "transformer_blocks.5.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
297
+ "transformer_blocks.5.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
298
+ "transformer_blocks.5.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
299
+ "transformer_blocks.5.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
300
+ "transformer_blocks.5.ff.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
301
+ "transformer_blocks.5.ff.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
302
+ "transformer_blocks.5.ff_context.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
303
+ "transformer_blocks.5.ff_context.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
304
+ "transformer_blocks.6.attn.add_k_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
305
+ "transformer_blocks.6.attn.add_q_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
306
+ "transformer_blocks.6.attn.add_v_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
307
+ "transformer_blocks.6.attn.norm_added_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
308
+ "transformer_blocks.6.attn.norm_added_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
309
+ "transformer_blocks.6.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
310
+ "transformer_blocks.6.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
311
+ "transformer_blocks.6.attn.to_add_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
312
+ "transformer_blocks.6.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
313
+ "transformer_blocks.6.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
314
+ "transformer_blocks.6.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
315
+ "transformer_blocks.6.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
316
+ "transformer_blocks.6.ff.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
317
+ "transformer_blocks.6.ff.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
318
+ "transformer_blocks.6.ff_context.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
319
+ "transformer_blocks.6.ff_context.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
320
+ "transformer_blocks.7.attn.add_k_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
321
+ "transformer_blocks.7.attn.add_q_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
322
+ "transformer_blocks.7.attn.add_v_proj.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
323
+ "transformer_blocks.7.attn.norm_added_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
324
+ "transformer_blocks.7.attn.norm_added_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
325
+ "transformer_blocks.7.attn.norm_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
326
+ "transformer_blocks.7.attn.norm_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
327
+ "transformer_blocks.7.attn.to_add_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
328
+ "transformer_blocks.7.attn.to_k.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
329
+ "transformer_blocks.7.attn.to_out.0.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
330
+ "transformer_blocks.7.attn.to_q.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
331
+ "transformer_blocks.7.attn.to_v.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
332
+ "transformer_blocks.7.ff.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
333
+ "transformer_blocks.7.ff.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
334
+ "transformer_blocks.7.ff_context.linear_in.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
335
+ "transformer_blocks.7.ff_context.linear_out.weight": "diffusion_pytorch_model-00002-of-00007.safetensors",
336
+ "x_embedder.weight": "diffusion_pytorch_model-00001-of-00007.safetensors"
337
+ }
338
+ }
vae/config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "AutoencoderKLFlux2",
3
+ "_diffusers_version": "0.36.0.dev0",
4
+ "act_fn": "silu",
5
+ "batch_norm_eps": 0.0001,
6
+ "batch_norm_momentum": 0.1,
7
+ "block_out_channels": [
8
+ 128,
9
+ 256,
10
+ 512,
11
+ 512
12
+ ],
13
+ "down_block_types": [
14
+ "DownEncoderBlock2D",
15
+ "DownEncoderBlock2D",
16
+ "DownEncoderBlock2D",
17
+ "DownEncoderBlock2D"
18
+ ],
19
+ "force_upcast": true,
20
+ "in_channels": 3,
21
+ "latent_channels": 32,
22
+ "layers_per_block": 2,
23
+ "mid_block_add_attention": true,
24
+ "norm_num_groups": 32,
25
+ "out_channels": 3,
26
+ "patch_size": [
27
+ 2,
28
+ 2
29
+ ],
30
+ "sample_size": 1024,
31
+ "up_block_types": [
32
+ "UpDecoderBlock2D",
33
+ "UpDecoderBlock2D",
34
+ "UpDecoderBlock2D",
35
+ "UpDecoderBlock2D"
36
+ ],
37
+ "use_post_quant_conv": true,
38
+ "use_quant_conv": true
39
+ }
vae/diffusion_pytorch_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d64f3a68e1cc4f9f4e29b6e0da38a0204fe9a49f2d4053f0ec1fa1ca02f9c4b5
3
+ size 336213556