diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..e98058ee30350a2be1da90c47bf2f335ec21457b
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,21 @@
+# The .dockerignore file excludes files from the container build process.
+#
+# https://docs.docker.com/engine/reference/builder/#dockerignore-file
+
+# Exclude Git files
+.git
+.github
+.gitignore
+
+# Exclude Python cache files
+__pycache__
+.mypy_cache
+.pytest_cache
+.ruff_cache
+
+# Exclude Python virtual environment
+/venv
+
+# Exclude some weights
+/openai
+/liuhaotian
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000000000000000000000000000000000000..d99a490bee397f969e93faa0c083b69674435ee8
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,18 @@
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+charset = utf-8
+
+# 4 space indentation
+[*.{py,json}]
+indent_style = space
+indent_size = 4
+
+# 2 space indentation
+[*.{md,sh,yaml,yml}]
+indent_style = space
+indent_size = 2
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 1ef325f1b111266a6b26e0196871bd78baa8c2f3..5462cde720b76950382f4f83eb14d08ac438edaa 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,59 +1,29 @@
-*.7z filter=lfs diff=lfs merge=lfs -text
-*.arrow filter=lfs diff=lfs merge=lfs -text
-*.bin filter=lfs diff=lfs merge=lfs -text
-*.bz2 filter=lfs diff=lfs merge=lfs -text
-*.ckpt filter=lfs diff=lfs merge=lfs -text
-*.ftz filter=lfs diff=lfs merge=lfs -text
-*.gz filter=lfs diff=lfs merge=lfs -text
-*.h5 filter=lfs diff=lfs merge=lfs -text
-*.joblib filter=lfs diff=lfs merge=lfs -text
-*.lfs.* filter=lfs diff=lfs merge=lfs -text
-*.lz4 filter=lfs diff=lfs merge=lfs -text
-*.mds filter=lfs diff=lfs merge=lfs -text
-*.mlmodel filter=lfs diff=lfs merge=lfs -text
-*.model filter=lfs diff=lfs merge=lfs -text
-*.msgpack filter=lfs diff=lfs merge=lfs -text
-*.npy filter=lfs diff=lfs merge=lfs -text
-*.npz filter=lfs diff=lfs merge=lfs -text
-*.onnx filter=lfs diff=lfs merge=lfs -text
-*.ot filter=lfs diff=lfs merge=lfs -text
-*.parquet filter=lfs diff=lfs merge=lfs -text
-*.pb filter=lfs diff=lfs merge=lfs -text
-*.pickle filter=lfs diff=lfs merge=lfs -text
-*.pkl filter=lfs diff=lfs merge=lfs -text
-*.pt filter=lfs diff=lfs merge=lfs -text
-*.pth filter=lfs diff=lfs merge=lfs -text
-*.rar filter=lfs diff=lfs merge=lfs -text
-*.safetensors filter=lfs diff=lfs merge=lfs -text
-saved_model/**/* filter=lfs diff=lfs merge=lfs -text
-*.tar.* filter=lfs diff=lfs merge=lfs -text
-*.tar filter=lfs diff=lfs merge=lfs -text
-*.tflite filter=lfs diff=lfs merge=lfs -text
-*.tgz filter=lfs diff=lfs merge=lfs -text
-*.wasm filter=lfs diff=lfs merge=lfs -text
-*.xz filter=lfs diff=lfs merge=lfs -text
-*.zip filter=lfs diff=lfs merge=lfs -text
-*.zst filter=lfs diff=lfs merge=lfs -text
-*tfevents* filter=lfs diff=lfs merge=lfs -text
-# Audio files - uncompressed
-*.pcm filter=lfs diff=lfs merge=lfs -text
-*.sam filter=lfs diff=lfs merge=lfs -text
-*.raw filter=lfs diff=lfs merge=lfs -text
-# Audio files - compressed
-*.aac filter=lfs diff=lfs merge=lfs -text
-*.flac filter=lfs diff=lfs merge=lfs -text
-*.mp3 filter=lfs diff=lfs merge=lfs -text
-*.ogg filter=lfs diff=lfs merge=lfs -text
-*.wav filter=lfs diff=lfs merge=lfs -text
-# Image files - uncompressed
-*.bmp filter=lfs diff=lfs merge=lfs -text
-*.gif filter=lfs diff=lfs merge=lfs -text
-*.png filter=lfs diff=lfs merge=lfs -text
-*.tiff filter=lfs diff=lfs merge=lfs -text
-# Image files - compressed
-*.jpg filter=lfs diff=lfs merge=lfs -text
-*.jpeg filter=lfs diff=lfs merge=lfs -text
-*.webp filter=lfs diff=lfs merge=lfs -text
-# Video files - compressed
-*.mp4 filter=lfs diff=lfs merge=lfs -text
-*.webm filter=lfs diff=lfs merge=lfs -text
+# https://git-scm.com/docs/gitattributes
+
+# Set the default behavior, in case people don't have core.autocrlf set.
+# https://git-scm.com/docs/gitattributes#_end_of_line_conversion
+* text=auto
+
+# common python attributes, taken from https://github.com/alexkaratarakis/gitattributes/blob/710900479a2bedeec7003d381719521ffbb18bf8/Python.gitattributes
+# Source files
+# ============
+*.pxd text diff=python
+*.py text diff=python
+*.py3 text diff=python
+*.pyw text diff=python
+*.pyx text diff=python
+*.pyz text diff=python
+*.pyi text diff=python
+
+# Binary files
+# ============
+*.db binary
+*.p binary
+*.pkl binary
+*.pickle binary
+*.pyc binary export-ignore
+*.pyo binary export-ignore
+*.pyd binary
+
+# Jupyter notebook
+*.ipynb text eol=lf
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..747e3b55f71de1a8ffbfcdff8665d6cec882c386
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,81 @@
+# Python
+__pycache__
+*.pyc
+*.egg-info
+dist
+
+# Log
+*.log
+*.log.*
+# *.json
+# *.jsonl
+
+# Data
+!**/alpaca-data-conversation.json
+# Editor
+.idea
+*.swp
+.vscode
+
+# Other
+.DS_Store
+wandb
+output
+llavavid
+
+checkpoints
+project_checkpoints
+debug_checkpoints
+playground/data
+playground/cc3m_llava34b_cap
+ckpts*
+
+.ipynb_checkpoints
+chunyl_scripts
+*.ipynb
+
+# DevContainer
+!.devcontainer/*
+
+# Demo
+serve_images/
+notebooks/
+logs
+scripts/dist_*
+logs/
+submissions/
+cn_scripts/
+internal_project_checkpoints/
+work_dirs
+workdir
+.venv
+venv
+scripts/i18n/*
+playground/.nfs028b000000010add00000001
+HIP
+playground/.nfs028b0000017bff2c00000012
+scripts/qwen
+scripts/vicuna
+scripts/mistral
+scripts/baseline_rep
+scripts/cn_boli01_hl
+scripts/cn_boli01_lf
+scripts/cn_lf
+scripts/cn_lq
+scripts/cn_yg
+scripts/cn_yg_hao
+scripts/eva_encoder
+scripts/i18n
+scripts/i18n_higher_res
+scripts/multi-images
+scratchpad
+build/
+playground/*.json
+mlx_configs/
+data_processing/
+# demo/
+.python-version
+uv.lock
+# nas
+.nfs*
+data/
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000000000000000000000000000000000000..a1ef1adb30856bdb2baa064011c4b92e25f906fc
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,26 @@
+{
+ // 使用 IntelliSense 了解相关属性。
+ // 悬停以查看现有属性的描述。
+ // 欲了解更多信息,请访问: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "sh_file_debug",
+ "type": "debugpy",
+ "request": "attach",
+ "connect": {
+ "host": "localhost",
+ "port": 9518
+ },
+ "justMyCode": false
+ },
+ {
+ "name": "Python 调试程序: 包含参数的当前文件",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${file}",
+ "console": "integratedTerminal",
+ "args": "${command:pickArgs}"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/11video_r1_training_model_keys.json b/11video_r1_training_model_keys.json
new file mode 100644
index 0000000000000000000000000000000000000000..a1b158b94eccdee7eae7fcec4717c6abcc2e093f
--- /dev/null
+++ b/11video_r1_training_model_keys.json
@@ -0,0 +1,842 @@
+{
+ "total": 825,
+ "first_10": [
+ "visual.patch_embed.proj.weight",
+ "visual.blocks.0.norm1.weight",
+ "visual.blocks.0.norm2.weight",
+ "visual.blocks.0.attn.qkv.weight",
+ "visual.blocks.0.attn.qkv.bias",
+ "visual.blocks.0.attn.proj.weight",
+ "visual.blocks.0.attn.proj.bias",
+ "visual.blocks.0.mlp.gate_proj.weight",
+ "visual.blocks.0.mlp.gate_proj.bias",
+ "visual.blocks.0.mlp.up_proj.weight"
+ ],
+ "all_keys": [
+ "visual.patch_embed.proj.weight",
+ "visual.blocks.0.norm1.weight",
+ "visual.blocks.0.norm2.weight",
+ "visual.blocks.0.attn.qkv.weight",
+ "visual.blocks.0.attn.qkv.bias",
+ "visual.blocks.0.attn.proj.weight",
+ "visual.blocks.0.attn.proj.bias",
+ "visual.blocks.0.mlp.gate_proj.weight",
+ "visual.blocks.0.mlp.gate_proj.bias",
+ "visual.blocks.0.mlp.up_proj.weight",
+ "visual.blocks.0.mlp.up_proj.bias",
+ "visual.blocks.0.mlp.down_proj.weight",
+ "visual.blocks.0.mlp.down_proj.bias",
+ "visual.blocks.1.norm1.weight",
+ "visual.blocks.1.norm2.weight",
+ "visual.blocks.1.attn.qkv.weight",
+ "visual.blocks.1.attn.qkv.bias",
+ "visual.blocks.1.attn.proj.weight",
+ "visual.blocks.1.attn.proj.bias",
+ "visual.blocks.1.mlp.gate_proj.weight",
+ "visual.blocks.1.mlp.gate_proj.bias",
+ "visual.blocks.1.mlp.up_proj.weight",
+ "visual.blocks.1.mlp.up_proj.bias",
+ "visual.blocks.1.mlp.down_proj.weight",
+ "visual.blocks.1.mlp.down_proj.bias",
+ "visual.blocks.2.norm1.weight",
+ "visual.blocks.2.norm2.weight",
+ "visual.blocks.2.attn.qkv.weight",
+ "visual.blocks.2.attn.qkv.bias",
+ "visual.blocks.2.attn.proj.weight",
+ "visual.blocks.2.attn.proj.bias",
+ "visual.blocks.2.mlp.gate_proj.weight",
+ "visual.blocks.2.mlp.gate_proj.bias",
+ "visual.blocks.2.mlp.up_proj.weight",
+ "visual.blocks.2.mlp.up_proj.bias",
+ "visual.blocks.2.mlp.down_proj.weight",
+ "visual.blocks.2.mlp.down_proj.bias",
+ "visual.blocks.3.norm1.weight",
+ "visual.blocks.3.norm2.weight",
+ "visual.blocks.3.attn.qkv.weight",
+ "visual.blocks.3.attn.qkv.bias",
+ "visual.blocks.3.attn.proj.weight",
+ "visual.blocks.3.attn.proj.bias",
+ "visual.blocks.3.mlp.gate_proj.weight",
+ "visual.blocks.3.mlp.gate_proj.bias",
+ "visual.blocks.3.mlp.up_proj.weight",
+ "visual.blocks.3.mlp.up_proj.bias",
+ "visual.blocks.3.mlp.down_proj.weight",
+ "visual.blocks.3.mlp.down_proj.bias",
+ "visual.blocks.4.norm1.weight",
+ "visual.blocks.4.norm2.weight",
+ "visual.blocks.4.attn.qkv.weight",
+ "visual.blocks.4.attn.qkv.bias",
+ "visual.blocks.4.attn.proj.weight",
+ "visual.blocks.4.attn.proj.bias",
+ "visual.blocks.4.mlp.gate_proj.weight",
+ "visual.blocks.4.mlp.gate_proj.bias",
+ "visual.blocks.4.mlp.up_proj.weight",
+ "visual.blocks.4.mlp.up_proj.bias",
+ "visual.blocks.4.mlp.down_proj.weight",
+ "visual.blocks.4.mlp.down_proj.bias",
+ "visual.blocks.5.norm1.weight",
+ "visual.blocks.5.norm2.weight",
+ "visual.blocks.5.attn.qkv.weight",
+ "visual.blocks.5.attn.qkv.bias",
+ "visual.blocks.5.attn.proj.weight",
+ "visual.blocks.5.attn.proj.bias",
+ "visual.blocks.5.mlp.gate_proj.weight",
+ "visual.blocks.5.mlp.gate_proj.bias",
+ "visual.blocks.5.mlp.up_proj.weight",
+ "visual.blocks.5.mlp.up_proj.bias",
+ "visual.blocks.5.mlp.down_proj.weight",
+ "visual.blocks.5.mlp.down_proj.bias",
+ "visual.blocks.6.norm1.weight",
+ "visual.blocks.6.norm2.weight",
+ "visual.blocks.6.attn.qkv.weight",
+ "visual.blocks.6.attn.qkv.bias",
+ "visual.blocks.6.attn.proj.weight",
+ "visual.blocks.6.attn.proj.bias",
+ "visual.blocks.6.mlp.gate_proj.weight",
+ "visual.blocks.6.mlp.gate_proj.bias",
+ "visual.blocks.6.mlp.up_proj.weight",
+ "visual.blocks.6.mlp.up_proj.bias",
+ "visual.blocks.6.mlp.down_proj.weight",
+ "visual.blocks.6.mlp.down_proj.bias",
+ "visual.blocks.7.norm1.weight",
+ "visual.blocks.7.norm2.weight",
+ "visual.blocks.7.attn.qkv.weight",
+ "visual.blocks.7.attn.qkv.bias",
+ "visual.blocks.7.attn.proj.weight",
+ "visual.blocks.7.attn.proj.bias",
+ "visual.blocks.7.mlp.gate_proj.weight",
+ "visual.blocks.7.mlp.gate_proj.bias",
+ "visual.blocks.7.mlp.up_proj.weight",
+ "visual.blocks.7.mlp.up_proj.bias",
+ "visual.blocks.7.mlp.down_proj.weight",
+ "visual.blocks.7.mlp.down_proj.bias",
+ "visual.blocks.8.norm1.weight",
+ "visual.blocks.8.norm2.weight",
+ "visual.blocks.8.attn.qkv.weight",
+ "visual.blocks.8.attn.qkv.bias",
+ "visual.blocks.8.attn.proj.weight",
+ "visual.blocks.8.attn.proj.bias",
+ "visual.blocks.8.mlp.gate_proj.weight",
+ "visual.blocks.8.mlp.gate_proj.bias",
+ "visual.blocks.8.mlp.up_proj.weight",
+ "visual.blocks.8.mlp.up_proj.bias",
+ "visual.blocks.8.mlp.down_proj.weight",
+ "visual.blocks.8.mlp.down_proj.bias",
+ "visual.blocks.9.norm1.weight",
+ "visual.blocks.9.norm2.weight",
+ "visual.blocks.9.attn.qkv.weight",
+ "visual.blocks.9.attn.qkv.bias",
+ "visual.blocks.9.attn.proj.weight",
+ "visual.blocks.9.attn.proj.bias",
+ "visual.blocks.9.mlp.gate_proj.weight",
+ "visual.blocks.9.mlp.gate_proj.bias",
+ "visual.blocks.9.mlp.up_proj.weight",
+ "visual.blocks.9.mlp.up_proj.bias",
+ "visual.blocks.9.mlp.down_proj.weight",
+ "visual.blocks.9.mlp.down_proj.bias",
+ "visual.blocks.10.norm1.weight",
+ "visual.blocks.10.norm2.weight",
+ "visual.blocks.10.attn.qkv.weight",
+ "visual.blocks.10.attn.qkv.bias",
+ "visual.blocks.10.attn.proj.weight",
+ "visual.blocks.10.attn.proj.bias",
+ "visual.blocks.10.mlp.gate_proj.weight",
+ "visual.blocks.10.mlp.gate_proj.bias",
+ "visual.blocks.10.mlp.up_proj.weight",
+ "visual.blocks.10.mlp.up_proj.bias",
+ "visual.blocks.10.mlp.down_proj.weight",
+ "visual.blocks.10.mlp.down_proj.bias",
+ "visual.blocks.11.norm1.weight",
+ "visual.blocks.11.norm2.weight",
+ "visual.blocks.11.attn.qkv.weight",
+ "visual.blocks.11.attn.qkv.bias",
+ "visual.blocks.11.attn.proj.weight",
+ "visual.blocks.11.attn.proj.bias",
+ "visual.blocks.11.mlp.gate_proj.weight",
+ "visual.blocks.11.mlp.gate_proj.bias",
+ "visual.blocks.11.mlp.up_proj.weight",
+ "visual.blocks.11.mlp.up_proj.bias",
+ "visual.blocks.11.mlp.down_proj.weight",
+ "visual.blocks.11.mlp.down_proj.bias",
+ "visual.blocks.12.norm1.weight",
+ "visual.blocks.12.norm2.weight",
+ "visual.blocks.12.attn.qkv.weight",
+ "visual.blocks.12.attn.qkv.bias",
+ "visual.blocks.12.attn.proj.weight",
+ "visual.blocks.12.attn.proj.bias",
+ "visual.blocks.12.mlp.gate_proj.weight",
+ "visual.blocks.12.mlp.gate_proj.bias",
+ "visual.blocks.12.mlp.up_proj.weight",
+ "visual.blocks.12.mlp.up_proj.bias",
+ "visual.blocks.12.mlp.down_proj.weight",
+ "visual.blocks.12.mlp.down_proj.bias",
+ "visual.blocks.13.norm1.weight",
+ "visual.blocks.13.norm2.weight",
+ "visual.blocks.13.attn.qkv.weight",
+ "visual.blocks.13.attn.qkv.bias",
+ "visual.blocks.13.attn.proj.weight",
+ "visual.blocks.13.attn.proj.bias",
+ "visual.blocks.13.mlp.gate_proj.weight",
+ "visual.blocks.13.mlp.gate_proj.bias",
+ "visual.blocks.13.mlp.up_proj.weight",
+ "visual.blocks.13.mlp.up_proj.bias",
+ "visual.blocks.13.mlp.down_proj.weight",
+ "visual.blocks.13.mlp.down_proj.bias",
+ "visual.blocks.14.norm1.weight",
+ "visual.blocks.14.norm2.weight",
+ "visual.blocks.14.attn.qkv.weight",
+ "visual.blocks.14.attn.qkv.bias",
+ "visual.blocks.14.attn.proj.weight",
+ "visual.blocks.14.attn.proj.bias",
+ "visual.blocks.14.mlp.gate_proj.weight",
+ "visual.blocks.14.mlp.gate_proj.bias",
+ "visual.blocks.14.mlp.up_proj.weight",
+ "visual.blocks.14.mlp.up_proj.bias",
+ "visual.blocks.14.mlp.down_proj.weight",
+ "visual.blocks.14.mlp.down_proj.bias",
+ "visual.blocks.15.norm1.weight",
+ "visual.blocks.15.norm2.weight",
+ "visual.blocks.15.attn.qkv.weight",
+ "visual.blocks.15.attn.qkv.bias",
+ "visual.blocks.15.attn.proj.weight",
+ "visual.blocks.15.attn.proj.bias",
+ "visual.blocks.15.mlp.gate_proj.weight",
+ "visual.blocks.15.mlp.gate_proj.bias",
+ "visual.blocks.15.mlp.up_proj.weight",
+ "visual.blocks.15.mlp.up_proj.bias",
+ "visual.blocks.15.mlp.down_proj.weight",
+ "visual.blocks.15.mlp.down_proj.bias",
+ "visual.blocks.16.norm1.weight",
+ "visual.blocks.16.norm2.weight",
+ "visual.blocks.16.attn.qkv.weight",
+ "visual.blocks.16.attn.qkv.bias",
+ "visual.blocks.16.attn.proj.weight",
+ "visual.blocks.16.attn.proj.bias",
+ "visual.blocks.16.mlp.gate_proj.weight",
+ "visual.blocks.16.mlp.gate_proj.bias",
+ "visual.blocks.16.mlp.up_proj.weight",
+ "visual.blocks.16.mlp.up_proj.bias",
+ "visual.blocks.16.mlp.down_proj.weight",
+ "visual.blocks.16.mlp.down_proj.bias",
+ "visual.blocks.17.norm1.weight",
+ "visual.blocks.17.norm2.weight",
+ "visual.blocks.17.attn.qkv.weight",
+ "visual.blocks.17.attn.qkv.bias",
+ "visual.blocks.17.attn.proj.weight",
+ "visual.blocks.17.attn.proj.bias",
+ "visual.blocks.17.mlp.gate_proj.weight",
+ "visual.blocks.17.mlp.gate_proj.bias",
+ "visual.blocks.17.mlp.up_proj.weight",
+ "visual.blocks.17.mlp.up_proj.bias",
+ "visual.blocks.17.mlp.down_proj.weight",
+ "visual.blocks.17.mlp.down_proj.bias",
+ "visual.blocks.18.norm1.weight",
+ "visual.blocks.18.norm2.weight",
+ "visual.blocks.18.attn.qkv.weight",
+ "visual.blocks.18.attn.qkv.bias",
+ "visual.blocks.18.attn.proj.weight",
+ "visual.blocks.18.attn.proj.bias",
+ "visual.blocks.18.mlp.gate_proj.weight",
+ "visual.blocks.18.mlp.gate_proj.bias",
+ "visual.blocks.18.mlp.up_proj.weight",
+ "visual.blocks.18.mlp.up_proj.bias",
+ "visual.blocks.18.mlp.down_proj.weight",
+ "visual.blocks.18.mlp.down_proj.bias",
+ "visual.blocks.19.norm1.weight",
+ "visual.blocks.19.norm2.weight",
+ "visual.blocks.19.attn.qkv.weight",
+ "visual.blocks.19.attn.qkv.bias",
+ "visual.blocks.19.attn.proj.weight",
+ "visual.blocks.19.attn.proj.bias",
+ "visual.blocks.19.mlp.gate_proj.weight",
+ "visual.blocks.19.mlp.gate_proj.bias",
+ "visual.blocks.19.mlp.up_proj.weight",
+ "visual.blocks.19.mlp.up_proj.bias",
+ "visual.blocks.19.mlp.down_proj.weight",
+ "visual.blocks.19.mlp.down_proj.bias",
+ "visual.blocks.20.norm1.weight",
+ "visual.blocks.20.norm2.weight",
+ "visual.blocks.20.attn.qkv.weight",
+ "visual.blocks.20.attn.qkv.bias",
+ "visual.blocks.20.attn.proj.weight",
+ "visual.blocks.20.attn.proj.bias",
+ "visual.blocks.20.mlp.gate_proj.weight",
+ "visual.blocks.20.mlp.gate_proj.bias",
+ "visual.blocks.20.mlp.up_proj.weight",
+ "visual.blocks.20.mlp.up_proj.bias",
+ "visual.blocks.20.mlp.down_proj.weight",
+ "visual.blocks.20.mlp.down_proj.bias",
+ "visual.blocks.21.norm1.weight",
+ "visual.blocks.21.norm2.weight",
+ "visual.blocks.21.attn.qkv.weight",
+ "visual.blocks.21.attn.qkv.bias",
+ "visual.blocks.21.attn.proj.weight",
+ "visual.blocks.21.attn.proj.bias",
+ "visual.blocks.21.mlp.gate_proj.weight",
+ "visual.blocks.21.mlp.gate_proj.bias",
+ "visual.blocks.21.mlp.up_proj.weight",
+ "visual.blocks.21.mlp.up_proj.bias",
+ "visual.blocks.21.mlp.down_proj.weight",
+ "visual.blocks.21.mlp.down_proj.bias",
+ "visual.blocks.22.norm1.weight",
+ "visual.blocks.22.norm2.weight",
+ "visual.blocks.22.attn.qkv.weight",
+ "visual.blocks.22.attn.qkv.bias",
+ "visual.blocks.22.attn.proj.weight",
+ "visual.blocks.22.attn.proj.bias",
+ "visual.blocks.22.mlp.gate_proj.weight",
+ "visual.blocks.22.mlp.gate_proj.bias",
+ "visual.blocks.22.mlp.up_proj.weight",
+ "visual.blocks.22.mlp.up_proj.bias",
+ "visual.blocks.22.mlp.down_proj.weight",
+ "visual.blocks.22.mlp.down_proj.bias",
+ "visual.blocks.23.norm1.weight",
+ "visual.blocks.23.norm2.weight",
+ "visual.blocks.23.attn.qkv.weight",
+ "visual.blocks.23.attn.qkv.bias",
+ "visual.blocks.23.attn.proj.weight",
+ "visual.blocks.23.attn.proj.bias",
+ "visual.blocks.23.mlp.gate_proj.weight",
+ "visual.blocks.23.mlp.gate_proj.bias",
+ "visual.blocks.23.mlp.up_proj.weight",
+ "visual.blocks.23.mlp.up_proj.bias",
+ "visual.blocks.23.mlp.down_proj.weight",
+ "visual.blocks.23.mlp.down_proj.bias",
+ "visual.blocks.24.norm1.weight",
+ "visual.blocks.24.norm2.weight",
+ "visual.blocks.24.attn.qkv.weight",
+ "visual.blocks.24.attn.qkv.bias",
+ "visual.blocks.24.attn.proj.weight",
+ "visual.blocks.24.attn.proj.bias",
+ "visual.blocks.24.mlp.gate_proj.weight",
+ "visual.blocks.24.mlp.gate_proj.bias",
+ "visual.blocks.24.mlp.up_proj.weight",
+ "visual.blocks.24.mlp.up_proj.bias",
+ "visual.blocks.24.mlp.down_proj.weight",
+ "visual.blocks.24.mlp.down_proj.bias",
+ "visual.blocks.25.norm1.weight",
+ "visual.blocks.25.norm2.weight",
+ "visual.blocks.25.attn.qkv.weight",
+ "visual.blocks.25.attn.qkv.bias",
+ "visual.blocks.25.attn.proj.weight",
+ "visual.blocks.25.attn.proj.bias",
+ "visual.blocks.25.mlp.gate_proj.weight",
+ "visual.blocks.25.mlp.gate_proj.bias",
+ "visual.blocks.25.mlp.up_proj.weight",
+ "visual.blocks.25.mlp.up_proj.bias",
+ "visual.blocks.25.mlp.down_proj.weight",
+ "visual.blocks.25.mlp.down_proj.bias",
+ "visual.blocks.26.norm1.weight",
+ "visual.blocks.26.norm2.weight",
+ "visual.blocks.26.attn.qkv.weight",
+ "visual.blocks.26.attn.qkv.bias",
+ "visual.blocks.26.attn.proj.weight",
+ "visual.blocks.26.attn.proj.bias",
+ "visual.blocks.26.mlp.gate_proj.weight",
+ "visual.blocks.26.mlp.gate_proj.bias",
+ "visual.blocks.26.mlp.up_proj.weight",
+ "visual.blocks.26.mlp.up_proj.bias",
+ "visual.blocks.26.mlp.down_proj.weight",
+ "visual.blocks.26.mlp.down_proj.bias",
+ "visual.blocks.27.norm1.weight",
+ "visual.blocks.27.norm2.weight",
+ "visual.blocks.27.attn.qkv.weight",
+ "visual.blocks.27.attn.qkv.bias",
+ "visual.blocks.27.attn.proj.weight",
+ "visual.blocks.27.attn.proj.bias",
+ "visual.blocks.27.mlp.gate_proj.weight",
+ "visual.blocks.27.mlp.gate_proj.bias",
+ "visual.blocks.27.mlp.up_proj.weight",
+ "visual.blocks.27.mlp.up_proj.bias",
+ "visual.blocks.27.mlp.down_proj.weight",
+ "visual.blocks.27.mlp.down_proj.bias",
+ "visual.blocks.28.norm1.weight",
+ "visual.blocks.28.norm2.weight",
+ "visual.blocks.28.attn.qkv.weight",
+ "visual.blocks.28.attn.qkv.bias",
+ "visual.blocks.28.attn.proj.weight",
+ "visual.blocks.28.attn.proj.bias",
+ "visual.blocks.28.mlp.gate_proj.weight",
+ "visual.blocks.28.mlp.gate_proj.bias",
+ "visual.blocks.28.mlp.up_proj.weight",
+ "visual.blocks.28.mlp.up_proj.bias",
+ "visual.blocks.28.mlp.down_proj.weight",
+ "visual.blocks.28.mlp.down_proj.bias",
+ "visual.blocks.29.norm1.weight",
+ "visual.blocks.29.norm2.weight",
+ "visual.blocks.29.attn.qkv.weight",
+ "visual.blocks.29.attn.qkv.bias",
+ "visual.blocks.29.attn.proj.weight",
+ "visual.blocks.29.attn.proj.bias",
+ "visual.blocks.29.mlp.gate_proj.weight",
+ "visual.blocks.29.mlp.gate_proj.bias",
+ "visual.blocks.29.mlp.up_proj.weight",
+ "visual.blocks.29.mlp.up_proj.bias",
+ "visual.blocks.29.mlp.down_proj.weight",
+ "visual.blocks.29.mlp.down_proj.bias",
+ "visual.blocks.30.norm1.weight",
+ "visual.blocks.30.norm2.weight",
+ "visual.blocks.30.attn.qkv.weight",
+ "visual.blocks.30.attn.qkv.bias",
+ "visual.blocks.30.attn.proj.weight",
+ "visual.blocks.30.attn.proj.bias",
+ "visual.blocks.30.mlp.gate_proj.weight",
+ "visual.blocks.30.mlp.gate_proj.bias",
+ "visual.blocks.30.mlp.up_proj.weight",
+ "visual.blocks.30.mlp.up_proj.bias",
+ "visual.blocks.30.mlp.down_proj.weight",
+ "visual.blocks.30.mlp.down_proj.bias",
+ "visual.blocks.31.norm1.weight",
+ "visual.blocks.31.norm2.weight",
+ "visual.blocks.31.attn.qkv.weight",
+ "visual.blocks.31.attn.qkv.bias",
+ "visual.blocks.31.attn.proj.weight",
+ "visual.blocks.31.attn.proj.bias",
+ "visual.blocks.31.mlp.gate_proj.weight",
+ "visual.blocks.31.mlp.gate_proj.bias",
+ "visual.blocks.31.mlp.up_proj.weight",
+ "visual.blocks.31.mlp.up_proj.bias",
+ "visual.blocks.31.mlp.down_proj.weight",
+ "visual.blocks.31.mlp.down_proj.bias",
+ "visual.merger.ln_q.weight",
+ "visual.merger.mlp.0.weight",
+ "visual.merger.mlp.0.bias",
+ "visual.merger.mlp.2.weight",
+ "visual.merger.mlp.2.bias",
+ "model.embed_tokens.weight",
+ "model.layers.0.self_attn.q_proj.weight",
+ "model.layers.0.self_attn.q_proj.bias",
+ "model.layers.0.self_attn.k_proj.weight",
+ "model.layers.0.self_attn.k_proj.bias",
+ "model.layers.0.self_attn.v_proj.weight",
+ "model.layers.0.self_attn.v_proj.bias",
+ "model.layers.0.self_attn.o_proj.weight",
+ "model.layers.0.mlp.gate_proj.weight",
+ "model.layers.0.mlp.up_proj.weight",
+ "model.layers.0.mlp.down_proj.weight",
+ "model.layers.0.input_layernorm.weight",
+ "model.layers.0.post_attention_layernorm.weight",
+ "model.layers.1.self_attn.q_proj.weight",
+ "model.layers.1.self_attn.q_proj.bias",
+ "model.layers.1.self_attn.k_proj.weight",
+ "model.layers.1.self_attn.k_proj.bias",
+ "model.layers.1.self_attn.v_proj.weight",
+ "model.layers.1.self_attn.v_proj.bias",
+ "model.layers.1.self_attn.o_proj.weight",
+ "model.layers.1.mlp.gate_proj.weight",
+ "model.layers.1.mlp.up_proj.weight",
+ "model.layers.1.mlp.down_proj.weight",
+ "model.layers.1.input_layernorm.weight",
+ "model.layers.1.post_attention_layernorm.weight",
+ "model.layers.2.self_attn.q_proj.weight",
+ "model.layers.2.self_attn.q_proj.bias",
+ "model.layers.2.self_attn.k_proj.weight",
+ "model.layers.2.self_attn.k_proj.bias",
+ "model.layers.2.self_attn.v_proj.weight",
+ "model.layers.2.self_attn.v_proj.bias",
+ "model.layers.2.self_attn.o_proj.weight",
+ "model.layers.2.mlp.gate_proj.weight",
+ "model.layers.2.mlp.up_proj.weight",
+ "model.layers.2.mlp.down_proj.weight",
+ "model.layers.2.input_layernorm.weight",
+ "model.layers.2.post_attention_layernorm.weight",
+ "model.layers.3.self_attn.q_proj.weight",
+ "model.layers.3.self_attn.q_proj.bias",
+ "model.layers.3.self_attn.k_proj.weight",
+ "model.layers.3.self_attn.k_proj.bias",
+ "model.layers.3.self_attn.v_proj.weight",
+ "model.layers.3.self_attn.v_proj.bias",
+ "model.layers.3.self_attn.o_proj.weight",
+ "model.layers.3.mlp.gate_proj.weight",
+ "model.layers.3.mlp.up_proj.weight",
+ "model.layers.3.mlp.down_proj.weight",
+ "model.layers.3.input_layernorm.weight",
+ "model.layers.3.post_attention_layernorm.weight",
+ "model.layers.4.self_attn.q_proj.weight",
+ "model.layers.4.self_attn.q_proj.bias",
+ "model.layers.4.self_attn.k_proj.weight",
+ "model.layers.4.self_attn.k_proj.bias",
+ "model.layers.4.self_attn.v_proj.weight",
+ "model.layers.4.self_attn.v_proj.bias",
+ "model.layers.4.self_attn.o_proj.weight",
+ "model.layers.4.mlp.gate_proj.weight",
+ "model.layers.4.mlp.up_proj.weight",
+ "model.layers.4.mlp.down_proj.weight",
+ "model.layers.4.input_layernorm.weight",
+ "model.layers.4.post_attention_layernorm.weight",
+ "model.layers.5.self_attn.q_proj.weight",
+ "model.layers.5.self_attn.q_proj.bias",
+ "model.layers.5.self_attn.k_proj.weight",
+ "model.layers.5.self_attn.k_proj.bias",
+ "model.layers.5.self_attn.v_proj.weight",
+ "model.layers.5.self_attn.v_proj.bias",
+ "model.layers.5.self_attn.o_proj.weight",
+ "model.layers.5.mlp.gate_proj.weight",
+ "model.layers.5.mlp.up_proj.weight",
+ "model.layers.5.mlp.down_proj.weight",
+ "model.layers.5.input_layernorm.weight",
+ "model.layers.5.post_attention_layernorm.weight",
+ "model.layers.6.self_attn.q_proj.weight",
+ "model.layers.6.self_attn.q_proj.bias",
+ "model.layers.6.self_attn.k_proj.weight",
+ "model.layers.6.self_attn.k_proj.bias",
+ "model.layers.6.self_attn.v_proj.weight",
+ "model.layers.6.self_attn.v_proj.bias",
+ "model.layers.6.self_attn.o_proj.weight",
+ "model.layers.6.mlp.gate_proj.weight",
+ "model.layers.6.mlp.up_proj.weight",
+ "model.layers.6.mlp.down_proj.weight",
+ "model.layers.6.input_layernorm.weight",
+ "model.layers.6.post_attention_layernorm.weight",
+ "model.layers.7.self_attn.q_proj.weight",
+ "model.layers.7.self_attn.q_proj.bias",
+ "model.layers.7.self_attn.k_proj.weight",
+ "model.layers.7.self_attn.k_proj.bias",
+ "model.layers.7.self_attn.v_proj.weight",
+ "model.layers.7.self_attn.v_proj.bias",
+ "model.layers.7.self_attn.o_proj.weight",
+ "model.layers.7.mlp.gate_proj.weight",
+ "model.layers.7.mlp.up_proj.weight",
+ "model.layers.7.mlp.down_proj.weight",
+ "model.layers.7.input_layernorm.weight",
+ "model.layers.7.post_attention_layernorm.weight",
+ "model.layers.8.self_attn.q_proj.weight",
+ "model.layers.8.self_attn.q_proj.bias",
+ "model.layers.8.self_attn.k_proj.weight",
+ "model.layers.8.self_attn.k_proj.bias",
+ "model.layers.8.self_attn.v_proj.weight",
+ "model.layers.8.self_attn.v_proj.bias",
+ "model.layers.8.self_attn.o_proj.weight",
+ "model.layers.8.mlp.gate_proj.weight",
+ "model.layers.8.mlp.up_proj.weight",
+ "model.layers.8.mlp.down_proj.weight",
+ "model.layers.8.input_layernorm.weight",
+ "model.layers.8.post_attention_layernorm.weight",
+ "model.layers.9.self_attn.q_proj.weight",
+ "model.layers.9.self_attn.q_proj.bias",
+ "model.layers.9.self_attn.k_proj.weight",
+ "model.layers.9.self_attn.k_proj.bias",
+ "model.layers.9.self_attn.v_proj.weight",
+ "model.layers.9.self_attn.v_proj.bias",
+ "model.layers.9.self_attn.o_proj.weight",
+ "model.layers.9.mlp.gate_proj.weight",
+ "model.layers.9.mlp.up_proj.weight",
+ "model.layers.9.mlp.down_proj.weight",
+ "model.layers.9.input_layernorm.weight",
+ "model.layers.9.post_attention_layernorm.weight",
+ "model.layers.10.self_attn.q_proj.weight",
+ "model.layers.10.self_attn.q_proj.bias",
+ "model.layers.10.self_attn.k_proj.weight",
+ "model.layers.10.self_attn.k_proj.bias",
+ "model.layers.10.self_attn.v_proj.weight",
+ "model.layers.10.self_attn.v_proj.bias",
+ "model.layers.10.self_attn.o_proj.weight",
+ "model.layers.10.mlp.gate_proj.weight",
+ "model.layers.10.mlp.up_proj.weight",
+ "model.layers.10.mlp.down_proj.weight",
+ "model.layers.10.input_layernorm.weight",
+ "model.layers.10.post_attention_layernorm.weight",
+ "model.layers.11.self_attn.q_proj.weight",
+ "model.layers.11.self_attn.q_proj.bias",
+ "model.layers.11.self_attn.k_proj.weight",
+ "model.layers.11.self_attn.k_proj.bias",
+ "model.layers.11.self_attn.v_proj.weight",
+ "model.layers.11.self_attn.v_proj.bias",
+ "model.layers.11.self_attn.o_proj.weight",
+ "model.layers.11.mlp.gate_proj.weight",
+ "model.layers.11.mlp.up_proj.weight",
+ "model.layers.11.mlp.down_proj.weight",
+ "model.layers.11.input_layernorm.weight",
+ "model.layers.11.post_attention_layernorm.weight",
+ "model.layers.12.self_attn.q_proj.weight",
+ "model.layers.12.self_attn.q_proj.bias",
+ "model.layers.12.self_attn.k_proj.weight",
+ "model.layers.12.self_attn.k_proj.bias",
+ "model.layers.12.self_attn.v_proj.weight",
+ "model.layers.12.self_attn.v_proj.bias",
+ "model.layers.12.self_attn.o_proj.weight",
+ "model.layers.12.mlp.gate_proj.weight",
+ "model.layers.12.mlp.up_proj.weight",
+ "model.layers.12.mlp.down_proj.weight",
+ "model.layers.12.input_layernorm.weight",
+ "model.layers.12.post_attention_layernorm.weight",
+ "model.layers.13.self_attn.q_proj.weight",
+ "model.layers.13.self_attn.q_proj.bias",
+ "model.layers.13.self_attn.k_proj.weight",
+ "model.layers.13.self_attn.k_proj.bias",
+ "model.layers.13.self_attn.v_proj.weight",
+ "model.layers.13.self_attn.v_proj.bias",
+ "model.layers.13.self_attn.o_proj.weight",
+ "model.layers.13.mlp.gate_proj.weight",
+ "model.layers.13.mlp.up_proj.weight",
+ "model.layers.13.mlp.down_proj.weight",
+ "model.layers.13.input_layernorm.weight",
+ "model.layers.13.post_attention_layernorm.weight",
+ "model.layers.14.self_attn.q_proj.weight",
+ "model.layers.14.self_attn.q_proj.bias",
+ "model.layers.14.self_attn.k_proj.weight",
+ "model.layers.14.self_attn.k_proj.bias",
+ "model.layers.14.self_attn.v_proj.weight",
+ "model.layers.14.self_attn.v_proj.bias",
+ "model.layers.14.self_attn.o_proj.weight",
+ "model.layers.14.mlp.gate_proj.weight",
+ "model.layers.14.mlp.up_proj.weight",
+ "model.layers.14.mlp.down_proj.weight",
+ "model.layers.14.input_layernorm.weight",
+ "model.layers.14.post_attention_layernorm.weight",
+ "model.layers.15.self_attn.q_proj.weight",
+ "model.layers.15.self_attn.q_proj.bias",
+ "model.layers.15.self_attn.k_proj.weight",
+ "model.layers.15.self_attn.k_proj.bias",
+ "model.layers.15.self_attn.v_proj.weight",
+ "model.layers.15.self_attn.v_proj.bias",
+ "model.layers.15.self_attn.o_proj.weight",
+ "model.layers.15.mlp.gate_proj.weight",
+ "model.layers.15.mlp.up_proj.weight",
+ "model.layers.15.mlp.down_proj.weight",
+ "model.layers.15.input_layernorm.weight",
+ "model.layers.15.post_attention_layernorm.weight",
+ "model.layers.16.self_attn.q_proj.weight",
+ "model.layers.16.self_attn.q_proj.bias",
+ "model.layers.16.self_attn.k_proj.weight",
+ "model.layers.16.self_attn.k_proj.bias",
+ "model.layers.16.self_attn.v_proj.weight",
+ "model.layers.16.self_attn.v_proj.bias",
+ "model.layers.16.self_attn.o_proj.weight",
+ "model.layers.16.mlp.gate_proj.weight",
+ "model.layers.16.mlp.up_proj.weight",
+ "model.layers.16.mlp.down_proj.weight",
+ "model.layers.16.input_layernorm.weight",
+ "model.layers.16.post_attention_layernorm.weight",
+ "model.layers.17.self_attn.q_proj.weight",
+ "model.layers.17.self_attn.q_proj.bias",
+ "model.layers.17.self_attn.k_proj.weight",
+ "model.layers.17.self_attn.k_proj.bias",
+ "model.layers.17.self_attn.v_proj.weight",
+ "model.layers.17.self_attn.v_proj.bias",
+ "model.layers.17.self_attn.o_proj.weight",
+ "model.layers.17.mlp.gate_proj.weight",
+ "model.layers.17.mlp.up_proj.weight",
+ "model.layers.17.mlp.down_proj.weight",
+ "model.layers.17.input_layernorm.weight",
+ "model.layers.17.post_attention_layernorm.weight",
+ "model.layers.18.self_attn.q_proj.weight",
+ "model.layers.18.self_attn.q_proj.bias",
+ "model.layers.18.self_attn.k_proj.weight",
+ "model.layers.18.self_attn.k_proj.bias",
+ "model.layers.18.self_attn.v_proj.weight",
+ "model.layers.18.self_attn.v_proj.bias",
+ "model.layers.18.self_attn.o_proj.weight",
+ "model.layers.18.mlp.gate_proj.weight",
+ "model.layers.18.mlp.up_proj.weight",
+ "model.layers.18.mlp.down_proj.weight",
+ "model.layers.18.input_layernorm.weight",
+ "model.layers.18.post_attention_layernorm.weight",
+ "model.layers.19.self_attn.q_proj.weight",
+ "model.layers.19.self_attn.q_proj.bias",
+ "model.layers.19.self_attn.k_proj.weight",
+ "model.layers.19.self_attn.k_proj.bias",
+ "model.layers.19.self_attn.v_proj.weight",
+ "model.layers.19.self_attn.v_proj.bias",
+ "model.layers.19.self_attn.o_proj.weight",
+ "model.layers.19.mlp.gate_proj.weight",
+ "model.layers.19.mlp.up_proj.weight",
+ "model.layers.19.mlp.down_proj.weight",
+ "model.layers.19.input_layernorm.weight",
+ "model.layers.19.post_attention_layernorm.weight",
+ "model.layers.20.self_attn.q_proj.weight",
+ "model.layers.20.self_attn.q_proj.bias",
+ "model.layers.20.self_attn.k_proj.weight",
+ "model.layers.20.self_attn.k_proj.bias",
+ "model.layers.20.self_attn.v_proj.weight",
+ "model.layers.20.self_attn.v_proj.bias",
+ "model.layers.20.self_attn.o_proj.weight",
+ "model.layers.20.mlp.gate_proj.weight",
+ "model.layers.20.mlp.up_proj.weight",
+ "model.layers.20.mlp.down_proj.weight",
+ "model.layers.20.input_layernorm.weight",
+ "model.layers.20.post_attention_layernorm.weight",
+ "model.layers.21.self_attn.q_proj.weight",
+ "model.layers.21.self_attn.q_proj.bias",
+ "model.layers.21.self_attn.k_proj.weight",
+ "model.layers.21.self_attn.k_proj.bias",
+ "model.layers.21.self_attn.v_proj.weight",
+ "model.layers.21.self_attn.v_proj.bias",
+ "model.layers.21.self_attn.o_proj.weight",
+ "model.layers.21.mlp.gate_proj.weight",
+ "model.layers.21.mlp.up_proj.weight",
+ "model.layers.21.mlp.down_proj.weight",
+ "model.layers.21.input_layernorm.weight",
+ "model.layers.21.post_attention_layernorm.weight",
+ "model.layers.22.self_attn.q_proj.weight",
+ "model.layers.22.self_attn.q_proj.bias",
+ "model.layers.22.self_attn.k_proj.weight",
+ "model.layers.22.self_attn.k_proj.bias",
+ "model.layers.22.self_attn.v_proj.weight",
+ "model.layers.22.self_attn.v_proj.bias",
+ "model.layers.22.self_attn.o_proj.weight",
+ "model.layers.22.mlp.gate_proj.weight",
+ "model.layers.22.mlp.up_proj.weight",
+ "model.layers.22.mlp.down_proj.weight",
+ "model.layers.22.input_layernorm.weight",
+ "model.layers.22.post_attention_layernorm.weight",
+ "model.layers.23.self_attn.q_proj.weight",
+ "model.layers.23.self_attn.q_proj.bias",
+ "model.layers.23.self_attn.k_proj.weight",
+ "model.layers.23.self_attn.k_proj.bias",
+ "model.layers.23.self_attn.v_proj.weight",
+ "model.layers.23.self_attn.v_proj.bias",
+ "model.layers.23.self_attn.o_proj.weight",
+ "model.layers.23.mlp.gate_proj.weight",
+ "model.layers.23.mlp.up_proj.weight",
+ "model.layers.23.mlp.down_proj.weight",
+ "model.layers.23.input_layernorm.weight",
+ "model.layers.23.post_attention_layernorm.weight",
+ "model.layers.24.self_attn.q_proj.weight",
+ "model.layers.24.self_attn.q_proj.bias",
+ "model.layers.24.self_attn.k_proj.weight",
+ "model.layers.24.self_attn.k_proj.bias",
+ "model.layers.24.self_attn.v_proj.weight",
+ "model.layers.24.self_attn.v_proj.bias",
+ "model.layers.24.self_attn.o_proj.weight",
+ "model.layers.24.mlp.gate_proj.weight",
+ "model.layers.24.mlp.up_proj.weight",
+ "model.layers.24.mlp.down_proj.weight",
+ "model.layers.24.input_layernorm.weight",
+ "model.layers.24.post_attention_layernorm.weight",
+ "model.layers.25.self_attn.q_proj.weight",
+ "model.layers.25.self_attn.q_proj.bias",
+ "model.layers.25.self_attn.k_proj.weight",
+ "model.layers.25.self_attn.k_proj.bias",
+ "model.layers.25.self_attn.v_proj.weight",
+ "model.layers.25.self_attn.v_proj.bias",
+ "model.layers.25.self_attn.o_proj.weight",
+ "model.layers.25.mlp.gate_proj.weight",
+ "model.layers.25.mlp.up_proj.weight",
+ "model.layers.25.mlp.down_proj.weight",
+ "model.layers.25.input_layernorm.weight",
+ "model.layers.25.post_attention_layernorm.weight",
+ "model.layers.26.self_attn.q_proj.weight",
+ "model.layers.26.self_attn.q_proj.bias",
+ "model.layers.26.self_attn.k_proj.weight",
+ "model.layers.26.self_attn.k_proj.bias",
+ "model.layers.26.self_attn.v_proj.weight",
+ "model.layers.26.self_attn.v_proj.bias",
+ "model.layers.26.self_attn.o_proj.weight",
+ "model.layers.26.mlp.gate_proj.weight",
+ "model.layers.26.mlp.up_proj.weight",
+ "model.layers.26.mlp.down_proj.weight",
+ "model.layers.26.input_layernorm.weight",
+ "model.layers.26.post_attention_layernorm.weight",
+ "model.layers.27.self_attn.q_proj.weight",
+ "model.layers.27.self_attn.q_proj.bias",
+ "model.layers.27.self_attn.k_proj.weight",
+ "model.layers.27.self_attn.k_proj.bias",
+ "model.layers.27.self_attn.v_proj.weight",
+ "model.layers.27.self_attn.v_proj.bias",
+ "model.layers.27.self_attn.o_proj.weight",
+ "model.layers.27.mlp.gate_proj.weight",
+ "model.layers.27.mlp.up_proj.weight",
+ "model.layers.27.mlp.down_proj.weight",
+ "model.layers.27.input_layernorm.weight",
+ "model.layers.27.post_attention_layernorm.weight",
+ "model.layers.28.self_attn.q_proj.weight",
+ "model.layers.28.self_attn.q_proj.bias",
+ "model.layers.28.self_attn.k_proj.weight",
+ "model.layers.28.self_attn.k_proj.bias",
+ "model.layers.28.self_attn.v_proj.weight",
+ "model.layers.28.self_attn.v_proj.bias",
+ "model.layers.28.self_attn.o_proj.weight",
+ "model.layers.28.mlp.gate_proj.weight",
+ "model.layers.28.mlp.up_proj.weight",
+ "model.layers.28.mlp.down_proj.weight",
+ "model.layers.28.input_layernorm.weight",
+ "model.layers.28.post_attention_layernorm.weight",
+ "model.layers.29.self_attn.q_proj.weight",
+ "model.layers.29.self_attn.q_proj.bias",
+ "model.layers.29.self_attn.k_proj.weight",
+ "model.layers.29.self_attn.k_proj.bias",
+ "model.layers.29.self_attn.v_proj.weight",
+ "model.layers.29.self_attn.v_proj.bias",
+ "model.layers.29.self_attn.o_proj.weight",
+ "model.layers.29.mlp.gate_proj.weight",
+ "model.layers.29.mlp.up_proj.weight",
+ "model.layers.29.mlp.down_proj.weight",
+ "model.layers.29.input_layernorm.weight",
+ "model.layers.29.post_attention_layernorm.weight",
+ "model.layers.30.self_attn.q_proj.weight",
+ "model.layers.30.self_attn.q_proj.bias",
+ "model.layers.30.self_attn.k_proj.weight",
+ "model.layers.30.self_attn.k_proj.bias",
+ "model.layers.30.self_attn.v_proj.weight",
+ "model.layers.30.self_attn.v_proj.bias",
+ "model.layers.30.self_attn.o_proj.weight",
+ "model.layers.30.mlp.gate_proj.weight",
+ "model.layers.30.mlp.up_proj.weight",
+ "model.layers.30.mlp.down_proj.weight",
+ "model.layers.30.input_layernorm.weight",
+ "model.layers.30.post_attention_layernorm.weight",
+ "model.layers.31.self_attn.q_proj.weight",
+ "model.layers.31.self_attn.q_proj.bias",
+ "model.layers.31.self_attn.k_proj.weight",
+ "model.layers.31.self_attn.k_proj.bias",
+ "model.layers.31.self_attn.v_proj.weight",
+ "model.layers.31.self_attn.v_proj.bias",
+ "model.layers.31.self_attn.o_proj.weight",
+ "model.layers.31.mlp.gate_proj.weight",
+ "model.layers.31.mlp.up_proj.weight",
+ "model.layers.31.mlp.down_proj.weight",
+ "model.layers.31.input_layernorm.weight",
+ "model.layers.31.post_attention_layernorm.weight",
+ "model.layers.32.self_attn.q_proj.weight",
+ "model.layers.32.self_attn.q_proj.bias",
+ "model.layers.32.self_attn.k_proj.weight",
+ "model.layers.32.self_attn.k_proj.bias",
+ "model.layers.32.self_attn.v_proj.weight",
+ "model.layers.32.self_attn.v_proj.bias",
+ "model.layers.32.self_attn.o_proj.weight",
+ "model.layers.32.mlp.gate_proj.weight",
+ "model.layers.32.mlp.up_proj.weight",
+ "model.layers.32.mlp.down_proj.weight",
+ "model.layers.32.input_layernorm.weight",
+ "model.layers.32.post_attention_layernorm.weight",
+ "model.layers.33.self_attn.q_proj.weight",
+ "model.layers.33.self_attn.q_proj.bias",
+ "model.layers.33.self_attn.k_proj.weight",
+ "model.layers.33.self_attn.k_proj.bias",
+ "model.layers.33.self_attn.v_proj.weight",
+ "model.layers.33.self_attn.v_proj.bias",
+ "model.layers.33.self_attn.o_proj.weight",
+ "model.layers.33.mlp.gate_proj.weight",
+ "model.layers.33.mlp.up_proj.weight",
+ "model.layers.33.mlp.down_proj.weight",
+ "model.layers.33.input_layernorm.weight",
+ "model.layers.33.post_attention_layernorm.weight",
+ "model.layers.34.self_attn.q_proj.weight",
+ "model.layers.34.self_attn.q_proj.bias",
+ "model.layers.34.self_attn.k_proj.weight",
+ "model.layers.34.self_attn.k_proj.bias",
+ "model.layers.34.self_attn.v_proj.weight",
+ "model.layers.34.self_attn.v_proj.bias",
+ "model.layers.34.self_attn.o_proj.weight",
+ "model.layers.34.mlp.gate_proj.weight",
+ "model.layers.34.mlp.up_proj.weight",
+ "model.layers.34.mlp.down_proj.weight",
+ "model.layers.34.input_layernorm.weight",
+ "model.layers.34.post_attention_layernorm.weight",
+ "model.layers.35.self_attn.q_proj.weight",
+ "model.layers.35.self_attn.q_proj.bias",
+ "model.layers.35.self_attn.k_proj.weight",
+ "model.layers.35.self_attn.k_proj.bias",
+ "model.layers.35.self_attn.v_proj.weight",
+ "model.layers.35.self_attn.v_proj.bias",
+ "model.layers.35.self_attn.o_proj.weight",
+ "model.layers.35.mlp.gate_proj.weight",
+ "model.layers.35.mlp.up_proj.weight",
+ "model.layers.35.mlp.down_proj.weight",
+ "model.layers.35.input_layernorm.weight",
+ "model.layers.35.post_attention_layernorm.weight",
+ "model.norm.weight",
+ "lm_head.weight"
+ ]
+}
\ No newline at end of file
diff --git a/11video_r1_vllm_model_keys.json b/11video_r1_vllm_model_keys.json
new file mode 100644
index 0000000000000000000000000000000000000000..ed2154d8d5d86cb0f1403584844d8b898af58a8b
--- /dev/null
+++ b/11video_r1_vllm_model_keys.json
@@ -0,0 +1,661 @@
+{
+ "total": 644,
+ "first_10": [
+ "visual.patch_embed.proj.weight",
+ "visual.blocks.0.norm1.weight",
+ "visual.blocks.0.norm2.weight",
+ "visual.blocks.0.attn.qkv.weight",
+ "visual.blocks.0.attn.qkv.bias",
+ "visual.blocks.0.attn.proj.weight",
+ "visual.blocks.0.attn.proj.bias",
+ "visual.blocks.0.mlp.gate_proj.weight",
+ "visual.blocks.0.mlp.gate_proj.bias",
+ "visual.blocks.0.mlp.up_proj.weight"
+ ],
+ "all_keys": [
+ "visual.patch_embed.proj.weight",
+ "visual.blocks.0.norm1.weight",
+ "visual.blocks.0.norm2.weight",
+ "visual.blocks.0.attn.qkv.weight",
+ "visual.blocks.0.attn.qkv.bias",
+ "visual.blocks.0.attn.proj.weight",
+ "visual.blocks.0.attn.proj.bias",
+ "visual.blocks.0.mlp.gate_proj.weight",
+ "visual.blocks.0.mlp.gate_proj.bias",
+ "visual.blocks.0.mlp.up_proj.weight",
+ "visual.blocks.0.mlp.up_proj.bias",
+ "visual.blocks.0.mlp.down_proj.weight",
+ "visual.blocks.0.mlp.down_proj.bias",
+ "visual.blocks.1.norm1.weight",
+ "visual.blocks.1.norm2.weight",
+ "visual.blocks.1.attn.qkv.weight",
+ "visual.blocks.1.attn.qkv.bias",
+ "visual.blocks.1.attn.proj.weight",
+ "visual.blocks.1.attn.proj.bias",
+ "visual.blocks.1.mlp.gate_proj.weight",
+ "visual.blocks.1.mlp.gate_proj.bias",
+ "visual.blocks.1.mlp.up_proj.weight",
+ "visual.blocks.1.mlp.up_proj.bias",
+ "visual.blocks.1.mlp.down_proj.weight",
+ "visual.blocks.1.mlp.down_proj.bias",
+ "visual.blocks.2.norm1.weight",
+ "visual.blocks.2.norm2.weight",
+ "visual.blocks.2.attn.qkv.weight",
+ "visual.blocks.2.attn.qkv.bias",
+ "visual.blocks.2.attn.proj.weight",
+ "visual.blocks.2.attn.proj.bias",
+ "visual.blocks.2.mlp.gate_proj.weight",
+ "visual.blocks.2.mlp.gate_proj.bias",
+ "visual.blocks.2.mlp.up_proj.weight",
+ "visual.blocks.2.mlp.up_proj.bias",
+ "visual.blocks.2.mlp.down_proj.weight",
+ "visual.blocks.2.mlp.down_proj.bias",
+ "visual.blocks.3.norm1.weight",
+ "visual.blocks.3.norm2.weight",
+ "visual.blocks.3.attn.qkv.weight",
+ "visual.blocks.3.attn.qkv.bias",
+ "visual.blocks.3.attn.proj.weight",
+ "visual.blocks.3.attn.proj.bias",
+ "visual.blocks.3.mlp.gate_proj.weight",
+ "visual.blocks.3.mlp.gate_proj.bias",
+ "visual.blocks.3.mlp.up_proj.weight",
+ "visual.blocks.3.mlp.up_proj.bias",
+ "visual.blocks.3.mlp.down_proj.weight",
+ "visual.blocks.3.mlp.down_proj.bias",
+ "visual.blocks.4.norm1.weight",
+ "visual.blocks.4.norm2.weight",
+ "visual.blocks.4.attn.qkv.weight",
+ "visual.blocks.4.attn.qkv.bias",
+ "visual.blocks.4.attn.proj.weight",
+ "visual.blocks.4.attn.proj.bias",
+ "visual.blocks.4.mlp.gate_proj.weight",
+ "visual.blocks.4.mlp.gate_proj.bias",
+ "visual.blocks.4.mlp.up_proj.weight",
+ "visual.blocks.4.mlp.up_proj.bias",
+ "visual.blocks.4.mlp.down_proj.weight",
+ "visual.blocks.4.mlp.down_proj.bias",
+ "visual.blocks.5.norm1.weight",
+ "visual.blocks.5.norm2.weight",
+ "visual.blocks.5.attn.qkv.weight",
+ "visual.blocks.5.attn.qkv.bias",
+ "visual.blocks.5.attn.proj.weight",
+ "visual.blocks.5.attn.proj.bias",
+ "visual.blocks.5.mlp.gate_proj.weight",
+ "visual.blocks.5.mlp.gate_proj.bias",
+ "visual.blocks.5.mlp.up_proj.weight",
+ "visual.blocks.5.mlp.up_proj.bias",
+ "visual.blocks.5.mlp.down_proj.weight",
+ "visual.blocks.5.mlp.down_proj.bias",
+ "visual.blocks.6.norm1.weight",
+ "visual.blocks.6.norm2.weight",
+ "visual.blocks.6.attn.qkv.weight",
+ "visual.blocks.6.attn.qkv.bias",
+ "visual.blocks.6.attn.proj.weight",
+ "visual.blocks.6.attn.proj.bias",
+ "visual.blocks.6.mlp.gate_proj.weight",
+ "visual.blocks.6.mlp.gate_proj.bias",
+ "visual.blocks.6.mlp.up_proj.weight",
+ "visual.blocks.6.mlp.up_proj.bias",
+ "visual.blocks.6.mlp.down_proj.weight",
+ "visual.blocks.6.mlp.down_proj.bias",
+ "visual.blocks.7.norm1.weight",
+ "visual.blocks.7.norm2.weight",
+ "visual.blocks.7.attn.qkv.weight",
+ "visual.blocks.7.attn.qkv.bias",
+ "visual.blocks.7.attn.proj.weight",
+ "visual.blocks.7.attn.proj.bias",
+ "visual.blocks.7.mlp.gate_proj.weight",
+ "visual.blocks.7.mlp.gate_proj.bias",
+ "visual.blocks.7.mlp.up_proj.weight",
+ "visual.blocks.7.mlp.up_proj.bias",
+ "visual.blocks.7.mlp.down_proj.weight",
+ "visual.blocks.7.mlp.down_proj.bias",
+ "visual.blocks.8.norm1.weight",
+ "visual.blocks.8.norm2.weight",
+ "visual.blocks.8.attn.qkv.weight",
+ "visual.blocks.8.attn.qkv.bias",
+ "visual.blocks.8.attn.proj.weight",
+ "visual.blocks.8.attn.proj.bias",
+ "visual.blocks.8.mlp.gate_proj.weight",
+ "visual.blocks.8.mlp.gate_proj.bias",
+ "visual.blocks.8.mlp.up_proj.weight",
+ "visual.blocks.8.mlp.up_proj.bias",
+ "visual.blocks.8.mlp.down_proj.weight",
+ "visual.blocks.8.mlp.down_proj.bias",
+ "visual.blocks.9.norm1.weight",
+ "visual.blocks.9.norm2.weight",
+ "visual.blocks.9.attn.qkv.weight",
+ "visual.blocks.9.attn.qkv.bias",
+ "visual.blocks.9.attn.proj.weight",
+ "visual.blocks.9.attn.proj.bias",
+ "visual.blocks.9.mlp.gate_proj.weight",
+ "visual.blocks.9.mlp.gate_proj.bias",
+ "visual.blocks.9.mlp.up_proj.weight",
+ "visual.blocks.9.mlp.up_proj.bias",
+ "visual.blocks.9.mlp.down_proj.weight",
+ "visual.blocks.9.mlp.down_proj.bias",
+ "visual.blocks.10.norm1.weight",
+ "visual.blocks.10.norm2.weight",
+ "visual.blocks.10.attn.qkv.weight",
+ "visual.blocks.10.attn.qkv.bias",
+ "visual.blocks.10.attn.proj.weight",
+ "visual.blocks.10.attn.proj.bias",
+ "visual.blocks.10.mlp.gate_proj.weight",
+ "visual.blocks.10.mlp.gate_proj.bias",
+ "visual.blocks.10.mlp.up_proj.weight",
+ "visual.blocks.10.mlp.up_proj.bias",
+ "visual.blocks.10.mlp.down_proj.weight",
+ "visual.blocks.10.mlp.down_proj.bias",
+ "visual.blocks.11.norm1.weight",
+ "visual.blocks.11.norm2.weight",
+ "visual.blocks.11.attn.qkv.weight",
+ "visual.blocks.11.attn.qkv.bias",
+ "visual.blocks.11.attn.proj.weight",
+ "visual.blocks.11.attn.proj.bias",
+ "visual.blocks.11.mlp.gate_proj.weight",
+ "visual.blocks.11.mlp.gate_proj.bias",
+ "visual.blocks.11.mlp.up_proj.weight",
+ "visual.blocks.11.mlp.up_proj.bias",
+ "visual.blocks.11.mlp.down_proj.weight",
+ "visual.blocks.11.mlp.down_proj.bias",
+ "visual.blocks.12.norm1.weight",
+ "visual.blocks.12.norm2.weight",
+ "visual.blocks.12.attn.qkv.weight",
+ "visual.blocks.12.attn.qkv.bias",
+ "visual.blocks.12.attn.proj.weight",
+ "visual.blocks.12.attn.proj.bias",
+ "visual.blocks.12.mlp.gate_proj.weight",
+ "visual.blocks.12.mlp.gate_proj.bias",
+ "visual.blocks.12.mlp.up_proj.weight",
+ "visual.blocks.12.mlp.up_proj.bias",
+ "visual.blocks.12.mlp.down_proj.weight",
+ "visual.blocks.12.mlp.down_proj.bias",
+ "visual.blocks.13.norm1.weight",
+ "visual.blocks.13.norm2.weight",
+ "visual.blocks.13.attn.qkv.weight",
+ "visual.blocks.13.attn.qkv.bias",
+ "visual.blocks.13.attn.proj.weight",
+ "visual.blocks.13.attn.proj.bias",
+ "visual.blocks.13.mlp.gate_proj.weight",
+ "visual.blocks.13.mlp.gate_proj.bias",
+ "visual.blocks.13.mlp.up_proj.weight",
+ "visual.blocks.13.mlp.up_proj.bias",
+ "visual.blocks.13.mlp.down_proj.weight",
+ "visual.blocks.13.mlp.down_proj.bias",
+ "visual.blocks.14.norm1.weight",
+ "visual.blocks.14.norm2.weight",
+ "visual.blocks.14.attn.qkv.weight",
+ "visual.blocks.14.attn.qkv.bias",
+ "visual.blocks.14.attn.proj.weight",
+ "visual.blocks.14.attn.proj.bias",
+ "visual.blocks.14.mlp.gate_proj.weight",
+ "visual.blocks.14.mlp.gate_proj.bias",
+ "visual.blocks.14.mlp.up_proj.weight",
+ "visual.blocks.14.mlp.up_proj.bias",
+ "visual.blocks.14.mlp.down_proj.weight",
+ "visual.blocks.14.mlp.down_proj.bias",
+ "visual.blocks.15.norm1.weight",
+ "visual.blocks.15.norm2.weight",
+ "visual.blocks.15.attn.qkv.weight",
+ "visual.blocks.15.attn.qkv.bias",
+ "visual.blocks.15.attn.proj.weight",
+ "visual.blocks.15.attn.proj.bias",
+ "visual.blocks.15.mlp.gate_proj.weight",
+ "visual.blocks.15.mlp.gate_proj.bias",
+ "visual.blocks.15.mlp.up_proj.weight",
+ "visual.blocks.15.mlp.up_proj.bias",
+ "visual.blocks.15.mlp.down_proj.weight",
+ "visual.blocks.15.mlp.down_proj.bias",
+ "visual.blocks.16.norm1.weight",
+ "visual.blocks.16.norm2.weight",
+ "visual.blocks.16.attn.qkv.weight",
+ "visual.blocks.16.attn.qkv.bias",
+ "visual.blocks.16.attn.proj.weight",
+ "visual.blocks.16.attn.proj.bias",
+ "visual.blocks.16.mlp.gate_proj.weight",
+ "visual.blocks.16.mlp.gate_proj.bias",
+ "visual.blocks.16.mlp.up_proj.weight",
+ "visual.blocks.16.mlp.up_proj.bias",
+ "visual.blocks.16.mlp.down_proj.weight",
+ "visual.blocks.16.mlp.down_proj.bias",
+ "visual.blocks.17.norm1.weight",
+ "visual.blocks.17.norm2.weight",
+ "visual.blocks.17.attn.qkv.weight",
+ "visual.blocks.17.attn.qkv.bias",
+ "visual.blocks.17.attn.proj.weight",
+ "visual.blocks.17.attn.proj.bias",
+ "visual.blocks.17.mlp.gate_proj.weight",
+ "visual.blocks.17.mlp.gate_proj.bias",
+ "visual.blocks.17.mlp.up_proj.weight",
+ "visual.blocks.17.mlp.up_proj.bias",
+ "visual.blocks.17.mlp.down_proj.weight",
+ "visual.blocks.17.mlp.down_proj.bias",
+ "visual.blocks.18.norm1.weight",
+ "visual.blocks.18.norm2.weight",
+ "visual.blocks.18.attn.qkv.weight",
+ "visual.blocks.18.attn.qkv.bias",
+ "visual.blocks.18.attn.proj.weight",
+ "visual.blocks.18.attn.proj.bias",
+ "visual.blocks.18.mlp.gate_proj.weight",
+ "visual.blocks.18.mlp.gate_proj.bias",
+ "visual.blocks.18.mlp.up_proj.weight",
+ "visual.blocks.18.mlp.up_proj.bias",
+ "visual.blocks.18.mlp.down_proj.weight",
+ "visual.blocks.18.mlp.down_proj.bias",
+ "visual.blocks.19.norm1.weight",
+ "visual.blocks.19.norm2.weight",
+ "visual.blocks.19.attn.qkv.weight",
+ "visual.blocks.19.attn.qkv.bias",
+ "visual.blocks.19.attn.proj.weight",
+ "visual.blocks.19.attn.proj.bias",
+ "visual.blocks.19.mlp.gate_proj.weight",
+ "visual.blocks.19.mlp.gate_proj.bias",
+ "visual.blocks.19.mlp.up_proj.weight",
+ "visual.blocks.19.mlp.up_proj.bias",
+ "visual.blocks.19.mlp.down_proj.weight",
+ "visual.blocks.19.mlp.down_proj.bias",
+ "visual.blocks.20.norm1.weight",
+ "visual.blocks.20.norm2.weight",
+ "visual.blocks.20.attn.qkv.weight",
+ "visual.blocks.20.attn.qkv.bias",
+ "visual.blocks.20.attn.proj.weight",
+ "visual.blocks.20.attn.proj.bias",
+ "visual.blocks.20.mlp.gate_proj.weight",
+ "visual.blocks.20.mlp.gate_proj.bias",
+ "visual.blocks.20.mlp.up_proj.weight",
+ "visual.blocks.20.mlp.up_proj.bias",
+ "visual.blocks.20.mlp.down_proj.weight",
+ "visual.blocks.20.mlp.down_proj.bias",
+ "visual.blocks.21.norm1.weight",
+ "visual.blocks.21.norm2.weight",
+ "visual.blocks.21.attn.qkv.weight",
+ "visual.blocks.21.attn.qkv.bias",
+ "visual.blocks.21.attn.proj.weight",
+ "visual.blocks.21.attn.proj.bias",
+ "visual.blocks.21.mlp.gate_proj.weight",
+ "visual.blocks.21.mlp.gate_proj.bias",
+ "visual.blocks.21.mlp.up_proj.weight",
+ "visual.blocks.21.mlp.up_proj.bias",
+ "visual.blocks.21.mlp.down_proj.weight",
+ "visual.blocks.21.mlp.down_proj.bias",
+ "visual.blocks.22.norm1.weight",
+ "visual.blocks.22.norm2.weight",
+ "visual.blocks.22.attn.qkv.weight",
+ "visual.blocks.22.attn.qkv.bias",
+ "visual.blocks.22.attn.proj.weight",
+ "visual.blocks.22.attn.proj.bias",
+ "visual.blocks.22.mlp.gate_proj.weight",
+ "visual.blocks.22.mlp.gate_proj.bias",
+ "visual.blocks.22.mlp.up_proj.weight",
+ "visual.blocks.22.mlp.up_proj.bias",
+ "visual.blocks.22.mlp.down_proj.weight",
+ "visual.blocks.22.mlp.down_proj.bias",
+ "visual.blocks.23.norm1.weight",
+ "visual.blocks.23.norm2.weight",
+ "visual.blocks.23.attn.qkv.weight",
+ "visual.blocks.23.attn.qkv.bias",
+ "visual.blocks.23.attn.proj.weight",
+ "visual.blocks.23.attn.proj.bias",
+ "visual.blocks.23.mlp.gate_proj.weight",
+ "visual.blocks.23.mlp.gate_proj.bias",
+ "visual.blocks.23.mlp.up_proj.weight",
+ "visual.blocks.23.mlp.up_proj.bias",
+ "visual.blocks.23.mlp.down_proj.weight",
+ "visual.blocks.23.mlp.down_proj.bias",
+ "visual.blocks.24.norm1.weight",
+ "visual.blocks.24.norm2.weight",
+ "visual.blocks.24.attn.qkv.weight",
+ "visual.blocks.24.attn.qkv.bias",
+ "visual.blocks.24.attn.proj.weight",
+ "visual.blocks.24.attn.proj.bias",
+ "visual.blocks.24.mlp.gate_proj.weight",
+ "visual.blocks.24.mlp.gate_proj.bias",
+ "visual.blocks.24.mlp.up_proj.weight",
+ "visual.blocks.24.mlp.up_proj.bias",
+ "visual.blocks.24.mlp.down_proj.weight",
+ "visual.blocks.24.mlp.down_proj.bias",
+ "visual.blocks.25.norm1.weight",
+ "visual.blocks.25.norm2.weight",
+ "visual.blocks.25.attn.qkv.weight",
+ "visual.blocks.25.attn.qkv.bias",
+ "visual.blocks.25.attn.proj.weight",
+ "visual.blocks.25.attn.proj.bias",
+ "visual.blocks.25.mlp.gate_proj.weight",
+ "visual.blocks.25.mlp.gate_proj.bias",
+ "visual.blocks.25.mlp.up_proj.weight",
+ "visual.blocks.25.mlp.up_proj.bias",
+ "visual.blocks.25.mlp.down_proj.weight",
+ "visual.blocks.25.mlp.down_proj.bias",
+ "visual.blocks.26.norm1.weight",
+ "visual.blocks.26.norm2.weight",
+ "visual.blocks.26.attn.qkv.weight",
+ "visual.blocks.26.attn.qkv.bias",
+ "visual.blocks.26.attn.proj.weight",
+ "visual.blocks.26.attn.proj.bias",
+ "visual.blocks.26.mlp.gate_proj.weight",
+ "visual.blocks.26.mlp.gate_proj.bias",
+ "visual.blocks.26.mlp.up_proj.weight",
+ "visual.blocks.26.mlp.up_proj.bias",
+ "visual.blocks.26.mlp.down_proj.weight",
+ "visual.blocks.26.mlp.down_proj.bias",
+ "visual.blocks.27.norm1.weight",
+ "visual.blocks.27.norm2.weight",
+ "visual.blocks.27.attn.qkv.weight",
+ "visual.blocks.27.attn.qkv.bias",
+ "visual.blocks.27.attn.proj.weight",
+ "visual.blocks.27.attn.proj.bias",
+ "visual.blocks.27.mlp.gate_proj.weight",
+ "visual.blocks.27.mlp.gate_proj.bias",
+ "visual.blocks.27.mlp.up_proj.weight",
+ "visual.blocks.27.mlp.up_proj.bias",
+ "visual.blocks.27.mlp.down_proj.weight",
+ "visual.blocks.27.mlp.down_proj.bias",
+ "visual.blocks.28.norm1.weight",
+ "visual.blocks.28.norm2.weight",
+ "visual.blocks.28.attn.qkv.weight",
+ "visual.blocks.28.attn.qkv.bias",
+ "visual.blocks.28.attn.proj.weight",
+ "visual.blocks.28.attn.proj.bias",
+ "visual.blocks.28.mlp.gate_proj.weight",
+ "visual.blocks.28.mlp.gate_proj.bias",
+ "visual.blocks.28.mlp.up_proj.weight",
+ "visual.blocks.28.mlp.up_proj.bias",
+ "visual.blocks.28.mlp.down_proj.weight",
+ "visual.blocks.28.mlp.down_proj.bias",
+ "visual.blocks.29.norm1.weight",
+ "visual.blocks.29.norm2.weight",
+ "visual.blocks.29.attn.qkv.weight",
+ "visual.blocks.29.attn.qkv.bias",
+ "visual.blocks.29.attn.proj.weight",
+ "visual.blocks.29.attn.proj.bias",
+ "visual.blocks.29.mlp.gate_proj.weight",
+ "visual.blocks.29.mlp.gate_proj.bias",
+ "visual.blocks.29.mlp.up_proj.weight",
+ "visual.blocks.29.mlp.up_proj.bias",
+ "visual.blocks.29.mlp.down_proj.weight",
+ "visual.blocks.29.mlp.down_proj.bias",
+ "visual.blocks.30.norm1.weight",
+ "visual.blocks.30.norm2.weight",
+ "visual.blocks.30.attn.qkv.weight",
+ "visual.blocks.30.attn.qkv.bias",
+ "visual.blocks.30.attn.proj.weight",
+ "visual.blocks.30.attn.proj.bias",
+ "visual.blocks.30.mlp.gate_proj.weight",
+ "visual.blocks.30.mlp.gate_proj.bias",
+ "visual.blocks.30.mlp.up_proj.weight",
+ "visual.blocks.30.mlp.up_proj.bias",
+ "visual.blocks.30.mlp.down_proj.weight",
+ "visual.blocks.30.mlp.down_proj.bias",
+ "visual.blocks.31.norm1.weight",
+ "visual.blocks.31.norm2.weight",
+ "visual.blocks.31.attn.qkv.weight",
+ "visual.blocks.31.attn.qkv.bias",
+ "visual.blocks.31.attn.proj.weight",
+ "visual.blocks.31.attn.proj.bias",
+ "visual.blocks.31.mlp.gate_proj.weight",
+ "visual.blocks.31.mlp.gate_proj.bias",
+ "visual.blocks.31.mlp.up_proj.weight",
+ "visual.blocks.31.mlp.up_proj.bias",
+ "visual.blocks.31.mlp.down_proj.weight",
+ "visual.blocks.31.mlp.down_proj.bias",
+ "visual.merger.ln_q.weight",
+ "visual.merger.mlp.0.weight",
+ "visual.merger.mlp.0.bias",
+ "visual.merger.mlp.2.weight",
+ "visual.merger.mlp.2.bias",
+ "language_model.model.embed_tokens.weight",
+ "language_model.model.layers.0.self_attn.qkv_proj.weight",
+ "language_model.model.layers.0.self_attn.qkv_proj.bias",
+ "language_model.model.layers.0.self_attn.o_proj.weight",
+ "language_model.model.layers.0.mlp.gate_up_proj.weight",
+ "language_model.model.layers.0.mlp.down_proj.weight",
+ "language_model.model.layers.0.input_layernorm.weight",
+ "language_model.model.layers.0.post_attention_layernorm.weight",
+ "language_model.model.layers.1.self_attn.qkv_proj.weight",
+ "language_model.model.layers.1.self_attn.qkv_proj.bias",
+ "language_model.model.layers.1.self_attn.o_proj.weight",
+ "language_model.model.layers.1.mlp.gate_up_proj.weight",
+ "language_model.model.layers.1.mlp.down_proj.weight",
+ "language_model.model.layers.1.input_layernorm.weight",
+ "language_model.model.layers.1.post_attention_layernorm.weight",
+ "language_model.model.layers.2.self_attn.qkv_proj.weight",
+ "language_model.model.layers.2.self_attn.qkv_proj.bias",
+ "language_model.model.layers.2.self_attn.o_proj.weight",
+ "language_model.model.layers.2.mlp.gate_up_proj.weight",
+ "language_model.model.layers.2.mlp.down_proj.weight",
+ "language_model.model.layers.2.input_layernorm.weight",
+ "language_model.model.layers.2.post_attention_layernorm.weight",
+ "language_model.model.layers.3.self_attn.qkv_proj.weight",
+ "language_model.model.layers.3.self_attn.qkv_proj.bias",
+ "language_model.model.layers.3.self_attn.o_proj.weight",
+ "language_model.model.layers.3.mlp.gate_up_proj.weight",
+ "language_model.model.layers.3.mlp.down_proj.weight",
+ "language_model.model.layers.3.input_layernorm.weight",
+ "language_model.model.layers.3.post_attention_layernorm.weight",
+ "language_model.model.layers.4.self_attn.qkv_proj.weight",
+ "language_model.model.layers.4.self_attn.qkv_proj.bias",
+ "language_model.model.layers.4.self_attn.o_proj.weight",
+ "language_model.model.layers.4.mlp.gate_up_proj.weight",
+ "language_model.model.layers.4.mlp.down_proj.weight",
+ "language_model.model.layers.4.input_layernorm.weight",
+ "language_model.model.layers.4.post_attention_layernorm.weight",
+ "language_model.model.layers.5.self_attn.qkv_proj.weight",
+ "language_model.model.layers.5.self_attn.qkv_proj.bias",
+ "language_model.model.layers.5.self_attn.o_proj.weight",
+ "language_model.model.layers.5.mlp.gate_up_proj.weight",
+ "language_model.model.layers.5.mlp.down_proj.weight",
+ "language_model.model.layers.5.input_layernorm.weight",
+ "language_model.model.layers.5.post_attention_layernorm.weight",
+ "language_model.model.layers.6.self_attn.qkv_proj.weight",
+ "language_model.model.layers.6.self_attn.qkv_proj.bias",
+ "language_model.model.layers.6.self_attn.o_proj.weight",
+ "language_model.model.layers.6.mlp.gate_up_proj.weight",
+ "language_model.model.layers.6.mlp.down_proj.weight",
+ "language_model.model.layers.6.input_layernorm.weight",
+ "language_model.model.layers.6.post_attention_layernorm.weight",
+ "language_model.model.layers.7.self_attn.qkv_proj.weight",
+ "language_model.model.layers.7.self_attn.qkv_proj.bias",
+ "language_model.model.layers.7.self_attn.o_proj.weight",
+ "language_model.model.layers.7.mlp.gate_up_proj.weight",
+ "language_model.model.layers.7.mlp.down_proj.weight",
+ "language_model.model.layers.7.input_layernorm.weight",
+ "language_model.model.layers.7.post_attention_layernorm.weight",
+ "language_model.model.layers.8.self_attn.qkv_proj.weight",
+ "language_model.model.layers.8.self_attn.qkv_proj.bias",
+ "language_model.model.layers.8.self_attn.o_proj.weight",
+ "language_model.model.layers.8.mlp.gate_up_proj.weight",
+ "language_model.model.layers.8.mlp.down_proj.weight",
+ "language_model.model.layers.8.input_layernorm.weight",
+ "language_model.model.layers.8.post_attention_layernorm.weight",
+ "language_model.model.layers.9.self_attn.qkv_proj.weight",
+ "language_model.model.layers.9.self_attn.qkv_proj.bias",
+ "language_model.model.layers.9.self_attn.o_proj.weight",
+ "language_model.model.layers.9.mlp.gate_up_proj.weight",
+ "language_model.model.layers.9.mlp.down_proj.weight",
+ "language_model.model.layers.9.input_layernorm.weight",
+ "language_model.model.layers.9.post_attention_layernorm.weight",
+ "language_model.model.layers.10.self_attn.qkv_proj.weight",
+ "language_model.model.layers.10.self_attn.qkv_proj.bias",
+ "language_model.model.layers.10.self_attn.o_proj.weight",
+ "language_model.model.layers.10.mlp.gate_up_proj.weight",
+ "language_model.model.layers.10.mlp.down_proj.weight",
+ "language_model.model.layers.10.input_layernorm.weight",
+ "language_model.model.layers.10.post_attention_layernorm.weight",
+ "language_model.model.layers.11.self_attn.qkv_proj.weight",
+ "language_model.model.layers.11.self_attn.qkv_proj.bias",
+ "language_model.model.layers.11.self_attn.o_proj.weight",
+ "language_model.model.layers.11.mlp.gate_up_proj.weight",
+ "language_model.model.layers.11.mlp.down_proj.weight",
+ "language_model.model.layers.11.input_layernorm.weight",
+ "language_model.model.layers.11.post_attention_layernorm.weight",
+ "language_model.model.layers.12.self_attn.qkv_proj.weight",
+ "language_model.model.layers.12.self_attn.qkv_proj.bias",
+ "language_model.model.layers.12.self_attn.o_proj.weight",
+ "language_model.model.layers.12.mlp.gate_up_proj.weight",
+ "language_model.model.layers.12.mlp.down_proj.weight",
+ "language_model.model.layers.12.input_layernorm.weight",
+ "language_model.model.layers.12.post_attention_layernorm.weight",
+ "language_model.model.layers.13.self_attn.qkv_proj.weight",
+ "language_model.model.layers.13.self_attn.qkv_proj.bias",
+ "language_model.model.layers.13.self_attn.o_proj.weight",
+ "language_model.model.layers.13.mlp.gate_up_proj.weight",
+ "language_model.model.layers.13.mlp.down_proj.weight",
+ "language_model.model.layers.13.input_layernorm.weight",
+ "language_model.model.layers.13.post_attention_layernorm.weight",
+ "language_model.model.layers.14.self_attn.qkv_proj.weight",
+ "language_model.model.layers.14.self_attn.qkv_proj.bias",
+ "language_model.model.layers.14.self_attn.o_proj.weight",
+ "language_model.model.layers.14.mlp.gate_up_proj.weight",
+ "language_model.model.layers.14.mlp.down_proj.weight",
+ "language_model.model.layers.14.input_layernorm.weight",
+ "language_model.model.layers.14.post_attention_layernorm.weight",
+ "language_model.model.layers.15.self_attn.qkv_proj.weight",
+ "language_model.model.layers.15.self_attn.qkv_proj.bias",
+ "language_model.model.layers.15.self_attn.o_proj.weight",
+ "language_model.model.layers.15.mlp.gate_up_proj.weight",
+ "language_model.model.layers.15.mlp.down_proj.weight",
+ "language_model.model.layers.15.input_layernorm.weight",
+ "language_model.model.layers.15.post_attention_layernorm.weight",
+ "language_model.model.layers.16.self_attn.qkv_proj.weight",
+ "language_model.model.layers.16.self_attn.qkv_proj.bias",
+ "language_model.model.layers.16.self_attn.o_proj.weight",
+ "language_model.model.layers.16.mlp.gate_up_proj.weight",
+ "language_model.model.layers.16.mlp.down_proj.weight",
+ "language_model.model.layers.16.input_layernorm.weight",
+ "language_model.model.layers.16.post_attention_layernorm.weight",
+ "language_model.model.layers.17.self_attn.qkv_proj.weight",
+ "language_model.model.layers.17.self_attn.qkv_proj.bias",
+ "language_model.model.layers.17.self_attn.o_proj.weight",
+ "language_model.model.layers.17.mlp.gate_up_proj.weight",
+ "language_model.model.layers.17.mlp.down_proj.weight",
+ "language_model.model.layers.17.input_layernorm.weight",
+ "language_model.model.layers.17.post_attention_layernorm.weight",
+ "language_model.model.layers.18.self_attn.qkv_proj.weight",
+ "language_model.model.layers.18.self_attn.qkv_proj.bias",
+ "language_model.model.layers.18.self_attn.o_proj.weight",
+ "language_model.model.layers.18.mlp.gate_up_proj.weight",
+ "language_model.model.layers.18.mlp.down_proj.weight",
+ "language_model.model.layers.18.input_layernorm.weight",
+ "language_model.model.layers.18.post_attention_layernorm.weight",
+ "language_model.model.layers.19.self_attn.qkv_proj.weight",
+ "language_model.model.layers.19.self_attn.qkv_proj.bias",
+ "language_model.model.layers.19.self_attn.o_proj.weight",
+ "language_model.model.layers.19.mlp.gate_up_proj.weight",
+ "language_model.model.layers.19.mlp.down_proj.weight",
+ "language_model.model.layers.19.input_layernorm.weight",
+ "language_model.model.layers.19.post_attention_layernorm.weight",
+ "language_model.model.layers.20.self_attn.qkv_proj.weight",
+ "language_model.model.layers.20.self_attn.qkv_proj.bias",
+ "language_model.model.layers.20.self_attn.o_proj.weight",
+ "language_model.model.layers.20.mlp.gate_up_proj.weight",
+ "language_model.model.layers.20.mlp.down_proj.weight",
+ "language_model.model.layers.20.input_layernorm.weight",
+ "language_model.model.layers.20.post_attention_layernorm.weight",
+ "language_model.model.layers.21.self_attn.qkv_proj.weight",
+ "language_model.model.layers.21.self_attn.qkv_proj.bias",
+ "language_model.model.layers.21.self_attn.o_proj.weight",
+ "language_model.model.layers.21.mlp.gate_up_proj.weight",
+ "language_model.model.layers.21.mlp.down_proj.weight",
+ "language_model.model.layers.21.input_layernorm.weight",
+ "language_model.model.layers.21.post_attention_layernorm.weight",
+ "language_model.model.layers.22.self_attn.qkv_proj.weight",
+ "language_model.model.layers.22.self_attn.qkv_proj.bias",
+ "language_model.model.layers.22.self_attn.o_proj.weight",
+ "language_model.model.layers.22.mlp.gate_up_proj.weight",
+ "language_model.model.layers.22.mlp.down_proj.weight",
+ "language_model.model.layers.22.input_layernorm.weight",
+ "language_model.model.layers.22.post_attention_layernorm.weight",
+ "language_model.model.layers.23.self_attn.qkv_proj.weight",
+ "language_model.model.layers.23.self_attn.qkv_proj.bias",
+ "language_model.model.layers.23.self_attn.o_proj.weight",
+ "language_model.model.layers.23.mlp.gate_up_proj.weight",
+ "language_model.model.layers.23.mlp.down_proj.weight",
+ "language_model.model.layers.23.input_layernorm.weight",
+ "language_model.model.layers.23.post_attention_layernorm.weight",
+ "language_model.model.layers.24.self_attn.qkv_proj.weight",
+ "language_model.model.layers.24.self_attn.qkv_proj.bias",
+ "language_model.model.layers.24.self_attn.o_proj.weight",
+ "language_model.model.layers.24.mlp.gate_up_proj.weight",
+ "language_model.model.layers.24.mlp.down_proj.weight",
+ "language_model.model.layers.24.input_layernorm.weight",
+ "language_model.model.layers.24.post_attention_layernorm.weight",
+ "language_model.model.layers.25.self_attn.qkv_proj.weight",
+ "language_model.model.layers.25.self_attn.qkv_proj.bias",
+ "language_model.model.layers.25.self_attn.o_proj.weight",
+ "language_model.model.layers.25.mlp.gate_up_proj.weight",
+ "language_model.model.layers.25.mlp.down_proj.weight",
+ "language_model.model.layers.25.input_layernorm.weight",
+ "language_model.model.layers.25.post_attention_layernorm.weight",
+ "language_model.model.layers.26.self_attn.qkv_proj.weight",
+ "language_model.model.layers.26.self_attn.qkv_proj.bias",
+ "language_model.model.layers.26.self_attn.o_proj.weight",
+ "language_model.model.layers.26.mlp.gate_up_proj.weight",
+ "language_model.model.layers.26.mlp.down_proj.weight",
+ "language_model.model.layers.26.input_layernorm.weight",
+ "language_model.model.layers.26.post_attention_layernorm.weight",
+ "language_model.model.layers.27.self_attn.qkv_proj.weight",
+ "language_model.model.layers.27.self_attn.qkv_proj.bias",
+ "language_model.model.layers.27.self_attn.o_proj.weight",
+ "language_model.model.layers.27.mlp.gate_up_proj.weight",
+ "language_model.model.layers.27.mlp.down_proj.weight",
+ "language_model.model.layers.27.input_layernorm.weight",
+ "language_model.model.layers.27.post_attention_layernorm.weight",
+ "language_model.model.layers.28.self_attn.qkv_proj.weight",
+ "language_model.model.layers.28.self_attn.qkv_proj.bias",
+ "language_model.model.layers.28.self_attn.o_proj.weight",
+ "language_model.model.layers.28.mlp.gate_up_proj.weight",
+ "language_model.model.layers.28.mlp.down_proj.weight",
+ "language_model.model.layers.28.input_layernorm.weight",
+ "language_model.model.layers.28.post_attention_layernorm.weight",
+ "language_model.model.layers.29.self_attn.qkv_proj.weight",
+ "language_model.model.layers.29.self_attn.qkv_proj.bias",
+ "language_model.model.layers.29.self_attn.o_proj.weight",
+ "language_model.model.layers.29.mlp.gate_up_proj.weight",
+ "language_model.model.layers.29.mlp.down_proj.weight",
+ "language_model.model.layers.29.input_layernorm.weight",
+ "language_model.model.layers.29.post_attention_layernorm.weight",
+ "language_model.model.layers.30.self_attn.qkv_proj.weight",
+ "language_model.model.layers.30.self_attn.qkv_proj.bias",
+ "language_model.model.layers.30.self_attn.o_proj.weight",
+ "language_model.model.layers.30.mlp.gate_up_proj.weight",
+ "language_model.model.layers.30.mlp.down_proj.weight",
+ "language_model.model.layers.30.input_layernorm.weight",
+ "language_model.model.layers.30.post_attention_layernorm.weight",
+ "language_model.model.layers.31.self_attn.qkv_proj.weight",
+ "language_model.model.layers.31.self_attn.qkv_proj.bias",
+ "language_model.model.layers.31.self_attn.o_proj.weight",
+ "language_model.model.layers.31.mlp.gate_up_proj.weight",
+ "language_model.model.layers.31.mlp.down_proj.weight",
+ "language_model.model.layers.31.input_layernorm.weight",
+ "language_model.model.layers.31.post_attention_layernorm.weight",
+ "language_model.model.layers.32.self_attn.qkv_proj.weight",
+ "language_model.model.layers.32.self_attn.qkv_proj.bias",
+ "language_model.model.layers.32.self_attn.o_proj.weight",
+ "language_model.model.layers.32.mlp.gate_up_proj.weight",
+ "language_model.model.layers.32.mlp.down_proj.weight",
+ "language_model.model.layers.32.input_layernorm.weight",
+ "language_model.model.layers.32.post_attention_layernorm.weight",
+ "language_model.model.layers.33.self_attn.qkv_proj.weight",
+ "language_model.model.layers.33.self_attn.qkv_proj.bias",
+ "language_model.model.layers.33.self_attn.o_proj.weight",
+ "language_model.model.layers.33.mlp.gate_up_proj.weight",
+ "language_model.model.layers.33.mlp.down_proj.weight",
+ "language_model.model.layers.33.input_layernorm.weight",
+ "language_model.model.layers.33.post_attention_layernorm.weight",
+ "language_model.model.layers.34.self_attn.qkv_proj.weight",
+ "language_model.model.layers.34.self_attn.qkv_proj.bias",
+ "language_model.model.layers.34.self_attn.o_proj.weight",
+ "language_model.model.layers.34.mlp.gate_up_proj.weight",
+ "language_model.model.layers.34.mlp.down_proj.weight",
+ "language_model.model.layers.34.input_layernorm.weight",
+ "language_model.model.layers.34.post_attention_layernorm.weight",
+ "language_model.model.layers.35.self_attn.qkv_proj.weight",
+ "language_model.model.layers.35.self_attn.qkv_proj.bias",
+ "language_model.model.layers.35.self_attn.o_proj.weight",
+ "language_model.model.layers.35.mlp.gate_up_proj.weight",
+ "language_model.model.layers.35.mlp.down_proj.weight",
+ "language_model.model.layers.35.input_layernorm.weight",
+ "language_model.model.layers.35.post_attention_layernorm.weight",
+ "language_model.model.norm.weight"
+ ]
+}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e6a1229dffbdbd8ac2743188d2ea216dcfc1fe4d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,164 @@
+# ⏱️ TimeSearch-R: Adaptive Temporal Search for Long-Form Video Understanding via Self-Verification Reinforcement Learning
+
+*A model that learns to actively search for relevant temporal clips through end-to-end reinforcement learning.*
+
+[📄 [Paper](https://arxiv.org/abs/2511.05489)] [🤗 [Model](https://huggingface.co/Time-Search/TimeSearch-R)]
+
+## 📰 News
+
+🔥 **[2025/11/13]** Our [Model Checkpoint](https://huggingface.co/Time-Search/TimeSearch-R) is uploaded!
+
+## 👁️ Overview
+
+TimeSearch-R reformulates temporal search as interleaved text–video thinking, seamlessly integrating searching video clips into the reasoning process through reinforcement learning (RL).
+
+
+
+We introduce GRPO with Completeness Self-Verification (GRPO-CSV), which gathers searched video frames from the interleaved reasoning process and utilizes the same policy model to verify the adequacy of searched frames, thereby improving the completeness of video reasoning.
+
+
+
+## 🚀 Quick Start
+
+### 🏝️ Environmental Setup
+
+**Step 1:** Prepare the running environment.
+
+Prepare the environment with CUDA and PyTorch (CUDA 12.4 and PyTorch 2.6.0 in our experiments), and install the dependencies with `pip`.
+
+```bash
+pip install -r requirements.txt
+```
+
+**Step 2:** Run the clip server for video frame retrieval.
+
+Download the pre-trained SigLIP model.
+```bash
+huggingface-cli download google/siglip-so400m-patch14-384 --local-dir /path/to/your/local/filedir
+```
+Modify the `clip_as_service/server/clip_server/torch-flow.yml` to use the downloaded local model path and run the SigLIP server.
+
+```bash
+cd clip_as_service/server && pip3 install .
+
+export CUDA_VISIBLE_DEVICES=RR
+export GRPC_VERBOSITY=debug
+export HF_HUB_OFFLINE=1
+export PYTHONPATH=$PYTHONPATH:.
+
+python3 -m clip_server
+```
+
+### 📦️ Dataset & Model
+
+We provide the preprocessed JSON files for [Haystack-LVBench](https://huggingface.co/datasets/MLL-Lab/LongVideoHaystack). The corresponding `.mp4` video files can be downloaded from the original [LongVideoBench](https://huggingface.co/datasets/longvideobench/LongVideoBench) dataset.
+
+Download the pre-trained TimeSearch-R model.
+
+```bash
+huggingface-cli download --resume-download Time-Search/TimeSearch-R --local-dir /path/to/your/local/filedir
+```
+
+**(Recommended) Prepare the frame cache and feature cache.**
+To accelerate the inference and training speed, we recommend extracting the frames and features for the videos in advance.
+
+```bash
+python3 scripts/converts/prepare_frame_cache.py /path/to/your/local/data_root /path/to/your/local/haystack_lvbench_input.jsonl --num_workers 16 --target_fps 2
+python3 scripts/converts/prepare_feature_cache.py /path/to/your/local/data_root /path/to/your/local/haystack_lvbench_input.jsonl --num_workers 16
+```
+
+### 📋️ Inference & Evaluation
+
+**Step 1:** Run the TimeSearch-R inference.
+
+```bash
+# The IP address from the above step
+export SIGLIP_URL=grpc://127.0.0.1:51000
+
+torchrun \
+ --nproc_per_node=8 \
+ --master_port=24137 \
+ time_r1/inference.py \
+ --input_path /path/to/your/local/haystack_lvbench_input.jsonl \
+ --save_path /path/to/your/local/haystack_lvbench_output \
+ --data_root /path/to/your/local/data_root \
+ --model_base /path/to/your/local/checkpoint \
+ --prompt_template v4 \
+ --use_env True \
+ --use_vllm True \
+ --batch_size 1 \
+ --num_data_workers 2 \
+ --total_video_tokens 24000 \
+ --max_frames 768 \
+ --max_tokens 256
+```
+
+**Step 2:** Evaluate the temporal search and QA performance.
+
+The temporal search evaluation script is modified from [T*](https://github.com/mll-lab-nu/TStar).
+
+```bash
+# Temporal search evaluation
+python time_r1/eval/eval_temporal_search.py --search_result_path /path/to/your/local/haystack_lvbench_output.jsonl
+
+# QA evaluation
+python time_r1/eval/longvideobench_eval.py /path/to/your/local/haystack_lvbench_output.jsonl
+```
+
+### 🏗️ GRPO-CSV Training
+
+**Step 1:** Prepare the reward model.
+
+We use [Qwen-2.5-72B-Instruct](https://huggingface.co/Qwen/Qwen2.5-72B-Instruct) as our reward model for LLM-as-a-judge verification.
+
+```bash
+# download Qwen-2.5-72B-Instruct model
+huggingface-cli download --resume-download https://huggingface.co/Qwen/Qwen2.5-72B-Instruct --local-dir /path/to/your/local/filedir
+```
+
+Start a VLLM server of [Qwen-2.5-72B-Instruct](https://huggingface.co/Qwen/Qwen2.5-72B-Instruct) for LLM-as-a-judge verification.
+
+```bash
+vllm serve /path/to/your/local/filedir \
+ --port 18901 \
+ --gpu-memory-utilization 0.8 \
+ --max-model-len 32768 \
+ --tensor-parallel-size 8 \
+ --served-model-name "judge" \
+ --trust-remote-code \
+ --disable-log-requests
+```
+
+**Step 2:** Train TimeSearch-R with GRPO-CSV.
+
+We recommend using no less than 16 GPUs (2 nodes x 8 GPUs) for 7B training. For each node, we recommend using no less than 1024GB CPU RAM, as the long-form videos in training datasets can consume a large amount of memory.
+
+We provide the training script for TimeSearch-R with GRPO-CSV in `scripts/train.sh`.
+
+```bash
+bash scripts/train.sh
+```
+
+## 🔖 Citation
+
+If you find TimeSearch-R useful for your research and applications, please cite using this BibTeX:
+```bibtex
+@article{timesearch-r,
+ title={TimeSearch-R: Adaptive Temporal Search for Long-Form Video Understanding via Self-Verification Reinforcement Learning},
+ author={Pan, Junwen and Zhang, Qizhe and Zhang, Rui and Lu, Ming and Wan, Xin and Zhang, Yuan and Liu, Chang and She, Qi},
+ journal={arXiv preprint arXiv:2511.05489},
+ year={2025}
+}
+```
+
+## 🎟️ License
+
+This project is released under the [Apache 2.0 license](LICENSE).
+
+## 🏅 Acknowledgements
+
+We thank the authors of the following projects for their contributions:
+* [Qwen2.5-VL](https://github.com/QwenLM/Qwen2.5-VL)
+* [R1-V](https://github.com/Deep-Agent/R1-V)
+* [trl](https://github.com/huggingface/trl)
+* [T*](https://github.com/mll-lab-nu/TStar)
diff --git a/all_output_reflect.jsonl b/all_output_reflect.jsonl
new file mode 100644
index 0000000000000000000000000000000000000000..b1a3ba313d71b95d688b08f74c8a54eab5211eb0
--- /dev/null
+++ b/all_output_reflect.jsonl
@@ -0,0 +1,815 @@
+{"video_id": "7F9IrtSHmc0", "video_path": "/data/shuimu.chen/LongVideoBench/videos/7F9IrtSHmc0.mp4", "question": "In a room with a wall tiger and a map on the wall, there is a man wearing a white shirt. What is he doing?", "options": "A) drinking water\nB) playing with a cell phone\nC) speaking\nD) dancing", "answer": "C", "duration_group": 600, "gt_frame_index": [305], "duration": 422.66, "video": "/data/shuimu.chen/LongVideoBench/videos/7F9IrtSHmc0.mp4", "prediction": [{"role": "system", "content": [{"type": "text", "text": "You are a helpful video assistant.\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within XML tags:\n\n{\"type\": \"function\", \"function\": {\"name\": \"seek_video_frames\", \"description\": \"Search and select video frames according to textual query and temporal window. Time is in seconds.\", \"parameters\": {\"type\": \"object\", \"properties\": {\"query\": {\"type\": \"string\", \"description\": \"The query is used to describe the object, scene, or event of interest in the video thoroughly and clearly. \"}, \"start_time\": {\"type\": \"number\", \"description\": \"Start time of the segment of interest. \"}, \"end_time\": {\"type\": \"number\", \"description\": \"End time of the segment of interest. \"}, \"num_frames\": {\"type\": \"integer\", \"description\": \"Number of frames to sample (maximum 8). Default is 8.\"}}, \"required\": [\"query\"]}}}\n\n\nFor each function call, return a json object with function name and arguments within XML tags:\n\n{\"name\": , \"arguments\": }\n"}]}, {"role": "user", "content": [{"type": "video", "video": "