|
{ |
|
"metadata": { |
|
"total_size": 5758342763 |
|
}, |
|
"weight_map": { |
|
"model.transformer.blocks.0.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.0.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.1.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.10.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.11.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.12.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.13.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.14.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.15.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.16.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.17.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.18.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.19.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.2.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.20.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.21.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.22.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.23.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.24.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.25.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.26.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.27.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.3.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.4.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.5.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.6.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.7.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.8.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.bias": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.att_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.attn_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_norm.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_out.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight.absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight.nested_absmax": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight.nested_quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight.quant_map": "model-00001-of-00002.safetensors", |
|
"model.transformer.blocks.9.ff_proj.weight.quant_state.bitsandbytes__nf4": "model-00001-of-00002.safetensors", |
|
"model.transformer.ff_out.weight": "model-00002-of-00002.safetensors", |
|
"model.transformer.ln_f.weight": "model-00001-of-00002.safetensors", |
|
"model.transformer.wte.embedding": "model-00001-of-00002.safetensors", |
|
"model.transformer.wte.new_embedding": "model-00001-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_pooling_2d.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_projector.w3.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.class_embedding": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.patch_embedding.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.positional_embedding": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.pre_ln.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.pre_ln.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.0.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.1.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.10.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.11.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.12.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.13.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.14.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.15.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.16.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.17.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.18.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.19.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.2.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.20.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.21.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.22.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.3.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.4.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.5.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.6.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.7.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.8.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wk.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wo.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wq.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention.wv.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.attention_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w1.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight.absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight.nested_absmax": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight.nested_quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight.quant_map": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.feed_forward.w2.weight.quant_state.bitsandbytes__nf4": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.ffn_norm.bias": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.image_vit.transformer.resblocks.9.ffn_norm.weight": "model-00002-of-00002.safetensors", |
|
"model.vision_backbone.pad_embed": "model-00002-of-00002.safetensors" |
|
} |
|
} |
|
|