llama-2-7b-chat-12layers-T6-25000steps-llama-2-7b-chat-12layers-T6-25000steps-peft-lora-orpo-2
/
model.safetensors.index.json
{ | |
"metadata": { | |
"total_size": 5381529600 | |
}, | |
"weight_map": { | |
"lm_head.weight": "model-00002-of-00002.safetensors", | |
"model.embed_tokens.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.11.input_layernorm.weight": "model-00002-of-00002.safetensors", | |
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00002.safetensors", | |
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00002.safetensors", | |
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", | |
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", | |
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", | |
"model.norm.weight": "model-00002-of-00002.safetensors" | |
} | |
} | |