princeton-nlp commited on
Commit
4eadd2f
1 Parent(s): ae0cd60

Upload LlamaForSequenceClassification

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "checkpoints-preferences/preferences_princeton-nlp--Sheared-LLaMA-1.3b_bsz512_lr5e-5_epochs2_warmup0.1_conf0.5_labeltemp1.0_datasetnew_labelall_main/",
3
  "architectures": [
4
- "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": 1,
 
1
  {
2
  "_name_or_path": "checkpoints-preferences/preferences_princeton-nlp--Sheared-LLaMA-1.3b_bsz512_lr5e-5_epochs2_warmup0.1_conf0.5_labeltemp1.0_datasetnew_labelall_main/",
3
  "architectures": [
4
+ "LlamaForSequenceClassification"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": 1,
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b2eaaec9410edc03daf6c8803cd8d1ac4a5afdb9119270e5bbd38a7bbde168f
3
- size 397435656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e526f05de4ed3ddfca217c9a9ef926bc0f11210023de99921db2c67afdbbb9c
3
+ size 135324400
model.safetensors.index.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  "metadata": {
3
- "total_size": 5381693440
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
@@ -221,6 +220,7 @@
221
  "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
  "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
  "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "model.norm.weight": "model-00002-of-00002.safetensors"
 
225
  }
226
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 5119582208
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
 
220
  "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
  "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
  "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.norm.weight": "model-00002-of-00002.safetensors",
224
+ "score.weight": "model-00002-of-00002.safetensors"
225
  }
226
  }