sc2582 commited on
Commit
a915f91
1 Parent(s): 583d98f

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -24,14 +24,14 @@
24
  "revision": null,
25
  "target_modules": [
26
  "q_proj",
27
- "embed_tokens",
28
  "k_proj",
29
- "o_proj",
30
  "gate_proj",
31
- "lm_head",
 
32
  "down_proj",
33
  "up_proj",
34
- "v_proj"
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
 
24
  "revision": null,
25
  "target_modules": [
26
  "q_proj",
 
27
  "k_proj",
28
+ "v_proj",
29
  "gate_proj",
30
+ "embed_tokens",
31
+ "o_proj",
32
  "down_proj",
33
  "up_proj",
34
+ "lm_head"
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e07e1b7840b0ec1128ab4de0b59b4ae3a4bd55ecde02ad57984acd01cd03d3ff
3
  size 2772497632
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5773309530e96589d9ed42fde7bbd133d68c5d3cb3eb39c84eab57fba3be2d70
3
  size 2772497632
generation_config.json CHANGED
@@ -8,5 +8,5 @@
8
  "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.42.3"
12
  }
 
8
  "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "4.43.3"
12
  }