rynmurdock commited on
Commit
6a4a6f4
1 Parent(s): cb78d70

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -21,8 +21,8 @@
21
  "revision": null,
22
  "target_modules": [
23
  "q_proj",
24
- "o_proj",
25
  "k_proj",
 
26
  "v_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
 
21
  "revision": null,
22
  "target_modules": [
23
  "q_proj",
 
24
  "k_proj",
25
+ "o_proj",
26
  "v_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:276ae5a3cc3358f25e4abde927f6b0d19e57cde6670d53e8b00a7c9590c1989c
3
  size 1744866448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f4a3a7efeca4b1a6355ba44fdd129460a297fb87f268d6697ad0f6632a40e7d
3
  size 1744866448
generation_config.json CHANGED
@@ -1,9 +1,12 @@
1
  {
 
2
  "do_sample": true,
 
 
 
 
 
3
  "temperature": 0.6,
4
  "top_p": 0.9,
5
- "_from_model_config": true,
6
- "bos_token_id": 128000,
7
- "eos_token_id": 128001,
8
- "transformers_version": "4.43.0.dev0"
9
  }
 
1
  {
2
+ "bos_token_id": 128000,
3
  "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "4.45.0.dev0"
 
 
 
12
  }