lemonilia commited on
Commit
50c1efe
1 Parent(s): faaa6cf

Upload 3 files

Browse files
Files changed (3) hide show
  1. adapter_config.json +3 -3
  2. adapter_model.bin +1 -1
  3. config.json +2 -1
adapter_config.json CHANGED
@@ -16,12 +16,12 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "v_proj",
20
  "q_proj",
 
21
  "k_proj",
 
22
  "down_proj",
23
- "gate_proj",
24
- "o_proj",
25
  "up_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "gate_proj",
20
  "q_proj",
21
+ "o_proj",
22
  "k_proj",
23
+ "v_proj",
24
  "down_proj",
 
 
25
  "up_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:821d951c6c8ab1d8654a98042729ce1f8b17a781cceb5ce9ee2ade05672daf0f
3
  size 84046925
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9edefbd37dc5e961086923e49fc46f12c6f44bc81b5ca3e8cad19106997a9cd3
3
  size 84046925
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "hidden_act": "silu",
@@ -31,7 +32,7 @@
31
  "sliding_window": 4096,
32
  "tie_word_embeddings": false,
33
  "torch_dtype": "bfloat16",
34
- "transformers_version": "4.35.1",
35
  "use_cache": false,
36
  "vocab_size": 32000
37
  }
 
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
+ "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
 
32
  "sliding_window": 4096,
33
  "tie_word_embeddings": false,
34
  "torch_dtype": "bfloat16",
35
+ "transformers_version": "4.36.0",
36
  "use_cache": false,
37
  "vocab_size": 32000
38
  }