phoebeklett commited on
Commit
8295b3c
·
verified ·
1 Parent(s): 9ae5225

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +15 -1
config.json CHANGED
@@ -4,7 +4,21 @@
4
  "ExtendedMptForCausalLM"
5
  ],
6
  "attn_config": {
7
- "model_type": ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  },
9
  "auto_map": {
10
  "AutoConfig": "configuration.ExtendedMptConfig",
 
4
  "ExtendedMptForCausalLM"
5
  ],
6
  "attn_config": {
7
+ "alibi": true,
8
+ "alibi_bias_max": 8,
9
+ "attn_impl": "torch",
10
+ "attn_pdrop": 0,
11
+ "attn_type": "multihead_attention",
12
+ "attn_uses_sequence_id": false,
13
+ "clip_qkv": null,
14
+ "mask_by_sim": true,
15
+ "memory_type": "manual",
16
+ "prefix_lm": false,
17
+ "qk_ln": false,
18
+ "sim_threshold": 0.25,
19
+ "softmax_scale": null,
20
+ "topk": 10,
21
+ "use_active_externalism": true
22
  },
23
  "auto_map": {
24
  "AutoConfig": "configuration.ExtendedMptConfig",