ybelkada commited on
Commit
464ab3b
1 Parent(s): 318c118

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -10
config.json CHANGED
@@ -3,17 +3,9 @@
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
- "RWForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
- "auto_map": {
10
- "AutoConfig": "tiiuae/falcon-7b--configuration_RW.RWConfig",
11
- "AutoModel": "tiiuae/falcon-7b--modelling_RW.RWModel",
12
- "AutoModelForCausalLM": "tiiuae/falcon-7b--modelling_RW.RWForCausalLM",
13
- "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modelling_RW.RWForQuestionAnswering",
14
- "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modelling_RW.RWForSequenceClassification",
15
- "AutoModelForTokenClassification": "tiiuae/falcon-7b--modelling_RW.RWForTokenClassification"
16
- },
17
  "bias": false,
18
  "bos_token_id": 11,
19
  "eos_token_id": 11,
@@ -21,7 +13,7 @@
21
  "hidden_size": 4544,
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
- "model_type": "RefinedWebModel",
25
  "multi_query": true,
26
  "n_head": 71,
27
  "n_layer": 32,
 
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
+ "FalconForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
 
 
 
 
 
 
 
 
9
  "bias": false,
10
  "bos_token_id": 11,
11
  "eos_token_id": 11,
 
13
  "hidden_size": 4544,
14
  "initializer_range": 0.02,
15
  "layer_norm_epsilon": 1e-05,
16
+ "model_type": "falcon",
17
  "multi_query": true,
18
  "n_head": 71,
19
  "n_layer": 32,