thejagstudio commited on
Commit
6131aed
1 Parent(s): 089b53e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -4
config.json CHANGED
@@ -10,9 +10,6 @@
10
  "AutoConfig": "tiiuae/falcon-7b--configuration_RW.RWConfig",
11
  "AutoModel": "tiiuae/falcon-7b--modelling_RW.RWModel",
12
  "AutoModelForCausalLM": "tiiuae/falcon-7b--modelling_RW.RWForCausalLM",
13
- "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modelling_RW.RWForQuestionAnswering",
14
- "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modelling_RW.RWForSequenceClassification",
15
- "AutoModelForTokenClassification": "tiiuae/falcon-7b--modelling_RW.RWForTokenClassification"
16
  },
17
  "bias": false,
18
  "bos_token_id": 11,
@@ -21,7 +18,7 @@
21
  "hidden_size": 4544,
22
  "initializer_range": 0.02,
23
  "layer_norm_epsilon": 1e-05,
24
- "model_type": "RefinedWebModel",
25
  "multi_query": true,
26
  "n_head": 71,
27
  "n_layer": 32,
 
10
  "AutoConfig": "tiiuae/falcon-7b--configuration_RW.RWConfig",
11
  "AutoModel": "tiiuae/falcon-7b--modelling_RW.RWModel",
12
  "AutoModelForCausalLM": "tiiuae/falcon-7b--modelling_RW.RWForCausalLM",
 
 
 
13
  },
14
  "bias": false,
15
  "bos_token_id": 11,
 
18
  "hidden_size": 4544,
19
  "initializer_range": 0.02,
20
  "layer_norm_epsilon": 1e-05,
21
+ "model_type": "MT5Config",
22
  "multi_query": true,
23
  "n_head": 71,
24
  "n_layer": 32,