susnato commited on
Commit
0b08351
1 Parent(s): 0aa7fc9

add attn_pdrop and auto_map

Browse files
Files changed (1) hide show
  1. config.json +5 -0
config.json CHANGED
@@ -2,6 +2,10 @@
2
  "architectures": [
3
  "PhiForCausalLM"
4
  ],
 
 
 
 
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "gelu_new",
@@ -15,6 +19,7 @@
15
  "pretraining_tp": 1,
16
  "resid_pdrop": 0.0,
17
  "embd_pdrop": 0.0,
 
18
  "layer_norm_eps": 1e-05,
19
  "rope_scaling": null,
20
  "rope_theta": 10000.0,
 
2
  "architectures": [
3
  "PhiForCausalLM"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_phi.PhiConfig",
7
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
8
+ },
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
11
  "hidden_act": "gelu_new",
 
19
  "pretraining_tp": 1,
20
  "resid_pdrop": 0.0,
21
  "embd_pdrop": 0.0,
22
+ "attention_dropout": 0.0,
23
  "layer_norm_eps": 1e-05,
24
  "rope_scaling": null,
25
  "rope_theta": 10000.0,