susnato commited on
Commit
85d9497
1 Parent(s): ff4e06f

add attn_pdrop and auto_map

Browse files
Files changed (1) hide show
  1. config.json +6 -1
config.json CHANGED
@@ -2,6 +2,10 @@
2
  "architectures": [
3
  "PhiForCausalLM"
4
  ],
 
 
 
 
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "gelu_new",
@@ -15,6 +19,7 @@
15
  "pretraining_tp": 1,
16
  "resid_pdrop": 0.0,
17
  "embd_pdrop": 0.0,
 
18
  "layer_norm_eps": 1e-05,
19
  "rope_scaling": null,
20
  "rope_theta": 10000.0,
@@ -24,4 +29,4 @@
24
  "transformers_version": "4.34.0.dev0",
25
  "use_cache": true,
26
  "vocab_size": 51200
27
- }
 
2
  "architectures": [
3
  "PhiForCausalLM"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_phi.PhiConfig",
7
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
8
+ },
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
11
  "hidden_act": "gelu_new",
 
19
  "pretraining_tp": 1,
20
  "resid_pdrop": 0.0,
21
  "embd_pdrop": 0.0,
22
+ "attention_dropout": 0.0,
23
  "layer_norm_eps": 1e-05,
24
  "rope_scaling": null,
25
  "rope_theta": 10000.0,
 
29
  "transformers_version": "4.34.0.dev0",
30
  "use_cache": true,
31
  "vocab_size": 51200
32
+ }