PeepDaSlan9 commited on
Commit
c465df7
·
verified ·
1 Parent(s): e9b5c83

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -3
config.json CHANGED
@@ -1,13 +1,15 @@
 
1
  {
2
- "_name_or_path": "microsoft/phi-2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
7
  "attn_pdrop": 0.0,
8
  "auto_map": {
9
- "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig",
10
- "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM"
 
11
  },
12
  "embd_pdrop": 0.0,
13
  "flash_attn": false,
@@ -19,6 +21,8 @@
19
  "model_type": "phi-msft",
20
  "n_embd": 2560,
21
  "n_head": 32,
 
 
22
  "n_layer": 32,
23
  "n_positions": 2048,
24
  "resid_pdrop": 0.1,
 
1
+ ```json
2
  {
3
+ "_name_or_path": "./models/dolphin-2_6-phi-2",
4
  "activation_function": "gelu_new",
5
  "architectures": [
6
  "PhiForCausalLM"
7
  ],
8
  "attn_pdrop": 0.0,
9
  "auto_map": {
10
+ "AutoConfig": "configuration_phi.PhiConfig",
11
+ "AutoModel": "modeling_phi.PhiForCausalLM",
12
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
13
  },
14
  "embd_pdrop": 0.0,
15
  "flash_attn": false,
 
21
  "model_type": "phi-msft",
22
  "n_embd": 2560,
23
  "n_head": 32,
24
+ "n_head_kv": null,
25
+ "n_inner": null,
26
  "n_layer": 32,
27
  "n_positions": 2048,
28
  "resid_pdrop": 0.1,