matthewkenney commited on
Commit
5ea2e8f
1 Parent(s): 7759b47

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -1
config.json CHANGED
@@ -10,11 +10,12 @@
10
  "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM"
11
  },
12
  "embd_pdrop": 0.0,
 
13
  "flash_rotary": false,
14
  "fused_dense": false,
15
  "initializer_range": 0.02,
16
  "layer_norm_epsilon": 1e-05,
17
- "model_type": "mixformer-sequential",
18
  "n_embd": 2048,
19
  "n_head": 32,
20
  "n_head_kv": null,
 
10
  "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM"
11
  },
12
  "embd_pdrop": 0.0,
13
+ "flash_attn": false,
14
  "flash_rotary": false,
15
  "fused_dense": false,
16
  "initializer_range": 0.02,
17
  "layer_norm_epsilon": 1e-05,
18
+ "model_type": "phi",
19
  "n_embd": 2048,
20
  "n_head": 32,
21
  "n_head_kv": null,