satpalsr commited on
Commit
c5758cf
1 Parent(s): 6e609aa

Training in progress, step 118

Browse files
Files changed (2) hide show
  1. config.json +15 -16
  2. training_args.bin +1 -1
config.json CHANGED
@@ -1,31 +1,30 @@
1
  {
2
  "_name_or_path": "microsoft/phi-2",
 
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
6
- "attention_dropout": 0.0,
7
  "auto_map": {
8
  "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig",
9
  "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM"
10
  },
11
- "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
- "eos_token_id": null,
14
- "hidden_act": "gelu_new",
15
- "hidden_size": 2560,
 
16
  "initializer_range": 0.02,
17
- "intermediate_size": 10240,
18
- "layer_norm_eps": 1e-05,
19
- "max_position_embeddings": 2048,
20
- "model_type": "phi",
21
- "num_attention_heads": 32,
22
- "num_hidden_layers": 32,
23
- "num_key_value_heads": 32,
24
- "partial_rotary_factor": 0.4,
25
- "qk_layernorm": false,
26
  "resid_pdrop": 0.1,
27
- "rope_scaling": null,
28
- "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.37.0.dev0",
 
1
  {
2
  "_name_or_path": "microsoft/phi-2",
3
+ "activation_function": "gelu_new",
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
7
+ "attn_pdrop": 0.0,
8
  "auto_map": {
9
  "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig",
10
  "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM"
11
  },
 
12
  "embd_pdrop": 0.0,
13
+ "flash_attn": false,
14
+ "flash_rotary": false,
15
+ "fused_dense": false,
16
+ "img_processor": null,
17
  "initializer_range": 0.02,
18
+ "layer_norm_epsilon": 1e-05,
19
+ "model_type": "phi-msft",
20
+ "n_embd": 2560,
21
+ "n_head": 32,
22
+ "n_head_kv": null,
23
+ "n_inner": null,
24
+ "n_layer": 32,
25
+ "n_positions": 2048,
 
26
  "resid_pdrop": 0.1,
27
+ "rotary_dim": 32,
 
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "float32",
30
  "transformers_version": "4.37.0.dev0",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6945732dc97ffe13097af00878127f2e1892b6625039d9d6d97d420a6263fc97
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c67201b896b290970d7f4b444c7f49a41ad5190ca72e99ef204736c0d49d9279
3
  size 5240