nluai commited on
Commit
c6572f3
1 Parent(s): 7ccd88b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -16
config.json CHANGED
@@ -1,22 +1,14 @@
1
  {
 
2
  "architectures": [
3
- "MPTForCausalLM"
4
  ],
5
  "attn_config": {
6
- "alibi": true,
7
- "alibi_bias_max": 8,
8
- "attn_impl": "torch",
9
- "attn_pdrop": 0.0,
10
- "attn_type": "multihead_attention",
11
- "attn_uses_sequence_id": false,
12
- "clip_qkv": null,
13
- "prefix_lm": false,
14
- "qk_ln": false,
15
- "softmax_scale": null
16
  },
17
  "auto_map": {
18
- "AutoConfig": "configuration_mpt.MPTConfig",
19
- "AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM"
20
  },
21
  "d_model": 3072,
22
  "emb_pdrop": 0.0,
@@ -34,6 +26,8 @@
34
  "verbose": 0
35
  },
36
  "init_device": "cpu",
 
 
37
  "learned_pos_emb": true,
38
  "logit_scale": null,
39
  "max_seq_len": 8192,
@@ -43,9 +37,9 @@
43
  "no_bias": false,
44
  "norm_type": "low_precision_layernorm",
45
  "resid_pdrop": 0.0,
46
- "torch_dtype": "bfloat16",
47
- "transformers_version": "4.30.2",
48
  "use_cache": false,
49
  "verbose": 0,
50
  "vocab_size": 20480
51
- }
 
1
  {
2
+ "_name_or_path": "vinai/PhoGPT-4B-Chat",
3
  "architectures": [
4
+ "MptForCausalLM"
5
  ],
6
  "attn_config": {
7
+ "model_type": ""
 
 
 
 
 
 
 
 
 
8
  },
9
  "auto_map": {
10
+ "AutoConfig": "vinai/PhoGPT-4B-Chat--configuration_mpt.MPTConfig",
11
+ "AutoModelForCausalLM": "vinai/PhoGPT-4B-Chat--modeling_mpt.MPTForCausalLM"
12
  },
13
  "d_model": 3072,
14
  "emb_pdrop": 0.0,
 
26
  "verbose": 0
27
  },
28
  "init_device": "cpu",
29
+ "initializer_range": 0.02,
30
+ "layer_norm_epsilon": 1e-05,
31
  "learned_pos_emb": true,
32
  "logit_scale": null,
33
  "max_seq_len": 8192,
 
37
  "no_bias": false,
38
  "norm_type": "low_precision_layernorm",
39
  "resid_pdrop": 0.0,
40
+ "torch_dtype": "float16",
41
+ "transformers_version": "4.41.1",
42
  "use_cache": false,
43
  "verbose": 0,
44
  "vocab_size": 20480
45
+ }