valhalla commited on
Commit
445a8ae
1 Parent(s): c578154

update config

Browse files
Files changed (1) hide show
  1. config.json +1 -12
config.json CHANGED
@@ -1,22 +1,12 @@
1
  {
2
- "_name_or_path": "xglm-564M",
3
- "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
- "architectures": [
6
- "XGLMForCausalLM"
7
- ],
8
  "attention_dropout": 0.1,
9
  "attention_heads": 16,
10
  "bos_token_id": 0,
11
- "classifier_dropout": 0.0,
12
  "d_model": 1024,
13
- "decoder_attention_heads": 16,
14
- "decoder_ffn_dim": 4096,
15
- "decoder_layerdrop": 0.0,
16
- "decoder_layers": 24,
17
  "decoder_start_token_id": 2,
18
  "dropout": 0.1,
19
- "encoder_layerdrop": 0.0,
20
  "eos_token_id": 2,
21
  "ffn_dim": 4096,
22
  "init_std": 0.02,
@@ -26,7 +16,6 @@
26
  "num_layers": 24,
27
  "pad_token_id": 1,
28
  "scale_embedding": true,
29
- "torch_dtype": "float16",
30
  "transformers_version": "4.16.0.dev0",
31
  "use_cache": true,
32
  "vocab_size": 256008
 
1
  {
2
+ "activation_dropout": 0,
 
3
  "activation_function": "gelu",
 
 
 
4
  "attention_dropout": 0.1,
5
  "attention_heads": 16,
6
  "bos_token_id": 0,
 
7
  "d_model": 1024,
 
 
 
 
8
  "decoder_start_token_id": 2,
9
  "dropout": 0.1,
 
10
  "eos_token_id": 2,
11
  "ffn_dim": 4096,
12
  "init_std": 0.02,
 
16
  "num_layers": 24,
17
  "pad_token_id": 1,
18
  "scale_embedding": true,
 
19
  "transformers_version": "4.16.0.dev0",
20
  "use_cache": true,
21
  "vocab_size": 256008