patrickvonplaten commited on
Commit
b309735
1 Parent(s): bd8ae41
Files changed (1) hide show
  1. config.json +1 -7
config.json CHANGED
@@ -7,26 +7,20 @@
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
9
  "d_model": 7168,
10
- "decoder_layernorm": false,
11
- "decoder_start_token_id": 2,
12
  "do_layer_norm_before": true,
13
  "dropout": 0.1,
14
  "eos_token_id": 2,
15
  "ffn_dim": 28672,
16
- "forced_eos_token_id": 2,
17
  "init_std": 0.02,
18
  "layerdrop": 0.0,
19
  "max_position_embeddings": 2048,
20
  "model_type": "opt",
21
  "num_attention_heads": 56,
22
  "num_hidden_layers": 48,
23
- "output_projection": true,
24
  "pad_token_id": 1,
25
- "scale_embedding": false,
26
- "share_input_output_embed": true,
27
  "torch_dtype": "float16",
28
  "transformers_version": "4.19.0.dev0",
29
- "use_cache": false,
30
  "vocab_size": 50272,
31
  "word_embed_proj_dim": 7168
32
  }
 
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
9
  "d_model": 7168,
 
 
10
  "do_layer_norm_before": true,
11
  "dropout": 0.1,
12
  "eos_token_id": 2,
13
  "ffn_dim": 28672,
 
14
  "init_std": 0.02,
15
  "layerdrop": 0.0,
16
  "max_position_embeddings": 2048,
17
  "model_type": "opt",
18
  "num_attention_heads": 56,
19
  "num_hidden_layers": 48,
 
20
  "pad_token_id": 1,
 
 
21
  "torch_dtype": "float16",
22
  "transformers_version": "4.19.0.dev0",
23
+ "use_cache": true,
24
  "vocab_size": 50272,
25
  "word_embed_proj_dim": 7168
26
  }