kamalkraj commited on
Commit
65f02c7
1 Parent(s): d1412a1

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -0
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "BioGptLMHeadModel"
4
  ],
@@ -11,11 +12,13 @@
11
  "initializer_range": 0.02,
12
  "intermediate_size": 4096,
13
  "layer_norm_eps": 1e-12,
 
14
  "max_position_embeddings": 1024,
15
  "model_type": "biogpt",
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 24,
18
  "pad_token_id": 1,
 
19
  "transformers_version": "4.25.0.dev0",
20
  "use_cache": true,
21
  "vocab_size": 42384
 
1
  {
2
+ "activation_dropout": 0.0,
3
  "architectures": [
4
  "BioGptLMHeadModel"
5
  ],
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 4096,
14
  "layer_norm_eps": 1e-12,
15
+ "layerdrop": 0.0,
16
  "max_position_embeddings": 1024,
17
  "model_type": "biogpt",
18
  "num_attention_heads": 16,
19
  "num_hidden_layers": 24,
20
  "pad_token_id": 1,
21
+ "scale_embedding": true,
22
  "transformers_version": "4.25.0.dev0",
23
  "use_cache": true,
24
  "vocab_size": 42384