system HF staff commited on
Commit
07ab17e
1 Parent(s): a12dc79

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -4
config.json CHANGED
@@ -3,7 +3,6 @@
3
  "ReformerModelWithLMHead"
4
  ],
5
  "attention_head_size": 64,
6
- "attention_probs_dropout_prob": 0.1,
7
  "attn_layers": [
8
  "local",
9
  "lsh",
@@ -24,15 +23,23 @@
24
  ],
25
  "chunk_size_feed_forward": 0,
26
  "chunk_size_lm_head": 0,
 
27
  "feed_forward_size": 512,
 
28
  "hidden_act": "relu",
29
- "hidden_dropout_prob": 0.05,
30
  "hidden_size": 256,
31
  "initializer_range": 0.02,
32
  "is_decoder": true,
33
  "layer_norm_eps": 1e-12,
 
34
  "local_attn_chunk_length": 64,
 
 
 
35
  "lsh_attn_chunk_length": 64,
 
 
36
  "max_position_embeddings": 524288,
37
  "model_type": "reformer",
38
  "num_attention_heads": 2,
@@ -40,8 +47,6 @@
40
  64,
41
  128
42
  ],
43
- "num_chunks_after": 0,
44
- "num_chunks_before": 1,
45
  "num_hashes": 1,
46
  "num_hidden_layers": 6,
47
  "pad_token_id": 0,
 
3
  "ReformerModelWithLMHead"
4
  ],
5
  "attention_head_size": 64,
 
6
  "attn_layers": [
7
  "local",
8
  "lsh",
 
23
  ],
24
  "chunk_size_feed_forward": 0,
25
  "chunk_size_lm_head": 0,
26
+ "eos_token_id": 2,
27
  "feed_forward_size": 512,
28
+ "hash_seed": null,
29
  "hidden_act": "relu",
30
+ "hidden_dropout_prob": 0.0,
31
  "hidden_size": 256,
32
  "initializer_range": 0.02,
33
  "is_decoder": true,
34
  "layer_norm_eps": 1e-12,
35
+ "local_attention_probs_dropout_prob": 0.0,
36
  "local_attn_chunk_length": 64,
37
+ "local_num_chunks_after": 0,
38
+ "local_num_chunks_before": 1,
39
+ "lsh_attention_probs_dropout_prob": 0.0,
40
  "lsh_attn_chunk_length": 64,
41
+ "lsh_num_chunks_after": 0,
42
+ "lsh_num_chunks_before": 1,
43
  "max_position_embeddings": 524288,
44
  "model_type": "reformer",
45
  "num_attention_heads": 2,
 
47
  64,
48
  128
49
  ],
 
 
50
  "num_hashes": 1,
51
  "num_hidden_layers": 6,
52
  "pad_token_id": 0,