File size: 500 Bytes
053aaba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "num_layers": 24,
  "vocab_size": 30592,
  "hidden_size": 1024,
  "num_attention_heads":16,
  "embedding_dropout_prob":0.1,
  "attention_dropout_prob":0.1,
  "output_dropout_prob":0.1,
  "max_sequence_length":512,
  "max_memory_length":0,
  "checkpoint_activations": false,
  "checkpoint_num_layers":1 ,
  "parallel_output": true,
  "relative_encoding": false,
  "block_position_encoding": true,
  "output_predict": true,
  "spell_length": null,
  "spell_func": "lstm",
  "attention_scale":1.0
}