Randolphzeng's picture
Upload config.json
716b4e9
{
"num_layers":32,
"vocab_size":50048,
"hidden_size":1600,
"num_attention_heads":25,
"embedding_dropout_prob":0.1,
"attention_dropout_prob":0.1,
"output_dropout_prob":0.1,
"max_sequence_length":512,
"max_memory_length":512,
"checkpoint_activations":false,
"checkpoint_num_layers":1,
"parallel_output":true,
"relative_encoding":true
}