transforXL / config.json
schwgHao's picture
Upload 2 files
56b9167
{
"num_layers":40,
"vocab_size":50176,
"hidden_size":3072,
"num_attention_heads":48,
"embedding_dropout_prob":0.1,
"attention_dropout_prob":0.1,
"output_dropout_prob":0.1,
"max_sequence_length":1024,
"max_memory_length":512,
"checkpoint_activations":false,
"checkpoint_num_layers":1,
"parallel_output":true,
"relative_encoding":true
}