kaizerBox's picture
Time is 220.16162587006886 minutes, Memory is 9.4GB
debe76a
{
"architectures": [
"ReformerModelWithLMHead"
],
"attention_head_size": 32,
"attn_layers": [
"local",
"lsh",
"local",
"lsh",
"local",
"lsh"
],
"axial_norm_std": 1.0,
"axial_pos_embds": true,
"axial_pos_embds_dim": [
11,
11
],
"axial_pos_shape": [
32,
32
],
"chunk_size_lm_head": 0,
"classifier_dropout": null,
"eos_token_id": 50256,
"feed_forward_size": 32,
"hash_seed": null,
"hidden_act": "relu",
"hidden_dropout_prob": 0.0,
"hidden_size": 22,
"initializer_range": 0.02,
"is_decoder": true,
"layer_norm_eps": 1e-12,
"local_attention_probs_dropout_prob": 0.0,
"local_attn_chunk_length": 64,
"local_num_chunks_after": 0,
"local_num_chunks_before": 1,
"lsh_attention_probs_dropout_prob": 0.0,
"lsh_attn_chunk_length": 64,
"lsh_num_chunks_after": 0,
"lsh_num_chunks_before": 1,
"max_position_embeddings": 1024,
"model_type": "reformer",
"num_attention_heads": 3,
"num_buckets": 32,
"num_hashes": 1,
"num_hidden_layers": 6,
"pad_token_id": 50257,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"use_cache": true,
"vocab_size": 50259
}