File size: 947 Bytes
fa5d4f8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
{
"activation_dropout": 0.0,
"activation_fn": "swish",
"architectures": [
"RetNetForCausalLM"
],
"decoder_embed_dim": 1280,
"decoder_ffn_embed_dim": 2560,
"decoder_layers": 12,
"decoder_normalize_before": true,
"decoder_retention_heads": 8,
"decoder_value_embed_dim": 2560,
"deepnorm": false,
"drop_path_rate": 0.0,
"dropout": 0.0,
"eos_token_id": 50256,
"forward_impl": "parallel",
"initializer_range": 0.02,
"is_decoder": true,
"layernorm_embedding": true,
"layernorm_eps": 1e-06,
"model_type": "retnet",
"no_scale_embedding": false,
"output_retentions": false,
"pad_token_id": 50256,
"recurrent_chunk_size": 512,
"subln": true,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.34.0",
"unk_token_id": 50256,
"use_cache": true,
"use_ffn_rms_norm": false,
"use_glu": true,
"use_lm_decay": false,
"vocab_size": 50257,
"z_loss_coeff": 0.0
}
|