FAT5-large-flan-en / config.json
BorisAlbar's picture
Upload config.json with huggingface_hub
622c63f verified
{
"alibi_mode": "symetric",
"architectures": [
"FlashT5ForConditionalGeneration"
],
"attention_dropout_rate": 0.0,
"attention_scale": 1.0,
"attention_type": "ref",
"auto_map": {
"AutoConfig": "configuration_flash_t5.FlashT5Config",
"AutoModel": "modeling_flash_t5.FlashT5ForConditionalGeneration",
"AutoModelForQuestionAnswering": "custom_heads_flash_t5.FlashT5ForQuestionAnswering",
"AutoModelForSeq2SeqLM": "modeling_flash_t5.FlashT5ForConditionalGeneration",
"AutoModelForSequenceClassification": "custom_heads_flash_t5.FlashT5ForSequenceClassification",
"AutoModelForTokenClassification": "custom_heads_flash_t5.FlashT5ForTokenClassification"
},
"classifier_dropout": 0.0,
"d_ff": 2816,
"d_kv": 64,
"d_model": 1024,
"decoder_start_token_id": 0,
"dense_act_fn": "relu",
"dropout_rate": 0.0,
"eos_token_id": 1,
"feed_forward_proj": "relu",
"fire_mlp_width": 32,
"initializer_factor": 1.0,
"is_encoder_decoder": false,
"is_gated_act": false,
"label_smoothing": 0.1,
"layer_norm_epsilon": 1e-06,
"max_sequence_length": 1024,
"model_type": "flash_t5",
"num_decoder_layers": 24,
"num_heads": 16,
"num_layers": 24,
"pad_token_id": 0,
"position_encoding_type": "t5",
"relative_attention_max_distance": 128,
"relative_attention_num_buckets": 32,
"rotary_base": 10000,
"rotary_emb_fraction": 1.0,
"rotary_interleaved": false,
"rotary_scale_base": null,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.39.3",
"use_cache": true,
"use_flash_attention": "triton",
"use_full_bias_size": false,
"use_gelu_act": true,
"use_glu_mlp": true,
"use_masking": false,
"use_randomized_position_encoding": false,
"use_triton_crossentropy": false,
"use_triton_gated_mlp": false,
"use_triton_layernorm": false,
"vocab_size": 32128,
"z_loss": 0.0001
}