bilma / config.json
guillermoruiz's picture
Upload Bilma
b9233ad verified
raw
history blame
No virus
433 Bytes
{
"_name_or_path": "w",
"architectures": [
"lma"
],
"auto_map": {
"AutoConfig": "configuration_bilma.BilmaConfig",
"TFAutoModelForMaskedLM": "modeling_bilma.Bilma"
},
"drop_rate": 0.1,
"embedding_dim": 512,
"model_type": "bilma",
"name": "xxx",
"num_attention_heads": 4,
"num_encoders": 2,
"seq_max_length": 280,
"transformers_version": "4.30.2",
"vocab_size": 28949,
"weights": "spanish"
}