{ | |
"architectures": [ | |
"lma" | |
], | |
"auto_map": { | |
"AutoConfig": "configuration_bilma.BilmaConfig", | |
"TFAutoModelForMaskedLM": "modeling_bilma.Bilma" | |
}, | |
"drop_rate": 0.1, | |
"embedding_dim": 512, | |
"model_type": "bilma", | |
"num_attention_heads": 4, | |
"num_encoders": 2, | |
"transformers_version": "4.30.2", | |
"vocab_size": 28949, | |
"weights": "spanish" | |
} | |