{ "_name_or_path": "w", "architectures": [ "lma" ], "auto_map": { "AutoConfig": "configuration_bilma.BilmaConfig", "TFAutoModelForMaskedLM": "modeling_bilma.Bilma" }, "drop_rate": 0.1, "embedding_dim": 512, "model_type": "bilma", "name": "xxx", "num_attention_heads": 4, "num_encoders": 2, "seq_max_length": 280, "transformers_version": "4.30.2", "vocab_size": 28949, "weights": "spanish" }