{ "add_head": null, "architectures": [ "Bilma" ], "auto_map": { "AutoConfig": "configuration_bilma.BilmaConfig", "TFAutoModel": "modeling_bilma.TFBilma" }, "hidden_dropout_prob": 0.1, "hidden_size": 512, "include_top": true, "model_type": "bilma", "num_attention_heads": 4, "num_hidden_layers": 2, "pooling": null, "seq_max_length": 280, "transformers_version": "4.30.2", "vocab_size": 29025, "weights": "AR" }