KhaldiAbderrhmane's picture
Upload config
981cfb7 verified
raw
history blame contribute delete
486 Bytes
{
"architectures": [
"DEBERTAMultiGATAttentionModel"
],
"auto_map": {
"AutoConfig": "config.BERTMultiGATAttentionConfig",
"AutoModel": "model.DEBERTAMultiGATAttentionModel"
},
"dropout": 0.07,
"gnn_hidden_dim": 768,
"gnn_input_dim": 768,
"hidden_size": 768,
"model_type": "deberta_semantic_similarity",
"num_heads": 8,
"torch_dtype": "float32",
"transformer_model": "microsoft/deberta-v3-base",
"transformers_version": "4.37.2"
}