roberta-large-sst_pfeiffer / head_config.json
calpt's picture
Add adapter roberta-large-sst_pfeiffer version 1
a7b09cd verified
raw
history blame
425 Bytes
{
"config": {
"activation_function": "tanh",
"bias": true,
"dropout_prob": null,
"head_type": "classification",
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layers": 2,
"num_labels": 2,
"use_pooler": false
},
"hidden_size": 1024,
"model_class": "RobertaAdapterModel",
"model_name": "roberta-large",
"model_type": "roberta",
"name": "sst-2",
"version": "0.2.0"
}