bert-base-uncased-pf-cola / head_config.json
calpt's picture
Initial version.
3601d43
raw
history blame
388 Bytes
{
"config": {
"activation_function": "tanh",
"bias": true,
"head_type": "classification",
"label2id": {
"acceptable": 1,
"unacceptable": 0
},
"layers": 2,
"num_labels": 2,
"use_pooler": false
},
"hidden_size": 768,
"model_class": "BertModelWithHeads",
"model_name": "bert-base-uncased",
"model_type": "bert",
"name": "glue_cola"
}