roberta-base-pf-emotion / head_config.json
calpt's picture
Initial version.
ed3df1c
raw history blame
No virus
459 Bytes
{
"config": {
"activation_function": "tanh",
"bias": true,
"head_type": "classification",
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2,
"LABEL_3": 3,
"LABEL_4": 4,
"LABEL_5": 5
},
"layers": 2,
"num_labels": 6,
"use_pooler": false
},
"hidden_size": 768,
"model_class": "RobertaModelWithHeads",
"model_name": "roberta-base",
"model_type": "roberta",
"name": "emotion"
}