File size: 413 Bytes
08412a2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
{
"config": {
"activation_function": "gelu",
"bias": true,
"embedding_size": 768,
"head_type": "masked_lm",
"label2id": null,
"layer_norm": true,
"layers": 2,
"shift_labels": false,
"vocab_size": 50265
},
"hidden_size": 768,
"model_class": "RobertaAdapterModel",
"model_name": "roberta-base",
"model_type": "roberta",
"name": "tapt_seq_bn",
"version": "0.1.2"
} |