File size: 435 Bytes
a57a1b3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "config": {
    "activation_function": "gelu",
    "bias": true,
    "dropout_prob": null,
    "embedding_size": 768,
    "head_type": "causal_lm",
    "label2id": null,
    "layer_norm": true,
    "layers": 2,
    "shift_labels": true,
    "vocab_size": 50257
  },
  "hidden_size": 768,
  "model_class": "GPT2AdapterModel",
  "model_name": "distilgpt2",
  "model_type": "gpt2",
  "name": "pm_task",
  "version": "adapters.1.0.0"
}