{ "config": { "activation_function": "gelu", "bias": true, "dropout_prob": null, "embedding_size": 768, "head_type": "causal_lm", "label2id": null, "layer_norm": true, "layers": 2, "shift_labels": true, "vocab_size": 50257 }, "hidden_size": 768, "model_class": "GPT2AdapterModel", "model_name": "distilgpt2", "model_type": "gpt2", "name": "pm_task", "version": "adapters.1.0.0" }