File size: 753 Bytes
6796ccf
1
"{\n\"activation_function\": \"gelu_new\",\n  \"architectures\": [\n    \"GPT2LMHeadModel\"\n  ],\n  \"attn_pdrop\": 0.1,\n  \"bos_token_id\": 50256,\n  \"embd_pdrop\": 0.1,\n  \"eos_token_id\": 50256,\n  \"initializer_range\": 0.02,\n  \"layer_norm_epsilon\": 1e-05,\n  \"model_type\": \"gpt2\",\n  \"n_ctx\": 1024,\n  \"n_embd\": 768,\n  \"n_head\": 12,\n  \"n_layer\": 12,\n  \"n_positions\": 1024,\n  \"resid_pdrop\": 0.1,\n  \"summary_activation\": null,\n  \"summary_first_dropout\": 0.1,\n  \"summary_proj_to_labels\": true,\n  \"summary_type\": \"cls_index\",\n  \"summary_use_proj\": true,\n  \"task_specific_params\": {\n    \"text-generation\": {\n      \"do_sample\": true,\n      \"max_length\": 50\n    }\n  },\n  \"vocab_size\": 50257\n}"