system HF staff commited on
Commit
3acd70e
1 Parent(s): 92211f4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -1 +1 @@
1
- "{\n\"activation_function\": \"gelu_new\",\n \"architectures\": [\n \"GPT2LMHeadModel\"\n ],\n \"attn_pdrop\": 0.1,\n \"bos_token_id\": 50256,\n \"embd_pdrop\": 0.1,\n \"eos_token_id\": 50256,\n \"initializer_range\": 0.02,\n \"layer_norm_epsilon\": 1e-05,\n \"model_type\": \"gpt2\",\n \"n_ctx\": 1024,\n \"n_embd\": 768,\n \"n_head\": 12,\n \"n_layer\": 12,\n \"n_positions\": 1024,\n \"resid_pdrop\": 0.1,\n \"summary_activation\": null,\n \"summary_first_dropout\": 0.1,\n \"summary_proj_to_labels\": true,\n \"summary_type\": \"cls_index\",\n \"summary_use_proj\": true,\n \"task_specific_params\": {\n \"text-generation\": {\n \"do_sample\": true,\n \"max_length\": 50\n }\n },\n \"vocab_size\": 50257\n}"
1
+ {"activation_function": "gelu_new", "architectures": ["GPT2LMHeadModel"], "attn_pdrop": 0.1, "bos_token_id": 50256, "embd_pdrop": 0.1, "eos_token_id": 50256, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt2", "n_ctx": 1024, "n_embd": 768, "n_head": 12, "n_layer": 12, "n_positions": 1024, "resid_pdrop": 0.1, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "task_specific_params": {"text-generation": {"do_sample": true, "max_length": 50}}, "vocab_size": 50257}