system HF staff commited on
Commit
6796ccf
1 Parent(s): f9dac2b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -30
config.json CHANGED
@@ -1,30 +1 @@
1
- {"activation_function": "gelu_new",
2
- "architectures": [
3
- "GPT2LMHeadModel"
4
- ],
5
- "attn_pdrop": 0.1,
6
- "bos_token_id": 50256,
7
- "embd_pdrop": 0.1,
8
- "eos_token_id": 50256,
9
- "initializer_range": 0.02,
10
- "layer_norm_epsilon": 1e-05,
11
- "model_type": "gpt2",
12
- "n_ctx": 1024,
13
- "n_embd": 768,
14
- "n_head": 12,
15
- "n_layer": 12,
16
- "n_positions": 1024,
17
- "resid_pdrop": 0.1,
18
- "summary_activation": null,
19
- "summary_first_dropout": 0.1,
20
- "summary_proj_to_labels": true,
21
- "summary_type": "cls_index",
22
- "summary_use_proj": true,
23
- "task_specific_params": {
24
- "text-generation": {
25
- "do_sample": true,
26
- "max_length": 50
27
- }
28
- },
29
- "vocab_size": 50257
30
- }
1
+ "{\n\"activation_function\": \"gelu_new\",\n \"architectures\": [\n \"GPT2LMHeadModel\"\n ],\n \"attn_pdrop\": 0.1,\n \"bos_token_id\": 50256,\n \"embd_pdrop\": 0.1,\n \"eos_token_id\": 50256,\n \"initializer_range\": 0.02,\n \"layer_norm_epsilon\": 1e-05,\n \"model_type\": \"gpt2\",\n \"n_ctx\": 1024,\n \"n_embd\": 768,\n \"n_head\": 12,\n \"n_layer\": 12,\n \"n_positions\": 1024,\n \"resid_pdrop\": 0.1,\n \"summary_activation\": null,\n \"summary_first_dropout\": 0.1,\n \"summary_proj_to_labels\": true,\n \"summary_type\": \"cls_index\",\n \"summary_use_proj\": true,\n \"task_specific_params\": {\n \"text-generation\": {\n \"do_sample\": true,\n \"max_length\": 50\n }\n },\n \"vocab_size\": 50257\n}"