ScyKindness commited on
Commit
c8ccb21
1 Parent(s): c44430f

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -4
config.json CHANGED
@@ -11,9 +11,9 @@
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
  "n_ctx": 1024,
14
- "n_embd": 1280,
15
- "n_head": 20,
16
- "n_layer": 36,
17
  "n_positions": 1024,
18
  "resid_pdrop": 0.1,
19
  "summary_activation": null,
@@ -21,5 +21,10 @@
21
  "summary_proj_to_labels": true,
22
  "summary_type": "cls_index",
23
  "summary_use_proj": true,
 
 
 
 
 
24
  "vocab_size": 50257
25
- }
 
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
  "n_ctx": 1024,
14
+ "n_embd": 768,
15
+ "n_head": 12,
16
+ "n_layer": 12,
17
  "n_positions": 1024,
18
  "resid_pdrop": 0.1,
19
  "summary_activation": null,
 
21
  "summary_proj_to_labels": true,
22
  "summary_type": "cls_index",
23
  "summary_use_proj": true,
24
+ "task_specific_params": {
25
+ "conversational": {
26
+ "max_length": 1000
27
+ }
28
+ },
29
  "vocab_size": 50257
30
+ }