ItelAi commited on
Commit
09289f7
1 Parent(s): a5db4d3

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -6
config.json CHANGED
@@ -1,6 +1,5 @@
1
-
2
  {
3
- "_name_or_path": "output-small",
4
  "activation_function": "gelu_new",
5
  "architectures": [
6
  "GPT2LMHeadModel"
@@ -14,10 +13,10 @@
14
  "layer_norm_epsilon": 1e-05,
15
  "model_type": "gpt2",
16
  "n_ctx": 1024,
17
- "n_embd": 768,
18
- "n_head": 12,
19
  "n_inner": null,
20
- "n_layer": 12,
21
  "n_positions": 1024,
22
  "resid_pdrop": 0.1,
23
  "scale_attn_weights": true,
@@ -34,4 +33,4 @@
34
  "transformers_version": "4.6.1",
35
  "use_cache": true,
36
  "vocab_size": 50257
37
- }
 
 
1
  {
2
+ "_name_or_path": "output-medium",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
15
  "n_ctx": 1024,
16
+ "n_embd": 1024,
17
+ "n_head": 16,
18
  "n_inner": null,
19
+ "n_layer": 24,
20
  "n_positions": 1024,
21
  "resid_pdrop": 0.1,
22
  "scale_attn_weights": true,
 
33
  "transformers_version": "4.6.1",
34
  "use_cache": true,
35
  "vocab_size": 50257
36
+ }