quintic commited on
Commit
37266f9
1 Parent(s): 5367b63
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -10,13 +10,13 @@
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gptj",
13
- "n_embd": 1024,
14
  "n_head": 16,
15
  "n_inner": null,
16
- "n_layer": 20,
17
  "n_positions": 2048,
18
  "resid_pdrop": 0.0,
19
- "rotary_dim": 32,
20
  "scale_attn_weights": true,
21
  "tie_word_embeddings": false,
22
  "tokenizer_class": "CodeGenTokenizer",
 
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gptj",
13
+ "n_embd": 4096,
14
  "n_head": 16,
15
  "n_inner": null,
16
+ "n_layer": 33,
17
  "n_positions": 2048,
18
  "resid_pdrop": 0.0,
19
+ "rotary_dim": 64,
20
  "scale_attn_weights": true,
21
  "tie_word_embeddings": false,
22
  "tokenizer_class": "CodeGenTokenizer",