esceptico commited on
Commit
1e095b6
1 Parent(s): d3f7644

Upload GPTJForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -12,10 +12,10 @@
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gptj",
15
- "n_embd": 16,
16
  "n_head": 4,
17
- "n_inner": 32,
18
- "n_layer": 3,
19
  "n_positions": 2048,
20
  "resid_pdrop": 0.0,
21
  "rotary": true,
 
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gptj",
15
+ "n_embd": 32,
16
  "n_head": 4,
17
+ "n_inner": 64,
18
+ "n_layer": 4,
19
  "n_positions": 2048,
20
  "resid_pdrop": 0.0,
21
  "rotary": true,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fff073017204167fde7e5d5c11743a62cd17d0cddcea36adf6a52350facbab25
3
- size 19274604
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f52677357c641fd15386ac9f72103a67b4ff5d521907a244655c22687478201e
3
+ size 30032208