jtatman commited on
Commit
bf58674
·
verified ·
1 Parent(s): ce61eed

Upload GPTNeoXForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +5 -5
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -10,21 +10,21 @@
10
  "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
- "hidden_size": 8192,
14
  "initializer_range": 0.02,
15
- "intermediate_size": 57344,
16
  "layer_norm_eps": 1e-05,
17
- "max_position_embeddings": 8192,
18
  "model_type": "gpt_neox",
19
  "num_attention_heads": 8,
20
- "num_hidden_layers": 8,
21
  "pad_token_id": 0,
22
  "rope_scaling": null,
23
  "rotary_emb_base": 10000,
24
  "rotary_pct": 0.5,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
- "transformers_version": "4.41.2",
28
  "use_cache": true,
29
  "use_parallel_residual": true,
30
  "vocab_size": 50281
 
10
  "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
+ "hidden_size": 2048,
14
  "initializer_range": 0.02,
15
+ "intermediate_size": 7168,
16
  "layer_norm_eps": 1e-05,
17
+ "max_position_embeddings": 2048,
18
  "model_type": "gpt_neox",
19
  "num_attention_heads": 8,
20
+ "num_hidden_layers": 16,
21
  "pad_token_id": 0,
22
  "rope_scaling": null,
23
  "rotary_emb_base": 10000,
24
  "rotary_pct": 0.5,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
+ "transformers_version": "4.38.2",
28
  "use_cache": true,
29
  "use_parallel_residual": true,
30
  "vocab_size": 50281
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.41.2"
7
  }
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.38.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:383f0994660dcb4c92601cf8ed1def53ae28daeac30456dc7b8496d6c35ddaad
3
- size 2357751968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d706f84ddf1399b349d7f6e3be5d4af4a9fcc8254804651d009a641ea87653cd
3
+ size 3891683344