fragata commited on
Commit
16a56dd
1 Parent(s): d6c5b8e

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -1
config.json CHANGED
@@ -1,10 +1,15 @@
1
  {
 
2
  "architectures": [
3
  "GPTNeoXForCausalLM"
4
  ],
 
 
5
  "bos_token_id": 7,
 
6
  "eos_token_id": 7,
7
  "hidden_act": "gelu",
 
8
  "hidden_size": 4096,
9
  "initializer_range": 0.02,
10
  "intermediate_size": 16384,
@@ -13,11 +18,12 @@
13
  "model_type": "gpt_neox",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 32,
 
16
  "rotary_emb_base": 10000,
17
  "rotary_pct": 1.0,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
- "transformers_version": "4.24.0",
21
  "use_cache": true,
22
  "use_parallel_residual": false,
23
  "vocab_size": 150016
 
1
  {
2
+ "_name_or_path": "NYTK/PULI-GPTrio",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
6
+ "attention_bias": true,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 7,
9
+ "classifier_dropout": 0.1,
10
  "eos_token_id": 7,
11
  "hidden_act": "gelu",
12
+ "hidden_dropout": 0.0,
13
  "hidden_size": 4096,
14
  "initializer_range": 0.02,
15
  "intermediate_size": 16384,
 
18
  "model_type": "gpt_neox",
19
  "num_attention_heads": 32,
20
  "num_hidden_layers": 32,
21
+ "rope_scaling": null,
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 1.0,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float16",
26
+ "transformers_version": "4.37.0",
27
  "use_cache": true,
28
  "use_parallel_residual": false,
29
  "vocab_size": 150016