DopeorNope commited on
Commit
d5d54f8
1 Parent(s): 4569847

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -10
config.json CHANGED
@@ -1,27 +1,29 @@
1
  {
2
- "_name_or_path": "./",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
6
  "bos_token_id": 0,
7
  "classifier_dropout": 0.1,
8
  "eos_token_id": 0,
9
  "hidden_act": "gelu",
10
- "hidden_size": 5120,
 
11
  "initializer_range": 0.02,
12
- "intermediate_size": 20480,
13
  "layer_norm_eps": 1e-05,
14
  "max_position_embeddings": 2048,
15
  "model_type": "gpt_neox",
16
- "num_attention_heads": 40,
17
- "num_hidden_layers": 40,
18
- "num_steps": "global_step15930",
19
  "rotary_emb_base": 10000,
20
- "rotary_pct": 0.5,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "float16",
23
- "transformers_version": "4.29.2",
24
- "use_cache": true,
25
  "use_parallel_residual": true,
26
- "vocab_size": 30003
27
  }
 
1
  {
2
+ "_name_or_path": "beomi/KoAlpaca-Polyglot-5.8B",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
6
+ "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
  "classifier_dropout": 0.1,
9
  "eos_token_id": 0,
10
  "hidden_act": "gelu",
11
+ "hidden_dropout": 0.0,
12
+ "hidden_size": 4096,
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 16384,
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "gpt_neox",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 28,
20
+ "num_steps": "global_step320000",
21
  "rotary_emb_base": 10000,
22
+ "rotary_pct": 0.25,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float16",
25
+ "transformers_version": "4.31.0.dev0",
26
+ "use_cache": false,
27
  "use_parallel_residual": true,
28
+ "vocab_size": 30080
29
  }