BonjinKim commited on
Commit
995b9e1
1 Parent(s): 7bf7192

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -8
config.json CHANGED
@@ -1,10 +1,8 @@
1
  {
2
- "_name_or_path": "./checkpoints/checkpoint-5090",
3
  "architectures": [
4
  "BertForPreTraining"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
- "gradient_checkpointing": false,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
@@ -12,16 +10,10 @@
12
  "intermediate_size": 3072,
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
- "model_name_or_path": "dsksd/bert-ko-small-minimal",
16
  "model_type": "bert",
17
- "n_gate": 5,
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 6,
20
  "pad_token_id": 0,
21
- "position_embedding_type": "absolute",
22
- "proj_dim": null,
23
- "transformers_version": "4.5.1",
24
  "type_vocab_size": 2,
25
- "use_cache": true,
26
  "vocab_size": 35000
27
  }
 
1
  {
 
2
  "architectures": [
3
  "BertForPreTraining"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 768,
 
10
  "intermediate_size": 3072,
11
  "layer_norm_eps": 1e-12,
12
  "max_position_embeddings": 512,
 
13
  "model_type": "bert",
 
14
  "num_attention_heads": 12,
15
  "num_hidden_layers": 6,
16
  "pad_token_id": 0,
 
 
 
17
  "type_vocab_size": 2,
 
18
  "vocab_size": 35000
19
  }