keonju commited on
Commit
3636d2a
1 Parent(s): cdc8458

Training in progress, epoch 1

Browse files
.gitattributes CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 250101
3
+ }
config.json CHANGED
@@ -8,24 +8,16 @@
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "id2label": {
11
- "0": "LABEL_0",
12
- "1": "LABEL_1",
13
- "2": "LABEL_2",
14
- "3": "LABEL_3",
15
- "4": "LABEL_4",
16
- "5": "LABEL_5",
17
- "6": "LABEL_6"
18
  },
19
  "initializer_range": 0.02,
20
  "intermediate_size": 3072,
21
  "label2id": {
22
- "LABEL_0": 0,
23
- "LABEL_1": 1,
24
- "LABEL_2": 2,
25
- "LABEL_3": 3,
26
- "LABEL_4": 4,
27
- "LABEL_5": 5,
28
- "LABEL_6": 6
29
  },
30
  "layer_norm_eps": 1e-07,
31
  "max_position_embeddings": 512,
 
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "id2label": {
11
+ "0": "\ubd80\uc815",
12
+ "1": "\uae0d\uc815",
13
+ "2": "\uc911\ub9bd"
 
 
 
 
14
  },
15
  "initializer_range": 0.02,
16
  "intermediate_size": 3072,
17
  "label2id": {
18
+ "\uae0d\uc815": 1,
19
+ "\ubd80\uc815": 0,
20
+ "\uc911\ub9bd": 2
 
 
 
 
21
  },
22
  "layer_norm_eps": 1e-07,
23
  "max_position_embeddings": 512,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:008afb4347aa45f9fcfb6f44cf2ab28e122102375d3492e3c7ee9ba8e4665988
3
- size 1115332921
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b932374739a4eef1248ba92c26dd147f5463843babaf6e7a89b1151bde6d9f9b
3
+ size 1115320633
special_tokens_map.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": "[UNK]"
9
+ }
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13c8d666d62a7bc4ac8f040aab68e942c861f93303156cc28f5c7e885d86d6e3
3
+ size 4305025
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f06fc3bbcbbc8f07be861b9a6a69e177247bc549b2bcbf0483e4dd98c06b6f6
3
+ size 16331301
tokenizer_config.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "do_lower_case": false,
5
+ "eos_token": "[SEP]",
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 1000000000000000019884624838656,
8
+ "name_or_path": "lighthouse/mdeberta-v3-base-kor-further",
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "sp_model_kwargs": {},
12
+ "special_tokens_map_file": null,
13
+ "split_by_punct": false,
14
+ "tokenizer_class": "DebertaV2Tokenizer",
15
+ "unk_token": "[UNK]",
16
+ "vocab_type": "spm"
17
+ }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e66e8c8c30ffc4369beaafddd3a06d76fc9c66f18f90d843c23b4561cc80e96d
3
  size 3451
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dddfa1fa546b2c1119ac611e2f98cd3de9f285b5229a98e2491f73cd76ab25b0
3
  size 3451