kevin-yang commited on
Commit
5335db1
1 Parent(s): 49e3e89

initial commit

Browse files
README.md ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Finetuning
2
+
3
+ ## Result
4
+
5
+ ### Base Model
6
+
7
+ | | Size | **NSMC**<br/>(acc) | **Naver NER**<br/>(F1) | **PAWS**<br/>(acc) | **KorNLI**<br/>(acc) | **KorSTS**<br/>(spearman) | **Question Pair**<br/>(acc) | **KorQuaD (Dev)**<br/>(EM/F1) | **Korean-Hate-Speech (Dev)**<br/>(F1) |
8
+ | :-------------------- | :---: | :----------------: | :--------------------: | :----------------: | :------------------: | :-----------------------: | :-------------------------: | :---------------------------: | :-----------------------------------: |
9
+ | KoBERT | 351M | 89.59 | 87.92 | 81.25 | 79.62 | 81.59 | 94.85 | 51.75 / 79.15 | 66.21 |
10
+ | XLM-Roberta-Base | 1.03G | 89.03 | 86.65 | 82.80 | 80.23 | 78.45 | 93.80 | 64.70 / 88.94 | 64.06 |
11
+ | HanBERT | 614M | 90.06 | 87.70 | 82.95 | 80.32 | 82.73 | 94.72 | 78.74 / 92.02 | 68.32 |
12
+ | KoELECTRA-Base-v3 | 431M | 90.63 | 88.11 | 84.45 | 82.24 | 85.53 | 95.25 | 84.83 / 93.45 | 67.61 |
13
+ | Soongsil-BERT | 370M | **91.2** | - | - | - | 76 | 94 | - | **69** |
14
+
15
+ ### Small Model
16
+
17
+ | | Size | **NSMC**<br/>(acc) | **Naver NER**<br/>(F1) | **PAWS**<br/>(acc) | **KorNLI**<br/>(acc) | **KorSTS**<br/>(spearman) | **Question Pair**<br/>(acc) | **KorQuaD (Dev)**<br/>(EM/F1) | **Korean-Hate-Speech (Dev)**<br/>(F1) |
18
+ | :--------------------- | :--: | :----------------: | :--------------------: | :----------------: | :------------------: | :-----------------------: | :-------------------------: | :---------------------------: | :-----------------------------------: |
19
+ | DistilKoBERT | 108M | 88.60 | 84.65 | 60.50 | 72.00 | 72.59 | 92.48 | 54.40 / 77.97 | 60.72 |
20
+ | KoELECTRA-Small-v3 | 54M | 89.36 | 85.40 | 77.45 | 78.60 | 80.79 | 94.85 | 82.11 / 91.13 | 63.07 |
21
+ | Soongsil-BERT | 213M | **90.7** | 84 | 69.1 | 76 | - | 92 | - | **66** |
22
+
23
+ ## Reference
24
+ - [Transformers Examples](https://github.com/huggingface/transformers/blob/master/examples/README.md)
25
+ - [NSMC](https://github.com/e9t/nsmc)
26
+ - [Naver NER Dataset](https://github.com/naver/nlp-challenge)
27
+ - [PAWS](https://github.com/google-research-datasets/paws)
28
+ - [KorNLI/KorSTS](https://github.com/kakaobrain/KorNLUDatasets)
29
+ - [Question Pair](https://github.com/songys/Question_pair)
30
+ - [KorQuad](https://korquad.github.io/category/1.0_KOR.html)
31
+ - [Korean Hate Speech](https://github.com/kocohub/korean-hate-speech)
32
+ - [KoELECTRA](https://github.com/monologg/KoELECTRA)
33
+ - [KoBERT](https://github.com/SKTBrain/KoBERT)
34
+ - [HanBERT](https://github.com/tbai2019/HanBert-54k-N)
35
+ - [HanBert Transformers](https://github.com/monologg/HanBert-Transformers)
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "RobertaForSequenceClassification"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 0,
7
+ "eos_token_id": 2,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "none",
14
+ "1": "offensive",
15
+ "2": "hate"
16
+ },
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 3072,
19
+ "label2id": {
20
+ "hate": 2,
21
+ "none": 0,
22
+ "offensive": 1
23
+ },
24
+ "layer_norm_eps": 1e-05,
25
+ "max_position_embeddings": 514,
26
+ "model_type": "roberta",
27
+ "num_attention_heads": 12,
28
+ "num_hidden_layers": 12,
29
+ "pad_token_id": 1,
30
+ "total_flos": 8.217624121867606e+19,
31
+ "type_vocab_size": 1,
32
+ "vocab_size": 16000
33
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bf1435f5b6f9727c9b7fbf89ba4ef5cf3e8cf68b4365d0868187f2f32ef69f7
3
+ size 393420233
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"max_len": 512, "special_tokens_map_file": null, "full_tokenizer_file": null}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17405666aa60e70369a597d3349370e9143c329ab67f7e8f9ba33f2abb679861
3
+ size 1519
vocab.json ADDED
The diff for this file is too large to render. See raw diff