binhquoc commited on
Commit
81e58bd
·
1 Parent(s): 0b56d9c

commit files to HF hub

Browse files
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "DebertaV2ForMaskedLM"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 2,
7
+ "eos_token_id": 3,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 3072,
13
+ "layer_norm_eps": 1e-07,
14
+ "max_position_embeddings": 1024,
15
+ "max_relative_positions": -1,
16
+ "model_type": "deberta-v2",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 6,
19
+ "pad_token_id": 0,
20
+ "pooler_dropout": 0,
21
+ "pooler_hidden_act": "gelu",
22
+ "pooler_hidden_size": 768,
23
+ "pos_att_type": null,
24
+ "position_biased_input": true,
25
+ "relative_attention": false,
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.21.2",
28
+ "type_vocab_size": 0,
29
+ "vocab_size": 40000
30
+ }
deberta_result.log ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Before load dataset, RAM used: 0.36 GB | Avaiable: 233.95 GB | Left: 233.59 GB
2
+ Before load dataset, RAM used: 0.36 GB | Avaiable: 233.95 GB | Left: 233.59 GB
3
+ Dataset({
4
+ features: ['text'],
5
+ num_rows: 112759113
6
+ })
7
+ After load dataset, RAM used: 6.51 GB | Avaiable: 233.75 GB | Left: 227.23 GB
8
+ After Prepare Dataloader, RAM used: 37.51 GB | Avaiable: 234.01 GB | Left: 196.51 GB
9
+ After epoch 1, RAM used: 38.33 GB | Avaiable: 214.05 GB | Left: 175.72 GB
10
+
11
+ >>> Epoch 1: Perplexity: 12.33546676560462 Loss: 2.109314857722502
12
+ Loss improved inf -> 2.109314857722502
13
+ Saved training checkpoint
14
+ After epoch 2, RAM used: 38.32 GB | Avaiable: 211.02 GB | Left: 172.71 GB
15
+
16
+ >>> Epoch 2: Perplexity: 9.42495984722383 Loss: 1.9333453324274112
17
+ Loss improved 2.109314857722502 -> 1.9333453324274112
18
+ Saved training checkpoint
19
+ After epoch 3, RAM used: 38.32 GB | Avaiable: 214.43 GB | Left: 176.11 GB
20
+
21
+ >>> Epoch 3: Perplexity: 7.812616130125877 Loss: 1.8017513724170475
22
+ Loss improved 1.9333453324274112 -> 1.8017513724170475
23
+ Saved training checkpoint
24
+ After epoch 4, RAM used: 38.32 GB | Avaiable: 213.09 GB | Left: 174.77 GB
25
+
26
+ >>> Epoch 4: Perplexity: 5.710954100841271 Loss: 1.6808245233119454
27
+ Loss improved 1.8017513724170475 -> 1.6808245233119454
28
+ Saved training checkpoint
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09d3360ba64d2d6e7e8b056015e9bf99d5adfd3cf4f30db2116b9c0d36a88371
3
+ size 298712813
special_tokens_map.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[EOS]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": "[UNK]"
9
+ }
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd1d1a303889e650d36230ceeb5cbdfbc1bc879ec50ac577993fcfeaa2963051
3
+ size 880522
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[BOS]",
3
+ "cls_token": "[CLS]",
4
+ "do_lower_case": false,
5
+ "eos_token": "[EOS]",
6
+ "mask_token": "[MASK]",
7
+ "name_or_path": "binhquoc/vie-tokenizer-v1",
8
+ "pad_token": "[PAD]",
9
+ "sep_token": "[SEP]",
10
+ "sp_model_kwargs": {},
11
+ "special_tokens_map_file": "/root/.cache/huggingface/transformers/423d759425088f736b8d0250ed15b43c61a902f755ab79aa13b5a8f6eb182fef.de27e148691aa9d25a1377400b3c8e2c5bacda9e4df4d3cacf9bb66ecdb3d5de",
12
+ "split_by_punct": false,
13
+ "tokenizer_class": "DebertaV2Tokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
vie_deberta_small_checkpoint_1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f06575dfbcf560aa28ecfebb6613989d9ff1b2bad886d54ee7443770c705a903
3
+ size 298733549
vie_deberta_small_checkpoint_2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:464c792c652d041a07cc591ca2b44d748479a59c93f6e9bfa236ccf10d9cc371
3
+ size 298733613
vie_deberta_small_checkpoint_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76e506672ee8d73c6c334e405334641cc0c7bbf2e5e1906029a45d914f737cb6
3
+ size 298733613
vie_deberta_small_checkpoint_4.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:996a942b53169ab87c9646eee1a077738ce1eb1bc23711f5102df81672d09b08
3
+ size 298733613