hieule commited on
Commit
11a40e3
1 Parent(s): b9fceb5

deberta and replace linear with conv depth wise

Browse files
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "checkpoints/deberta-vie-small-3",
3
+ "architectures": [
4
+ "DebertaV2ForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-07,
15
+ "max_position_embeddings": 512,
16
+ "max_relative_positions": -1,
17
+ "model_type": "deberta-v2",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 6,
20
+ "pad_token_id": 1,
21
+ "pooler_dropout": 0,
22
+ "pooler_hidden_act": "gelu",
23
+ "pooler_hidden_size": 768,
24
+ "pos_att_type": null,
25
+ "position_biased_input": true,
26
+ "relative_attention": false,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.21.1",
29
+ "type_vocab_size": 0,
30
+ "vocab_size": 40030
31
+ }
deberta_train.log ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ DATASET: corpus_v1_10000000 | Model: deberta-vie-small
2
+ >>> Train Epochs: 1 | Losses: 2.423206581025543 | lr: 0.00013761917313292747
3
+ >>> Eval Epochs: 1 | Losses: 1.8951771660055028 | Perplexity: 6.673118307905794
4
+ >>> Train Epochs: 2 | Losses: 1.8172417448716809 | lr: 6.880958656646373e-05
5
+ >>> Eval Epochs: 2 | Losses: 1.6318174320577783 | Perplexity: 5.106666322469493
6
+ >>> Train Epochs: 3 | Losses: 1.6091945579320812 | lr: 0.0
7
+ >>> Eval Epochs: 3 | Losses: 1.487273468930144 | Perplexity: 4.307366906292279
dict.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d49c32945d0e7ae1871a591cba2ae36a7a0b32d3ee9a23ad4ec2495d8f95b560
3
+ size 297229737
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "__type": "AddedToken",
7
+ "content": "<mask>",
8
+ "lstrip": true,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "model_max_length": 1024,
14
+ "name_or_path": "checkpoints/deberta-vie-small-3",
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "sp_model_kwargs": {},
18
+ "special_tokens_map_file": null,
19
+ "tokenizer_class": "BartphoTokenizer",
20
+ "unk_token": "<unk>"
21
+ }