ShengdingHu commited on
Commit
38b4eb3
1 Parent(s): 54bfd27

Training in progress, step 200

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"[MASK]": 128000}
config.json CHANGED
@@ -1,12 +1,9 @@
1
  {
2
- "_name_or_path": "../../../../plm_cache/roberta-base",
3
  "architectures": [
4
- "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "eos_token_id": 2,
10
  "finetuning_task": "sst2",
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
@@ -21,17 +18,27 @@
21
  "negative": 0,
22
  "positive": 1
23
  },
24
- "layer_norm_eps": 1e-05,
25
- "max_position_embeddings": 514,
26
- "model_type": "roberta",
 
 
27
  "num_attention_heads": 12,
28
  "num_hidden_layers": 12,
29
- "pad_token_id": 1,
30
- "position_embedding_type": "absolute",
31
- "problem_type": "single_label_classification",
 
 
 
 
 
 
 
 
 
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.16.0.dev0",
34
- "type_vocab_size": 1,
35
- "use_cache": true,
36
- "vocab_size": 50265
37
  }
1
  {
2
+ "_name_or_path": "microsoft/deberta-v3-base",
3
  "architectures": [
4
+ "DebertaV2ForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
 
 
 
7
  "finetuning_task": "sst2",
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
18
  "negative": 0,
19
  "positive": 1
20
  },
21
+ "layer_norm_eps": 1e-07,
22
+ "max_position_embeddings": 512,
23
+ "max_relative_positions": -1,
24
+ "model_type": "deberta-v2",
25
+ "norm_rel_ebd": "layer_norm",
26
  "num_attention_heads": 12,
27
  "num_hidden_layers": 12,
28
+ "pad_token_id": 0,
29
+ "pooler_dropout": 0,
30
+ "pooler_hidden_act": "gelu",
31
+ "pooler_hidden_size": 768,
32
+ "pos_att_type": [
33
+ "p2c",
34
+ "c2p"
35
+ ],
36
+ "position_biased_input": false,
37
+ "position_buckets": 256,
38
+ "relative_attention": true,
39
+ "share_att_key": true,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.16.0.dev0",
42
+ "type_vocab_size": 0,
43
+ "vocab_size": 128100
 
44
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73d84baa77eefecefaf90806cdfc1109a0adc20ba7b257a432204d06d5073c3d
3
- size 2689947
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:664411322d02ebf40368ed16d41551d603f34add02d71bd36c6c656a0c116a34
3
+ size 328099
runs/Feb04_17-04-49_node1/1643965559.8010118/events.out.tfevents.1643965559.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1530362fe5c9709e51c54b106f3daa48dc09892ac3766900f3ed1b59fa5acf2c
3
+ size 4603
runs/Feb04_17-04-49_node1/events.out.tfevents.1643965559.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3e056ab9f5508c47ff7499a28ad925e5cbef64865061fc8272fb0ef56f4bf17
3
+ size 3629
runs/Feb04_17-11-16_node1/1643965920.1866007/events.out.tfevents.1643965920.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d0e262b3c381ce7cdb152644b0e35fbdb442a8636ec3bd8fe2f97405d0b9251
3
+ size 4603
runs/Feb04_17-11-16_node1/events.out.tfevents.1643965920.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38c929504cf87f864bd69e06196e9b53797efbccf13ec2b20ceb3cd65a2d6f6d
3
+ size 3629
runs/Feb04_17-14-46_node1/1643966130.986056/events.out.tfevents.1643966130.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7066f946eb20c6bbc3513da2f68837a952bcf3c4cc3a17faba5f1a12f83de37
3
+ size 4603
runs/Feb04_17-14-46_node1/events.out.tfevents.1643966130.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a0600859408eacbbde69c9fca047e2044b6cfe0a0a43a9a1339e82ae782f860
3
+ size 3629
runs/Feb04_17-16-34_node1/1643966237.2940464/events.out.tfevents.1643966237.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdd5379afe08fe95141039c354aa5a8e4d408b2fb44e5d3305008bb9246cde62
3
+ size 4603
runs/Feb04_17-16-34_node1/events.out.tfevents.1643966237.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18456d527f73637e05367df27682768f42d91253b485e686097b2ea9333459cb
3
+ size 3629
runs/Feb04_17-20-56_node1/1643966502.604528/events.out.tfevents.1643966502.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59674945681dd4eaf202e8db18b9cf5a3b3a913719dc8628e11611f1008c5f3b
3
+ size 4603
runs/Feb04_17-20-56_node1/events.out.tfevents.1643966502.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5aaa5dbfa0282adb7408e1858fdbef473021c5cb24d752f9e19947bcca8305c4
3
+ size 3629
runs/Feb04_17-22-32_node1/1643966598.464753/events.out.tfevents.1643966598.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf27372d5aaa31250536f12712d3da4ffd6a4ef9cf28f6b75eef7089ee9509a1
3
+ size 4603
runs/Feb04_17-22-32_node1/events.out.tfevents.1643966598.node1 ADDED
File without changes
runs/Feb04_17-48-33_node1/1643968157.2634227/events.out.tfevents.1643968157.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89c42d99d7f6c303e17b260ba2faa2772b77e5726d6e8aa8dc4f686169ac493c
3
+ size 4603
runs/Feb04_17-48-33_node1/events.out.tfevents.1643968157.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f46cdbe93ab5ddca743620294a0ac93e2b303732e9fcd004841fe5eb89eb78eb
3
+ size 3629
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
1
+ {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "../../../../plm_cache/roberta-base", "tokenizer_class": "RobertaTokenizer"}
1
+ {"do_lower_case": false, "bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "split_by_punct": false, "sp_model_kwargs": {}, "vocab_type": "spm", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "microsoft/deberta-v3-base", "tokenizer_class": "DebertaV2Tokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:78029fac8d1dfbd7c020fa1131ffa54ae3973cb91c98a62efd855de13a244c04
3
  size 2991
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b985d0a9a6f4706067765568bbaca516e554bc6a768f0e84862bf9a20fce6db7
3
  size 2991