ShengdingHu commited on
Commit
e6d5fae
1 Parent(s): 9c4e7a0

Training in progress, step 200

Browse files
Files changed (28) hide show
  1. config.json +50 -37
  2. pytorch_model.bin +2 -2
  3. runs/Feb04_15-25-43_node1/1643959673.5207505/events.out.tfevents.1643959673.node1 +3 -0
  4. runs/Feb04_15-25-43_node1/events.out.tfevents.1643959673.node1 +3 -0
  5. runs/Feb04_15-35-14_node1/1643960186.2194066/events.out.tfevents.1643960186.node1 +3 -0
  6. runs/Feb04_15-35-14_node1/events.out.tfevents.1643960186.node1 +0 -0
  7. runs/Feb04_15-37-23_node1/1643960322.0006874/events.out.tfevents.1643960322.node1 +3 -0
  8. runs/Feb04_15-37-23_node1/events.out.tfevents.1643960321.node1 +3 -0
  9. runs/Feb04_15-43-18_node1/1643960708.4925878/events.out.tfevents.1643960708.node1 +3 -0
  10. runs/Feb04_15-43-18_node1/events.out.tfevents.1643960708.node1 +3 -0
  11. runs/Feb04_15-49-40_node1/1643961071.8329544/events.out.tfevents.1643961071.node1 +3 -0
  12. runs/Feb04_15-49-40_node1/events.out.tfevents.1643961071.node1 +0 -0
  13. runs/Feb04_16-01-17_node1/1643962091.2497125/events.out.tfevents.1643962091.node1 +3 -0
  14. runs/Feb04_16-01-17_node1/events.out.tfevents.1643962091.node1 +3 -0
  15. runs/Feb04_17-48-45_node1/1643968245.0512648/events.out.tfevents.1643968245.node1 +3 -0
  16. runs/Feb04_17-48-45_node1/events.out.tfevents.1643968245.node1 +0 -0
  17. runs/Feb04_17-56-07_node1/1643969834.9487028/events.out.tfevents.1643969834.node1 +3 -0
  18. runs/Feb04_17-56-07_node1/events.out.tfevents.1643969834.node1 +3 -0
  19. runs/Feb04_18-19-48_node1/1643970168.767643/events.out.tfevents.1643970168.node1 +3 -0
  20. runs/Feb04_18-19-48_node1/events.out.tfevents.1643970168.node1 +3 -0
  21. runs/Feb04_18-26-56_node1/1643970487.6814518/events.out.tfevents.1643970487.node1 +3 -0
  22. runs/Feb04_18-26-56_node1/events.out.tfevents.1643970487.node1 +3 -0
  23. runs/Feb04_18-32-48_node1/1643970832.3296053/events.out.tfevents.1643970832.node1 +3 -0
  24. runs/Feb04_18-32-48_node1/events.out.tfevents.1643970832.node1 +3 -0
  25. special_tokens_map.json +1 -1
  26. tokenizer.json +0 -0
  27. tokenizer_config.json +1 -1
  28. training_args.bin +2 -2
config.json CHANGED
@@ -1,44 +1,57 @@
1
  {
2
- "_name_or_path": "microsoft/deberta-v3-base",
3
  "architectures": [
4
- "DebertaV2ForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "finetuning_task": "sst2",
8
- "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.1,
10
- "hidden_size": 768,
11
- "id2label": {
12
- "0": "negative",
13
- "1": "positive"
14
- },
15
- "initializer_range": 0.02,
16
- "intermediate_size": 3072,
17
- "label2id": {
18
- "negative": 0,
19
- "positive": 1
20
- },
21
- "layer_norm_eps": 1e-07,
22
- "max_position_embeddings": 512,
23
- "max_relative_positions": -1,
24
- "model_type": "deberta-v2",
25
- "norm_rel_ebd": "layer_norm",
26
- "num_attention_heads": 12,
27
- "num_hidden_layers": 12,
28
  "pad_token_id": 0,
29
- "pooler_dropout": 0,
30
- "pooler_hidden_act": "gelu",
31
- "pooler_hidden_size": 768,
32
- "pos_att_type": [
33
- "p2c",
34
- "c2p"
35
- ],
36
- "position_biased_input": false,
37
- "position_buckets": 256,
38
- "relative_attention": true,
39
- "share_att_key": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.16.0.dev0",
42
- "type_vocab_size": 0,
43
- "vocab_size": 128100
44
  }
 
1
  {
2
+ "_name_or_path": "../../../../plm_cache/t5-base",
3
  "architectures": [
4
+ "T5ForConditionalGeneration"
5
  ],
6
+ "d_ff": 3072,
7
+ "d_kv": 64,
8
+ "d_model": 768,
9
+ "decoder_start_token_id": 0,
10
+ "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "relu",
13
+ "initializer_factor": 1.0,
14
+ "is_encoder_decoder": true,
15
+ "layer_norm_epsilon": 1e-06,
16
+ "model_type": "t5",
17
+ "n_positions": 512,
18
+ "num_decoder_layers": 12,
19
+ "num_heads": 12,
20
+ "num_layers": 12,
21
+ "output_past": true,
 
 
 
 
 
 
22
  "pad_token_id": 0,
23
+ "relative_attention_num_buckets": 32,
24
+ "task_specific_params": {
25
+ "summarization": {
26
+ "early_stopping": true,
27
+ "length_penalty": 2.0,
28
+ "max_length": 200,
29
+ "min_length": 30,
30
+ "no_repeat_ngram_size": 3,
31
+ "num_beams": 4,
32
+ "prefix": "summarize: "
33
+ },
34
+ "translation_en_to_de": {
35
+ "early_stopping": true,
36
+ "max_length": 300,
37
+ "num_beams": 4,
38
+ "prefix": "translate English to German: "
39
+ },
40
+ "translation_en_to_fr": {
41
+ "early_stopping": true,
42
+ "max_length": 300,
43
+ "num_beams": 4,
44
+ "prefix": "translate English to French: "
45
+ },
46
+ "translation_en_to_ro": {
47
+ "early_stopping": true,
48
+ "max_length": 300,
49
+ "num_beams": 4,
50
+ "prefix": "translate English to Romanian: "
51
+ }
52
+ },
53
  "torch_dtype": "float32",
54
  "transformers_version": "4.16.0.dev0",
55
+ "use_cache": true,
56
+ "vocab_size": 32100
57
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:29680e30c548ecabdc3f24279214ee32acb419fc7e3acf053097b67a400ebec6
3
- size 328099
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d8fb1479604bdd6db5771723ac04661603ceb2e7f6e4c61754f5c2788c9de74
3
+ size 335851
runs/Feb04_15-25-43_node1/1643959673.5207505/events.out.tfevents.1643959673.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e4542fa8afa1aab7e61bc5f8b868969f2a2e75531732a284b28dedc1ed2a4f9
3
+ size 5026
runs/Feb04_15-25-43_node1/events.out.tfevents.1643959673.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc524562647feaa839b128fb99fa86e05050fde034c5657e43c32601c305a435
3
+ size 4312
runs/Feb04_15-35-14_node1/1643960186.2194066/events.out.tfevents.1643960186.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60a0c762de03fecd2c05c26b149e908695ec28d8b396e37d1387e2aaaf7d0e7b
3
+ size 5026
runs/Feb04_15-35-14_node1/events.out.tfevents.1643960186.node1 ADDED
File without changes
runs/Feb04_15-37-23_node1/1643960322.0006874/events.out.tfevents.1643960322.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01613bad5945e4bd6b1697ffa5b881573186bc31194ecee365c0b9f62ce5793a
3
+ size 5026
runs/Feb04_15-37-23_node1/events.out.tfevents.1643960321.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb460bf53403e4255ad1634e708ecfe249941d9e91f9a33274df63545f9112ea
3
+ size 4312
runs/Feb04_15-43-18_node1/1643960708.4925878/events.out.tfevents.1643960708.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5b09b2e0c025ebcec19e6f70087467b6d6908014061fefef6ce126607f17722
3
+ size 5026
runs/Feb04_15-43-18_node1/events.out.tfevents.1643960708.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87dfe6269a49f3a8f198a8dcdbfeb9f29b9535ed1264e8550a5028d59c34207c
3
+ size 4312
runs/Feb04_15-49-40_node1/1643961071.8329544/events.out.tfevents.1643961071.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfb09b3198a19776683ae923d2e5a09f2eea1d3e171a349a64b10f88a0e47437
3
+ size 5026
runs/Feb04_15-49-40_node1/events.out.tfevents.1643961071.node1 ADDED
File without changes
runs/Feb04_16-01-17_node1/1643962091.2497125/events.out.tfevents.1643962091.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc9876509235f183f7de4c5982a148554241e3f4224bc1a60ca976265404b8e7
3
+ size 5026
runs/Feb04_16-01-17_node1/events.out.tfevents.1643962091.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76a8c48ad3c8685f375fef1007ed5154a00ec750643e30aee334ed8e7942d31d
3
+ size 4312
runs/Feb04_17-48-45_node1/1643968245.0512648/events.out.tfevents.1643968245.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5c7fa85d76f91d51a7717661d9ebb32171dbabfe6974d132bf4f0bda834806
3
+ size 5026
runs/Feb04_17-48-45_node1/events.out.tfevents.1643968245.node1 ADDED
File without changes
runs/Feb04_17-56-07_node1/1643969834.9487028/events.out.tfevents.1643969834.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:026434a66d5ca053d28e2c04f5c3dbc22bc4579d8bfcfbb805cafdb6d11c4fb7
3
+ size 5026
runs/Feb04_17-56-07_node1/events.out.tfevents.1643969834.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24707a91c85dc3311c4b4921ed3a1c34793140256ee142b46a9919f39c13f33e
3
+ size 4312
runs/Feb04_18-19-48_node1/1643970168.767643/events.out.tfevents.1643970168.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88e1492e417e7388e6068f92c410bf1ca5b9c5ce8f207c2c6ba7e0d9cfbd9a6b
3
+ size 5026
runs/Feb04_18-19-48_node1/events.out.tfevents.1643970168.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eef06e76199407fda8937a61d266513f5db546b84f79541ea523eb7a99d8f452
3
+ size 4312
runs/Feb04_18-26-56_node1/1643970487.6814518/events.out.tfevents.1643970487.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fcf8b12a33ba0a9a473bd4999d20dde0975bd841af236d5b11e5666101f23018
3
+ size 5026
runs/Feb04_18-26-56_node1/events.out.tfevents.1643970487.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b493c6be6088442aaa0470139173686319017f8589954c2bf9a371a7e74942dc
3
+ size 4312
runs/Feb04_18-32-48_node1/1643970832.3296053/events.out.tfevents.1643970832.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:246f0d102406e4aa9006efe3b9e6f91fdfafcbd2f141441fc023ebcd910c0457
3
+ size 5026
runs/Feb04_18-32-48_node1/events.out.tfevents.1643970832.node1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7f3b4f514cd64f124a68d0c61d398193943fe6d1cc1cb8fed9573c7c9641528
3
+ size 4312
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"]}
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "split_by_punct": false, "sp_model_kwargs": {}, "vocab_type": "spm", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "microsoft/deberta-v3-base", "tokenizer_class": "DebertaV2Tokenizer"}
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "special_tokens_map_file": null, "name_or_path": "../../../../plm_cache/t5-base", "tokenizer_class": "T5Tokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b985d0a9a6f4706067765568bbaca516e554bc6a768f0e84862bf9a20fce6db7
3
- size 2991
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c439ad69205bdf400bbc3794245f7e239fb8d7e46e596e1557d697860012541
3
+ size 3183