aristotletan
commited on
Commit
•
826e7dc
1
Parent(s):
355a3ca
add model
Browse files- .gitignore +1 -0
- README.md +81 -0
- config.json +57 -0
- pytorch_model.bin +3 -0
- runs/Jul21_23-21-14_51606f7d4e97/1626909807.29645/events.out.tfevents.1626909807.51606f7d4e97.172.1 +3 -0
- runs/Jul21_23-21-14_51606f7d4e97/events.out.tfevents.1626909807.51606f7d4e97.172.0 +3 -0
- runs/Jul21_23-23-43_51606f7d4e97/1626909838.6081355/events.out.tfevents.1626909838.51606f7d4e97.172.3 +3 -0
- runs/Jul21_23-23-43_51606f7d4e97/events.out.tfevents.1626909838.51606f7d4e97.172.2 +3 -0
- runs/Jul21_23-24-11_51606f7d4e97/1626909862.662255/events.out.tfevents.1626909862.51606f7d4e97.172.5 +3 -0
- runs/Jul21_23-24-11_51606f7d4e97/events.out.tfevents.1626909862.51606f7d4e97.172.4 +3 -0
- runs/Jul21_23-26-22_51606f7d4e97/1626909995.7680657/events.out.tfevents.1626909995.51606f7d4e97.557.1 +3 -0
- runs/Jul21_23-26-22_51606f7d4e97/events.out.tfevents.1626909995.51606f7d4e97.557.0 +3 -0
- runs/Jul21_23-32-35_51606f7d4e97/1626910375.855908/events.out.tfevents.1626910375.51606f7d4e97.703.1 +3 -0
- runs/Jul21_23-32-35_51606f7d4e97/events.out.tfevents.1626910375.51606f7d4e97.703.0 +3 -0
- runs/Jul21_23-54-04_51606f7d4e97/1626911660.8439035/events.out.tfevents.1626911660.51606f7d4e97.1017.1 +3 -0
- runs/Jul21_23-54-04_51606f7d4e97/events.out.tfevents.1626911660.51606f7d4e97.1017.0 +3 -0
- runs/Jul22_00-00-33_51606f7d4e97/1626912043.5967755/events.out.tfevents.1626912043.51606f7d4e97.1179.1 +3 -0
- runs/Jul22_00-00-33_51606f7d4e97/events.out.tfevents.1626912043.51606f7d4e97.1179.0 +3 -0
- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- training_args.bin +3 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
checkpoint-*/
|
README.md
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
tags:
|
4 |
+
- generated_from_trainer
|
5 |
+
datasets:
|
6 |
+
- wsj_markets
|
7 |
+
metrics:
|
8 |
+
- rouge
|
9 |
+
model_index:
|
10 |
+
- name: t5-small-finetuned-xsum
|
11 |
+
results:
|
12 |
+
- task:
|
13 |
+
name: Sequence-to-sequence Language Modeling
|
14 |
+
type: text2text-generation
|
15 |
+
dataset:
|
16 |
+
name: wsj_markets
|
17 |
+
type: wsj_markets
|
18 |
+
args: default
|
19 |
+
metric:
|
20 |
+
name: Rouge1
|
21 |
+
type: rouge
|
22 |
+
value: 10.4492
|
23 |
+
---
|
24 |
+
|
25 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
26 |
+
should probably proofread and complete it, then remove this comment. -->
|
27 |
+
|
28 |
+
# t5-small-finetuned-xsum
|
29 |
+
|
30 |
+
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the wsj_markets dataset.
|
31 |
+
It achieves the following results on the evaluation set:
|
32 |
+
- Loss: 1.1447
|
33 |
+
- Rouge1: 10.4492
|
34 |
+
- Rouge2: 3.9563
|
35 |
+
- Rougel: 9.3368
|
36 |
+
- Rougelsum: 9.9828
|
37 |
+
- Gen Len: 19.0
|
38 |
+
|
39 |
+
## Model description
|
40 |
+
|
41 |
+
More information needed
|
42 |
+
|
43 |
+
## Intended uses & limitations
|
44 |
+
|
45 |
+
More information needed
|
46 |
+
|
47 |
+
## Training and evaluation data
|
48 |
+
|
49 |
+
More information needed
|
50 |
+
|
51 |
+
## Training procedure
|
52 |
+
|
53 |
+
### Training hyperparameters
|
54 |
+
|
55 |
+
The following hyperparameters were used during training:
|
56 |
+
- learning_rate: 2e-05
|
57 |
+
- train_batch_size: 4
|
58 |
+
- eval_batch_size: 4
|
59 |
+
- seed: 42
|
60 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
61 |
+
- lr_scheduler_type: linear
|
62 |
+
- num_epochs: 5
|
63 |
+
- mixed_precision_training: Native AMP
|
64 |
+
|
65 |
+
### Training results
|
66 |
+
|
67 |
+
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|
68 |
+
|:-------------:|:-----:|:----:|:---------------:|:-------:|:------:|:------:|:---------:|:-------:|
|
69 |
+
| 2.2742 | 1.0 | 868 | 1.3135 | 9.4644 | 2.618 | 8.4048 | 8.9764 | 19.0 |
|
70 |
+
| 1.4607 | 2.0 | 1736 | 1.2134 | 9.6327 | 3.8535 | 9.0703 | 9.2466 | 19.0 |
|
71 |
+
| 1.3579 | 3.0 | 2604 | 1.1684 | 10.1616 | 3.5498 | 9.2294 | 9.4507 | 19.0 |
|
72 |
+
| 1.3314 | 4.0 | 3472 | 1.1514 | 10.0621 | 3.6907 | 9.1635 | 9.4955 | 19.0 |
|
73 |
+
| 1.3084 | 5.0 | 4340 | 1.1447 | 10.4492 | 3.9563 | 9.3368 | 9.9828 | 19.0 |
|
74 |
+
|
75 |
+
|
76 |
+
### Framework versions
|
77 |
+
|
78 |
+
- Transformers 4.8.2
|
79 |
+
- Pytorch 1.9.0+cu102
|
80 |
+
- Datasets 1.10.0
|
81 |
+
- Tokenizers 0.10.3
|
config.json
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "t5-small",
|
3 |
+
"architectures": [
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"d_ff": 2048,
|
7 |
+
"d_kv": 64,
|
8 |
+
"d_model": 512,
|
9 |
+
"decoder_start_token_id": 0,
|
10 |
+
"dropout_rate": 0.1,
|
11 |
+
"eos_token_id": 1,
|
12 |
+
"feed_forward_proj": "relu",
|
13 |
+
"gradient_checkpointing": false,
|
14 |
+
"initializer_factor": 1.0,
|
15 |
+
"is_encoder_decoder": true,
|
16 |
+
"layer_norm_epsilon": 1e-06,
|
17 |
+
"model_type": "t5",
|
18 |
+
"n_positions": 512,
|
19 |
+
"num_decoder_layers": 6,
|
20 |
+
"num_heads": 8,
|
21 |
+
"num_layers": 6,
|
22 |
+
"output_past": true,
|
23 |
+
"pad_token_id": 0,
|
24 |
+
"relative_attention_num_buckets": 32,
|
25 |
+
"task_specific_params": {
|
26 |
+
"summarization": {
|
27 |
+
"early_stopping": true,
|
28 |
+
"length_penalty": 2.0,
|
29 |
+
"max_length": 200,
|
30 |
+
"min_length": 30,
|
31 |
+
"no_repeat_ngram_size": 3,
|
32 |
+
"num_beams": 4,
|
33 |
+
"prefix": "summarize: "
|
34 |
+
},
|
35 |
+
"translation_en_to_de": {
|
36 |
+
"early_stopping": true,
|
37 |
+
"max_length": 300,
|
38 |
+
"num_beams": 4,
|
39 |
+
"prefix": "translate English to German: "
|
40 |
+
},
|
41 |
+
"translation_en_to_fr": {
|
42 |
+
"early_stopping": true,
|
43 |
+
"max_length": 300,
|
44 |
+
"num_beams": 4,
|
45 |
+
"prefix": "translate English to French: "
|
46 |
+
},
|
47 |
+
"translation_en_to_ro": {
|
48 |
+
"early_stopping": true,
|
49 |
+
"max_length": 300,
|
50 |
+
"num_beams": 4,
|
51 |
+
"prefix": "translate English to Romanian: "
|
52 |
+
}
|
53 |
+
},
|
54 |
+
"transformers_version": "4.8.2",
|
55 |
+
"use_cache": true,
|
56 |
+
"vocab_size": 32128
|
57 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:182307b56bf3139b64de754180887364b56a7aada7c45f3c67b936d7fe3ec92f
|
3 |
+
size 242085627
|
runs/Jul21_23-21-14_51606f7d4e97/1626909807.29645/events.out.tfevents.1626909807.51606f7d4e97.172.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b779646fa580429a366560bf38ece3a6b75b6b05715889c4d186b38c74a429e0
|
3 |
+
size 4334
|
runs/Jul21_23-21-14_51606f7d4e97/events.out.tfevents.1626909807.51606f7d4e97.172.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1afb8969fa496083676ea9db5e80db3577bd9512e19fbb85819349610f67ccb
|
3 |
+
size 3772
|
runs/Jul21_23-23-43_51606f7d4e97/1626909838.6081355/events.out.tfevents.1626909838.51606f7d4e97.172.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27eb244477ea4f9de9f2b9cbaf36c18994b8d665eeaca234db5bd326b97880be
|
3 |
+
size 4334
|
runs/Jul21_23-23-43_51606f7d4e97/events.out.tfevents.1626909838.51606f7d4e97.172.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cdd5a44cdfb74a1ce5783f34250302640eb7e33f0e7b57c63fff23754b52270c
|
3 |
+
size 3770
|
runs/Jul21_23-24-11_51606f7d4e97/1626909862.662255/events.out.tfevents.1626909862.51606f7d4e97.172.5
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25867e9743f31657e74c00e3fb3fc40cb8c0e3da6fa1c7fc2bb78d3e813f6ffa
|
3 |
+
size 4334
|
runs/Jul21_23-24-11_51606f7d4e97/events.out.tfevents.1626909862.51606f7d4e97.172.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f3676289380b65aa0dbc75da722095e720b603c346ef26f0b4ae1f24ad83505a
|
3 |
+
size 3770
|
runs/Jul21_23-26-22_51606f7d4e97/1626909995.7680657/events.out.tfevents.1626909995.51606f7d4e97.557.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:855e0bfc162258c1fae06b11d4c7938183f47702406d532b468e058a52d3149b
|
3 |
+
size 4334
|
runs/Jul21_23-26-22_51606f7d4e97/events.out.tfevents.1626909995.51606f7d4e97.557.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ea0e6ce1f10976915ae28f2c6667454d379576bf22946580e6b2be2e92a3e63
|
3 |
+
size 3927
|
runs/Jul21_23-32-35_51606f7d4e97/1626910375.855908/events.out.tfevents.1626910375.51606f7d4e97.703.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:518d4c430600576abcf1d44bd5919cd5dd190ce4ac3675d4911fb1e5c74cfd6e
|
3 |
+
size 4334
|
runs/Jul21_23-32-35_51606f7d4e97/events.out.tfevents.1626910375.51606f7d4e97.703.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4cacaff9ba272a67fbba724c21f8cb8bed48fa06fd5da8ea6a578802a4629b8b
|
3 |
+
size 8005
|
runs/Jul21_23-54-04_51606f7d4e97/1626911660.8439035/events.out.tfevents.1626911660.51606f7d4e97.1017.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:116bb7f0495adb66df89db9196b52a57e1f1f06d670da06348ae7d05b475be8a
|
3 |
+
size 4334
|
runs/Jul21_23-54-04_51606f7d4e97/events.out.tfevents.1626911660.51606f7d4e97.1017.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68edb1c6a19fbb098f66454872ee59e3f61b4f1c39be109d8bde3e4080db00a2
|
3 |
+
size 3927
|
runs/Jul22_00-00-33_51606f7d4e97/1626912043.5967755/events.out.tfevents.1626912043.51606f7d4e97.1179.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00019ed9b95a90df0228566ca4564f54ff82b5d1149f6a0d2907bf397f7fb032
|
3 |
+
size 4334
|
runs/Jul22_00-00-33_51606f7d4e97/events.out.tfevents.1626912043.51606f7d4e97.1179.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c2eeed47732066a5ba7f856b553ef51a712169fdb5bb457970afc6d2805e64f2
|
3 |
+
size 8005
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"]}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "t5-small", "tokenizer_class": "T5Tokenizer"}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3f03c7c2f2463138193b5d6da8d3201cabe4c4fc389df2f0818fdfa61b8a7d92
|
3 |
+
size 2735
|