satyanshu404
commited on
Commit
•
377771d
1
Parent(s):
a4e18d0
End of training
Browse files- README.md +109 -109
- config.json +1 -1
- generation_config.json +1 -1
- pytorch_model.bin +2 -2
- special_tokens_map.json +7 -1
- tokenizer.json +5 -5
- tokenizer_config.json +5 -6
- training_args.bin +2 -2
README.md
CHANGED
@@ -13,11 +13,11 @@ should probably proofread and complete it, then remove this comment. -->
|
|
13 |
|
14 |
# bart-large-cnn-finetuned-prompt_generation
|
15 |
|
16 |
-
This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on
|
17 |
It achieves the following results on the evaluation set:
|
18 |
-
- Loss:
|
19 |
-
- Map: 0.
|
20 |
-
- Ndcg@10: 0.
|
21 |
|
22 |
## Model description
|
23 |
|
@@ -37,8 +37,8 @@ More information needed
|
|
37 |
|
38 |
The following hyperparameters were used during training:
|
39 |
- learning_rate: 3e-07
|
40 |
-
- train_batch_size:
|
41 |
-
- eval_batch_size:
|
42 |
- seed: 42
|
43 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
44 |
- lr_scheduler_type: linear
|
@@ -48,111 +48,111 @@ The following hyperparameters were used during training:
|
|
48 |
|
49 |
| Training Loss | Epoch | Step | Validation Loss | Map | Ndcg@10 |
|
50 |
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|
|
51 |
-
| No log | 1.0 |
|
52 |
-
| No log | 2.0 |
|
53 |
-
| No log | 3.0 |
|
54 |
-
| No log | 4.0 |
|
55 |
-
| No log | 5.0 |
|
56 |
-
| No log | 6.0 |
|
57 |
-
| No log | 7.0 |
|
58 |
-
| No log | 8.0 |
|
59 |
-
| No log | 9.0 |
|
60 |
-
| No log | 10.0 |
|
61 |
-
| No log | 11.0 |
|
62 |
-
| No log | 12.0 |
|
63 |
-
| No log | 13.0 |
|
64 |
-
| No log | 14.0 |
|
65 |
-
| No log | 15.0 |
|
66 |
-
| No log | 16.0 |
|
67 |
-
| No log | 17.0 |
|
68 |
-
| No log | 18.0 |
|
69 |
-
| No log | 19.0 |
|
70 |
-
| No log | 20.0 |
|
71 |
-
| No log | 21.0 |
|
72 |
-
| No log | 22.0 |
|
73 |
-
| No log | 23.0 |
|
74 |
-
| No log | 24.0 |
|
75 |
-
| No log | 25.0 |
|
76 |
-
| No log | 26.0 |
|
77 |
-
| No log | 27.0 |
|
78 |
-
| No log | 28.0 |
|
79 |
-
| No log | 29.0 |
|
80 |
-
| No log | 30.0 |
|
81 |
-
| No log | 31.0 |
|
82 |
-
| No log | 32.0 |
|
83 |
-
| No log | 33.0 |
|
84 |
-
| No log | 34.0 |
|
85 |
-
| No log | 35.0 |
|
86 |
-
| No log | 36.0 |
|
87 |
-
| No log | 37.0 |
|
88 |
-
| No log | 38.0 |
|
89 |
-
| No log | 39.0 |
|
90 |
-
| No log | 40.0 |
|
91 |
-
| No log | 41.0 |
|
92 |
-
| No log | 42.0 |
|
93 |
-
| No log | 43.0 |
|
94 |
-
| No log | 44.0 |
|
95 |
-
| No log | 45.0 |
|
96 |
-
| No log | 46.0 |
|
97 |
-
| No log | 47.0 |
|
98 |
-
| No log | 48.0 |
|
99 |
-
| No log | 49.0 |
|
100 |
-
| No log | 50.0 |
|
101 |
-
| No log | 51.0 |
|
102 |
-
| No log | 52.0 |
|
103 |
-
| No log | 53.0 |
|
104 |
-
| No log | 54.0 |
|
105 |
-
| No log | 55.0 |
|
106 |
-
| No log | 56.0 |
|
107 |
-
| No log | 57.0 |
|
108 |
-
| No log | 58.0 |
|
109 |
-
| No log | 59.0 |
|
110 |
-
| No log | 60.0 |
|
111 |
-
| No log | 61.0 |
|
112 |
-
| No log | 62.0 |
|
113 |
-
| No log | 63.0 |
|
114 |
-
| No log | 64.0 |
|
115 |
-
| No log | 65.0 |
|
116 |
-
| No log | 66.0 |
|
117 |
-
| No log | 67.0 |
|
118 |
-
| No log | 68.0 |
|
119 |
-
| No log | 69.0 |
|
120 |
-
| No log | 70.0 |
|
121 |
-
| No log | 71.0 |
|
122 |
-
| No log | 72.0 |
|
123 |
-
| No log | 73.0 |
|
124 |
-
| No log | 74.0 |
|
125 |
-
| No log | 75.0 |
|
126 |
-
| No log | 76.0 |
|
127 |
-
| No log | 77.0 |
|
128 |
-
| No log | 78.0 |
|
129 |
-
| No log | 79.0 |
|
130 |
-
| No log | 80.0 |
|
131 |
-
| No log | 81.0 |
|
132 |
-
| No log | 82.0 |
|
133 |
-
| No log | 83.0 |
|
134 |
-
| No log | 84.0 |
|
135 |
-
| No log | 85.0 |
|
136 |
-
| No log | 86.0 |
|
137 |
-
| No log | 87.0 |
|
138 |
-
| No log | 88.0 |
|
139 |
-
| No log | 89.0 |
|
140 |
-
| No log | 90.0 |
|
141 |
-
| No log | 91.0 |
|
142 |
-
| No log | 92.0 |
|
143 |
-
| No log | 93.0 |
|
144 |
-
| No log | 94.0 |
|
145 |
-
| No log | 95.0 |
|
146 |
-
| No log | 96.0 |
|
147 |
-
| No log | 97.0 |
|
148 |
-
| No log | 98.0 |
|
149 |
-
| No log | 99.0 |
|
150 |
-
| No log | 100.0 |
|
151 |
|
152 |
|
153 |
### Framework versions
|
154 |
|
155 |
-
- Transformers 4.34.
|
156 |
-
- Pytorch 2.0
|
157 |
-
- Datasets 2.14.
|
158 |
- Tokenizers 0.14.1
|
|
|
13 |
|
14 |
# bart-large-cnn-finetuned-prompt_generation
|
15 |
|
16 |
+
This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on an unknown dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 2.8294
|
19 |
+
- Map: 0.4211
|
20 |
+
- Ndcg@10: 0.6088
|
21 |
|
22 |
## Model description
|
23 |
|
|
|
37 |
|
38 |
The following hyperparameters were used during training:
|
39 |
- learning_rate: 3e-07
|
40 |
+
- train_batch_size: 16
|
41 |
+
- eval_batch_size: 16
|
42 |
- seed: 42
|
43 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
44 |
- lr_scheduler_type: linear
|
|
|
48 |
|
49 |
| Training Loss | Epoch | Step | Validation Loss | Map | Ndcg@10 |
|
50 |
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|
|
51 |
+
| No log | 1.0 | 2 | 3.6607 | 0.3400 | 0.4882 |
|
52 |
+
| No log | 2.0 | 4 | 3.6575 | 0.3 | 0.4282 |
|
53 |
+
| No log | 3.0 | 6 | 3.6485 | 0.3183 | 0.5016 |
|
54 |
+
| No log | 4.0 | 8 | 3.6279 | 0.3183 | 0.4899 |
|
55 |
+
| No log | 5.0 | 10 | 3.6199 | 0.3183 | 0.4899 |
|
56 |
+
| No log | 6.0 | 12 | 3.6119 | 0.3123 | 0.5016 |
|
57 |
+
| No log | 7.0 | 14 | 3.6076 | 0.3323 | 0.5299 |
|
58 |
+
| No log | 8.0 | 16 | 3.5413 | 0.3523 | 0.5733 |
|
59 |
+
| No log | 9.0 | 18 | 3.5274 | 0.345 | 0.5333 |
|
60 |
+
| No log | 10.0 | 20 | 3.5184 | 0.3200 | 0.4816 |
|
61 |
+
| No log | 11.0 | 22 | 3.5041 | 0.3200 | 0.5016 |
|
62 |
+
| No log | 12.0 | 24 | 3.4935 | 0.3133 | 0.4899 |
|
63 |
+
| No log | 13.0 | 26 | 3.4858 | 0.31 | 0.4951 |
|
64 |
+
| No log | 14.0 | 28 | 3.4763 | 0.31 | 0.5068 |
|
65 |
+
| No log | 15.0 | 30 | 3.3761 | 0.34 | 0.5434 |
|
66 |
+
| No log | 16.0 | 32 | 3.3314 | 0.345 | 0.5751 |
|
67 |
+
| No log | 17.0 | 34 | 3.3103 | 0.3283 | 0.5468 |
|
68 |
+
| No log | 18.0 | 36 | 3.2951 | 0.3233 | 0.5151 |
|
69 |
+
| No log | 19.0 | 38 | 3.2811 | 0.3233 | 0.5034 |
|
70 |
+
| No log | 20.0 | 40 | 3.2708 | 0.3167 | 0.4834 |
|
71 |
+
| No log | 21.0 | 42 | 3.2625 | 0.3233 | 0.4834 |
|
72 |
+
| No log | 22.0 | 44 | 3.2471 | 0.3133 | 0.4834 |
|
73 |
+
| No log | 23.0 | 46 | 3.2308 | 0.3067 | 0.5034 |
|
74 |
+
| No log | 24.0 | 48 | 3.2171 | 0.2867 | 0.4634 |
|
75 |
+
| No log | 25.0 | 50 | 3.2068 | 0.2933 | 0.4751 |
|
76 |
+
| No log | 26.0 | 52 | 3.1972 | 0.2890 | 0.4803 |
|
77 |
+
| No log | 27.0 | 54 | 3.1892 | 0.2757 | 0.4252 |
|
78 |
+
| No log | 28.0 | 56 | 3.1812 | 0.2823 | 0.4252 |
|
79 |
+
| No log | 29.0 | 58 | 3.1681 | 0.309 | 0.4769 |
|
80 |
+
| No log | 30.0 | 60 | 3.1422 | 0.3223 | 0.4969 |
|
81 |
+
| No log | 31.0 | 62 | 3.1154 | 0.309 | 0.4769 |
|
82 |
+
| No log | 32.0 | 64 | 3.0906 | 0.369 | 0.5539 |
|
83 |
+
| No log | 33.0 | 66 | 3.0680 | 0.3850 | 0.5486 |
|
84 |
+
| No log | 34.0 | 68 | 3.0476 | 0.3567 | 0.5139 |
|
85 |
+
| No log | 35.0 | 70 | 3.0301 | 0.3347 | 0.4909 |
|
86 |
+
| No log | 36.0 | 72 | 3.0159 | 0.2861 | 0.4581 |
|
87 |
+
| No log | 37.0 | 74 | 3.0040 | 0.2887 | 0.4678 |
|
88 |
+
| No log | 38.0 | 76 | 2.9937 | 0.3003 | 0.4374 |
|
89 |
+
| No log | 39.0 | 78 | 2.9842 | 0.2723 | 0.3950 |
|
90 |
+
| No log | 40.0 | 80 | 2.9759 | 0.3052 | 0.4695 |
|
91 |
+
| No log | 41.0 | 82 | 2.9686 | 0.2867 | 0.4459 |
|
92 |
+
| No log | 42.0 | 84 | 2.9622 | 0.3099 | 0.4764 |
|
93 |
+
| No log | 43.0 | 86 | 2.9565 | 0.3141 | 0.5019 |
|
94 |
+
| No log | 44.0 | 88 | 2.9512 | 0.325 | 0.5204 |
|
95 |
+
| No log | 45.0 | 90 | 2.9462 | 0.3050 | 0.5004 |
|
96 |
+
| No log | 46.0 | 92 | 2.9416 | 0.325 | 0.5151 |
|
97 |
+
| No log | 47.0 | 94 | 2.9372 | 0.3183 | 0.4951 |
|
98 |
+
| No log | 48.0 | 96 | 2.9325 | 0.318 | 0.5235 |
|
99 |
+
| No log | 49.0 | 98 | 2.9278 | 0.318 | 0.5269 |
|
100 |
+
| No log | 50.0 | 100 | 2.9228 | 0.3155 | 0.5380 |
|
101 |
+
| No log | 51.0 | 102 | 2.9178 | 0.2795 | 0.4823 |
|
102 |
+
| No log | 52.0 | 104 | 2.9127 | 0.3329 | 0.5655 |
|
103 |
+
| No log | 53.0 | 106 | 2.9081 | 0.3127 | 0.5455 |
|
104 |
+
| No log | 54.0 | 108 | 2.9037 | 0.3195 | 0.5642 |
|
105 |
+
| No log | 55.0 | 110 | 2.8995 | 0.3145 | 0.5442 |
|
106 |
+
| No log | 56.0 | 112 | 2.8957 | 0.3245 | 0.5759 |
|
107 |
+
| No log | 57.0 | 114 | 2.8922 | 0.3798 | 0.6383 |
|
108 |
+
| No log | 58.0 | 116 | 2.8886 | 0.3788 | 0.6405 |
|
109 |
+
| No log | 59.0 | 118 | 2.8854 | 0.3920 | 0.6502 |
|
110 |
+
| No log | 60.0 | 120 | 2.8822 | 0.3920 | 0.6376 |
|
111 |
+
| No log | 61.0 | 122 | 2.8793 | 0.4255 | 0.6796 |
|
112 |
+
| No log | 62.0 | 124 | 2.8766 | 0.4288 | 0.7089 |
|
113 |
+
| No log | 63.0 | 126 | 2.8738 | 0.4340 | 0.7048 |
|
114 |
+
| No log | 64.0 | 128 | 2.8712 | 0.4273 | 0.6889 |
|
115 |
+
| No log | 65.0 | 130 | 2.8688 | 0.4173 | 0.7067 |
|
116 |
+
| No log | 66.0 | 132 | 2.8665 | 0.4233 | 0.6802 |
|
117 |
+
| No log | 67.0 | 134 | 2.8642 | 0.3973 | 0.6309 |
|
118 |
+
| No log | 68.0 | 136 | 2.8620 | 0.4107 | 0.6574 |
|
119 |
+
| No log | 69.0 | 138 | 2.8599 | 0.4173 | 0.6774 |
|
120 |
+
| No log | 70.0 | 140 | 2.8580 | 0.3907 | 0.6109 |
|
121 |
+
| No log | 71.0 | 142 | 2.8560 | 0.4407 | 0.6596 |
|
122 |
+
| No log | 72.0 | 144 | 2.8542 | 0.4007 | 0.6196 |
|
123 |
+
| No log | 73.0 | 146 | 2.8525 | 0.4207 | 0.6396 |
|
124 |
+
| No log | 74.0 | 148 | 2.8508 | 0.4173 | 0.6596 |
|
125 |
+
| No log | 75.0 | 150 | 2.8491 | 0.4107 | 0.6303 |
|
126 |
+
| No log | 76.0 | 152 | 2.8476 | 0.3973 | 0.5986 |
|
127 |
+
| No log | 77.0 | 154 | 2.8460 | 0.4040 | 0.6186 |
|
128 |
+
| No log | 78.0 | 156 | 2.8447 | 0.414 | 0.6747 |
|
129 |
+
| No log | 79.0 | 158 | 2.8433 | 0.4167 | 0.6673 |
|
130 |
+
| No log | 80.0 | 160 | 2.8420 | 0.4457 | 0.6813 |
|
131 |
+
| No log | 81.0 | 162 | 2.8409 | 0.4257 | 0.6512 |
|
132 |
+
| No log | 82.0 | 164 | 2.8397 | 0.4607 | 0.7073 |
|
133 |
+
| No log | 83.0 | 166 | 2.8387 | 0.4257 | 0.6048 |
|
134 |
+
| No log | 84.0 | 168 | 2.8377 | 0.4207 | 0.6048 |
|
135 |
+
| No log | 85.0 | 170 | 2.8366 | 0.369 | 0.5248 |
|
136 |
+
| No log | 86.0 | 172 | 2.8357 | 0.4111 | 0.5971 |
|
137 |
+
| No log | 87.0 | 174 | 2.8350 | 0.389 | 0.5448 |
|
138 |
+
| No log | 88.0 | 176 | 2.8342 | 0.4028 | 0.5771 |
|
139 |
+
| No log | 89.0 | 178 | 2.8334 | 0.374 | 0.5448 |
|
140 |
+
| No log | 90.0 | 180 | 2.8328 | 0.374 | 0.5565 |
|
141 |
+
| No log | 91.0 | 182 | 2.8321 | 0.4078 | 0.5971 |
|
142 |
+
| No log | 92.0 | 184 | 2.8316 | 0.4011 | 0.5888 |
|
143 |
+
| No log | 93.0 | 186 | 2.8311 | 0.374 | 0.5565 |
|
144 |
+
| No log | 94.0 | 188 | 2.8308 | 0.3811 | 0.5688 |
|
145 |
+
| No log | 95.0 | 190 | 2.8304 | 0.374 | 0.5565 |
|
146 |
+
| No log | 96.0 | 192 | 2.8302 | 0.3911 | 0.5888 |
|
147 |
+
| No log | 97.0 | 194 | 2.8300 | 0.3611 | 0.5488 |
|
148 |
+
| No log | 98.0 | 196 | 2.8297 | 0.414 | 0.5848 |
|
149 |
+
| No log | 99.0 | 198 | 2.8295 | 0.3878 | 0.5888 |
|
150 |
+
| No log | 100.0 | 200 | 2.8294 | 0.4211 | 0.6088 |
|
151 |
|
152 |
|
153 |
### Framework versions
|
154 |
|
155 |
+
- Transformers 4.34.1
|
156 |
+
- Pytorch 2.1.0+cu118
|
157 |
+
- Datasets 2.14.6
|
158 |
- Tokenizers 0.14.1
|
config.json
CHANGED
@@ -64,7 +64,7 @@
|
|
64 |
}
|
65 |
},
|
66 |
"torch_dtype": "float32",
|
67 |
-
"transformers_version": "4.34.
|
68 |
"use_cache": true,
|
69 |
"vocab_size": 50264
|
70 |
}
|
|
|
64 |
}
|
65 |
},
|
66 |
"torch_dtype": "float32",
|
67 |
+
"transformers_version": "4.34.1",
|
68 |
"use_cache": true,
|
69 |
"vocab_size": 50264
|
70 |
}
|
generation_config.json
CHANGED
@@ -11,5 +11,5 @@
|
|
11 |
"no_repeat_ngram_size": 3,
|
12 |
"num_beams": 4,
|
13 |
"pad_token_id": 1,
|
14 |
-
"transformers_version": "4.34.
|
15 |
}
|
|
|
11 |
"no_repeat_ngram_size": 3,
|
12 |
"num_beams": 4,
|
13 |
"pad_token_id": 1,
|
14 |
+
"transformers_version": "4.34.1"
|
15 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9710c8311c67ba940ec84258793176126d5dcd3bc838091005c7c57e27fd9777
|
3 |
+
size 1625537802
|
special_tokens_map.json
CHANGED
@@ -2,7 +2,13 @@
|
|
2 |
"bos_token": "<s>",
|
3 |
"cls_token": "<s>",
|
4 |
"eos_token": "</s>",
|
5 |
-
"mask_token":
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
"pad_token": "<pad>",
|
7 |
"sep_token": "</s>",
|
8 |
"unk_token": "<unk>"
|
|
|
2 |
"bos_token": "<s>",
|
3 |
"cls_token": "<s>",
|
4 |
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": true,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
"pad_token": "<pad>",
|
13 |
"sep_token": "</s>",
|
14 |
"unk_token": "<unk>"
|
tokenizer.json
CHANGED
@@ -14,7 +14,7 @@
|
|
14 |
"single_word": false,
|
15 |
"lstrip": false,
|
16 |
"rstrip": false,
|
17 |
-
"normalized":
|
18 |
"special": true
|
19 |
},
|
20 |
{
|
@@ -23,7 +23,7 @@
|
|
23 |
"single_word": false,
|
24 |
"lstrip": false,
|
25 |
"rstrip": false,
|
26 |
-
"normalized":
|
27 |
"special": true
|
28 |
},
|
29 |
{
|
@@ -32,7 +32,7 @@
|
|
32 |
"single_word": false,
|
33 |
"lstrip": false,
|
34 |
"rstrip": false,
|
35 |
-
"normalized":
|
36 |
"special": true
|
37 |
},
|
38 |
{
|
@@ -41,7 +41,7 @@
|
|
41 |
"single_word": false,
|
42 |
"lstrip": false,
|
43 |
"rstrip": false,
|
44 |
-
"normalized":
|
45 |
"special": true
|
46 |
},
|
47 |
{
|
@@ -50,7 +50,7 @@
|
|
50 |
"single_word": false,
|
51 |
"lstrip": true,
|
52 |
"rstrip": false,
|
53 |
-
"normalized":
|
54 |
"special": true
|
55 |
}
|
56 |
],
|
|
|
14 |
"single_word": false,
|
15 |
"lstrip": false,
|
16 |
"rstrip": false,
|
17 |
+
"normalized": true,
|
18 |
"special": true
|
19 |
},
|
20 |
{
|
|
|
23 |
"single_word": false,
|
24 |
"lstrip": false,
|
25 |
"rstrip": false,
|
26 |
+
"normalized": true,
|
27 |
"special": true
|
28 |
},
|
29 |
{
|
|
|
32 |
"single_word": false,
|
33 |
"lstrip": false,
|
34 |
"rstrip": false,
|
35 |
+
"normalized": true,
|
36 |
"special": true
|
37 |
},
|
38 |
{
|
|
|
41 |
"single_word": false,
|
42 |
"lstrip": false,
|
43 |
"rstrip": false,
|
44 |
+
"normalized": true,
|
45 |
"special": true
|
46 |
},
|
47 |
{
|
|
|
50 |
"single_word": false,
|
51 |
"lstrip": true,
|
52 |
"rstrip": false,
|
53 |
+
"normalized": true,
|
54 |
"special": true
|
55 |
}
|
56 |
],
|
tokenizer_config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
"0": {
|
5 |
"content": "<s>",
|
6 |
"lstrip": false,
|
7 |
-
"normalized":
|
8 |
"rstrip": false,
|
9 |
"single_word": false,
|
10 |
"special": true
|
@@ -12,7 +12,7 @@
|
|
12 |
"1": {
|
13 |
"content": "<pad>",
|
14 |
"lstrip": false,
|
15 |
-
"normalized":
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
@@ -20,7 +20,7 @@
|
|
20 |
"2": {
|
21 |
"content": "</s>",
|
22 |
"lstrip": false,
|
23 |
-
"normalized":
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
@@ -28,7 +28,7 @@
|
|
28 |
"3": {
|
29 |
"content": "<unk>",
|
30 |
"lstrip": false,
|
31 |
-
"normalized":
|
32 |
"rstrip": false,
|
33 |
"single_word": false,
|
34 |
"special": true
|
@@ -36,13 +36,12 @@
|
|
36 |
"50264": {
|
37 |
"content": "<mask>",
|
38 |
"lstrip": true,
|
39 |
-
"normalized":
|
40 |
"rstrip": false,
|
41 |
"single_word": false,
|
42 |
"special": true
|
43 |
}
|
44 |
},
|
45 |
-
"additional_special_tokens": [],
|
46 |
"bos_token": "<s>",
|
47 |
"clean_up_tokenization_spaces": true,
|
48 |
"cls_token": "<s>",
|
|
|
4 |
"0": {
|
5 |
"content": "<s>",
|
6 |
"lstrip": false,
|
7 |
+
"normalized": true,
|
8 |
"rstrip": false,
|
9 |
"single_word": false,
|
10 |
"special": true
|
|
|
12 |
"1": {
|
13 |
"content": "<pad>",
|
14 |
"lstrip": false,
|
15 |
+
"normalized": true,
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
|
|
20 |
"2": {
|
21 |
"content": "</s>",
|
22 |
"lstrip": false,
|
23 |
+
"normalized": true,
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
|
|
28 |
"3": {
|
29 |
"content": "<unk>",
|
30 |
"lstrip": false,
|
31 |
+
"normalized": true,
|
32 |
"rstrip": false,
|
33 |
"single_word": false,
|
34 |
"special": true
|
|
|
36 |
"50264": {
|
37 |
"content": "<mask>",
|
38 |
"lstrip": true,
|
39 |
+
"normalized": true,
|
40 |
"rstrip": false,
|
41 |
"single_word": false,
|
42 |
"special": true
|
43 |
}
|
44 |
},
|
|
|
45 |
"bos_token": "<s>",
|
46 |
"clean_up_tokenization_spaces": true,
|
47 |
"cls_token": "<s>",
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c1fef4034e9d3b69ddf1ad60d56e567f6dca3e250ba144cd518c34a38b42ec6
|
3 |
+
size 4728
|