germanchura
commited on
Commit
•
3efe037
1
Parent(s):
ff4183c
Model save
Browse files- README.md +49 -199
- model.safetensors +1 -1
- runs/May27_21-10-12_34ec97677d65/events.out.tfevents.1716844213.34ec97677d65.1835.9 +3 -0
- runs/May27_21-10-23_34ec97677d65/events.out.tfevents.1716844223.34ec97677d65.1835.10 +3 -0
- runs/May27_21-10-23_34ec97677d65/events.out.tfevents.1716844248.34ec97677d65.1835.11 +3 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilroberta-base](https://huggingface.co/distilroberta-base) on the None dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
-
- Loss: 2.
|
19 |
|
20 |
## Model description
|
21 |
|
@@ -40,7 +40,7 @@ The following hyperparameters were used during training:
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
-
- num_epochs:
|
44 |
|
45 |
### Training results
|
46 |
|
@@ -48,204 +48,54 @@ The following hyperparameters were used during training:
|
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
| No log | 1.0 | 2 | 3.0764 |
|
50 |
| No log | 2.0 | 4 | 2.9682 |
|
51 |
-
| No log | 3.0 | 6 | 2.
|
52 |
| No log | 4.0 | 8 | 2.7053 |
|
53 |
-
| No log | 5.0 | 10 | 3.
|
54 |
-
| No log | 6.0 | 12 | 3.
|
55 |
-
| No log | 7.0 | 14 | 2.
|
56 |
-
| No log | 8.0 | 16 | 2.
|
57 |
-
| No log | 9.0 | 18 | 3.
|
58 |
-
| No log | 10.0 | 20 | 2.
|
59 |
-
| No log | 11.0 | 22 | 2.
|
60 |
-
| No log | 12.0 | 24 | 2.
|
61 |
-
| No log | 13.0 | 26 | 2.
|
62 |
-
| No log | 14.0 | 28 | 2.
|
63 |
-
| No log | 15.0 | 30 | 2.
|
64 |
-
| No log | 16.0 | 32 | 2.
|
65 |
-
| No log | 17.0 | 34 | 3.
|
66 |
-
| No log | 18.0 | 36 | 3.
|
67 |
-
| No log | 19.0 | 38 | 3.
|
68 |
-
| No log | 20.0 | 40 | 2.
|
69 |
-
| No log | 21.0 | 42 | 2.
|
70 |
-
| No log | 22.0 | 44 | 2.
|
71 |
-
| No log | 23.0 | 46 | 3.
|
72 |
-
| No log | 24.0 | 48 | 3.
|
73 |
-
| No log | 25.0 | 50 | 3.
|
74 |
-
| No log | 26.0 | 52 | 3.
|
75 |
-
| No log | 27.0 | 54 | 2.
|
76 |
-
| No log | 28.0 | 56 | 2.
|
77 |
-
| No log | 29.0 | 58 | 2.
|
78 |
-
| No log | 30.0 | 60 | 2.
|
79 |
-
| No log | 31.0 | 62 | 2.
|
80 |
-
| No log | 32.0 | 64 | 2.
|
81 |
-
| No log | 33.0 | 66 | 2.
|
82 |
-
| No log | 34.0 | 68 | 2.
|
83 |
-
| No log | 35.0 | 70 | 2.
|
84 |
-
| No log | 36.0 | 72 | 2.
|
85 |
-
| No log | 37.0 | 74 | 3.
|
86 |
-
| No log | 38.0 | 76 | 2.
|
87 |
-
| No log | 39.0 | 78 | 2.
|
88 |
-
| No log | 40.0 | 80 | 2.
|
89 |
-
| No log | 41.0 | 82 | 3.
|
90 |
-
| No log | 42.0 | 84 | 2.
|
91 |
-
| No log | 43.0 | 86 | 2.
|
92 |
-
| No log | 44.0 | 88 | 2.
|
93 |
-
| No log | 45.0 | 90 | 2.
|
94 |
-
| No log | 46.0 | 92 | 3.
|
95 |
-
| No log | 47.0 | 94 | 2.
|
96 |
-
| No log | 48.0 | 96 | 2.
|
97 |
-
| No log | 49.0 | 98 | 2.
|
98 |
-
| No log | 50.0 | 100 | 3.
|
99 |
-
| No log | 51.0 | 102 | 2.9377 |
|
100 |
-
| No log | 52.0 | 104 | 2.7061 |
|
101 |
-
| No log | 53.0 | 106 | 2.7856 |
|
102 |
-
| No log | 54.0 | 108 | 2.6921 |
|
103 |
-
| No log | 55.0 | 110 | 3.2429 |
|
104 |
-
| No log | 56.0 | 112 | 2.7693 |
|
105 |
-
| No log | 57.0 | 114 | 2.9776 |
|
106 |
-
| No log | 58.0 | 116 | 2.4738 |
|
107 |
-
| No log | 59.0 | 118 | 3.0559 |
|
108 |
-
| No log | 60.0 | 120 | 2.5750 |
|
109 |
-
| No log | 61.0 | 122 | 2.6638 |
|
110 |
-
| No log | 62.0 | 124 | 2.5890 |
|
111 |
-
| No log | 63.0 | 126 | 3.1511 |
|
112 |
-
| No log | 64.0 | 128 | 2.5229 |
|
113 |
-
| No log | 65.0 | 130 | 2.4948 |
|
114 |
-
| No log | 66.0 | 132 | 2.7710 |
|
115 |
-
| No log | 67.0 | 134 | 3.0031 |
|
116 |
-
| No log | 68.0 | 136 | 2.8321 |
|
117 |
-
| No log | 69.0 | 138 | 2.7744 |
|
118 |
-
| No log | 70.0 | 140 | 2.9219 |
|
119 |
-
| No log | 71.0 | 142 | 2.9745 |
|
120 |
-
| No log | 72.0 | 144 | 3.0993 |
|
121 |
-
| No log | 73.0 | 146 | 2.7376 |
|
122 |
-
| No log | 74.0 | 148 | 2.7306 |
|
123 |
-
| No log | 75.0 | 150 | 2.7114 |
|
124 |
-
| No log | 76.0 | 152 | 2.6933 |
|
125 |
-
| No log | 77.0 | 154 | 2.6704 |
|
126 |
-
| No log | 78.0 | 156 | 2.8832 |
|
127 |
-
| No log | 79.0 | 158 | 2.8868 |
|
128 |
-
| No log | 80.0 | 160 | 3.0212 |
|
129 |
-
| No log | 81.0 | 162 | 2.8588 |
|
130 |
-
| No log | 82.0 | 164 | 2.7770 |
|
131 |
-
| No log | 83.0 | 166 | 3.0724 |
|
132 |
-
| No log | 84.0 | 168 | 2.9872 |
|
133 |
-
| No log | 85.0 | 170 | 2.6315 |
|
134 |
-
| No log | 86.0 | 172 | 2.6071 |
|
135 |
-
| No log | 87.0 | 174 | 2.8929 |
|
136 |
-
| No log | 88.0 | 176 | 2.6301 |
|
137 |
-
| No log | 89.0 | 178 | 2.7766 |
|
138 |
-
| No log | 90.0 | 180 | 2.7941 |
|
139 |
-
| No log | 91.0 | 182 | 2.9732 |
|
140 |
-
| No log | 92.0 | 184 | 3.3441 |
|
141 |
-
| No log | 93.0 | 186 | 2.7296 |
|
142 |
-
| No log | 94.0 | 188 | 2.9715 |
|
143 |
-
| No log | 95.0 | 190 | 2.9928 |
|
144 |
-
| No log | 96.0 | 192 | 2.8593 |
|
145 |
-
| No log | 97.0 | 194 | 3.0503 |
|
146 |
-
| No log | 98.0 | 196 | 2.8252 |
|
147 |
-
| No log | 99.0 | 198 | 2.8479 |
|
148 |
-
| No log | 100.0 | 200 | 3.0803 |
|
149 |
-
| No log | 101.0 | 202 | 2.6038 |
|
150 |
-
| No log | 102.0 | 204 | 2.8628 |
|
151 |
-
| No log | 103.0 | 206 | 3.0348 |
|
152 |
-
| No log | 104.0 | 208 | 2.9459 |
|
153 |
-
| No log | 105.0 | 210 | 2.8926 |
|
154 |
-
| No log | 106.0 | 212 | 2.9431 |
|
155 |
-
| No log | 107.0 | 214 | 2.7569 |
|
156 |
-
| No log | 108.0 | 216 | 2.7986 |
|
157 |
-
| No log | 109.0 | 218 | 2.4914 |
|
158 |
-
| No log | 110.0 | 220 | 2.7286 |
|
159 |
-
| No log | 111.0 | 222 | 2.7306 |
|
160 |
-
| No log | 112.0 | 224 | 2.8102 |
|
161 |
-
| No log | 113.0 | 226 | 2.8561 |
|
162 |
-
| No log | 114.0 | 228 | 2.8805 |
|
163 |
-
| No log | 115.0 | 230 | 2.9698 |
|
164 |
-
| No log | 116.0 | 232 | 3.2196 |
|
165 |
-
| No log | 117.0 | 234 | 2.8678 |
|
166 |
-
| No log | 118.0 | 236 | 2.7799 |
|
167 |
-
| No log | 119.0 | 238 | 2.7113 |
|
168 |
-
| No log | 120.0 | 240 | 2.9522 |
|
169 |
-
| No log | 121.0 | 242 | 3.0367 |
|
170 |
-
| No log | 122.0 | 244 | 2.8870 |
|
171 |
-
| No log | 123.0 | 246 | 2.9976 |
|
172 |
-
| No log | 124.0 | 248 | 3.2540 |
|
173 |
-
| No log | 125.0 | 250 | 2.8957 |
|
174 |
-
| No log | 126.0 | 252 | 2.7145 |
|
175 |
-
| No log | 127.0 | 254 | 2.5635 |
|
176 |
-
| No log | 128.0 | 256 | 2.8628 |
|
177 |
-
| No log | 129.0 | 258 | 3.0154 |
|
178 |
-
| No log | 130.0 | 260 | 2.8085 |
|
179 |
-
| No log | 131.0 | 262 | 3.1380 |
|
180 |
-
| No log | 132.0 | 264 | 2.9547 |
|
181 |
-
| No log | 133.0 | 266 | 2.7659 |
|
182 |
-
| No log | 134.0 | 268 | 2.7255 |
|
183 |
-
| No log | 135.0 | 270 | 3.0261 |
|
184 |
-
| No log | 136.0 | 272 | 2.6833 |
|
185 |
-
| No log | 137.0 | 274 | 2.8733 |
|
186 |
-
| No log | 138.0 | 276 | 3.0000 |
|
187 |
-
| No log | 139.0 | 278 | 3.1210 |
|
188 |
-
| No log | 140.0 | 280 | 2.9426 |
|
189 |
-
| No log | 141.0 | 282 | 2.6732 |
|
190 |
-
| No log | 142.0 | 284 | 2.4303 |
|
191 |
-
| No log | 143.0 | 286 | 2.5880 |
|
192 |
-
| No log | 144.0 | 288 | 2.8467 |
|
193 |
-
| No log | 145.0 | 290 | 2.8371 |
|
194 |
-
| No log | 146.0 | 292 | 2.6999 |
|
195 |
-
| No log | 147.0 | 294 | 3.1099 |
|
196 |
-
| No log | 148.0 | 296 | 2.7373 |
|
197 |
-
| No log | 149.0 | 298 | 3.0492 |
|
198 |
-
| No log | 150.0 | 300 | 3.1728 |
|
199 |
-
| No log | 151.0 | 302 | 2.7651 |
|
200 |
-
| No log | 152.0 | 304 | 2.8977 |
|
201 |
-
| No log | 153.0 | 306 | 2.9967 |
|
202 |
-
| No log | 154.0 | 308 | 3.1278 |
|
203 |
-
| No log | 155.0 | 310 | 2.6165 |
|
204 |
-
| No log | 156.0 | 312 | 2.8693 |
|
205 |
-
| No log | 157.0 | 314 | 2.9361 |
|
206 |
-
| No log | 158.0 | 316 | 3.1438 |
|
207 |
-
| No log | 159.0 | 318 | 2.9013 |
|
208 |
-
| No log | 160.0 | 320 | 2.7092 |
|
209 |
-
| No log | 161.0 | 322 | 2.9289 |
|
210 |
-
| No log | 162.0 | 324 | 2.9755 |
|
211 |
-
| No log | 163.0 | 326 | 2.9121 |
|
212 |
-
| No log | 164.0 | 328 | 2.7537 |
|
213 |
-
| No log | 165.0 | 330 | 2.5412 |
|
214 |
-
| No log | 166.0 | 332 | 2.5208 |
|
215 |
-
| No log | 167.0 | 334 | 2.6742 |
|
216 |
-
| No log | 168.0 | 336 | 2.6050 |
|
217 |
-
| No log | 169.0 | 338 | 2.9525 |
|
218 |
-
| No log | 170.0 | 340 | 2.8997 |
|
219 |
-
| No log | 171.0 | 342 | 2.9220 |
|
220 |
-
| No log | 172.0 | 344 | 2.8448 |
|
221 |
-
| No log | 173.0 | 346 | 2.8170 |
|
222 |
-
| No log | 174.0 | 348 | 2.6726 |
|
223 |
-
| No log | 175.0 | 350 | 2.8006 |
|
224 |
-
| No log | 176.0 | 352 | 2.4927 |
|
225 |
-
| No log | 177.0 | 354 | 2.9843 |
|
226 |
-
| No log | 178.0 | 356 | 2.9055 |
|
227 |
-
| No log | 179.0 | 358 | 2.9204 |
|
228 |
-
| No log | 180.0 | 360 | 2.7443 |
|
229 |
-
| No log | 181.0 | 362 | 3.0418 |
|
230 |
-
| No log | 182.0 | 364 | 2.9705 |
|
231 |
-
| No log | 183.0 | 366 | 2.9550 |
|
232 |
-
| No log | 184.0 | 368 | 2.8749 |
|
233 |
-
| No log | 185.0 | 370 | 2.9289 |
|
234 |
-
| No log | 186.0 | 372 | 2.8038 |
|
235 |
-
| No log | 187.0 | 374 | 2.6857 |
|
236 |
-
| No log | 188.0 | 376 | 3.1484 |
|
237 |
-
| No log | 189.0 | 378 | 2.5875 |
|
238 |
-
| No log | 190.0 | 380 | 2.9859 |
|
239 |
-
| No log | 191.0 | 382 | 2.8266 |
|
240 |
-
| No log | 192.0 | 384 | 2.7974 |
|
241 |
-
| No log | 193.0 | 386 | 2.6254 |
|
242 |
-
| No log | 194.0 | 388 | 2.6446 |
|
243 |
-
| No log | 195.0 | 390 | 2.7015 |
|
244 |
-
| No log | 196.0 | 392 | 2.9253 |
|
245 |
-
| No log | 197.0 | 394 | 2.7782 |
|
246 |
-
| No log | 198.0 | 396 | 2.6561 |
|
247 |
-
| No log | 199.0 | 398 | 2.9984 |
|
248 |
-
| No log | 200.0 | 400 | 2.8353 |
|
249 |
|
250 |
|
251 |
### Framework versions
|
|
|
15 |
|
16 |
This model is a fine-tuned version of [distilroberta-base](https://huggingface.co/distilroberta-base) on the None dataset.
|
17 |
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 2.9755
|
19 |
|
20 |
## Model description
|
21 |
|
|
|
40 |
- seed: 42
|
41 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
- lr_scheduler_type: linear
|
43 |
+
- num_epochs: 50
|
44 |
|
45 |
### Training results
|
46 |
|
|
|
48 |
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
| No log | 1.0 | 2 | 3.0764 |
|
50 |
| No log | 2.0 | 4 | 2.9682 |
|
51 |
+
| No log | 3.0 | 6 | 2.9192 |
|
52 |
| No log | 4.0 | 8 | 2.7053 |
|
53 |
+
| No log | 5.0 | 10 | 3.2641 |
|
54 |
+
| No log | 6.0 | 12 | 3.0686 |
|
55 |
+
| No log | 7.0 | 14 | 2.8964 |
|
56 |
+
| No log | 8.0 | 16 | 2.9286 |
|
57 |
+
| No log | 9.0 | 18 | 3.2297 |
|
58 |
+
| No log | 10.0 | 20 | 2.8838 |
|
59 |
+
| No log | 11.0 | 22 | 2.6667 |
|
60 |
+
| No log | 12.0 | 24 | 2.7971 |
|
61 |
+
| No log | 13.0 | 26 | 2.5930 |
|
62 |
+
| No log | 14.0 | 28 | 2.8104 |
|
63 |
+
| No log | 15.0 | 30 | 2.7695 |
|
64 |
+
| No log | 16.0 | 32 | 2.6098 |
|
65 |
+
| No log | 17.0 | 34 | 3.0375 |
|
66 |
+
| No log | 18.0 | 36 | 3.1342 |
|
67 |
+
| No log | 19.0 | 38 | 3.0716 |
|
68 |
+
| No log | 20.0 | 40 | 2.6655 |
|
69 |
+
| No log | 21.0 | 42 | 2.5342 |
|
70 |
+
| No log | 22.0 | 44 | 2.8461 |
|
71 |
+
| No log | 23.0 | 46 | 3.1634 |
|
72 |
+
| No log | 24.0 | 48 | 3.0142 |
|
73 |
+
| No log | 25.0 | 50 | 3.2181 |
|
74 |
+
| No log | 26.0 | 52 | 3.0536 |
|
75 |
+
| No log | 27.0 | 54 | 2.6519 |
|
76 |
+
| No log | 28.0 | 56 | 2.8154 |
|
77 |
+
| No log | 29.0 | 58 | 2.9385 |
|
78 |
+
| No log | 30.0 | 60 | 2.8281 |
|
79 |
+
| No log | 31.0 | 62 | 2.7531 |
|
80 |
+
| No log | 32.0 | 64 | 2.9408 |
|
81 |
+
| No log | 33.0 | 66 | 2.5850 |
|
82 |
+
| No log | 34.0 | 68 | 2.8823 |
|
83 |
+
| No log | 35.0 | 70 | 2.5177 |
|
84 |
+
| No log | 36.0 | 72 | 2.9296 |
|
85 |
+
| No log | 37.0 | 74 | 3.0441 |
|
86 |
+
| No log | 38.0 | 76 | 2.8714 |
|
87 |
+
| No log | 39.0 | 78 | 2.7857 |
|
88 |
+
| No log | 40.0 | 80 | 2.9850 |
|
89 |
+
| No log | 41.0 | 82 | 3.3792 |
|
90 |
+
| No log | 42.0 | 84 | 2.9246 |
|
91 |
+
| No log | 43.0 | 86 | 2.9392 |
|
92 |
+
| No log | 44.0 | 88 | 2.4090 |
|
93 |
+
| No log | 45.0 | 90 | 2.8323 |
|
94 |
+
| No log | 46.0 | 92 | 3.2173 |
|
95 |
+
| No log | 47.0 | 94 | 2.5451 |
|
96 |
+
| No log | 48.0 | 96 | 2.7456 |
|
97 |
+
| No log | 49.0 | 98 | 2.7570 |
|
98 |
+
| No log | 50.0 | 100 | 3.0471 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
|
100 |
|
101 |
### Framework versions
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 328693404
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e007aab4428d848faf9d80f0f41a78dffae43c445d47103cb09664a576c1ce4
|
3 |
size 328693404
|
runs/May27_21-10-12_34ec97677d65/events.out.tfevents.1716844213.34ec97677d65.1835.9
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e6659f92ec6da780f2b932e4a340de8e83d2033f8ad216ea50ad77fbeac29193
|
3 |
+
size 5893
|
runs/May27_21-10-23_34ec97677d65/events.out.tfevents.1716844223.34ec97677d65.1835.10
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:074ed4990b021eeda963c1f40d3cc48457ff623f03c505b69d13b57740889c38
|
3 |
+
size 18476
|
runs/May27_21-10-23_34ec97677d65/events.out.tfevents.1716844248.34ec97677d65.1835.11
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:73f6608848428f99a9532090208af46706ae57a0219a0f59c873be97ad809761
|
3 |
+
size 354
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5112
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:10b9e645baeb2e7620aaf9afb82ca016dd13c7a39f2235a14e8814865f187577
|
3 |
size 5112
|