File size: 3,255 Bytes
266d7b6
16c74a3
8f22cb0
84ae48a
b10b71d
a144499
c8a2f55
ed350f7
0bb736c
7f848f7
4e8ec4b
b912b9b
7563ffa
34d90fe
b51be98
f1841f4
d749b9e
540af05
23feff6
015001a
add85da
9b057f4
dcd34af
c2074e5
592d01f
4937ebd
33047be
925f0ea
fdff2a2
a83970e
89c1b8f
ca6698b
de78d7c
899751c
15173b9
b6695eb
e82345c
a9075d4
e84562f
12f07e2
2835e27
40076ae
57ca576
b770040
d7883f3
fc8ffd4
ecf7f12
c15923c
4dcab0e
7cee42a
d3cea19
ca625d0
4811865
bb14458
901ff26
5dd3c77
c5ba262
a18acbe
ebba0ba
c291199
2e1ae0e
31c81c3
c5f7b79
0326df2
9ed893b
3702b18
88aeece
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
Started at: 15:33:47
norbert, 5e-06, 128
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 0.3591454255580902 - MAE: 0.45179795905328163
Validation loss : 0.1890380075749229 - MAE: 0.3379535505810491
Epoch: 1
Training loss: 0.17689531534910202 - MAE: 0.3208967210731947
Validation loss : 0.1767409689286176 - MAE: 0.3246970355944142
Epoch: 2
Training loss: 0.16872187942266464 - MAE: 0.3125690919113166
Validation loss : 0.16847357066238627 - MAE: 0.3156110621676475
Epoch: 3
Training loss: 0.15907202735543252 - MAE: 0.3024783283365623
Validation loss : 0.16047895699739456 - MAE: 0.30389921152602795
Epoch: 4
Training loss: 0.1539549046754837 - MAE: 0.29788259924346105
Validation loss : 0.15662892557242336 - MAE: 0.29814380669050355
Epoch: 5
Training loss: 0.1503433680534363 - MAE: 0.2949152129950798
Validation loss : 0.1525153215317165 - MAE: 0.29214486485502245
Epoch: 6
Training loss: 0.14536995470523834 - MAE: 0.2877235071131655
Validation loss : 0.14888650003601522 - MAE: 0.2886239349463809
Epoch: 7
Training loss: 0.14128359347581865 - MAE: 0.28447937593043593
Validation loss : 0.14314944340902216 - MAE: 0.2826997923209342
Epoch: 8
Training loss: 0.1375480856001377 - MAE: 0.2799352840783482
Validation loss : 0.14027191654724233 - MAE: 0.2795930297413221
Epoch: 9
Training loss: 0.13393679924309254 - MAE: 0.2763389379987558
Validation loss : 0.13631205786676967 - MAE: 0.27739130260099404
Epoch: 10
Training loss: 0.12704791828989984 - MAE: 0.26849840650013085
Validation loss : 0.13081551124067867 - MAE: 0.27080877530759406
Epoch: 11
Training loss: 0.12236514694988727 - MAE: 0.26361909948112355
Validation loss : 0.12586704028003357 - MAE: 0.26506955371674007
Epoch: 12
Training loss: 0.11977985665202141 - MAE: 0.2607141841131485
Validation loss : 0.12720872417968862 - MAE: 0.2672541535157223
Epoch: 13
Training loss: 0.11567172214388848 - MAE: 0.2562471491060205
Validation loss : 0.1232336930492345 - MAE: 0.26517212424314035
Epoch: 14
Training loss: 0.11422013014554977 - MAE: 0.25530885712870843
Validation loss : 0.12000418542062535 - MAE: 0.2587850452919145
Epoch: 15
Training loss: 0.10793282993137837 - MAE: 0.24747668924503544
Validation loss : 0.11827676085864797 - MAE: 0.2578255408005992
Epoch: 16
Training loss: 0.10461243025958539 - MAE: 0.24322533124245482
Validation loss : 0.11895233348888509 - MAE: 0.2602667994208777
Epoch: 17
Training loss: 0.10394789524376392 - MAE: 0.24387488659668008
Validation loss : 0.11770302889978185 - MAE: 0.2584240275180989
Epoch: 18
Training loss: 0.10249279104173184 - MAE: 0.2412635314683213
Validation loss : 0.11409987728385364 - MAE: 0.2538274607858908
Epoch: 19
Training loss: 0.10047173336148262 - MAE: 0.23884923636114294
Validation loss : 0.113751100266681 - MAE: 0.2532091686905342
Prediction MAE: 0.2497
Finished at: 15:33:47
Time taken: 1675 s.
0 days 0 hours 27 minutes 55 seconds