File size: 5,487 Bytes
f151912
aac1dd3
2aeec1d
f5d877c
6e6ce2c
fd9f1ed
5fb32ad
b8778a2
9f747e9
b992ea2
8c802db
330852c
83cb4aa
128740d
a8f69ca
8e759de
bf83e33
9d48b71
17eba77
f27f5bf
cfc39e0
87ff816
a52a8cf
15d832b
e69bdc2
c44538c
9e0d0e5
078ffd8
caee09f
61b7486
6cb2ef0
a5eb167
5cbf7ff
7f92e95
fb54c88
0f1c923
2d7f8f8
63468c7
a15a548
75730ff
f0c2174
26ca820
c70a0fa
9e7440c
cdc25d4
8b413e0
f2894c0
b7d5e85
dc4a31b
a067b2b
71618dd
dac31e2
2cedf92
49fa371
9bbb41e
0218038
b5bb417
32a5abc
4fefbd7
3051369
2b2ad7d
421a8f8
92059d9
b5f472d
e40bec2
ac91467
30dded4
22fd641
3fdb935
629d97a
c983e73
e02ad6a
9e965ee
f65564b
770496e
b64905e
c7b3160
5468d5f
a3b47ae
77889d4
9de8c96
b9b49a9
713fa67
7162223
6fc9d5e
4044eb9
00c655a
72ad5da
00f9f6b
cdc4b9b
59d4e23
aaedd00
b79e267
eb3beb7
5737428
decbedd
dd58626
255e17d
f33d795
a119e43
b9006e5
7b6a057
5dc8b88
6af8a9e
c5c684c
0b8b29d
4ecae1b
33ce804
bac7aa9
7041c6d
d16775a
35a0714
bdd02f4
c6ed656
1093dba
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
Started at: 15:24:47
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.48961155652999877 - MAE: 0.5667749118208294
Validation loss : 0.2341225031349394 - MAE: 0.3800908488488268
Epoch: 1
Training loss: 0.23933333277702332 - MAE: 0.3816049737297095
Validation loss : 0.2157200359635883 - MAE: 0.36262968535385853
Epoch: 2
Training loss: 0.2227860403060913 - MAE: 0.362914211886953
Validation loss : 0.20576173894935185 - MAE: 0.351088469157324
Epoch: 3
Training loss: 0.21454387307167053 - MAE: 0.355829191485835
Validation loss : 0.20201933715078566 - MAE: 0.3476490434817412
Epoch: 4
Training loss: 0.20782955586910248 - MAE: 0.3492763092693279
Validation loss : 0.19840116136603886 - MAE: 0.34438782184298933
Epoch: 5
Training loss: 0.20974749684333802 - MAE: 0.3499676263302698
Validation loss : 0.19500345322820875 - MAE: 0.3412627339955124
Epoch: 6
Training loss: 0.20287642538547515 - MAE: 0.3440909850190504
Validation loss : 0.19372935758696663 - MAE: 0.3405082616642143
Epoch: 7
Training loss: 0.20112746179103852 - MAE: 0.3424945919935925
Validation loss : 0.19070245987839168 - MAE: 0.33745151530681705
Epoch: 8
Training loss: 0.19633454322814942 - MAE: 0.3393076079842827
Validation loss : 0.18811953398916456 - MAE: 0.33480433699694667
Epoch: 9
Training loss: 0.19513990223407746 - MAE: 0.33655391167095905
Validation loss : 0.18763755758603415 - MAE: 0.33493136829479836
Epoch: 10
Training loss: 0.1945616537332535 - MAE: 0.3339688628047326
Validation loss : 0.18520050081941816 - MAE: 0.3322033838398172
Epoch: 11
Training loss: 0.1919734501838684 - MAE: 0.3348057854308913
Validation loss : 0.18356686499383715 - MAE: 0.3305542036966663
Epoch: 12
Training loss: 0.18977773189544678 - MAE: 0.33093456128200255
Validation loss : 0.18238851262463462 - MAE: 0.3295210611243764
Epoch: 13
Training loss: 0.18959189653396608 - MAE: 0.33203631813041046
Validation loss : 0.18165012366241878 - MAE: 0.32894981523820216
Epoch: 14
Training loss: 0.188436096906662 - MAE: 0.3289166590553581
Validation loss : 0.18062874178091684 - MAE: 0.3279516450745761
Epoch: 15
Training loss: 0.1888059014081955 - MAE: 0.32938225827858314
Validation loss : 0.17884226640065512 - MAE: 0.32618892569948843
Epoch: 16
Training loss: 0.18457180321216582 - MAE: 0.32768304932122155
Validation loss : 0.1773804161283705 - MAE: 0.3244846629889762
Epoch: 17
Training loss: 0.1834845507144928 - MAE: 0.32513390306967505
Validation loss : 0.17784527275297377 - MAE: 0.325394248134103
Epoch: 18
Training loss: 0.17817132532596588 - MAE: 0.3205999365056764
Validation loss : 0.1772983885473675 - MAE: 0.3250120704872884
Epoch: 19
Training loss: 0.18241925418376922 - MAE: 0.32608043975119405
Validation loss : 0.1762605524725384 - MAE: 0.3239821380292207
Epoch: 20
Training loss: 0.1826972782611847 - MAE: 0.3236272201051217
Validation loss : 0.17492684887515175 - MAE: 0.32241326907282936
Epoch: 21
Training loss: 0.17828085482120515 - MAE: 0.32068803213274405
Validation loss : 0.17474046183956993 - MAE: 0.322295118924069
Epoch: 22
Training loss: 0.1751816302537918 - MAE: 0.3174733856779282
Validation loss : 0.17336992588308123 - MAE: 0.3207070293999617
Epoch: 23
Training loss: 0.17363301634788514 - MAE: 0.316815118397366
Validation loss : 0.17201685905456543 - MAE: 0.3189948372036725
Epoch: 24
Training loss: 0.1767018473148346 - MAE: 0.31913121630235014
Validation loss : 0.17214929395251805 - MAE: 0.31945983617544055
Epoch: 25
Training loss: 0.17439736306667328 - MAE: 0.31661741321206066
Validation loss : 0.17139916784233517 - MAE: 0.31865817509617683
Epoch: 26
Training loss: 0.1737448263168335 - MAE: 0.31659987435815284
Validation loss : 0.1706781718466017 - MAE: 0.318036288529697
Epoch: 27
Training loss: 0.17146194338798523 - MAE: 0.3118728405354431
Validation loss : 0.1697299944029914 - MAE: 0.31685192311272076
Epoch: 28
Training loss: 0.1718020862340927 - MAE: 0.3131538647314398
Validation loss : 0.16994635926352608 - MAE: 0.3175874889959908
Epoch: 29
Training loss: 0.1683420032262802 - MAE: 0.31101584504763863
Validation loss : 0.16925568216376835 - MAE: 0.3167970568735372
Epoch: 30
Training loss: 0.16941484928131104 - MAE: 0.3106103617384786
Validation loss : 0.1684651639726427 - MAE: 0.3159602735208132
Epoch: 31
Training loss: 0.1693895423412323 - MAE: 0.3118286356685513
Validation loss : 0.16787751846843296 - MAE: 0.3153364280899408
Epoch: 32
Training loss: 0.16764336854219436 - MAE: 0.3100005090011756
Validation loss : 0.1668115324444241 - MAE: 0.3138394321127746
Epoch: 33
Training loss: 0.16786101460456848 - MAE: 0.30754219991076703
Validation loss : 0.16722293363677132 - MAE: 0.3147444447232888
Epoch: 34
Training loss: 0.16658054411411286 - MAE: 0.3089389341184129
Validation loss : 0.1656692756546868 - MAE: 0.31278805971203405
Epoch: 35
Training loss: 0.16597358763217926 - MAE: 0.3086932329966142
Validation loss : 0.1659683949417538 - MAE: 0.31345250928240936
Epoch: 36
Training loss: 0.16379758477210998 - MAE: 0.30606558255167177
Validation loss : 0.16556445923116472 - MAE: 0.3130740202247091
Epoch: 37
Training loss: 0.16658478558063508 - MAE: 0.3094187850216587