File size: 5,976 Bytes
a9614e2
1c266c2
573d184
138c452
085fc0e
a8e4a32
60030c6
43aca8f
bced6f4
3c0ba3e
05cb146
2755fdc
6d43361
9ba6b1c
bc4b6fa
fd66402
8ac5557
0a5b886
7582378
7943a4b
1d207e4
fc0b9fb
778b2ee
d8f234d
b8f30b4
07d6b01
b1e9821
5d63be6
c240e23
7fd6a30
753d906
7988c17
f9cc4b4
2563dd5
3b32d9d
78c220f
13dd3c9
3645ffc
a67ad3c
1791d1d
417295b
9d01042
4692ce2
ee702fe
9ad9b31
8a55390
af3dc92
255d975
b5c59b2
6291ba6
910096e
f1cb193
5ecc42f
f52fd05
4aa4325
5b2f689
0ca188f
90fba87
3b6adf1
d7b4484
b3d9166
822020b
5f1eb05
5a03441
36bc79e
ff030dd
927b3c1
64f46f0
749183e
2c6fb37
bff4a2d
46697fc
0413f7a
5ee6dd5
419fd28
eb700da
64c500b
d0056fd
66982b8
1d41cd7
1c356ba
d930327
fcc9b1f
1399e97
04ba03c
ed8b3f4
025040b
13b93d7
d26ef5a
a37fc5f
cf6faf1
86a6afb
40f3e9f
7cbbdc1
f5369b2
63b151c
eaadc50
6354a95
0d8ed18
8182faf
f9e751c
cb1f440
8a3fa68
21062cf
7ce2a2c
1f989a1
3d97e81
23665ab
57ada11
4a041ea
91a97e6
f4e0960
42a05eb
214e4e3
a4c8ea3
1d48055
55af590
864f0d5
d12921d
b7fd40a
e4b2e16
35063ea
c287647
9cc161b
16df9c1
8c239b2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
Started at: 21:34:05
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.32175946950912476 - MAE: 0.4437903503551826
Validation loss : 0.19621961481041378 - MAE: 0.3388283822410594
Epoch: 1
Training loss: 0.21072698533535003 - MAE: 0.35116530289867337
Validation loss : 0.1876767095592287 - MAE: 0.33628140968919695
Epoch: 2
Training loss: 0.1893942326307297 - MAE: 0.3312522516218666
Validation loss : 0.1748287214173211 - MAE: 0.3213797447248068
Epoch: 3
Training loss: 0.17969159841537474 - MAE: 0.3215382240664255
Validation loss : 0.1697687026527193 - MAE: 0.31446870224950263
Epoch: 4
Training loss: 0.17220519423484804 - MAE: 0.3129526796594085
Validation loss : 0.16811848680178323 - MAE: 0.3117262145282266
Epoch: 5
Training loss: 0.17296144247055054 - MAE: 0.31494873624825814
Validation loss : 0.16849000917540657 - MAE: 0.31104514179599985
Epoch: 6
Training loss: 0.16641035348176955 - MAE: 0.30768475494362546
Validation loss : 0.16421516074074638 - MAE: 0.30715220958263423
Epoch: 7
Training loss: 0.16057054400444032 - MAE: 0.30329494898002457
Validation loss : 0.161027698053254 - MAE: 0.304297376785587
Epoch: 8
Training loss: 0.1566170272231102 - MAE: 0.29957014870206655
Validation loss : 0.15982638630602095 - MAE: 0.30397747682822174
Epoch: 9
Training loss: 0.15673983812332154 - MAE: 0.3000540458404174
Validation loss : 0.159184659520785 - MAE: 0.3035235378542429
Epoch: 10
Training loss: 0.15630604147911073 - MAE: 0.29690365842430627
Validation loss : 0.15921704471111298 - MAE: 0.30240467396157955
Epoch: 11
Training loss: 0.15595020622015 - MAE: 0.29754135005638765
Validation loss : 0.15686986181471083 - MAE: 0.30119996351152656
Epoch: 12
Training loss: 0.15274528950452804 - MAE: 0.2944161972508913
Validation loss : 0.15652166141404045 - MAE: 0.3012713923501961
Epoch: 13
Training loss: 0.15205995708703995 - MAE: 0.29424324063629004
Validation loss : 0.1575678288936615 - MAE: 0.3004794443503115
Epoch: 14
Training loss: 0.15087180227041244 - MAE: 0.292504579327589
Validation loss : 0.15574459234873453 - MAE: 0.29932356111019714
Epoch: 15
Training loss: 0.1518820345401764 - MAE: 0.2940904971897683
Validation loss : 0.15581322544150883 - MAE: 0.30048684662514935
Epoch: 16
Training loss: 0.14916340589523316 - MAE: 0.2900369708605816
Validation loss : 0.15502946575482687 - MAE: 0.2986623058669649
Epoch: 17
Training loss: 0.14969733864068985 - MAE: 0.2916869417468108
Validation loss : 0.15631223718325296 - MAE: 0.2994015598567933
Epoch: 18
Training loss: 0.14621972769498826 - MAE: 0.2875086269286061
Validation loss : 0.1557358337773217 - MAE: 0.29931970436403404
Epoch: 19
Training loss: 0.14798570185899734 - MAE: 0.29024787778757843
Validation loss : 0.15424930387073094 - MAE: 0.2984811820958494
Epoch: 20
Training loss: 0.14769238144159316 - MAE: 0.2896109423923894
Validation loss : 0.1536422868569692 - MAE: 0.2987445844262027
Epoch: 21
Training loss: 0.14361368536949157 - MAE: 0.2842206176667335
Validation loss : 0.1533755792511834 - MAE: 0.29642898867488277
Epoch: 22
Training loss: 0.14456430345773696 - MAE: 0.2860957867005398
Validation loss : 0.1542035871081882 - MAE: 0.2971775973100257
Epoch: 23
Training loss: 0.1426533755660057 - MAE: 0.28512021628758083
Validation loss : 0.15245803362793392 - MAE: 0.2961228783995986
Epoch: 24
Training loss: 0.14542177826166153 - MAE: 0.2855342243309425
Validation loss : 0.15294104317824045 - MAE: 0.29643245601047447
Epoch: 25
Training loss: 0.14522234290838243 - MAE: 0.2851345876886818
Validation loss : 0.15342944694889915 - MAE: 0.2958033843008907
Epoch: 26
Training loss: 0.14386597275733948 - MAE: 0.2838243646140143
Validation loss : 0.1515038808186849 - MAE: 0.2936464496284943
Epoch: 27
Training loss: 0.1415349954366684 - MAE: 0.2810586437104536
Validation loss : 0.1507236527072059 - MAE: 0.29409404478735646
Epoch: 28
Training loss: 0.14115683376789093 - MAE: 0.2815221038197439
Validation loss : 0.15061336921321022 - MAE: 0.29280129784932213
Epoch: 29
Training loss: 0.14186541587114335 - MAE: 0.28239653007887255
Validation loss : 0.15151139597098032 - MAE: 0.2953867482779256
Epoch: 30
Training loss: 0.14276256740093232 - MAE: 0.283208699006944
Validation loss : 0.1504600097735723 - MAE: 0.29276670315056585
Epoch: 31
Training loss: 0.14241950929164887 - MAE: 0.28277882078383293
Validation loss : 0.1505332812666893 - MAE: 0.29293145394981956
Epoch: 32
Training loss: 0.14204519629478454 - MAE: 0.28192363607736504
Validation loss : 0.15044088496102226 - MAE: 0.2929076596067834
Epoch: 33
Training loss: 0.13920597046613692 - MAE: 0.2793298976614221
Validation loss : 0.15006180769867367 - MAE: 0.29292432343849906
Epoch: 34
Training loss: 0.1410813584923744 - MAE: 0.2815379053911258
Validation loss : 0.14930015967951882 - MAE: 0.2933547268525188
Epoch: 35
Training loss: 0.14150760889053346 - MAE: 0.2832031330097777
Validation loss : 0.15295125875208113 - MAE: 0.2936935655701593
Epoch: 36
Training loss: 0.1405733221769333 - MAE: 0.2821643593441243
Validation loss : 0.14867112785577774 - MAE: 0.2921335742814868
Epoch: 37
Training loss: 0.14206359952688216 - MAE: 0.28261241361934286
Validation loss : 0.14988169405195448 - MAE: 0.2927604772074144
Epoch: 38
Training loss: 0.14064243495464324 - MAE: 0.28185439579063915
Validation loss : 0.1515509072277281 - MAE: 0.29319069832718325
Epoch: 39
Training loss: 0.14297345608472825 - MAE: 0.282847817609895
Validation loss : 0.15042786465750801 - MAE: 0.29282722784595605
Epoch: 40
Training loss: 0.13990240722894667 - MAE: 0.27964537481988466
Validation loss : 0.14972211172183356 - MAE: 0.29260355981671565
Epoch: 41