File size: 7,277 Bytes
b209950
fbbf5ad
45f1b11
8f2a114
5c745af
76f09e8
a43ad13
b6bf34a
4b521a4
c9d0e09
bf1909d
855c551
d6b12f1
39a6869
b135330
07c54a2
68e78a0
1362abc
3170096
cf591ec
a8c2817
0882573
702e361
342d348
caddab9
076cb72
15a9813
4fe6668
a3eea35
b32edbf
d81668b
9867f74
8ebb769
8f70b79
2e9d3fb
f6fe1e4
3367e7e
8816aa7
6edf92f
1383916
dc7fce0
7b508ea
9101793
9a95b4c
523d0cb
1f8f95f
f435dbf
ece12e4
d23c522
3eed329
53ee702
b14cf47
ab2d562
f4d7a58
6a27fda
7879ee0
c3a2894
ea83bc6
380ea57
568c2af
8c9976d
55b6c55
d953e35
8287bc7
e1e8986
e3d0b6a
34872e8
d3c0392
3f5d36c
61de540
775820c
b584db7
22d6325
a7b26f5
1a5e5a1
91c4db0
cafc72a
3353d65
5e291d9
8381f57
91cb371
e231525
3003ae0
9107593
db2d3d0
ec92ff6
41c2abb
dc5549a
0f7848d
604b603
d27bc50
aa39e4b
09e5a75
b78aac4
4a25b24
85d2fc6
f2be216
1edd71d
5e68975
3d3e408
6615e56
23769aa
ff0068b
a0ef4a9
4c08efc
5b043b6
69ed545
059a5a8
8c32286
968f2d9
1db7db8
b0dcf29
848915b
a17250c
217c861
3875792
b91f300
f329a76
2ce54dc
8c45d55
3e380ed
40a41c2
539af96
88bd61a
cbd928b
818e4f4
8fd927f
b5eae97
579145d
3e80fd2
0fec446
2ce0899
2ed4b5c
93989f2
f3c71a1
1299ed7
482a3ae
96c2574
23e0a01
e40473c
b9260a8
b9dd117
11be7ba
1d9d643
bbdc4ff
5bd542c
f20366e
22af23c
d665883
69933f8
7ce7aec
58cb9b5
a01c081
0ce4ac1
69ce47f
bec98db
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
Started at: 14:55:50
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.2707270306348801 - MAE: 0.4014548746281838
Validation loss : 0.18827014460283167 - MAE: 0.3350395914193632
Epoch: 1
Training loss: 0.18397963613271714 - MAE: 0.3274942520732981
Validation loss : 0.17896055035731373 - MAE: 0.32625804300542655
Epoch: 2
Training loss: 0.17076618894934653 - MAE: 0.3156013590381467
Validation loss : 0.17423390377970302 - MAE: 0.32323377271249604
Epoch: 3
Training loss: 0.1643083082139492 - MAE: 0.3095641840936019
Validation loss : 0.16728906508754282 - MAE: 0.3149824083534162
Epoch: 4
Training loss: 0.15957146167755126 - MAE: 0.3037565492992936
Validation loss : 0.16289721429347992 - MAE: 0.3089864069106985
Epoch: 5
Training loss: 0.15854436449706555 - MAE: 0.301500893432691
Validation loss : 0.16129758033682318 - MAE: 0.308202317708313
Epoch: 6
Training loss: 0.15570181787014006 - MAE: 0.29918709351469924
Validation loss : 0.16344413292758606 - MAE: 0.30938825983369334
Epoch: 7
Training loss: 0.15292704798281193 - MAE: 0.2961117975066867
Validation loss : 0.16081692103077383 - MAE: 0.3033625257590202
Epoch: 8
Training loss: 0.15216302141547203 - MAE: 0.2959306926556599
Validation loss : 0.15965510378865636 - MAE: 0.30640949969727455
Epoch: 9
Training loss: 0.1456899941712618 - MAE: 0.2888247514317626
Validation loss : 0.15742756864603827 - MAE: 0.3014585494849406
Epoch: 10
Training loss: 0.1467710939049721 - MAE: 0.2907957975079187
Validation loss : 0.15912020206451416 - MAE: 0.30151619716495687
Epoch: 11
Training loss: 0.14659826673567294 - MAE: 0.2893981534314371
Validation loss : 0.1602539621731814 - MAE: 0.3022486893374817
Epoch: 12
Training loss: 0.14781650044023992 - MAE: 0.29275159014985785
Validation loss : 0.15743818177896388 - MAE: 0.3011467784998341
Epoch: 13
Training loss: 0.14709322392940521 - MAE: 0.2905235164202929
Validation loss : 0.15774143706349766 - MAE: 0.30035432342912505
Epoch: 14
Training loss: 0.14297918483614921 - MAE: 0.2870641656789985
Validation loss : 0.16193263846285202 - MAE: 0.3045518551046045
Epoch: 15
Training loss: 0.14410636112093925 - MAE: 0.2889387564652954
Validation loss : 0.15722107492825566 - MAE: 0.299146052737687
Epoch: 16
Training loss: 0.14345928631722926 - MAE: 0.28585450120342
Validation loss : 0.15835655086180744 - MAE: 0.2999799426627903
Epoch: 17
Training loss: 0.14413826659321785 - MAE: 0.2881013153573912
Validation loss : 0.15604389984818065 - MAE: 0.29791508329064814
Epoch: 18
Training loss: 0.14309908427298068 - MAE: 0.2863054251735944
Validation loss : 0.15559008366921367 - MAE: 0.2975231734983981
Epoch: 19
Training loss: 0.14340508081018924 - MAE: 0.2865255988742361
Validation loss : 0.15817041651291006 - MAE: 0.30007397316689965
Epoch: 20
Training loss: 0.14153706684708595 - MAE: 0.28535710740880754
Validation loss : 0.15578039034324534 - MAE: 0.29770187771554596
Epoch: 21
Training loss: 0.1428557775169611 - MAE: 0.285682293954449
Validation loss : 0.16232867451275096 - MAE: 0.3028449341916031
Epoch: 22
Training loss: 0.1438133302330971 - MAE: 0.2875040690451074
Validation loss : 0.15943333638065002 - MAE: 0.30116253984475694
Epoch: 23
Training loss: 0.1429821538925171 - MAE: 0.2868833888762416
Validation loss : 0.16224380889359644 - MAE: 0.3035733609314583
Epoch: 24
Training loss: 0.1443456995487213 - MAE: 0.28863224872068594
Validation loss : 0.16213381728705237 - MAE: 0.30267648828913235
Epoch: 25
Training loss: 0.1441011916846037 - MAE: 0.2873510918998528
Validation loss : 0.1571306345217368 - MAE: 0.29800568097664254
Epoch: 26
Training loss: 0.1413027948886156 - MAE: 0.2845599852463436
Validation loss : 0.16157625484115937 - MAE: 0.302082414790674
Epoch: 27
Training loss: 0.1430261830240488 - MAE: 0.28583150224560333
Validation loss : 0.15557704515316906 - MAE: 0.2962375028009326
Epoch: 28
Training loss: 0.13997980147600175 - MAE: 0.2843562749380958
Validation loss : 0.15982679803581798 - MAE: 0.3009880212003318
Epoch: 29
Training loss: 0.14315382800996304 - MAE: 0.286772557351704
Validation loss : 0.1625496679369141 - MAE: 0.3033521796959797
Epoch: 30
Training loss: 0.14252791218459607 - MAE: 0.2867981758810579
Validation loss : 0.16115986687295578 - MAE: 0.3021782659616576
Epoch: 31
Training loss: 0.1429748132824898 - MAE: 0.2856617862509035
Validation loss : 0.157088626833523 - MAE: 0.2981007278628173
Epoch: 32
Training loss: 0.14145717337727548 - MAE: 0.28519633331447963
Validation loss : 0.15426941435126698 - MAE: 0.29495634573875945
Epoch: 33
Training loss: 0.1397652292251587 - MAE: 0.2824192674074606
Validation loss : 0.16185673939831116 - MAE: 0.30235075639458764
Epoch: 34
Training loss: 0.14383683420717716 - MAE: 0.28783451246641834
Validation loss : 0.1621221777270822 - MAE: 0.30186752792956195
Epoch: 35
Training loss: 0.14246633373200893 - MAE: 0.2877907118856343
Validation loss : 0.16278734321103377 - MAE: 0.30304530587385786
Epoch: 36
Training loss: 0.143770419806242 - MAE: 0.28817301816229635
Validation loss : 0.15517584848053315 - MAE: 0.2976560542863815
Epoch: 37
Training loss: 0.14138652928173542 - MAE: 0.2845921870039854
Validation loss : 0.16119667800033793 - MAE: 0.30265920196722157
Epoch: 38
Training loss: 0.14421981260180472 - MAE: 0.2889433584101701
Validation loss : 0.15571293322479024 - MAE: 0.29703595724259496
Epoch: 39
Training loss: 0.14046346701681614 - MAE: 0.2838468482556049
Validation loss : 0.15588921790613847 - MAE: 0.29754393996593387
Epoch: 40
Training loss: 0.14273824714124203 - MAE: 0.28727502690499407
Validation loss : 0.159224230576964 - MAE: 0.30038544147161034
Epoch: 41
Training loss: 0.13994729705154896 - MAE: 0.2830982256220656
Validation loss : 0.1574905299965073 - MAE: 0.29930840416808857
Epoch: 42
Training loss: 0.14540876604616643 - MAE: 0.2895460241769239
Validation loss : 0.15893673107904546 - MAE: 0.3014190039637334
Epoch: 43
Training loss: 0.14422410406172276 - MAE: 0.287621172506065
Validation loss : 0.15546253805651383 - MAE: 0.29675135359607124
Epoch: 44
Training loss: 0.14043312780559064 - MAE: 0.2845528497770886
Validation loss : 0.16211417755659888 - MAE: 0.3020204260618487
Epoch: 45
Training loss: 0.1428066013008356 - MAE: 0.28634184678654917
Validation loss : 0.1597718447446823 - MAE: 0.3001012446190728
Epoch: 46
Training loss: 0.14156132243573666 - MAE: 0.2859391534657045
Validation loss : 0.157582988633829 - MAE: 0.2976848072799099
Epoch: 47
Training loss: 0.14282805427908898 - MAE: 0.2871811233175576
Validation loss : 0.15926887533243964 - MAE: 0.30029825826740925
Epoch: 48
Training loss: 0.14263289153575898 - MAE: 0.2864816366339791
Validation loss : 0.15377109673093348 - MAE: 0.29632490944484824
Epoch: 49
Training loss: 0.13977760344743728 - MAE: 0.28464647238203133
Validation loss : 0.15436991479466944 - MAE: 0.29544529781828566
Prediction MAE: 0.2784
Finished at: 14:55:50
Time taken: 10942 s.
0 days 3 hours 2 minutes 22 seconds