File size: 5,830 Bytes
415e1d6
0625bb2
7d08e24
557aa89
450b003
94d6b3c
4f21eea
6a81b5a
5ae1f34
0b261b6
64c82db
bf2ade6
5218d77
1f36c26
9a66345
7b89b3f
f6ba68d
2a24f47
9d1c1f7
7261512
d01a3d5
d560ed1
b67e2d7
25ca10f
ed1681d
1afcba2
88b3981
5433b50
922374e
5a1c0c6
3b92628
ba2995f
3099c28
d2a9fed
91709a5
3c755dc
9f5271e
5e9a2ba
070cc17
a1d6b8d
f6d2bf1
acfd692
e0c788c
0729b84
3af0a40
012213c
eb44138
8a6167a
1ea0a3b
4ccabd1
a11eff6
9c17bda
1a1685c
76639f7
aa27110
6f639bc
cb06b25
a234af1
9bda0b1
9588a83
7e39a94
5a5a2ab
1bdac7b
1bc6c22
c5eae9e
105067f
2e7714c
332948e
e7e7a79
c983ea2
628f7e7
364616a
85c5d50
20d007f
c7a03ea
3ad0b5b
e342779
31d504b
7968def
2c9f269
9a8fd67
0af32e1
0ea38b3
a07c6bd
59047c5
8c4a632
9a7ab24
d0f4932
65733c0
306c382
03cf312
5818329
cd53907
af42767
7dacc65
44d3f41
58201b8
28cae30
13c413f
1e258a4
c9f9a5c
05bb255
056d675
9e505bc
f4cccd5
b12b1ba
be78cd8
5aa4832
8fc1cf6
a8490e9
9d2e664
2c0ba14
47d032a
ab616c5
598cce3
00cbe93
a60956a
61f4fcf
9ac4046
34c3048
42f17b2
0b00ea2
89b0721
f44180a
34fcd41
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
Started at: 14:22:52
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 0.6967193094583658 - MAE: 0.6750990625330823
Validation loss : 0.5263436496257782 - MAE: 0.613448569191522
Epoch: 1
Training loss: 0.26826212727106535 - MAE: 0.4087051125792896
Validation loss : 0.22296061515808105 - MAE: 0.3555378972565709
Epoch: 2
Training loss: 0.2124968572304799 - MAE: 0.3476390507866737
Validation loss : 0.20461321175098418 - MAE: 0.3532590132614058
Epoch: 3
Training loss: 0.18429063145930952 - MAE: 0.32677963264803717
Validation loss : 0.17646469473838805 - MAE: 0.3203760276514041
Epoch: 4
Training loss: 0.17768853444319505 - MAE: 0.31684026520272845
Validation loss : 0.17874470055103303 - MAE: 0.3257858562177419
Epoch: 5
Training loss: 0.17304767782871538 - MAE: 0.3139755194179104
Validation loss : 0.1718216747045517 - MAE: 0.31812428100953294
Epoch: 6
Training loss: 0.17041566165593955 - MAE: 0.31176170900212385
Validation loss : 0.17110097408294678 - MAE: 0.3175205683975872
Epoch: 7
Training loss: 0.16822290764405176 - MAE: 0.3093251221653364
Validation loss : 0.17010048031806946 - MAE: 0.3165555384963895
Epoch: 8
Training loss: 0.16749977606993455 - MAE: 0.309235729717861
Validation loss : 0.1681269407272339 - MAE: 0.31405053463780375
Epoch: 9
Training loss: 0.1660219534085347 - MAE: 0.30707150650440435
Validation loss : 0.16768858432769776 - MAE: 0.3134797613244496
Epoch: 10
Training loss: 0.16200380256542793 - MAE: 0.3036754525218348
Validation loss : 0.1665838599205017 - MAE: 0.3121892924671111
Epoch: 11
Training loss: 0.16332647777520692 - MAE: 0.3039458888319623
Validation loss : 0.16546964943408965 - MAE: 0.3107545377167428
Epoch: 12
Training loss: 0.16287456223597893 - MAE: 0.30448042997749564
Validation loss : 0.16524331867694855 - MAE: 0.3106786227351389
Epoch: 13
Training loss: 0.15990304029904878 - MAE: 0.30016709856440854
Validation loss : 0.16489878594875335 - MAE: 0.31031377115036685
Epoch: 14
Training loss: 0.15767766993779403 - MAE: 0.2997244083849813
Validation loss : 0.16339748501777648 - MAE: 0.3083185140265562
Epoch: 15
Training loss: 0.15951268031046942 - MAE: 0.29912324704552734
Validation loss : 0.16375492811203002 - MAE: 0.3090144460164269
Epoch: 16
Training loss: 0.15623677808504838 - MAE: 0.2973088492735953
Validation loss : 0.16299248337745667 - MAE: 0.30820454237494843
Epoch: 17
Training loss: 0.1564337038076841 - MAE: 0.29861382568944933
Validation loss : 0.16230234503746033 - MAE: 0.30725784051523525
Epoch: 18
Training loss: 0.15549989617787874 - MAE: 0.2960598878509324
Validation loss : 0.16213322579860687 - MAE: 0.3070436707425703
Epoch: 19
Training loss: 0.1569853677199437 - MAE: 0.29836095928185685
Validation loss : 0.16146726608276368 - MAE: 0.30629696980134635
Epoch: 20
Training loss: 0.1547408069555576 - MAE: 0.2970990607700864
Validation loss : 0.16105090379714965 - MAE: 0.3058310992073678
Epoch: 21
Training loss: 0.154588805941435 - MAE: 0.2957786660002471
Validation loss : 0.16075867414474487 - MAE: 0.30553690240829345
Epoch: 22
Training loss: 0.153683405656081 - MAE: 0.29670572424115915
Validation loss : 0.16087317764759063 - MAE: 0.3056201053962598
Epoch: 23
Training loss: 0.15514581363934737 - MAE: 0.2955321868620246
Validation loss : 0.16066641211509705 - MAE: 0.30531659865767746
Epoch: 24
Training loss: 0.15376647619100717 - MAE: 0.2947861379473795
Validation loss : 0.1603192001581192 - MAE: 0.3047779291624614
Epoch: 25
Training loss: 0.15424712231526008 - MAE: 0.295517101740558
Validation loss : 0.16027258038520814 - MAE: 0.30490525127423435
Epoch: 26
Training loss: 0.1534990152487388 - MAE: 0.2947872614603403
Validation loss : 0.15944191813468933 - MAE: 0.3040211334248351
Epoch: 27
Training loss: 0.15433337367497957 - MAE: 0.29452155782914224
Validation loss : 0.1589920550584793 - MAE: 0.3033883763699271
Epoch: 28
Training loss: 0.15299253509594843 - MAE: 0.29417555280935553
Validation loss : 0.15820957720279694 - MAE: 0.30212984699933354
Epoch: 29
Training loss: 0.1528388296182339 - MAE: 0.29439108542262243
Validation loss : 0.15762979090213775 - MAE: 0.30137274414478366
Epoch: 30
Training loss: 0.15239140964471376 - MAE: 0.2931581262311504
Validation loss : 0.1580193370580673 - MAE: 0.30197420343316406
Epoch: 31
Training loss: 0.15259050176693842 - MAE: 0.29443273043635304
Validation loss : 0.15747585594654084 - MAE: 0.30127911144588826
Epoch: 32
Training loss: 0.1526124311181215 - MAE: 0.29437440890737165
Validation loss : 0.15704381465911865 - MAE: 0.30065229981507346
Epoch: 33
Training loss: 0.15283041046215937 - MAE: 0.29381278442156483
Validation loss : 0.15643138587474822 - MAE: 0.300023304194808
Epoch: 34
Training loss: 0.15295472053381112 - MAE: 0.2943630839679987
Validation loss : 0.15613988637924195 - MAE: 0.2997042731770326
Epoch: 35
Training loss: 0.1538951786664816 - MAE: 0.2953855544885684
Validation loss : 0.15575254261493682 - MAE: 0.2986546943191013
Epoch: 36
Training loss: 0.15247866969842178 - MAE: 0.2940545876422218
Validation loss : 0.15558441281318663 - MAE: 0.29878957189644934
Epoch: 37
Training loss: 0.15140113463768592 - MAE: 0.2934594637632915
Validation loss : 0.15546987056732178 - MAE: 0.2987917791428303
Epoch: 38
Training loss: 0.15235514824207014 - MAE: 0.29373894252168026
Validation loss : 0.1554083526134491 - MAE: 0.29874726278048536
Stopped after 38 epochs.
Prediction MAE: 0.2850
Finished at: 14:22:52
Time taken: 3069 s.
0 days 0 hours 51 minutes 9 seconds