File size: 5,768 Bytes
34f0ece
6222105
75a0e74
64a9570
546d517
68b0f20
0939508
fa6ba90
3819732
0618aa1
9f90d1d
7bb073c
5abda92
29bd34a
24e668c
835b927
13f908d
99549f4
faea999
0eda6c3
34eddc9
22c61bc
a388479
7f8bb19
b14a98f
795590a
89a4235
d8571b3
03378a7
dd8464b
435981b
aa21c72
914c67e
0c36e41
1c32fb7
685e975
0a29e9b
39603f5
44fc5bd
36c07c6
57e9c2d
e5787b5
bd1456c
ba70980
d2a29ae
cc65b11
1c6ed4f
7a2aeed
5eb6ae4
352f378
32dbd56
c6b7c40
5a1c670
9f633b3
734ab14
a8833f4
64288a9
705ba6a
c38e3e5
ce59bd9
c50386c
1a7a4b5
3239c28
7036e56
3510d16
8dd4bad
f06365a
6b96172
e2ba096
007fc8c
cf0202e
f95af6f
4cbd93e
de0aebd
7ace8ad
4c5e61c
e15afd5
ea181e1
66b262a
132f5c1
6b361ed
ccab03b
cd0ecb4
3f7c7d4
d2fc893
71e9ad8
0132a7e
e1bc946
ea4acff
e487d9f
52d867c
90398dd
061f920
4881735
f3f0853
00b62f9
f68044a
0dbe926
377a5fd
319f4c1
598a593
a8970d6
cc931c1
3b5d6af
85a33d6
239f164
88fafe6
0393949
29047af
53895f9
2cff498
c654296
9430972
15dcf23
f6674a4
9adbdb0
d6d47f2
b43574e
3d4a1bb
152cafe
adb57cd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
Started at: 13:54:54
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '44815f7e109b53547cccdf3c6847f4c28b989816'}, {})
Epoch: 0
Training loss: 0.4353753740065976 - MSE: 0.4631018830797368
Validation loss : 0.14201142988167703 - MSE: 0.292727476662094
Epoch: 1
Training loss: 0.16082680688092582 - MSE: 0.3039855683631176
Validation loss : 0.14794749207794666 - MSE: 0.29952175843163786
Epoch: 2
Training loss: 0.15888278323568797 - MSE: 0.30232633269739506
Validation loss : 0.12977031315676868 - MSE: 0.2779232412353849
Epoch: 3
Training loss: 0.1516539692878723 - MSE: 0.29570308847882737
Validation loss : 0.1342183407396078 - MSE: 0.28315487688041685
Epoch: 4
Training loss: 0.14969512843771984 - MSE: 0.29376316316788137
Validation loss : 0.13651572703383863 - MSE: 0.286356273434194
Epoch: 5
Training loss: 0.14825457373732015 - MSE: 0.29236288646470276
Validation loss : 0.13325480837374926 - MSE: 0.28235259219172804
Epoch: 6
Training loss: 0.14587709978222846 - MSE: 0.29145181360233374
Validation loss : 0.14294268772937357 - MSE: 0.29422283310077546
Epoch: 7
Training loss: 0.148439571535901 - MSE: 0.29440375310883926
Validation loss : 0.14034398854710162 - MSE: 0.2907182476260459
Epoch: 8
Training loss: 0.14813685872052845 - MSE: 0.2931949322407332
Validation loss : 0.12979571800678968 - MSE: 0.2782447249405777
Epoch: 9
Training loss: 0.14627560220266644 - MSE: 0.2917503264555033
Validation loss : 0.1504771877080202 - MSE: 0.30260855387996344
Epoch: 10
Training loss: 0.14390636309981347 - MSE: 0.29031416543032734
Validation loss : 0.13953579450026155 - MSE: 0.2890264796219526
Epoch: 11
Training loss: 0.14922026603629715 - MSE: 0.292561392520073
Validation loss : 0.1444824270438403 - MSE: 0.2944073201865649
Epoch: 12
Training loss: 0.14153452319534202 - MSE: 0.285389527479883
Validation loss : 0.1263227758463472 - MSE: 0.27377457326610966
Epoch: 13
Training loss: 0.14998281272618394 - MSE: 0.2927288895743673
Validation loss : 0.12917391466908157 - MSE: 0.27674699036970196
Epoch: 14
Training loss: 0.1430773071160442 - MSE: 0.2892675879716682
Validation loss : 0.14142304356209934 - MSE: 0.2941539923554046
Epoch: 15
Training loss: 0.15678064901577798 - MSE: 0.3018205089469717
Validation loss : 0.12917302176356316 - MSE: 0.27513299298107086
Epoch: 16
Training loss: 0.1494230675854181 - MSE: 0.2951704079181553
Validation loss : 0.1280019199475646 - MSE: 0.2731706016706994
Epoch: 17
Training loss: 0.14316309780666703 - MSE: 0.28891571660292187
Validation loss : 0.12425365822855383 - MSE: 0.27112690135936646
Epoch: 18
Training loss: 0.1434906577593402 - MSE: 0.28606728956978134
Validation loss : 0.15053234971128404 - MSE: 0.30457389003231583
Epoch: 19
Training loss: 0.14480229098545877 - MSE: 0.2907542990574569
Validation loss : 0.1469436811748892 - MSE: 0.3000679240572026
Epoch: 20
Training loss: 0.1462563581372562 - MSE: 0.2903288957751347
Validation loss : 0.16302492283284664 - MSE: 0.3174909429867512
Epoch: 21
Training loss: 0.14268505094867004 - MSE: 0.28808178734771134
Validation loss : 0.12645738548599184 - MSE: 0.2734583813939935
Epoch: 22
Training loss: 0.1431882705735533 - MSE: 0.29013400485776225
Validation loss : 0.14717783266678452 - MSE: 0.3002806941290146
Epoch: 23
Training loss: 0.1427597692922542 - MSE: 0.2874371575758064
Validation loss : 0.12963575683534145 - MSE: 0.2767674714336863
Epoch: 24
Training loss: 0.14602290623282133 - MSE: 0.29132534981254526
Validation loss : 0.15562276146374643 - MSE: 0.30841544617533145
Epoch: 25
Training loss: 0.14057497789985254 - MSE: 0.2866385142998687
Validation loss : 0.12258221080992371 - MSE: 0.2676300164089298
Epoch: 26
Training loss: 0.13991084024310113 - MSE: 0.2862323305934003
Validation loss : 0.14549001771956682 - MSE: 0.29673826599128006
Epoch: 27
Training loss: 0.13917529892764594 - MSE: 0.2846490242675938
Validation loss : 0.1338302984368056 - MSE: 0.2824601715906283
Epoch: 28
Training loss: 0.1368718992330526 - MSE: 0.28348191048819527
Validation loss : 0.13304794486612082 - MSE: 0.28124178885582296
Epoch: 29
Training loss: 0.137821614467784 - MSE: 0.28195915514916314
Validation loss : 0.1356566457543522 - MSE: 0.2844219791945761
Epoch: 30
Training loss: 0.1361325614154339 - MSE: 0.2826986827887364
Validation loss : 0.13432824215851724 - MSE: 0.28257486897155104
Epoch: 31
Training loss: 0.13651365814240354 - MSE: 0.28391514279453545
Validation loss : 0.131905903108418 - MSE: 0.2781696847882813
Epoch: 32
Training loss: 0.13995016409378302 - MSE: 0.28707461995686306
Validation loss : 0.12570394831709564 - MSE: 0.27155404622999413
Epoch: 33
Training loss: 0.13649120593541547 - MSE: 0.28244032196195645
Validation loss : 0.14095223206095397 - MSE: 0.28911527057744024
Epoch: 34
Training loss: 0.1340673217647954 - MSE: 0.27867030067386744
Validation loss : 0.13333224435336888 - MSE: 0.2808691598252153
Epoch: 35
Training loss: 0.13995135018700047 - MSE: 0.28584392441572315
Validation loss : 0.1441046737600118 - MSE: 0.2924803690762019
Epoch: 36
Training loss: 0.13585356917036207 - MSE: 0.28205852994357916
Validation loss : 0.12641809089109302 - MSE: 0.2719647221742889
Epoch: 37
Training loss: 0.1334524358181577 - MSE: 0.27814886716867226
Validation loss : 0.1252909282920882 - MSE: 0.26961817654182596
Epoch: 38
Training loss: 0.13675130664518004 - MSE: 0.2819899654138859
Validation loss : 0.12670658458955586 - MSE: 0.2694715199536404
Epoch: 39
Training loss: 0.14203872606158258 - MSE: 0.2871730161628519