File size: 5,505 Bytes
f97bc04
5dbcf20
dfd478a
2e51abd
c414aac
d9f2aef
a7f4f17
42c40ea
21afba0
47328a5
50bdb63
3edf7dc
d4bf48e
16ad2cc
2e725e7
902806e
59dded6
dc1372f
4407438
d994707
9c6f311
9d61d42
33f749c
3fa2a56
b6cbb22
1f1e731
5ad542c
fe717af
adc8cb9
0d55d68
41724cf
0550a97
f7d16d6
c7d8672
0fd6405
890d02d
4a7ed27
5e8a7dc
4fcad9c
89998a5
0477bec
21b3e70
1e2523b
c2be6b8
12f862e
46ac917
6d53bf2
6953bc4
e1f5bf8
2c0ee1c
6e39929
c19d4cf
592bd48
2b50d88
34f0007
04db761
f5740c8
72aa9ae
1e64639
7674f42
eb5a027
b1ecd53
0bc8ea1
2053849
a7b16b8
666a5fc
2475353
0040f5a
5d89aa0
2ef3be9
ac7b728
b03d504
2d82a47
3ad1646
53dd928
5729ff2
807fe43
2dc05c6
b331c5c
567b2d9
4e86f5c
013f097
ca68a7a
766ebe9
f1b8226
a8bf468
75cc9e8
1da446a
77d245b
0a7b974
114f200
e0ccd8e
a901e3c
4b7b93f
841f93b
955ea78
e76e2e3
050a35c
ecd5a16
127032d
cb545e0
6a07797
ef55451
6d82dac
5cfe923
748d17e
52caa63
b136aac
a496fc8
2997cfb
c060aee
c9147c6
0904414
0aa0179
5785355
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
Started at: 10:48:19
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 0.1924845103344639 - MSE: 0.3322236655963751
Validation loss : 0.17660720612515102 - MSE: 0.324802509627284
Epoch: 1
Training loss: 0.1750537462078678 - MSE: 0.3180729018571566
Validation loss : 0.15942594849250533 - MSE: 0.303487836083192
Epoch: 2
Training loss: 0.16873684886672774 - MSE: 0.31160564513902284
Validation loss : 0.15245808395698215 - MSE: 0.29108694559682735
Epoch: 3
Training loss: 0.1641668309528513 - MSE: 0.30635566679511805
Validation loss : 0.15943744812499394 - MSE: 0.2995468239484796
Epoch: 4
Training loss: 0.16207607389252804 - MSE: 0.30587689084159564
Validation loss : 0.15767109924645134 - MSE: 0.30170587154140893
Epoch: 5
Training loss: 0.1646450460713527 - MSE: 0.30737025815788666
Validation loss : 0.15219450781516958 - MSE: 0.29307598227245624
Epoch: 6
Training loss: 0.15751262988687167 - MSE: 0.2992461270486002
Validation loss : 0.14895058451502613 - MSE: 0.2874865253358561
Epoch: 7
Training loss: 0.16588557305447946 - MSE: 0.3104177206449217
Validation loss : 0.15715703639117154 - MSE: 0.2987129812343567
Epoch: 8
Training loss: 0.15816156373259985 - MSE: 0.301503453503837
Validation loss : 0.15006912420644905 - MSE: 0.288090082300874
Epoch: 9
Training loss: 0.16364081340090272 - MSE: 0.30751142892997274
Validation loss : 0.15224947774726333 - MSE: 0.29088555215174683
Epoch: 10
Training loss: 0.15624311541769711 - MSE: 0.29860838299589704
Validation loss : 0.15263732759789986 - MSE: 0.28903691439119805
Epoch: 11
Training loss: 0.15317542393346728 - MSE: 0.29561704899634816
Validation loss : 0.1465996318129879 - MSE: 0.2883039288153636
Epoch: 12
Training loss: 0.15394915797308012 - MSE: 0.29761309661938545
Validation loss : 0.14934668831075681 - MSE: 0.2889731477139799
Epoch: 13
Training loss: 0.1515518317405645 - MSE: 0.293403737656915
Validation loss : 0.14790588775367447 - MSE: 0.2870366854261706
Epoch: 14
Training loss: 0.15473641882541822 - MSE: 0.2978288996002309
Validation loss : 0.14924548374432506 - MSE: 0.2876623407733963
Epoch: 15
Training loss: 0.15178937489535602 - MSE: 0.29491923654080343
Validation loss : 0.1417709912991885 - MSE: 0.28157878501973976
Epoch: 16
Training loss: 0.1528880622635033 - MSE: 0.29616020437308754
Validation loss : 0.14535920316296996 - MSE: 0.2877532904456922
Epoch: 17
Training loss: 0.14908536092430202 - MSE: 0.29369922969737966
Validation loss : 0.14267539989316103 - MSE: 0.2845677065163917
Epoch: 18
Training loss: 0.1528856832401704 - MSE: 0.29543899790480355
Validation loss : 0.14451625288435907 - MSE: 0.2880325385683692
Epoch: 19
Training loss: 0.14715763338401838 - MSE: 0.2894570824084226
Validation loss : 0.1392400273206559 - MSE: 0.28102629808100493
Epoch: 20
Training loss: 0.1496212546935844 - MSE: 0.2936362344117262
Validation loss : 0.14045483167424347 - MSE: 0.2781410631520079
Epoch: 21
Training loss: 0.15020131709309398 - MSE: 0.29264906647405664
Validation loss : 0.14210618603410144 - MSE: 0.279907465990816
Epoch: 22
Training loss: 0.14703214569442769 - MSE: 0.2902175784407383
Validation loss : 0.15019087527285924 - MSE: 0.29736463912534333
Epoch: 23
Training loss: 0.14753472385218905 - MSE: 0.2909915520698562
Validation loss : 0.14351169821439366 - MSE: 0.28804395697824053
Epoch: 24
Training loss: 0.14944542596485408 - MSE: 0.29246118657753634
Validation loss : 0.1458486494692889 - MSE: 0.2909268395457848
Epoch: 25
Training loss: 0.1488783634669587 - MSE: 0.2935176709923817
Validation loss : 0.15622048407341493 - MSE: 0.3037462926966687
Epoch: 26
Training loss: 0.1580004657283047 - MSE: 0.30098549026857363
Validation loss : 0.14231884614987808 - MSE: 0.28691459123960655
Epoch: 27
Training loss: 0.15134912697116132 - MSE: 0.2957935972907406
Validation loss : 0.14332976147080911 - MSE: 0.28577034088777054
Epoch: 28
Training loss: 0.15213962139438858 - MSE: 0.2972412166773918
Validation loss : 0.14392689954150806 - MSE: 0.2874576256659167
Epoch: 29
Training loss: 0.15294972841207147 - MSE: 0.2961479841129994
Validation loss : 0.14877552177869913 - MSE: 0.2962307486115943
Epoch: 30
Training loss: 0.14635942581341352 - MSE: 0.28953058687721034
Validation loss : 0.1395587156893629 - MSE: 0.28314074548913326
Epoch: 31
Training loss: 0.14861172377155518 - MSE: 0.29207846836634094
Validation loss : 0.13993756952836658 - MSE: 0.28164475508498127
Epoch: 32
Training loss: 0.14971404562368612 - MSE: 0.2942225782866977
Validation loss : 0.1402660423720425 - MSE: 0.2809878029065933
Epoch: 33
Training loss: 0.14612065743522595 - MSE: 0.28930121538091785
Validation loss : 0.14097531207583167 - MSE: 0.2808437031164752
Epoch: 34
Training loss: 0.14832691235590706 - MSE: 0.2931213777167157
Validation loss : 0.14187540943649682 - MSE: 0.28367678066771107
Epoch: 35
Training loss: 0.14736021030221494 - MSE: 0.29049721034771264
Validation loss : 0.13767617585306818 - MSE: 0.27810918026328907
Epoch: 36
Training loss: 0.1519300071948071 - MSE: 0.2955017029009571
Validation loss : 0.14467720444681068 - MSE: 0.28518483911658266
Epoch: 37
Training loss: 0.14739096877614255 - MSE: 0.29104104119321234