File size: 4,487 Bytes
a741f4f
be1077a
b641d4f
0a8d293
e375144
1ab0532
8259360
d264ca4
cd0d232
909ec64
385c96a
a30d15a
3556c8f
3ff2de1
6debf4b
ba3d793
a855490
6c68b41
5840f7e
91a22f0
ddb745a
ac701a7
2b25431
fd5f508
6d15571
0ec4d9c
3f6fc55
78b425b
364171e
0692593
963991e
3bd18fa
51c0c7b
1f6e66c
3886e4b
c3b01c1
06a1a2e
cda2019
ed75110
fe4972f
03c0d40
9449b45
42f9e3c
93984dd
b38fc72
4fca085
7a3826d
5c7ef44
5f16d67
d363931
757be37
003a02e
2fb2038
2fbe0d2
a996bb0
df6f88e
72350a3
435fd30
db90c29
e21f216
93d88e6
c2a1da3
487a55a
9b4cd78
4164b2a
8ab2103
f895007
6683d25
0a06df6
e93db0b
fae3790
7f66aa5
a3d562b
6bb44ee
9cebb4e
3fc24f1
3c89ad5
d411120
16a6d03
d8b7e83
dac0391
424ac28
c4ecf52
65030f6
1f72881
1148b31
1b2ab9d
3b475d0
d19a042
08130fe
0662dec
18addae
8d24d36
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
Started at: 12:15:30
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.32175946950912476 - MAE: 0.4437903503551826
Validation loss : 0.19621961481041378 - MAE: 0.3388283822410594
Epoch: 1
Training loss: 0.21072698533535003 - MAE: 0.35116530289867337
Validation loss : 0.1876767095592287 - MAE: 0.33628140968919695
Epoch: 2
Training loss: 0.1893942326307297 - MAE: 0.3312522516218666
Validation loss : 0.1748287214173211 - MAE: 0.3213797447248068
Epoch: 3
Training loss: 0.17969159841537474 - MAE: 0.3215382240664255
Validation loss : 0.1697687026527193 - MAE: 0.31446870224950263
Epoch: 4
Training loss: 0.17220519423484804 - MAE: 0.3129526796594085
Validation loss : 0.16811848680178323 - MAE: 0.3117262145282266
Epoch: 5
Training loss: 0.17296144247055054 - MAE: 0.31494873624825814
Validation loss : 0.16849000917540657 - MAE: 0.31104514179599985
Epoch: 6
Training loss: 0.16641035348176955 - MAE: 0.30768475494362546
Validation loss : 0.16421516074074638 - MAE: 0.30715220958263423
Epoch: 7
Training loss: 0.16057054400444032 - MAE: 0.30329494898002457
Validation loss : 0.161027698053254 - MAE: 0.304297376785587
Epoch: 8
Training loss: 0.1566170272231102 - MAE: 0.29957014870206655
Validation loss : 0.15982638630602095 - MAE: 0.30397747682822174
Epoch: 9
Training loss: 0.15673983812332154 - MAE: 0.3000540458404174
Validation loss : 0.159184659520785 - MAE: 0.3035235378542429
Epoch: 10
Training loss: 0.15630604147911073 - MAE: 0.29690365842430627
Validation loss : 0.15921704471111298 - MAE: 0.30240467396157955
Epoch: 11
Training loss: 0.15595020622015 - MAE: 0.29754135005638765
Validation loss : 0.15686986181471083 - MAE: 0.30119996351152656
Epoch: 12
Training loss: 0.15274528950452804 - MAE: 0.2944161972508913
Validation loss : 0.15652166141404045 - MAE: 0.3012713923501961
Epoch: 13
Training loss: 0.15205995708703995 - MAE: 0.29424324063629004
Validation loss : 0.1575678288936615 - MAE: 0.3004794443503115
Epoch: 14
Training loss: 0.15087180227041244 - MAE: 0.292504579327589
Validation loss : 0.15574459234873453 - MAE: 0.29932356111019714
Epoch: 15
Training loss: 0.1518820345401764 - MAE: 0.2940904971897683
Validation loss : 0.15581322544150883 - MAE: 0.30048684662514935
Epoch: 16
Training loss: 0.14916340589523316 - MAE: 0.2900369708605816
Validation loss : 0.15502946575482687 - MAE: 0.2986623058669649
Epoch: 17
Training loss: 0.14969733864068985 - MAE: 0.2916869417468108
Validation loss : 0.15631223718325296 - MAE: 0.2994015598567933
Epoch: 18
Training loss: 0.14621972769498826 - MAE: 0.2875086269286061
Validation loss : 0.1557358337773217 - MAE: 0.29931970436403404
Epoch: 19
Training loss: 0.14798570185899734 - MAE: 0.29024787778757843
Validation loss : 0.15424930387073094 - MAE: 0.2984811820958494
Epoch: 20
Training loss: 0.14769238144159316 - MAE: 0.2896109423923894
Validation loss : 0.1536422868569692 - MAE: 0.2987445844262027
Epoch: 21
Training loss: 0.14361368536949157 - MAE: 0.2842206176667335
Validation loss : 0.1533755792511834 - MAE: 0.29642898867488277
Epoch: 22
Training loss: 0.14456430345773696 - MAE: 0.2860957867005398
Validation loss : 0.1542035871081882 - MAE: 0.2971775973100257
Epoch: 23
Training loss: 0.1426533755660057 - MAE: 0.28512021628758083
Validation loss : 0.15245803362793392 - MAE: 0.2961228783995986
Epoch: 24
Training loss: 0.14542177826166153 - MAE: 0.2855342243309425
Validation loss : 0.15294104317824045 - MAE: 0.29643245601047447
Epoch: 25
Training loss: 0.14522234290838243 - MAE: 0.2851345876886818
Validation loss : 0.15342944694889915 - MAE: 0.2958033843008907
Epoch: 26
Training loss: 0.14386597275733948 - MAE: 0.2838243646140143
Validation loss : 0.1515038808186849 - MAE: 0.2936464496284943
Epoch: 27
Training loss: 0.1415349954366684 - MAE: 0.2810586437104536
Validation loss : 0.1507236527072059 - MAE: 0.29409404478735646
Epoch: 28
Training loss: 0.14115683376789093 - MAE: 0.2815221038197439
Validation loss : 0.15061336921321022 - MAE: 0.29280129784932213
Epoch: 29
Training loss: 0.14186541587114335 - MAE: 0.28239653007887255
Validation loss : 0.15151139597098032 - MAE: 0.2953867482779256
Epoch: 30