File size: 2,454 Bytes
651f0bf
8c85603
0f43528
832d830
bc79357
dd048d4
3d0228d
d5573a3
cec52be
090f1d4
aa7ba11
1ea9386
d8b75fe
2f02c6b
bf84e21
b1a5c77
74916ca
3e6d6fb
082a7a8
025d115
78bb011
2969a45
8859a79
000f9c6
472881e
2746714
caa0682
de0f751
c43b380
cd09c1e
a82c030
3210d44
9c015c1
47653f0
a3753c1
4c84bcc
089e9c1
74dc117
98bd84a
9c797c4
1dd65b4
47fa210
ec76853
d0410a6
aecc552
db59e54
2a3599e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
Started at: 15:10:27
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.23905933226900872 - MAE: 0.37286070239059566
Validation loss : 0.20273769037290054 - MAE: 0.34199745756205613
Epoch: 1
Training loss: 0.1739810389852283 - MAE: 0.3179558368563117
Validation loss : 0.17964895408261905 - MAE: 0.3195786666383862
Epoch: 2
Training loss: 0.16324252100905987 - MAE: 0.3064914498437487
Validation loss : 0.1667684671102148 - MAE: 0.3071639189113823
Epoch: 3
Training loss: 0.16300387642901354 - MAE: 0.3079134637659216
Validation loss : 0.16075672886588357 - MAE: 0.3023037822758914
Epoch: 4
Training loss: 0.15850964098265677 - MAE: 0.30209297763585696
Validation loss : 0.15993016551841388 - MAE: 0.2998915062426691
Epoch: 5
Training loss: 0.15355758436701514 - MAE: 0.2971466778583582
Validation loss : 0.16042366962541232 - MAE: 0.3002886265052834
Epoch: 6
Training loss: 0.1536776212730793 - MAE: 0.29592730138926
Validation loss : 0.15445094194376108 - MAE: 0.29839474560784734
Epoch: 7
Training loss: 0.1528735202972335 - MAE: 0.2964794843059913
Validation loss : 0.15745532535242313 - MAE: 0.29677731000751756
Epoch: 8
Training loss: 0.15427004425513624 - MAE: 0.29751606123388424
Validation loss : 0.15396082672205838 - MAE: 0.2950934395209802
Epoch: 9
Training loss: 0.149500017590595 - MAE: 0.2916245596857604
Validation loss : 0.15473474675055707 - MAE: 0.2949315575643968
Epoch: 10
Training loss: 0.15341697580585575 - MAE: 0.2974883490072689
Validation loss : 0.15178079225800253 - MAE: 0.29400432445830815
Epoch: 11
Training loss: 0.1485531846381197 - MAE: 0.29181963231047375
Validation loss : 0.1521155567783298 - MAE: 0.2917507828206561
Epoch: 12
Training loss: 0.15231723398572267 - MAE: 0.2955196249722615
Validation loss : 0.15099699817823642 - MAE: 0.29205374747278257
Epoch: 13
Training loss: 0.14968526845026497 - MAE: 0.29204745513801594
Validation loss : 0.15112860636277634 - MAE: 0.29562209819035995
Epoch: 14
Training loss: 0.14701649937966857 - MAE: 0.29068898034530744
Validation loss : 0.15004436694311374 - MAE: 0.29160289952934304