File size: 1,609 Bytes
3456dd0
9afdbf1
b14dd23
dbf179a
20b41c3
c3d0563
be719b7
f272d65
fd14fef
48668f3
14bcbad
eaa47fc
12b763e
f63c1f3
61aa9d7
5ab2159
9f99d53
7094461
3aac91b
4b1c27d
b41c854
ce151b9
cbd95bb
2914d74
6c41624
a041aaa
ec003b7
b5c1377
132e2b3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
Started at: 12:09:53
norbert2, 5e-06, 128
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.3536522166295485 - MAE: 0.46532984683044315
Validation loss : 0.2076082869961455 - MAE: 0.35413302846041045
Epoch: 1
Training loss: 0.18833580565723507 - MAE: 0.34120423112289355
Validation loss : 0.14335447590093356 - MAE: 0.2961027090254163
Epoch: 2
Training loss: 0.14451369047164916 - MAE: 0.29981097306125726
Validation loss : 0.11381873065555417 - MAE: 0.26195181306766296
Epoch: 3
Training loss: 0.11552442915060303 - MAE: 0.2678326714288848
Validation loss : 0.0960532952402089 - MAE: 0.23623789074857693
Epoch: 4
Training loss: 0.0972470148043199 - MAE: 0.24429521841448945
Validation loss : 0.08230455451317735 - MAE: 0.21549894983672038
Epoch: 5
Training loss: 0.0810245130211115 - MAE: 0.22129605621910198
Validation loss : 0.07145167736185563 - MAE: 0.19650661532600605
Epoch: 6
Training loss: 0.07072198672050779 - MAE: 0.20592124684891044
Validation loss : 0.06396911907437686 - MAE: 0.1829232461219717
Epoch: 7
Training loss: 0.062372697077014226 - MAE: 0.19333134304162353
Validation loss : 0.06492970934188044 - MAE: 0.1827321360406301
Epoch: 8
Training loss: 0.055565989491614426 - MAE: 0.18100666911037178