File size: 2,347 Bytes
85114f2
a433d09
85bd406
f47131a
1900e1e
c42510a
bebf40b
3128d71
24d8614
418328f
fd6274a
205aac2
b8f8f66
79a3673
2e7c68b
cc9cfe5
1e89e4e
47bdabe
c060af3
9de84e8
4bce435
9d3e2a3
28741ea
6c443b7
8213b9c
acbb129
9093b14
6f1bd91
35a2b1e
18c4445
fbded1b
6e7bb78
8591760
756476a
b594f32
81a5c65
20fa9e3
0b9bef7
793d90c
2c0bf35
937e803
890e614
36d0d06
c5a7be4
b15c9cb
0f9b75a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
Started at: 16:42:33
norbert2, 0.001, 64
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.23396679352630267 - MAE: 0.36809709172719646
Validation loss : 0.2051041704235655 - MAE: 0.344271905329582
Epoch: 1
Training loss: 0.17601347044862883 - MAE: 0.31801457103064673
Validation loss : 0.17097288595907617 - MAE: 0.31198182098215826
Epoch: 2
Training loss: 0.1654679064798837 - MAE: 0.3077319078236449
Validation loss : 0.16556653854521838 - MAE: 0.30692441994971376
Epoch: 3
Training loss: 0.16253817901767867 - MAE: 0.30606647005336457
Validation loss : 0.16399860449812628 - MAE: 0.30376864503593026
Epoch: 4
Training loss: 0.15459998937869313 - MAE: 0.29829892569804944
Validation loss : 0.16551078472173575 - MAE: 0.30465684222110123
Epoch: 5
Training loss: 0.15705167742991688 - MAE: 0.29957479958928274
Validation loss : 0.15921335802836853 - MAE: 0.2992412879561597
Epoch: 6
Training loss: 0.1556796274251408 - MAE: 0.298213584499331
Validation loss : 0.160054537608768 - MAE: 0.2985385847578787
Epoch: 7
Training loss: 0.15384320147109753 - MAE: 0.2976113847178801
Validation loss : 0.15325851286902573 - MAE: 0.29697666022376784
Epoch: 8
Training loss: 0.15232176303562492 - MAE: 0.2951661477439879
Validation loss : 0.15668316027431778 - MAE: 0.2951631450870871
Epoch: 9
Training loss: 0.1500442037076661 - MAE: 0.2939315468042274
Validation loss : 0.15931695473916602 - MAE: 0.29820807065440036
Epoch: 10
Training loss: 0.1502898669227807 - MAE: 0.2937852761340091
Validation loss : 0.1528170628078056 - MAE: 0.2929869784264181
Epoch: 11
Training loss: 0.14972644735767385 - MAE: 0.2936454548304151
Validation loss : 0.149495699188926 - MAE: 0.29191969764094317
Epoch: 12
Training loss: 0.1476658664719023 - MAE: 0.29071602013599546
Validation loss : 0.14979074330944003 - MAE: 0.2911997110268614
Epoch: 13
Training loss: 0.14847442575476386 - MAE: 0.2920681939055207
Validation loss : 0.15192810400868906 - MAE: 0.29221937702014605
Epoch: 14