File size: 2,773 Bytes
e9d5dbe
49f2afa
fdf4f7f
bf22c0c
d329034
354a495
e8439f0
53b5f45
e426a9f
344e3fb
190241b
1a0c460
8551b5e
aade771
d373200
ff1c420
0aeb5a0
f4988ff
8bd9718
07d23be
140c978
9c5d4fd
94b6be9
4e243d1
dcb7131
206cd86
f587263
e6af039
d3fa77d
dd67507
4c3d07d
be34b56
720d3e1
f582c73
aa74ea2
9d33f9b
aa384f3
c6127e2
edd70f4
0e98a6d
172dc16
a20ecc4
a8c1b3e
12acaa8
bc2969e
e70e8fc
776e2b7
1174500
8bffb6f
5c8d8be
8140095
3e99bf8
fa3127d
65077cd
0e3b356
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
Started at: 14:58:07
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.7675554938614368 - MAE: 0.7218268842869793
Validation loss : 0.5840958555539449 - MAE: 0.5827894046582064
Epoch: 1
Training loss: 0.49415092915296555 - MAE: 0.5986839248419658
Validation loss : 0.4720619022846222 - MAE: 0.585200722348146
Epoch: 2
Training loss: 0.4484046511352062 - MAE: 0.554433520788571
Validation loss : 0.4777786135673523 - MAE: 0.5403046027523047
Epoch: 3
Training loss: 0.415366031229496 - MAE: 0.5435317536295668
Validation loss : 0.42990968624750775 - MAE: 0.5419536063906305
Epoch: 4
Training loss: 0.39412142150104046 - MAE: 0.5245434911285064
Validation loss : 0.4350344041983287 - MAE: 0.5241025617347218
Epoch: 5
Training loss: 0.38853260688483715 - MAE: 0.5237674086717746
Validation loss : 0.4117620090643565 - MAE: 0.5211508578778722
Epoch: 6
Training loss: 0.3667535427957773 - MAE: 0.5072329589138348
Validation loss : 0.40754977862040204 - MAE: 0.5151992849713838
Epoch: 7
Training loss: 0.3623247407376766 - MAE: 0.5045013681358567
Validation loss : 0.4099322458108266 - MAE: 0.5114030400583374
Epoch: 8
Training loss: 0.3549719639122486 - MAE: 0.4960748857304838
Validation loss : 0.3965168197949727 - MAE: 0.5095656718918984
Epoch: 9
Training loss: 0.35677218809723854 - MAE: 0.4991656174770718
Validation loss : 0.3989155093828837 - MAE: 0.504121969526425
Epoch: 10
Training loss: 0.35797534696757793 - MAE: 0.4965052647383213
Validation loss : 0.3919292589028676 - MAE: 0.5021860128100027
Epoch: 11
Training loss: 0.3498771656304598 - MAE: 0.4923322100425394
Validation loss : 0.3922814130783081 - MAE: 0.49962354808867465
Epoch: 12
Training loss: 0.35045805759727955 - MAE: 0.49545303288574233
Validation loss : 0.38426289955774945 - MAE: 0.4981140455265725
Epoch: 13
Training loss: 0.34572333469986916 - MAE: 0.4875545829879762
Validation loss : 0.3801858226458232 - MAE: 0.49636689515641264
Epoch: 14
Training loss: 0.33835710026323795 - MAE: 0.4836895945561605
Validation loss : 0.3836107651392619 - MAE: 0.49250687287180367
Epoch: 15
Training loss: 0.3261668477207422 - MAE: 0.47392514293900473
Validation loss : 0.37519946694374084 - MAE: 0.49084831912535476
Epoch: 16
Training loss: 0.3351526018232107 - MAE: 0.4787258875098658
Validation loss : 0.37367749214172363 - MAE: 0.48954017311969195
Epoch: 17
Training loss: 0.3310759160667658 - MAE: 0.4774447066315486