File size: 3,020 Bytes
cd0c00d
f3414cc
2636c88
ae0b851
9e00ab7
c5cbf99
7e2dd58
361f8e4
b60daf7
4f7afa5
3378eca
b7bee43
a85546b
b31d85d
6932deb
8b4f30c
0e82986
fe0f842
3527315
271b57f
ba731e6
5cc5ac8
be71bba
13cda97
b2c8c1f
07cc25d
4eef126
c1bb14c
1b96e1c
5a88c86
09b9af6
14ee71b
23e8961
2663989
d4aa8a0
4b5ad4f
93a3937
cd45de9
d4e3597
595124b
81eefc6
fa09449
a6309c0
26553e0
233c871
0c595fb
f9e6a00
e0e15d0
770bd42
16be617
fddb7a5
19d2f43
8d2909f
b5eb455
d011703
fedc94e
7e8509d
59305b1
cc43fd1
000bebd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
Started at: 13:13:42
norbert2, 1e-06, 128
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.4574310235001824 - MAE: 0.5401737275380348
Validation loss : 0.35341861844062805 - MAE: 0.47187966068458054
Epoch: 1
Training loss: 0.3368046994913708 - MAE: 0.45609539357970186
Validation loss : 0.2778044169819033 - MAE: 0.40998613461344674
Epoch: 2
Training loss: 0.27038083604790947 - MAE: 0.4062241308124049
Validation loss : 0.2211051559126055 - MAE: 0.36538234947589154
Epoch: 3
Training loss: 0.22609539072621954 - MAE: 0.3727795396986739
Validation loss : 0.19445732558095777 - MAE: 0.34284239765259944
Epoch: 4
Training loss: 0.20361147035251964 - MAE: 0.35519470788487506
Validation loss : 0.17550719630073858 - MAE: 0.32641527623486516
Epoch: 5
Training loss: 0.18411212834444912 - MAE: 0.3379493529377132
Validation loss : 0.16247832895936193 - MAE: 0.31452419997685577
Epoch: 6
Training loss: 0.17290322509678926 - MAE: 0.32789149054718714
Validation loss : 0.15116736534479502 - MAE: 0.3037515159088559
Epoch: 7
Training loss: 0.16105539609085429 - MAE: 0.31728356194001767
Validation loss : 0.142264607186253 - MAE: 0.29460984957701103
Epoch: 8
Training loss: 0.15058780922131104 - MAE: 0.3055342884649308
Validation loss : 0.1350672460488371 - MAE: 0.28639660845044046
Epoch: 9
Training loss: 0.14364351305094633 - MAE: 0.2989912100126961
Validation loss : 0.12766022436522148 - MAE: 0.27749170646852833
Epoch: 10
Training loss: 0.13358757495880128 - MAE: 0.28905194688112457
Validation loss : 0.12071017498100126 - MAE: 0.2692566464152902
Epoch: 11
Training loss: 0.1279048427939415 - MAE: 0.28203779707723375
Validation loss : 0.11521857413085732 - MAE: 0.26214218933398814
Epoch: 12
Training loss: 0.12143478021025658 - MAE: 0.274182189252005
Validation loss : 0.1105714681986216 - MAE: 0.25604724451556926
Epoch: 13
Training loss: 0.1161456900564107 - MAE: 0.26758501849982064
Validation loss : 0.1055235407642416 - MAE: 0.24891725350169466
Epoch: 14
Training loss: 0.1095995495942506 - MAE: 0.2595437667646864
Validation loss : 0.10096074298426912 - MAE: 0.2434076435586873
Epoch: 15
Training loss: 0.10467073944481936 - MAE: 0.2538297558675485
Validation loss : 0.09589002422384314 - MAE: 0.23538500517817879
Epoch: 16
Training loss: 0.09896753112023526 - MAE: 0.2474362879127264
Validation loss : 0.09334073678867237 - MAE: 0.2316093617533846
Epoch: 17
Training loss: 0.09564962861212817 - MAE: 0.24124600934071788
Validation loss : 0.08950809832360293 - MAE: 0.2258600694670798
Epoch: 18
Training loss: 0.0920025158334862 - MAE: 0.23655202234154926
Validation loss : 0.0849252098516838 - MAE: 0.21879456808109843