File size: 3,254 Bytes
40bc4e1
49d1a15
e6f7540
99d153f
a66a52a
3bf2d81
acbcbc9
5ddc352
73ebda1
9eb9e41
3e66960
415c08b
dd34c4b
f5addf8
e3ced2e
21b7ac4
3380ac8
e979ee4
729882f
630c680
bf72577
f6447ce
aaffbbb
1ea538e
2a3b3d8
ffd699f
9114bec
cf20d4f
44d8c50
64ec748
7666f27
f159125
eb056b6
9340073
9aa354d
f83bd2b
cd90413
eb54952
4ed8ae7
3277603
810a6f3
92c0939
c0dcb05
7e2b613
64a10f6
db53461
e171826
591edb3
53d464a
9d81ed8
6f9981b
feb1f07
b96c075
8464f4b
5bc231f
a6cc472
4ba15b1
f48385b
73cacb5
f2ca7ee
9ca7463
c422710
4f62269
eb48d87
e0ee183
aa8891b
d0264ee
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
Started at: 14:11:08
norbert2, 0.001, 512
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.35933828124633205 - MAE: 0.4751237658554443
Validation loss : 0.23144497871398925 - MAE: 0.3734726961030864
Epoch: 1
Training loss: 0.23247717320919037 - MAE: 0.3732509105049625
Validation loss : 0.20686508417129518 - MAE: 0.3514252349695031
Epoch: 2
Training loss: 0.2124561335031803 - MAE: 0.35437058404120675
Validation loss : 0.1896565705537796 - MAE: 0.3355357861287618
Epoch: 3
Training loss: 0.2004259446492562 - MAE: 0.33960825331495564
Validation loss : 0.1811432033777237 - MAE: 0.32659469019295384
Epoch: 4
Training loss: 0.1885477098134848 - MAE: 0.3281006162329132
Validation loss : 0.1764070361852646 - MAE: 0.32144223855301585
Epoch: 5
Training loss: 0.18467810062261727 - MAE: 0.3265258309195063
Validation loss : 0.17094221711158752 - MAE: 0.31634747500053095
Epoch: 6
Training loss: 0.17537888999168688 - MAE: 0.31988395789738044
Validation loss : 0.16810469925403596 - MAE: 0.3138018870663194
Epoch: 7
Training loss: 0.1734414536219377 - MAE: 0.3134416996838174
Validation loss : 0.16584380269050597 - MAE: 0.3113483650285477
Epoch: 8
Training loss: 0.17009368538856506 - MAE: 0.3113253722143749
Validation loss : 0.16499556303024293 - MAE: 0.31061547000081563
Epoch: 9
Training loss: 0.16696236683772162 - MAE: 0.3090170955211275
Validation loss : 0.1626953661441803 - MAE: 0.30815754798418404
Epoch: 10
Training loss: 0.16321299053155458 - MAE: 0.30656158219089447
Validation loss : 0.16109866201877593 - MAE: 0.3066560629112524
Epoch: 11
Training loss: 0.16488801974516648 - MAE: 0.30673736154366793
Validation loss : 0.1611247330904007 - MAE: 0.3061875016992714
Epoch: 12
Training loss: 0.16279753011006576 - MAE: 0.3052001668581482
Validation loss : 0.15895505845546723 - MAE: 0.3041957691400855
Epoch: 13
Training loss: 0.16205430260071388 - MAE: 0.30296354010373683
Validation loss : 0.1576485514640808 - MAE: 0.30286596250441605
Epoch: 14
Training loss: 0.15873357882866493 - MAE: 0.29913588520156115
Validation loss : 0.15669676959514617 - MAE: 0.30180299394532806
Epoch: 15
Training loss: 0.1584533154964447 - MAE: 0.2998023957789063
Validation loss : 0.15532913208007812 - MAE: 0.30010611438559937
Epoch: 16
Training loss: 0.15160657465457916 - MAE: 0.29512139346045474
Validation loss : 0.155458864569664 - MAE: 0.30031144473289956
Epoch: 17
Training loss: 0.154553166948832 - MAE: 0.2959875188179299
Validation loss : 0.15447661876678467 - MAE: 0.29856949716613224
Epoch: 18
Training loss: 0.15589827528366676 - MAE: 0.2969922193921997
Validation loss : 0.15317214727401735 - MAE: 0.29797493440552103
Epoch: 19
Training loss: 0.15659963970000929 - MAE: 0.29645999002357104
Validation loss : 0.15315419733524321 - MAE: 0.2979769588090472
Prediction MAE: 0.2823
Finished at: 14:11:08
Time taken: 1954 s.
0 days 0 hours 32 minutes 34 seconds