File size: 2,153 Bytes
c953d2d
aee1fce
bb628c8
0425676
feb9b8c
41cb6c1
95d0573
f82ecd8
4b9bc10
c373fb1
8f80131
0739091
a8a78b5
d1681df
3590960
124b390
fdfbf67
725bba3
788f6c0
ef9b400
c7bdc7a
a2f3f71
b8bf0fc
fccc8ea
69af903
4f842f1
adda4be
44a0120
92cb482
3f88d71
06ca727
fd68de2
2e91109
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
Started at: 15:42:32
nb-bert-base, 5e-06, 128
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.33608432710170744 - MAE: 0.43243840742639095
Validation loss : 0.1794234970036675 - MAE: 0.32278704808536035
Epoch: 1
Training loss: 0.1674842369556427 - MAE: 0.31167954456795716
Validation loss : 0.1640064299983137 - MAE: 0.3063768331901366
Epoch: 2
Training loss: 0.1580326673388481 - MAE: 0.3004621400964959
Validation loss : 0.15081930116695516 - MAE: 0.2939987153720106
Epoch: 3
Training loss: 0.14433746680617332 - MAE: 0.2853955953382289
Validation loss : 0.13955745363936706 - MAE: 0.2805828900770382
Epoch: 4
Training loss: 0.12773224733769895 - MAE: 0.2682634582590704
Validation loss : 0.12777050628381617 - MAE: 0.26856869819032614
Epoch: 5
Training loss: 0.11507055133581162 - MAE: 0.2548698064814896
Validation loss : 0.11819195002317429 - MAE: 0.2559857516534626
Epoch: 6
Training loss: 0.10605520464479923 - MAE: 0.24685413996018704
Validation loss : 0.11385800119708567 - MAE: 0.2514239431805721
Epoch: 7
Training loss: 0.09987761311233044 - MAE: 0.23910935102620393
Validation loss : 0.10999320841887418 - MAE: 0.2490825267050983
Epoch: 8
Training loss: 0.09503072924911976 - MAE: 0.23424281710060202
Validation loss : 0.10815327263930265 - MAE: 0.24874807522133224
Epoch: 9
Training loss: 0.09119642566889524 - MAE: 0.229760940773865
Validation loss : 0.10649637790287242 - MAE: 0.2485144665264264