File size: 2,122 Bytes
f70ccc1
2d57d9e
7188462
a609d62
c77d695
69ec210
332b33f
b727d58
fa4296b
a185361
b5777f7
ca35df2
0ed2f6f
f6943dc
efcc253
303208f
a2ac259
61ae390
69592b6
0cad5a3
86fab43
f55c17b
0e0ee1f
000d668
e854b8b
abbff94
c9fc9da
80ee2e9
1d673fa
9dd6fa4
5c19d70
5a65209
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
Started at: 14:40:58
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.39730866372585294 - MAE: 0.4859462215712381
Validation loss : 0.2196155587832133 - MAE: 0.358251958619135
Epoch: 1
Training loss: 0.17908839583396913 - MAE: 0.32397780285924654
Validation loss : 0.1558608834942182 - MAE: 0.30258585682891376
Epoch: 2
Training loss: 0.1575879180431366 - MAE: 0.29877002595895036
Validation loss : 0.14711609482765198 - MAE: 0.29037275250782246
Epoch: 3
Training loss: 0.15081047505140305 - MAE: 0.2927416915612918
Validation loss : 0.14418130036857393 - MAE: 0.2855868255353847
Epoch: 4
Training loss: 0.1486131688952446 - MAE: 0.2895620071780584
Validation loss : 0.1416353558500608 - MAE: 0.2827090052807201
Epoch: 5
Training loss: 0.1443140760064125 - MAE: 0.2864368585237521
Validation loss : 0.1415707692503929 - MAE: 0.2815798797681286
Epoch: 6
Training loss: 0.14458888828754424 - MAE: 0.2867332379237455
Validation loss : 0.13991272449493408 - MAE: 0.2813442573034388
Epoch: 7
Training loss: 0.1426651358604431 - MAE: 0.2830702813173875
Validation loss : 0.14005657616588804 - MAE: 0.28040410864338733
Epoch: 8
Training loss: 0.14226840168237687 - MAE: 0.2838351485433796
Validation loss : 0.14099578393830192 - MAE: 0.2833797767431743
Epoch: 9
Training loss: 0.14148026525974275 - MAE: 0.2832427237151646
Validation loss : 0.14043273280064264 - MAE: 0.28271952708757264