File size: 2,218 Bytes
ec2fdb1
3844811
7c59103
5394486
03608e5
d750750
fdfc99a
197cf7a
eb40cdf
5bdcf16
6fc84af
94b2230
b24877b
caf95c2
bf265ca
535a9c1
7411a43
d34e089
c5f1408
63f1c02
ceccc29
5743c78
ae1f38f
a5fbc89
d93c03a
9c9e463
4e74cb5
6a43f6e
7c84c42
acf7a03
c015ae6
e277f2b
90b8862
ac55872
de81d07
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
Started at: 09:20:17
nb-bert-base, 0.001, 512
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.6738183131584754 - MAE: 0.671559791949798
Validation loss : 0.3496964991092682 - MAE: 0.481417017739273
Epoch: 1
Training loss: 0.2604064620458163 - MAE: 0.3969383760031138
Validation loss : 0.22093867957592012 - MAE: 0.35313969813939866
Epoch: 2
Training loss: 0.20208562452059525 - MAE: 0.3434821083990275
Validation loss : 0.1883617550134659 - MAE: 0.3379014120649767
Epoch: 3
Training loss: 0.1782165696987739 - MAE: 0.31999390375888026
Validation loss : 0.16966865658760072 - MAE: 0.31801389648176803
Epoch: 4
Training loss: 0.16990482119413522 - MAE: 0.31338787739566665
Validation loss : 0.1638330489397049 - MAE: 0.3106934884794657
Epoch: 5
Training loss: 0.1681439051261315 - MAE: 0.31001855013012786
Validation loss : 0.16361375153064728 - MAE: 0.3107548141570412
Epoch: 6
Training loss: 0.16585055337502405 - MAE: 0.3068083279056544
Validation loss : 0.15926897823810576 - MAE: 0.30532344804199296
Epoch: 7
Training loss: 0.1640594372382531 - MAE: 0.30565590681538174
Validation loss : 0.15682196021080017 - MAE: 0.3021364879085161
Epoch: 8
Training loss: 0.15877233560268694 - MAE: 0.30019124717441487
Validation loss : 0.15556727051734925 - MAE: 0.3009043710813451
Epoch: 9
Training loss: 0.157441843014497 - MAE: 0.29782973353451203
Validation loss : 0.15489959716796875 - MAE: 0.3003483373637247
Epoch: 10
Training loss: 0.1569079951598094 - MAE: 0.29850384416372977